Skip to content

Commit

Permalink
Make retry-related constants more generic
Browse files Browse the repository at this point in the history
Writing through Hive can fail in ways other than just "could only be
replicated to 0 nodes instead of minReplication". Let's make the names
of constants more generic to support more cases in the future, and also
abstract what the underlying problem may be.
  • Loading branch information
findepi committed Jan 30, 2023
1 parent 331a836 commit bd3edaa
Show file tree
Hide file tree
Showing 13 changed files with 105 additions and 98 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
import static com.google.common.base.Preconditions.checkArgument;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.tests.product.TestGroups.AZURE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH;
import static io.trino.tests.product.utils.QueryExecutors.onHive;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
Expand Down Expand Up @@ -56,63 +56,63 @@ protected String schemaLocation()
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testAddPartition()
{
super.testAddPartition();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testAddPartitionContainingCharactersThatNeedUrlEncoding()
{
super.testAddPartitionContainingCharactersThatNeedUrlEncoding();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testDropPartition()
{
super.testDropPartition();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testDropPartitionContainingCharactersThatNeedUrlEncoding()
{
super.testDropPartitionContainingCharactersThatNeedUrlEncoding();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testFullSyncPartition()
{
super.testFullSyncPartition();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testInvalidSyncMode()
{
super.testInvalidSyncMode();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testMixedCasePartitionNames()
{
super.testMixedCasePartitionNames();
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testConflictingMixedCasePartitionNames()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.tests.product.TestGroups.AZURE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH;
import static io.trino.tests.product.utils.QueryExecutors.onHive;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
Expand Down Expand Up @@ -57,7 +57,7 @@ public void tearDown()
}

@Test(groups = AZURE)
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testPathContainsSpecialCharacter()
{
String tableName = "test_path_special_character" + randomNameSuffix();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.HMS_ONLY;
import static io.trino.tests.product.TestGroups.STORAGE_FORMATS;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH;
import static io.trino.tests.product.utils.QueryExecutors.onHive;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
Expand Down Expand Up @@ -66,14 +66,14 @@ private void testInsertIntoCsvTable(String tableName, String additionalTableProp
}

@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testCreateCsvTableAs()
{
testCreateCsvTableAs("");
}

@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testCreateCsvTableAsWithCustomProperties()
{
testCreateCsvTableAs(", csv_escape = 'e', csv_separator = 's', csv_quote = 'q'");
Expand All @@ -98,14 +98,14 @@ private void testCreateCsvTableAs(String additionalParameters)
}

@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testInsertIntoPartitionedCsvTable()
{
testInsertIntoPartitionedCsvTable("test_partitioned_csv_table", "");
}

@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testInsertIntoPartitionedCsvTableWithCustomProperties()
{
testInsertIntoPartitionedCsvTable("test_partitioned_csv_table_with_custom_parameters", ", csv_escape = 'e', csv_separator = 's', csv_quote = 'q'");
Expand All @@ -132,14 +132,14 @@ private void testInsertIntoPartitionedCsvTable(String tableName, String addition
}

@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testCreatePartitionedCsvTableAs()
{
testCreatePartitionedCsvTableAs("storage_formats_test_create_table_as_select_partitioned_csv", "");
}

@Test(groups = {STORAGE_FORMATS, HMS_ONLY})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testCreatePartitionedCsvTableAsWithCustomParamters()
{
testCreatePartitionedCsvTableAs(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
import static io.trino.tests.product.TestGroups.HIVE_PARTITIONING;
import static io.trino.tests.product.TestGroups.SMOKE;
import static io.trino.tests.product.TestGroups.TRINO_JDBC;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;

public class TestHdfsSyncPartitionMetadata
Expand All @@ -47,63 +47,63 @@ protected String schemaLocation()
}

@Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testAddPartition()
{
super.testAddPartition();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testAddPartitionContainingCharactersThatNeedUrlEncoding()
{
super.testAddPartitionContainingCharactersThatNeedUrlEncoding();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testDropPartition()
{
super.testDropPartition();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testDropPartitionContainingCharactersThatNeedUrlEncoding()
{
super.testDropPartitionContainingCharactersThatNeedUrlEncoding();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testFullSyncPartition()
{
super.testFullSyncPartition();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE, TRINO_JDBC})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testInvalidSyncMode()
{
super.testInvalidSyncMode();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testMixedCasePartitionNames()
{
super.testMixedCasePartitionNames();
}

@Test(groups = {HIVE_PARTITIONING, SMOKE})
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
@Override
public void testConflictingMixedCasePartitionNames()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@

import static com.google.common.base.Verify.verify;
import static io.trino.tests.product.hive.util.TableLocationUtils.getTableLocation;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE;
import static io.trino.tests.product.utils.HadoopTestUtils.ERROR_COMMITTING_WRITE_TO_HIVE_MATCH;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_ISSUES;
import static io.trino.tests.product.utils.HadoopTestUtils.RETRYABLE_FAILURES_MATCH;
import static io.trino.tests.product.utils.QueryExecutors.onHive;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
Expand Down Expand Up @@ -294,7 +294,7 @@ public void testInsertPartitioned()
}

@Test
@Flaky(issue = ERROR_COMMITTING_WRITE_TO_HIVE_ISSUE, match = ERROR_COMMITTING_WRITE_TO_HIVE_MATCH)
@Flaky(issue = RETRYABLE_FAILURES_ISSUES, match = RETRYABLE_FAILURES_MATCH)
public void testInsertBucketed()
{
String tableName = "test_basic_statistics_bucketed_insert_presto";
Expand Down
Loading

0 comments on commit bd3edaa

Please sign in to comment.