From d29ed40199565c30c8f0668437d35d09e3a42348 Mon Sep 17 00:00:00 2001 From: Xiang Fu Date: Sat, 15 Oct 2022 02:26:22 -0700 Subject: [PATCH 1/2] Fix flaky pinot tests --- .../plugin/pinot/client/PinotClient.java | 171 +++++++++++------- ...asePinotIntegrationConnectorSmokeTest.java | 33 ++++ .../plugin/pinot/TestingPinotCluster.java | 20 +- 3 files changed, 151 insertions(+), 73 deletions(-) diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java index 0818e030e154..215ef09e8905 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java @@ -93,6 +93,7 @@ import static io.trino.plugin.pinot.PinotErrorCode.PINOT_AMBIGUOUS_TABLE_NAME; import static io.trino.plugin.pinot.PinotErrorCode.PINOT_EXCEPTION; import static io.trino.plugin.pinot.PinotErrorCode.PINOT_UNABLE_TO_FIND_BROKER; +import static io.trino.plugin.pinot.PinotErrorCode.PINOT_UNCLASSIFIED_ERROR; import static io.trino.plugin.pinot.PinotMetadata.SCHEMA_NAME; import static java.lang.String.format; import static java.util.Locale.ENGLISH; @@ -117,6 +118,8 @@ public class PinotClient private static final String ROUTING_TABLE_API_TEMPLATE = "debug/routingTable/%s"; private static final String TIME_BOUNDARY_API_TEMPLATE = "debug/timeBoundary/%s"; private static final String QUERY_URL_PATH = "query/sql"; + private static final int DEFAULT_HTTP_RETRY_COUNT = 10; + private static final int DEFAULT_RETRY_INTERVAL = 1000; private final List controllerUrls; private final HttpClient httpClient; @@ -270,18 +273,21 @@ public List getTables() protected Multimap getAllTables() { - List allTables = sendHttpGetToControllerJson(GET_ALL_TABLES_API_TEMPLATE, tablesJsonCodec).getTables(); - ImmutableListMultimap.Builder builder = ImmutableListMultimap.builder(); - for (String table : allTables) { - builder.put(table.toLowerCase(ENGLISH), table); - } - return builder.build(); + return doWithRetries(DEFAULT_HTTP_RETRY_COUNT, retryNumber -> { + List allTables = + sendHttpGetToControllerJson(GET_ALL_TABLES_API_TEMPLATE, tablesJsonCodec).getTables(); + ImmutableListMultimap.Builder builder = ImmutableListMultimap.builder(); + for (String table : allTables) { + builder.put(table.toLowerCase(ENGLISH), table); + } + return builder.build(); + }); } public Schema getTableSchema(String table) throws Exception { - return sendHttpGetToControllerJson(format(TABLE_SCHEMA_API_TEMPLATE, table), schemaJsonCodec); + return doWithRetries(DEFAULT_HTTP_RETRY_COUNT, retryNumber -> sendHttpGetToControllerJson(format(TABLE_SCHEMA_API_TEMPLATE, table), schemaJsonCodec)); } public List getPinotTableNames() @@ -361,63 +367,73 @@ public List getBrokers() @VisibleForTesting public List getAllBrokersForTable(String table) { - ArrayList brokers = sendHttpGetToControllerJson(format(TABLE_INSTANCES_API_TEMPLATE, table), brokersForTableJsonCodec) - .getBrokers().stream() - .flatMap(broker -> broker.getInstances().stream()) - .distinct() - .map(brokerToParse -> { - Matcher matcher = BROKER_PATTERN.matcher(brokerToParse); - if (matcher.matches() && matcher.groupCount() == 2) { - return pinotHostMapper.getBrokerHost(matcher.group(1), matcher.group(2)); - } - throw new PinotException( - PINOT_UNABLE_TO_FIND_BROKER, - Optional.empty(), - format("Cannot parse %s in the broker instance", brokerToParse)); - }) - .collect(Collectors.toCollection(ArrayList::new)); - Collections.shuffle(brokers); - return ImmutableList.copyOf(brokers); + return doWithRetries(DEFAULT_HTTP_RETRY_COUNT, retryNumber -> { + ArrayList brokers = sendHttpGetToControllerJson(format(TABLE_INSTANCES_API_TEMPLATE, table), brokersForTableJsonCodec) + .getBrokers().stream() + .flatMap(broker -> broker.getInstances().stream()) + .distinct() + .map(brokerToParse -> { + Matcher matcher = BROKER_PATTERN.matcher(brokerToParse); + if (matcher.matches() && matcher.groupCount() == 2) { + return pinotHostMapper.getBrokerHost(matcher.group(1), matcher.group(2)); + } + throw new PinotException( + PINOT_UNABLE_TO_FIND_BROKER, + Optional.empty(), + format("Cannot parse %s in the broker instance", brokerToParse)); + }) + .collect(Collectors.toCollection(ArrayList::new)); + Collections.shuffle(brokers); + return ImmutableList.copyOf(brokers); + }); } public String getBrokerHost(String table) { - try { - List brokers = brokersForTableCache.get(table); - if (brokers.isEmpty()) { - throw new PinotException(PINOT_UNABLE_TO_FIND_BROKER, Optional.empty(), "No valid brokers found for " + table); + return doWithRetries(DEFAULT_HTTP_RETRY_COUNT, retryNumber -> { + try { + List brokers = brokersForTableCache.get(table); + if (brokers.isEmpty()) { + throw new PinotException(PINOT_UNABLE_TO_FIND_BROKER, Optional.empty(), + "No valid brokers found for " + table, true); + } + return brokers.get(ThreadLocalRandom.current().nextInt(brokers.size())); } - return brokers.get(ThreadLocalRandom.current().nextInt(brokers.size())); - } - catch (ExecutionException e) { - Throwable throwable = e.getCause(); - if (throwable instanceof PinotException) { - throw (PinotException) throwable; + catch (ExecutionException e) { + Throwable throwable = e.getCause(); + if (throwable instanceof PinotException) { + throw (PinotException) throwable; + } + throw new PinotException(PINOT_UNABLE_TO_FIND_BROKER, Optional.empty(), + "Error when getting brokers for table " + table, true, throwable); } - throw new PinotException(PINOT_UNABLE_TO_FIND_BROKER, Optional.empty(), "Error when getting brokers for table " + table, throwable); - } + }); } public Map>> getRoutingTableForTable(String tableName) { - Map>> routingTable = sendHttpGetToBrokerJson(tableName, format(ROUTING_TABLE_API_TEMPLATE, tableName), ROUTING_TABLE_CODEC); - ImmutableMap.Builder>> routingTableMap = ImmutableMap.builder(); - for (Map.Entry>> entry : routingTable.entrySet()) { - String tableNameWithType = entry.getKey(); - if (!entry.getValue().isEmpty() && tableName.equals(extractRawTableName(tableNameWithType))) { - ImmutableMap.Builder> segmentBuilder = ImmutableMap.builder(); - for (Map.Entry> segmentEntry : entry.getValue().entrySet()) { - if (!segmentEntry.getValue().isEmpty()) { - segmentBuilder.put(segmentEntry.getKey(), segmentEntry.getValue()); + return doWithRetries(DEFAULT_HTTP_RETRY_COUNT, retryNumber -> { + Map>> routingTable = + sendHttpGetToBrokerJson(tableName, format(ROUTING_TABLE_API_TEMPLATE, tableName), + ROUTING_TABLE_CODEC); + ImmutableMap.Builder>> routingTableMap = ImmutableMap.builder(); + for (Map.Entry>> entry : routingTable.entrySet()) { + String tableNameWithType = entry.getKey(); + if (!entry.getValue().isEmpty() && tableName.equals(extractRawTableName(tableNameWithType))) { + ImmutableMap.Builder> segmentBuilder = ImmutableMap.builder(); + for (Map.Entry> segmentEntry : entry.getValue().entrySet()) { + if (!segmentEntry.getValue().isEmpty()) { + segmentBuilder.put(segmentEntry.getKey(), segmentEntry.getValue()); + } + } + Map> segmentMap = segmentBuilder.buildOrThrow(); + if (!segmentMap.isEmpty()) { + routingTableMap.put(tableNameWithType, segmentMap); } - } - Map> segmentMap = segmentBuilder.buildOrThrow(); - if (!segmentMap.isEmpty()) { - routingTableMap.put(tableNameWithType, segmentMap); } } - } - return routingTableMap.buildOrThrow(); + return routingTableMap.buildOrThrow(); + }); } public static class TimeBoundary @@ -458,16 +474,19 @@ public Optional getOfflineTimePredicate() public TimeBoundary getTimeBoundaryForTable(String table) { - try { - return sendHttpGetToBrokerJson(table, format(TIME_BOUNDARY_API_TEMPLATE, table), timeBoundaryJsonCodec); - } - catch (Exception e) { - String[] errorMessageSplits = e.getMessage().split(" "); - if (errorMessageSplits.length >= 4 && errorMessageSplits[3].equalsIgnoreCase(TIME_BOUNDARY_NOT_FOUND_ERROR_CODE)) { - return timeBoundaryJsonCodec.fromJson("{}"); + return doWithRetries(DEFAULT_HTTP_RETRY_COUNT, retryNumber -> { + try { + return sendHttpGetToBrokerJson(table, format(TIME_BOUNDARY_API_TEMPLATE, table), timeBoundaryJsonCodec); } - throw e; - } + catch (Exception e) { + String[] errorMessageSplits = e.getMessage().split(" "); + if (errorMessageSplits.length >= 4 && errorMessageSplits[3].equalsIgnoreCase( + TIME_BOUNDARY_NOT_FOUND_ERROR_CODE)) { + return timeBoundaryJsonCodec.fromJson("{}"); + } + throw e; + } + }); } public static class QueryRequest @@ -560,7 +579,8 @@ private BrokerResponseNative submitBrokerQueryJson(ConnectorSession session, Pin throw new PinotException( PINOT_EXCEPTION, Optional.of(query.getQuery()), - format("Query %s encountered exception %s", query.getQuery(), processingExceptionMessage)); + format("Query %s encountered exception %s", query.getQuery(), processingExceptionMessage), + true); } if (response.getNumServersQueried() == 0 || response.getNumServersResponded() == 0 || response.getNumServersQueried() > response.getNumServersResponded()) { throw new PinotInsufficientServerResponseException(query, response.getNumServersResponded(), response.getNumServersQueried()); @@ -623,6 +643,11 @@ public static ResultsIterator fromResultTable(BrokerResponseNative brokerRespons } public static T doWithRetries(int retries, Function caller) + { + return doWithRetries(retries, caller, DEFAULT_RETRY_INTERVAL); + } + + public static T doWithRetries(int retries, Function caller, int retryInterval) { PinotException firstError = null; checkState(retries > 0, "Invalid num of retries %s", retries); @@ -630,12 +655,26 @@ public static T doWithRetries(int retries, Function caller) try { return caller.apply(i); } - catch (PinotException e) { - if (firstError == null) { - firstError = e; + catch (Exception e) { + if (e instanceof PinotException pinotException) { + if (firstError == null) { + firstError = pinotException; + } + if (!pinotException.isRetryable()) { + throw pinotException; + } + } + else { + if (firstError == null) { + firstError = new PinotException(PINOT_UNCLASSIFIED_ERROR, Optional.empty(), + "Unexpected exception", e); + } + } + try { + Thread.sleep(retryInterval); } - if (!e.isRetryable()) { - throw e; + catch (InterruptedException ex) { + // Sleep interrupted, ignore } } } diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/BasePinotIntegrationConnectorSmokeTest.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/BasePinotIntegrationConnectorSmokeTest.java index 6e27e2c99d07..ec681f26e1f9 100644 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/BasePinotIntegrationConnectorSmokeTest.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/BasePinotIntegrationConnectorSmokeTest.java @@ -56,6 +56,7 @@ import org.apache.pinot.spi.data.readers.RecordReader; import org.apache.pinot.spi.utils.builder.TableNameBuilder; import org.testcontainers.shaded.org.bouncycastle.util.encoders.Hex; +import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.File; @@ -73,6 +74,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkState; @@ -145,6 +147,37 @@ protected boolean isLatestVersion() return getPinotImageName().equals(PINOT_LATEST_IMAGE_NAME); } + @Override + @BeforeClass + public void init() + throws Exception + { + super.init(); + // Ensure test tables are available in Pinot with expected number of rows. + validateTableRows(ALL_TYPES_TABLE, MAX_ROWS_PER_SPLIT_FOR_SEGMENT_QUERIES); + validateTableRows(MIXED_CASE_COLUMN_NAMES_TABLE, 4); + validateTableRows(MIXED_CASE_DISTINCT_TABLE, 4); + validateTableRows(TOO_MANY_ROWS_TABLE, MAX_ROWS_PER_SPLIT_FOR_SEGMENT_QUERIES + 1); + validateTableRows(TOO_MANY_BROKER_ROWS_TABLE, MAX_ROWS_PER_SPLIT_FOR_BROKER_QUERIES + 1); + validateTableRows(MIXED_CASE_TABLE_NAME, 4); + validateTableRows(JSON_TABLE, 7); + validateTableRows(JSON_TYPE_TABLE, 3); + validateTableRows(RESERVED_KEYWORD_TABLE, 2); + validateTableRows(QUOTES_IN_COLUMN_NAME_TABLE, 2); + validateTableRows(DUPLICATE_VALUES_IN_COLUMNS_TABLE, 5); + validateTableRows("region", getQueryRunner().execute("SELECT * FROM tpch.tiny.region").getRowCount()); + validateTableRows("nation", getQueryRunner().execute("SELECT * FROM tpch.tiny.nation").getRowCount()); + } + + private void validateTableRows(String tableName, int expectedRows) + { + assertQueryEventually( + getQueryRunner().getDefaultSession(), + "SELECT COUNT(*) FROM " + tableName, + "VALUES '" + expectedRows + "'", + new io.airlift.units.Duration(10, TimeUnit.SECONDS)); + } + @Override protected QueryRunner createQueryRunner() throws Exception diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java index 8a6ffd9047f2..1e51e252e8d7 100644 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java @@ -48,6 +48,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.time.Duration; import java.util.List; import java.util.function.Supplier; @@ -85,51 +86,50 @@ public class TestingPinotCluster private final GenericContainer server; private final GenericContainer zookeeper; private final HttpClient httpClient; - private final Closer closer = Closer.create(); private final boolean secured; public TestingPinotCluster(Network network, boolean secured, String pinotImageName) { - httpClient = closer.register(new JettyHttpClient()); + httpClient = new JettyHttpClient(); zookeeper = new GenericContainer<>(parse("zookeeper:3.5.6")) .withStartupAttempts(3) + .withStartupTimeout(Duration.ofMinutes(2)) .withNetwork(network) .withNetworkAliases(ZOOKEEPER_INTERNAL_HOST) .withEnv("ZOOKEEPER_CLIENT_PORT", String.valueOf(ZOOKEEPER_PORT)) .withExposedPorts(ZOOKEEPER_PORT); - closer.register(zookeeper::stop); String controllerConfig = secured ? "/var/pinot/controller/config/pinot-controller-secured.conf" : "/var/pinot/controller/config/pinot-controller.conf"; controller = new GenericContainer<>(parse(pinotImageName)) .withStartupAttempts(3) + .withStartupTimeout(Duration.ofMinutes(2)) .withNetwork(network) .withClasspathResourceMapping("/pinot-controller", "/var/pinot/controller/config", BindMode.READ_ONLY) .withEnv("JAVA_OPTS", "-Xmx512m -Dlog4j2.configurationFile=/opt/pinot/conf/pinot-controller-log4j2.xml -Dplugins.dir=/opt/pinot/plugins") .withCommand("StartController", "-configFileName", controllerConfig) .withNetworkAliases("pinot-controller", "localhost") .withExposedPorts(CONTROLLER_PORT); - closer.register(controller::stop); String brokerConfig = secured ? "/var/pinot/broker/config/pinot-broker-secured.conf" : "/var/pinot/broker/config/pinot-broker.conf"; broker = new GenericContainer<>(parse(pinotImageName)) .withStartupAttempts(3) + .withStartupTimeout(Duration.ofMinutes(2)) .withNetwork(network) .withClasspathResourceMapping("/pinot-broker", "/var/pinot/broker/config", BindMode.READ_ONLY) .withEnv("JAVA_OPTS", "-Xmx512m -Dlog4j2.configurationFile=/opt/pinot/conf/pinot-broker-log4j2.xml -Dplugins.dir=/opt/pinot/plugins") .withCommand("StartBroker", "-clusterName", "pinot", "-zkAddress", getZookeeperInternalHostPort(), "-configFileName", brokerConfig) .withNetworkAliases("pinot-broker", "localhost") .withExposedPorts(BROKER_PORT); - closer.register(broker::stop); server = new GenericContainer<>(parse(pinotImageName)) .withStartupAttempts(3) + .withStartupTimeout(Duration.ofMinutes(2)) .withNetwork(network) .withClasspathResourceMapping("/pinot-server", "/var/pinot/server/config", BindMode.READ_ONLY) .withEnv("JAVA_OPTS", "-Xmx512m -Dlog4j2.configurationFile=/opt/pinot/conf/pinot-server-log4j2.xml -Dplugins.dir=/opt/pinot/plugins") .withCommand("StartServer", "-clusterName", "pinot", "-zkAddress", getZookeeperInternalHostPort(), "-configFileName", "/var/pinot/server/config/pinot-server.conf") .withNetworkAliases("pinot-server", "localhost") .withExposedPorts(SERVER_PORT, SERVER_ADMIN_PORT, GRPC_PORT); - closer.register(server::stop); this.secured = secured; } @@ -146,7 +146,13 @@ public void start() public void close() throws IOException { - closer.close(); + try (Closer closer = Closer.create()) { + closer.register(zookeeper::stop); + closer.register(controller::stop); + closer.register(broker::stop); + closer.register(server::stop); + closer.register(httpClient); + } } private static String getZookeeperInternalHostPort() From 4c856783ba746885bbe4ea28841d8d92ed8f0ed0 Mon Sep 17 00:00:00 2001 From: Xiang Fu Date: Mon, 17 Oct 2022 22:26:19 -0700 Subject: [PATCH 2/2] stress test --- .github/workflows/ci.yml | 790 +++++---------------------------------- 1 file changed, 102 insertions(+), 688 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce72e0074c85..062832e7a9cf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,358 +39,6 @@ concurrency: cancel-in-progress: true jobs: - maven-checks: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - java-version: - - 17 - timeout-minutes: 45 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: ${{ matrix.java-version }} - cache: 'maven' - - name: Configure Problem Matchers - run: echo "::add-matcher::.github/problem-matcher.json" - - name: Maven Checks - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install -B --strict-checksums -V -T C1 -DskipTests -P ci -pl '!:trino-server-rpm' - - name: Test Server RPM - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN verify -B --strict-checksums -P ci -pl :trino-server-rpm - - name: Clean Maven Output - run: $MAVEN clean -pl '!:trino-server,!:trino-cli' - - uses: docker/setup-qemu-action@v1 - with: - platforms: arm64,ppc64le - - name: Test Docker Image - run: core/docker/build.sh - - name: Clean local Maven repo - # Avoid caching artifacts built in this job, cache should only include dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository/io/trino/trino-* - - check-commits: - runs-on: ubuntu-latest - if: github.event_name == 'pull_request' - steps: - - uses: actions/checkout@v3 - - name: Check Commits - uses: trinodb/github-actions/block-commits@c2991972560c5219d9ae5fb68c0c9d687ffcdd10 - with: - action-merge: fail - action-fixup: none - - error-prone-checks: - runs-on: ubuntu-latest - timeout-minutes: 45 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - - name: Fetch base ref to find merge-base for GIB - run: .github/bin/git-fetch-base-ref.sh - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 17 - cache: 'maven' - - name: Configure Problem Matchers - run: echo "::add-matcher::.github/problem-matcher.json" - - name: Maven Package - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean package ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' - - name: Error Prone Checks - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - # Run Error Prone on one module with a retry to ensure all runtime dependencies are fetched - $RETRY $MAVEN ${MAVEN_TEST} -T C1 clean verify -DskipTests -P gib,errorprone-compiler -am -pl ':trino-spi' - # The main Error Prone run - $MAVEN ${MAVEN_TEST} -T C1 clean verify -DskipTests -P gib,errorprone-compiler \ - -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' - - web-ui-checks: - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits: it's not needed here, but it's needed almost always, so let's do this for completeness - - name: Web UI Checks - run: core/trino-main/bin/check_webui.sh - - test-jdbc-compatibility: - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout tags so version in Manifest is set properly - - name: Fetch base ref to find merge-base for GIB - run: .github/bin/git-fetch-base-ref.sh - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 17 - cache: 'maven' - - name: Configure Problem Matchers - run: echo "::add-matcher::.github/problem-matcher.json" - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' - - name: Test old JDBC vs current server - run: | - if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-driver gib-impacted.log; then - testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh - fi - - name: Test current JDBC vs old server - if: always() - run: | - if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-server gib-impacted.log; then - $MAVEN test ${MAVEN_TEST} -pl :trino-test-jdbc-compatibility-old-server - fi - - name: Upload test results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ github.job }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - - hive-tests: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - config: - - config-hdp3 - # TODO: config-apache-hive3 - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - - name: Fetch base ref to find merge-base for GIB - run: .github/bin/git-fetch-base-ref.sh - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 17 - cache: 'maven' - - name: Configure Problem Matchers - run: echo "::add-matcher::.github/problem-matcher.json" - - name: Install Hive Module - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -am -pl :trino-hive-hadoop2 - - name: Run Hive Tests - run: | - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_tests.sh - - name: Run Hive S3 Tests - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESSKEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRETKEY }} - S3_BUCKET: "presto-ci-test" - S3_BUCKET_ENDPOINT: "https://s3.us-east-2.amazonaws.com" - run: | - if [ "${AWS_ACCESS_KEY_ID}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_s3_tests.sh - if [ matrix.config == 'config-hdp3' ]; then - # JsonSerde class needed for the S3 Select JSON tests is only available on hdp3. - plugin/trino-hive-hadoop2/bin/run_hive_s3_select_json_tests.sh - fi - fi - - name: Run Hive Glue Tests - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESSKEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRETKEY }} - AWS_REGION: us-east-2 - run: | - if [ "${AWS_ACCESS_KEY_ID}" != "" ]; then - $MAVEN test ${MAVEN_TEST} -pl :trino-hive -P test-hive-glue - fi - - name: Run Hive Azure ABFS Access Key Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} - ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} - ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_ACCESSKEY }} - run: | - if [ "${ABFS_CONTAINER}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_abfs_access_key_tests.sh - fi - - name: Run Hive Azure ABFS OAuth Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} - ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} - ABFS_OAUTH_ENDPOINT: ${{ secrets.AZURE_ABFS_OAUTH_ENDPOINT }} - ABFS_OAUTH_CLIENTID: ${{ secrets.AZURE_ABFS_OAUTH_CLIENTID }} - ABFS_OAUTH_SECRET: ${{ secrets.AZURE_ABFS_OAUTH_SECRET }} - run: | - if [ -n "$ABFS_CONTAINER" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_abfs_oauth_tests.sh - fi - - name: Run Hive Azure WASB Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - WASB_CONTAINER: ${{ secrets.AZURE_WASB_CONTAINER }} - WASB_ACCOUNT: ${{ secrets.AZURE_WASB_ACCOUNT }} - WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESSKEY }} - run: | - if [ "${WASB_CONTAINER}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_wasb_tests.sh - fi - - name: Run Hive Azure ADL Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - ADL_NAME: ${{ secrets.AZURE_ADL_NAME }} - ADL_CLIENT_ID: ${{ secrets.AZURE_ADL_CLIENTID }} - ADL_CREDENTIAL: ${{ secrets.AZURE_ADL_CREDENTIAL }} - ADL_REFRESH_URL: ${{ secrets.AZURE_ADL_REFRESHURL }} - run: | - if [ "${ADL_NAME}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_adl_tests.sh - fi - - name: Run Hive Alluxio Tests - run: | - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_alluxio_tests.sh - - name: Upload test results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ github.job }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} (${{ matrix.config }}) - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - - test-other-modules: - runs-on: ubuntu-latest - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - - name: Fetch base ref to find merge-base for GIB - run: .github/bin/git-fetch-base-ref.sh - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 17 - cache: 'maven' - - name: Configure Problem Matchers - run: echo "::add-matcher::.github/problem-matcher.json" - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' - - name: Maven Tests - run: | - $MAVEN test ${MAVEN_TEST} -pl ' - !:trino-accumulo, - !:trino-bigquery, - !:trino-cassandra, - !:trino-clickhouse, - !:trino-delta-lake, - !:trino-docs,!:trino-server,!:trino-server-rpm, - !:trino-druid, - !:trino-elasticsearch, - !:trino-faulttolerant-tests, - !:trino-hive, - !:trino-hudi, - !:trino-iceberg, - !:trino-jdbc,!:trino-base-jdbc,!:trino-thrift,!:trino-memory, - !:trino-kafka, - !:trino-kudu, - !:trino-main, - !:trino-mariadb, - !:trino-mongodb, - !:trino-mysql, - !:trino-oracle, - !:trino-phoenix5, - !:trino-pinot, - !:trino-postgresql, - !:trino-raptor-legacy, - !:trino-redis, - !:trino-singlestore, - !:trino-sqlserver, - !:trino-test-jdbc-compatibility-old-server, - !:trino-tests' - - name: Upload test results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ github.job }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - build-test-matrix: runs-on: ubuntu-latest outputs: @@ -416,39 +64,106 @@ jobs: touch gib-impacted.log cat < .github/test-matrix.yaml include: - - { modules: [ client/trino-jdbc, plugin/trino-base-jdbc, plugin/trino-thrift, plugin/trino-memory ] } - - { modules: core/trino-main } - - { modules: plugin/trino-accumulo } - - { modules: plugin/trino-bigquery } - - { modules: plugin/trino-cassandra } - - { modules: plugin/trino-clickhouse } - - { modules: plugin/trino-delta-lake } - - { modules: plugin/trino-delta-lake, profile: cloud-tests } - - { modules: plugin/trino-druid } - - { modules: plugin/trino-elasticsearch } - - { modules: plugin/trino-hive } - - { modules: plugin/trino-hive, profile: test-parquet } - - { modules: plugin/trino-hudi } - - { modules: plugin/trino-iceberg } - - { modules: plugin/trino-iceberg, profile: cloud-tests } - - { modules: plugin/trino-kafka } - - { modules: plugin/trino-kudu } - - { modules: plugin/trino-mariadb } - - { modules: plugin/trino-mongodb } - - { modules: plugin/trino-mysql } - - { modules: plugin/trino-oracle } - - { modules: plugin/trino-phoenix5 } - - { modules: plugin/trino-pinot } - - { modules: plugin/trino-postgresql } - - { modules: plugin/trino-raptor-legacy } - - { modules: plugin/trino-redis } - - { modules: plugin/trino-singlestore } - - { modules: plugin/trino-sqlserver } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive-1 } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive-2 } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-iceberg } - - { modules: testing/trino-tests } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-pinot } EOF ./.github/bin/build-matrix-from-impacted.py -v -i gib-impacted.log -m .github/test-matrix.yaml -o matrix.json echo "Matrix: $(jq '.' matrix.json)" @@ -490,7 +205,7 @@ jobs: run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }} # Additional tests for selected modules - name: Cloud Delta Lake Tests - # Cloud tests are separate because they are time intensive, requiring cross-cloud network communication + # Cloud tests are separate because they are time intensive, requiring cross-cloud network communication env: ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} @@ -508,7 +223,7 @@ jobs: -Dhive.hadoop2.azure-abfs-account="${ABFS_ACCOUNT}" \ -Dhive.hadoop2.azure-abfs-access-key="${ABFS_ACCESSKEY}" - name: GCS Delta Lake Tests - # Cloud tests are separate because they are time intensive, requiring cross-cloud network communication + # Cloud tests are separate because they are time intensive, requiring cross-cloud network communication env: GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} # Run tests if any of the secrets is present. Do not skip tests when one secret renamed, or secret name has a typo. @@ -583,304 +298,3 @@ jobs: # Avoid creating a cache entry because this job doesn't download all dependencies if: steps.cache.outputs.cache-hit != 'true' run: rm -rf ~/.m2/repository - - build-pt: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - product-tests-changed: ${{ steps.filter.outputs.product-tests }} - have_azure_secrets: ${{ steps.check-secrets.outputs.have_azure_secrets }} - have_databricks_secrets: ${{ steps.check-databricks-secrets.outputs.have_databricks_secrets }} - have_gcp_secrets: ${{ steps.check-gcp-secrets.outputs.have_gcp_secrets }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - - name: Fetch base ref to find merge-base for GIB - run: .github/bin/git-fetch-base-ref.sh - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: | - product-tests: - - 'testing/trino-product-tests*/**' - - 'testing/trino-testing-services/**' - # run all tests when there are any changes in the trino-server Maven module - # because it doesn't define it's Trino dependencies and - # it relies on the Provisio plugin to find the right artifacts - - 'core/trino-server/**' - - '.github/**' - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 17 - cache: 'maven' - - name: Check secrets - run: | - if [[ "${{ secrets.AZURE_ABFS_CONTAINER }}" != "" && \ - "${{ secrets.AZURE_ABFS_ACCOUNT }}" != "" && \ - "${{ secrets.AZURE_ABFS_ACCESSKEY }}" != "" - ]]; \ - then - echo "Secrets to run product tests were configured in the repo" - echo "::set-output name=have_azure_secrets::true" - else - echo "Secrets to run product tests were not configured in the repo" - echo "::set-output name=have_azure_secrets::false" - fi - id: check-secrets - - name: Check Delta Databricks secrets - id: check-databricks-secrets - run: | - if [[ "${{ secrets.DATABRICKS_TOKEN }}" != "" ]]; \ - then - echo "Secrets to run Delta Databricks product tests were configured in the repo" - echo "::set-output name=have_databricks_secrets::true" - else - echo "Secrets to run Delta Databricks product tests were not configured in the repo" - echo "::set-output name=have_databricks_secrets::false" - fi - - name: Check GCP secrets - id: check-gcp-secrets - run: | - if [[ "${{ secrets.GCP_CREDENTIALS_KEY }}" != "" ]]; \ - then - echo "Secrets to run GCP product tests were configured in the repo" - echo "::set-output name=have_gcp_secrets::true" - else - echo "Secrets to run GCP product tests were not configured in the repo" - echo "::set-output name=have_gcp_secrets::false" - fi - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs,!:trino-server-rpm' - - name: Map impacted plugins to features - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN validate ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-server-rpm' - # GIB doesn't run on master, so make sure the file always exist - touch gib-impacted.log - testing/trino-plugin-reader/target/trino-plugin-reader-*-executable.jar -i gib-impacted.log -p core/trino-server/target/trino-server-*-hardlinks/plugin > impacted-features.log - echo "Impacted plugin features:" - cat impacted-features.log - - name: Product tests artifact - uses: actions/upload-artifact@v3 - with: - name: product tests and server tarball - path: | - core/trino-server/target/*.tar.gz - impacted-features.log - testing/trino-product-tests-launcher/target/*-executable.jar - testing/trino-product-tests/target/*-executable.jar - client/trino-cli/target/*-executable.jar - retention-days: 1 - - id: prepare-matrix-template - run: | - cat < .github/test-pt-matrix.yaml - config: - - default - - hdp3 - # TODO: config-apache-hive3 - suite: - - suite-1 - - suite-2 - - suite-3 - # suite-4 does not exist - - suite-5 - - suite-azure - - suite-delta-lake-databricks - - suite-gcs - exclude: - - config: default - ignore exclusion if: >- - ${{ github.event_name != 'pull_request' - || github.event.pull_request.head.repo.full_name == github.repository - || contains(github.event.pull_request.labels.*.name, 'tests:all') - || contains(github.event.pull_request.labels.*.name, 'tests:hive') - }} - - - suite: suite-azure - config: default - - suite: suite-azure - ignore exclusion if: >- - ${{ needs.build-pt.outputs.have_azure_secrets == 'true' }} - - - suite: suite-gcs - config: default - - suite: suite-gcs - ignore exclusion if: >- - ${{ needs.build-pt.outputs.have_gcp_secrets == 'true' }} - - - suite: suite-delta-lake-databricks - config: hdp3 - - suite: suite-delta-lake-databricks - ignore exclusion if: >- - ${{ needs.build-pt.outputs.have_databricks_secrets == 'true' }} - - ignore exclusion if: - # Do not use this property outside of the matrix configuration. - # - # This is added to all matrix entries so they may be conditionally - # excluded by adding them to the excludes list with a GHA expression - # for this property. - # - If the expression evaluates to true, it will never match the a - # actual value of the property, and will therefore not be excluded. - # - If the expression evaluates to false, it will match the actual - # value of the property, and the exclusion will apply normally. - - "false" - include: - # this suite is not meant to be run with different configs - - config: default - suite: suite-6-non-generic - # this suite is not meant to be run with different configs - - config: default - suite: suite-7-non-generic - # this suite is not meant to be run with different configs - - config: default - suite: suite-8-non-generic - # this suite is not meant to be run with different configs - - config: default - suite: suite-tpcds - # this suite is not meant to be run with different configs - - config: default - suite: suite-parquet - # this suite is not meant to be run with different configs - - config: default - suite: suite-oauth2 - # this suite is not meant to be run with different configs - - config: default - suite: suite-ldap - # this suite is not meant to be run with different configs - - config: default - suite: suite-compatibility - # this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3. - - config: apache-hive3 - suite: suite-hms-only - # this suite is not meant to be run with different configs - - config: default - suite: suite-all - # this suite is not meant to be run with different configs - - config: default - suite: suite-delta-lake-oss - EOF - - name: Build PT matrix (all) - if: | - github.event_name != 'pull_request' || - steps.filter.outputs.product-tests == 'true' || - contains(github.event.pull_request.labels.*.name, 'tests:all') || - contains(github.event.pull_request.labels.*.name, 'tests:all-product') - run: | - # converts entire YAML file into JSON - no filtering since we want all PTs to run - ./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -o matrix.json - - name: Build PT matrix (impacted-features) - if: | - github.event_name == 'pull_request' && - steps.filter.outputs.product-tests == 'false' && - !contains(github.event.pull_request.labels.*.name, 'tests:all') && - !contains(github.event.pull_request.labels.*.name, 'product-tests:all') - # all these envs are required to be set by some product test environments - env: - ABFS_CONTAINER: - ABFS_ACCOUNT: - ABFS_ACCESS_KEY: - S3_BUCKET: - AWS_REGION: - DATABRICKS_AWS_ACCESS_KEY_ID: - DATABRICKS_AWS_SECRET_ACCESS_KEY: - DATABRICKS_73_JDBC_URL: - DATABRICKS_91_JDBC_URL: - DATABRICKS_104_JDBC_URL: - DATABRICKS_LOGIN: - DATABRICKS_TOKEN: - GCP_CREDENTIALS_KEY: - GCP_STORAGE_BUCKET: - run: | - # converts filtered YAML file into JSON - ./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -i impacted-features.log -o matrix.json - - id: set-matrix - run: | - echo "Matrix: $(jq '.' matrix.json)" - echo "::set-output name=matrix::$(cat matrix.json)" - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - - pt: - runs-on: ubuntu-latest - # explicitly define the name to avoid adding the value of the `ignore exclusion if` matrix item - name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - if: needs.build-pt.outputs.matrix != '{}' - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.build-pt.outputs.matrix) }} - # PT Launcher's timeout defaults to 2h, add some margin - timeout-minutes: 130 - needs: build-pt - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent - - uses: actions/setup-java@v3 - with: - distribution: 'zulu' - java-version: 17 - - name: Product tests artifact - uses: actions/download-artifact@v3 - with: - name: product tests and server tarball - - name: Fix artifact permissions - run: | - find . -type f -name \*-executable.jar -exec chmod 0777 {} \; - - name: Enable impact analysis - # don't enable this on pushes to master and in PRs in the main repository (not from forks) - # because these are most often used to run all tests with additional secrets - if: | - needs.build-pt.outputs.product-tests-changed == 'false' && - github.event_name == 'pull_request' && - github.event.pull_request.head.repo.full_name != github.repository && - !contains(github.event.pull_request.labels.*.name, 'tests:all') && - !contains(github.event.pull_request.labels.*.name, 'tests:all-product') - run: echo "PTL_OPTS=--impacted-features impacted-features.log" >> $GITHUB_ENV - - name: Product Tests - env: - ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} - ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} - ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_ACCESSKEY }} - S3_BUCKET: trino-ci-test - AWS_REGION: us-east-2 - DATABRICKS_AWS_ACCESS_KEY_ID: ${{ secrets.DATABRICKS_AWS_ACCESS_KEY_ID }} - DATABRICKS_AWS_SECRET_ACCESS_KEY: ${{ secrets.DATABRICKS_AWS_SECRET_ACCESS_KEY }} - DATABRICKS_73_JDBC_URL: ${{ secrets.DATABRICKS_73_JDBC_URL }} - DATABRICKS_91_JDBC_URL: ${{ secrets.DATABRICKS_91_JDBC_URL }} - DATABRICKS_104_JDBC_URL: ${{ secrets.DATABRICKS_104_JDBC_URL }} - DATABRICKS_LOGIN: token - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} - GCP_STORAGE_BUCKET: trino-ci-test-us-east - run: | - testing/bin/ptl suite run \ - --suite ${{ matrix.suite }} \ - --config config-${{ matrix.config }} \ - ${PTL_OPTS:-} \ - --bind=off --logs-dir logs/ --timeout 2h - - name: Upload test logs and results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - path: | - testing/trino-product-tests/target/* - logs/* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - path: testing/trino-product-tests/target/reports/**/testng-results.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }}