diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java index b58855bd7da1..28502f371d7f 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java @@ -64,7 +64,7 @@ public HiveMinioDataLake(String bucketName, Map hiveHadoopFilesT this.hiveHadoop = closer.register( HiveHadoop.builder() .withFilesToMount(ImmutableMap.builder() - .put("hive_s3_insert_overwrite/hive-core-site.xml", "/etc/hadoop/conf/core-site.xml") + .put("hive_minio_datalake/hive-core-site.xml", "/etc/hadoop/conf/core-site.xml") .putAll(hiveHadoopFilesToMount) .buildOrThrow()) .withImage(hiveHadoopImage) diff --git a/plugin/trino-hive/src/test/resources/hive_s3_insert_overwrite/hive-core-site.xml b/plugin/trino-hive/src/test/resources/hive_minio_datalake/hive-core-site.xml similarity index 82% rename from plugin/trino-hive/src/test/resources/hive_s3_insert_overwrite/hive-core-site.xml rename to plugin/trino-hive/src/test/resources/hive_minio_datalake/hive-core-site.xml index 0679865ea4be..38083c633ed9 100644 --- a/plugin/trino-hive/src/test/resources/hive_s3_insert_overwrite/hive-core-site.xml +++ b/plugin/trino-hive/src/test/resources/hive_minio_datalake/hive-core-site.xml @@ -20,4 +20,8 @@ fs.s3a.path.style.access true + + fs.s3.impl + org.apache.hadoop.fs.s3a.S3AFileSystem + diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorSmokeTest.java index 6feb58668ff3..f59c8bcd4edc 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorSmokeTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorSmokeTest.java @@ -15,17 +15,24 @@ import io.trino.testing.BaseConnectorSmokeTest; import io.trino.testing.TestingConnectorBehavior; +import org.apache.iceberg.FileFormat; import org.testng.annotations.Test; -import java.io.File; - import static java.lang.String.format; +import static java.util.Objects.requireNonNull; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; public abstract class BaseIcebergConnectorSmokeTest extends BaseConnectorSmokeTest { + protected final FileFormat format; + + public BaseIcebergConnectorSmokeTest(FileFormat format) + { + this.format = requireNonNull(format, "format is null"); + } + @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { @@ -59,17 +66,17 @@ public void testRowLevelDelete() @Override public void testShowCreateTable() { - File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); + String schemaName = getSession().getSchema().orElseThrow(); assertThat((String) computeScalar("SHOW CREATE TABLE region")) - .isEqualTo("" + - "CREATE TABLE iceberg.tpch.region (\n" + + .matches("" + + "CREATE TABLE iceberg." + schemaName + ".region \\(\n" + " regionkey bigint,\n" + " name varchar,\n" + " comment varchar\n" + - ")\n" + - "WITH (\n" + - " format = 'ORC',\n" + - format(" location = '%s/iceberg_data/tpch/region'\n", tempDir) + - ")"); + "\\)\n" + + "WITH \\(\n" + + " format = '" + format.name() + "',\n" + + format(" location = '.*/" + schemaName + "/region'\n") + + "\\)"); } } diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java new file mode 100644 index 000000000000..747dd701c0b4 --- /dev/null +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java @@ -0,0 +1,87 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.iceberg; + +import com.google.common.collect.ImmutableMap; +import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.testing.QueryRunner; +import org.apache.iceberg.FileFormat; +import org.testng.annotations.Test; + +import java.util.Locale; +import java.util.Optional; + +import static io.trino.plugin.hive.containers.HiveMinioDataLake.ACCESS_KEY; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.SECRET_KEY; +import static io.trino.plugin.iceberg.IcebergQueryRunner.createIcebergQueryRunner; +import static io.trino.testing.sql.TestTable.randomTableSuffix; +import static java.lang.String.format; + +public abstract class BaseIcebergMinioConnectorSmokeTest + extends BaseIcebergConnectorSmokeTest +{ + private final String schemaName; + private final String bucketName; + + private HiveMinioDataLake hiveMinioDataLake; + + public BaseIcebergMinioConnectorSmokeTest(FileFormat format) + { + super(format); + this.schemaName = "tpch_" + format.name().toLowerCase(Locale.ENGLISH); + this.bucketName = "test-iceberg-minio-smoke-test-" + randomTableSuffix(); + } + + @Override + protected QueryRunner createQueryRunner() + throws Exception + { + this.hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName, ImmutableMap.of())); + this.hiveMinioDataLake.start(); + return createIcebergQueryRunner( + ImmutableMap.of(), + ImmutableMap.builder() + .put("iceberg.file-format", format.name()) + .put("iceberg.catalog.type", "HIVE_METASTORE") + .put("hive.metastore.uri", "thrift://" + hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .put("hive.s3.aws-access-key", ACCESS_KEY) + .put("hive.s3.aws-secret-key", SECRET_KEY) + .put("hive.s3.endpoint", "http://" + hiveMinioDataLake.getMinio().getMinioApiEndpoint()) + .put("hive.s3.path-style-access", "true") + .put("hive.s3.streaming.part-size", "5MB") + .buildOrThrow(), + SchemaInitializer.builder() + .withSchemaName(schemaName) + .withClonedTpchTables(REQUIRED_TPCH_TABLES) + .withSchemaProperties(ImmutableMap.of( + "location", "'s3://" + bucketName + "/" + schemaName + "'")) + .build(), + Optional.empty()); + } + + @Override + protected String createSchemaSql(String schemaName) + { + return "CREATE SCHEMA IF NOT EXISTS " + schemaName + " WITH (location = 's3://" + bucketName + "/" + schemaName + "')"; + } + + @Test + @Override + public void testRenameSchema() + { + assertQueryFails( + format("ALTER SCHEMA %s RENAME TO %s", schemaName, schemaName + randomTableSuffix()), + "Hive metastore does not support renaming schemas"); + } +} diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConnectorSmokeTest.java index 61848158ee80..c816a6c753f4 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConnectorSmokeTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConnectorSmokeTest.java @@ -16,12 +16,18 @@ import io.trino.testing.QueryRunner; import static io.trino.plugin.iceberg.IcebergQueryRunner.createIcebergQueryRunner; +import static org.apache.iceberg.FileFormat.ORC; // Redundant over TestIcebergOrcConnectorTest, but exists to exercise BaseConnectorSmokeTest // Some features like materialized views may be supported by Iceberg only. public class TestIcebergConnectorSmokeTest extends BaseIcebergConnectorSmokeTest { + public TestIcebergConnectorSmokeTest() + { + super(ORC); + } + @Override protected QueryRunner createQueryRunner() throws Exception diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMinioOrcConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMinioOrcConnectorSmokeTest.java new file mode 100644 index 000000000000..7fccf3732dfd --- /dev/null +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMinioOrcConnectorSmokeTest.java @@ -0,0 +1,25 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.iceberg; + +import static org.apache.iceberg.FileFormat.ORC; + +public class TestIcebergMinioOrcConnectorSmokeTest + extends BaseIcebergMinioConnectorSmokeTest +{ + public TestIcebergMinioOrcConnectorSmokeTest() + { + super(ORC); + } +} diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMinioParquetConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMinioParquetConnectorSmokeTest.java new file mode 100644 index 000000000000..bdd7f2a25837 --- /dev/null +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMinioParquetConnectorSmokeTest.java @@ -0,0 +1,25 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.iceberg; + +import static org.apache.iceberg.FileFormat.PARQUET; + +public class TestIcebergMinioParquetConnectorSmokeTest + extends BaseIcebergMinioConnectorSmokeTest +{ + public TestIcebergMinioParquetConnectorSmokeTest() + { + super(PARQUET); + } +} diff --git a/testing/trino-testing/src/main/java/io/trino/testing/BaseConnectorSmokeTest.java b/testing/trino-testing/src/main/java/io/trino/testing/BaseConnectorSmokeTest.java index ff22f1181243..08a1470296a5 100644 --- a/testing/trino-testing/src/main/java/io/trino/testing/BaseConnectorSmokeTest.java +++ b/testing/trino-testing/src/main/java/io/trino/testing/BaseConnectorSmokeTest.java @@ -54,6 +54,11 @@ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) return connectorBehavior.hasBehaviorByDefault(this::hasBehavior); } + protected String createSchemaSql(String schemaName) + { + return "CREATE SCHEMA " + schemaName; + } + /** * Ensure the tests are run with {@link DistributedQueryRunner}. E.g. {@link LocalQueryRunner} takes some * shortcuts, not exercising certain aspects. @@ -245,11 +250,11 @@ public void testCreateSchema() { String schemaName = "test_schema_create_" + randomTableSuffix(); if (!hasBehavior(SUPPORTS_CREATE_SCHEMA)) { - assertQueryFails("CREATE SCHEMA " + schemaName, "This connector does not support creating schemas"); + assertQueryFails(createSchemaSql(schemaName), "This connector does not support creating schemas"); return; } - assertUpdate("CREATE SCHEMA " + schemaName); + assertUpdate(createSchemaSql(schemaName)); assertThat(query("SHOW SCHEMAS")) .skippingTypesCheck() .containsAll(format("VALUES '%s', '%s'", getSession().getSchema().orElseThrow(), schemaName)); @@ -339,7 +344,7 @@ public void testRenameTableAcrossSchemas() assertUpdate("CREATE TABLE " + oldTable + " (a bigint, b double)"); String schemaName = "test_schema_" + randomTableSuffix(); - assertUpdate("CREATE SCHEMA " + schemaName); + assertUpdate(createSchemaSql(schemaName)); String newTable = schemaName + ".test_rename_new_" + randomTableSuffix(); assertUpdate("ALTER TABLE " + oldTable + " RENAME TO " + newTable);