diff --git a/core/trino-main/src/test/java/io/trino/sql/query/QueryAssertions.java b/core/trino-main/src/test/java/io/trino/sql/query/QueryAssertions.java
index a39fdf8123fa..e50954ddbba7 100644
--- a/core/trino-main/src/test/java/io/trino/sql/query/QueryAssertions.java
+++ b/core/trino-main/src/test/java/io/trino/sql/query/QueryAssertions.java
@@ -33,6 +33,7 @@
import io.trino.sql.planner.plan.JoinNode;
import io.trino.sql.planner.plan.PlanNode;
import io.trino.sql.planner.plan.TableScanNode;
+import io.trino.sql.planner.plan.ValuesNode;
import io.trino.testing.LocalQueryRunner;
import io.trino.testing.MaterializedResult;
import io.trino.testing.MaterializedRow;
@@ -501,6 +502,34 @@ public QueryAssert isFullyPushedDown()
return this;
}
+ /**
+ * Verifies query is fully pushed down and Table Scan is replaced with empty Values.
+ * Verifies that results are the same as when pushdown is fully disabled.
+ */
+ @CanIgnoreReturnValue
+ public QueryAssert isReplacedWithEmptyValues()
+ {
+ checkState(!(runner instanceof LocalQueryRunner), "isReplacedWithEmptyValues() currently does not work with LocalQueryRunner");
+
+ transaction(runner.getTransactionManager(), runner.getMetadata(), runner.getAccessControl())
+ .execute(session, session -> {
+ Plan plan = runner.createPlan(session, query);
+ assertPlan(
+ session,
+ runner.getMetadata(),
+ runner.getFunctionManager(),
+ noopStatsCalculator(),
+ plan,
+ PlanMatchPattern.output(PlanMatchPattern.node(ValuesNode.class).with(ValuesNode.class, valuesNode -> valuesNode.getRowCount() == 0)));
+ });
+
+ if (!skipResultsCorrectnessCheckForPushdown) {
+ // Compare the results with pushdown disabled, so that explicit matches() call is not needed
+ hasCorrectResultsRegardlessOfPushdown();
+ }
+ return this;
+ }
+
/**
* Verifies query is not fully pushed down and that results are the same as when pushdown is fully disabled.
*
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstraintExtractor.java b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java
similarity index 75%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstraintExtractor.java
rename to lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java
index 7e84a774db6f..5b8f19c39472 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/ConstraintExtractor.java
+++ b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/filter/UtcConstraintExtractor.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg;
+package io.trino.plugin.base.filter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
@@ -52,9 +52,12 @@
import static io.trino.spi.expression.StandardFunctions.LESS_THAN_OPERATOR_FUNCTION_NAME;
import static io.trino.spi.expression.StandardFunctions.LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME;
import static io.trino.spi.expression.StandardFunctions.NOT_EQUAL_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone;
+import static io.trino.spi.type.DateTimeEncoding.unpackMillisUtc;
import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
-import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.MAX_SHORT_PRECISION;
import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_DAY;
+import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_NANOSECOND;
import static java.lang.Math.toIntExact;
import static java.math.RoundingMode.UNNECESSARY;
@@ -62,17 +65,22 @@
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
-public final class ConstraintExtractor
+/**
+ * Some expressions involving the TIMESTAMP WITH TIME ZONE type can be optimized when the time zone is known.
+ * It is not possible in the engine, but can be possible in the connector if the connector follows some
+ * convention regarding time zones. In some connectors, like the Delta Lake connector, or the Iceberg connector,
+ * all values of TIMESTAMP WITH TIME ZONE type are represented using the UTC time zone.
+ */
+public final class UtcConstraintExtractor
{
- private ConstraintExtractor() {}
+ private UtcConstraintExtractor() {}
public static ExtractionResult extractTupleDomain(Constraint constraint)
{
- TupleDomain result = constraint.getSummary()
- .transformKeys(IcebergColumnHandle.class::cast);
+ TupleDomain result = constraint.getSummary();
ImmutableList.Builder remainingExpressions = ImmutableList.builder();
for (ConnectorExpression conjunct : extractConjuncts(constraint.getExpression())) {
- Optional> converted = toTupleDomain(conjunct, constraint.getAssignments());
+ Optional> converted = toTupleDomain(conjunct, constraint.getAssignments());
if (converted.isEmpty()) {
remainingExpressions.add(conjunct);
}
@@ -86,7 +94,7 @@ public static ExtractionResult extractTupleDomain(Constraint constraint)
return new ExtractionResult(result, and(remainingExpressions.build()));
}
- private static Optional> toTupleDomain(ConnectorExpression expression, Map assignments)
+ private static Optional> toTupleDomain(ConnectorExpression expression, Map assignments)
{
if (expression instanceof Call call) {
return toTupleDomain(call, assignments);
@@ -94,7 +102,7 @@ private static Optional> toTupleDomain(Connecto
return Optional.empty();
}
- private static Optional> toTupleDomain(Call call, Map assignments)
+ private static Optional> toTupleDomain(Call call, Map assignments)
{
if (call.getArguments().size() == 2) {
ConnectorExpression firstArgument = call.getArguments().get(0);
@@ -145,7 +153,7 @@ private static Optional> toTupleDomain(Call cal
return Optional.empty();
}
- private static Optional> unwrapCastInComparison(
+ private static Optional> unwrapCastInComparison(
// upon invocation, we don't know if this really is a comparison
FunctionName functionName,
ConnectorExpression castSource,
@@ -164,13 +172,10 @@ private static Optional> unwrapCastInComparison
return Optional.empty();
}
- IcebergColumnHandle column = resolve(sourceVariable, assignments);
- if (column.getType() instanceof TimestampWithTimeZoneType sourceType) {
- // Iceberg supports only timestamp(6) with time zone
- checkArgument(sourceType.getPrecision() == 6, "Unexpected type: %s", column.getType());
-
+ ColumnHandle column = resolve(sourceVariable, assignments);
+ if (sourceVariable.getType() instanceof TimestampWithTimeZoneType columnType) {
if (constant.getType() == DateType.DATE) {
- return unwrapTimestampTzToDateCast(column, functionName, (long) constant.getValue())
+ return unwrapTimestampTzToDateCast(column, columnType, functionName, (long) constant.getValue())
.map(domain -> TupleDomain.withColumnDomains(ImmutableMap.of(column, domain)));
}
// TODO support timestamp constant
@@ -179,38 +184,50 @@ private static Optional> unwrapCastInComparison
return Optional.empty();
}
- private static Optional unwrapTimestampTzToDateCast(IcebergColumnHandle column, FunctionName functionName, long date)
+ private static Optional unwrapTimestampTzToDateCast(ColumnHandle column, Type columnType, FunctionName functionName, long date)
{
- Type type = column.getType();
- checkArgument(type.equals(TIMESTAMP_TZ_MICROS), "Column of unexpected type %s: %s", type, column);
-
// Verify no overflow. Date values must be in integer range.
verify(date <= Integer.MAX_VALUE, "Date value out of range: %s", date);
- // In Iceberg, timestamp with time zone values are all in UTC
-
- LongTimestampWithTimeZone startOfDate = LongTimestampWithTimeZone.fromEpochMillisAndFraction(date * MILLISECONDS_PER_DAY, 0, UTC_KEY);
- LongTimestampWithTimeZone startOfNextDate = LongTimestampWithTimeZone.fromEpochMillisAndFraction((date + 1) * MILLISECONDS_PER_DAY, 0, UTC_KEY);
+ Object startOfDate;
+ Object startOfNextDate;
+ int precision = ((TimestampWithTimeZoneType) columnType).getPrecision();
+ if (precision <= MAX_SHORT_PRECISION) {
+ startOfDate = packDateTimeWithZone(date * MILLISECONDS_PER_DAY, UTC_KEY);
+ startOfNextDate = packDateTimeWithZone((date + 1) * MILLISECONDS_PER_DAY, UTC_KEY);
+ }
+ else {
+ startOfDate = LongTimestampWithTimeZone.fromEpochMillisAndFraction(date * MILLISECONDS_PER_DAY, 0, UTC_KEY);
+ startOfNextDate = LongTimestampWithTimeZone.fromEpochMillisAndFraction((date + 1) * MILLISECONDS_PER_DAY, 0, UTC_KEY);
+ }
- return createDomain(functionName, type, startOfDate, startOfNextDate);
+ return createDomain(functionName, columnType, startOfDate, startOfNextDate);
}
private static Optional unwrapYearInTimestampTzComparison(FunctionName functionName, Type type, Constant constant)
{
checkArgument(constant.getValue() != null, "Unexpected constant: %s", constant);
- checkArgument(type.equals(TIMESTAMP_TZ_MICROS), "Unexpected type: %s", type);
int year = toIntExact((Long) constant.getValue());
ZonedDateTime periodStart = ZonedDateTime.of(year, 1, 1, 0, 0, 0, 0, UTC);
ZonedDateTime periodEnd = periodStart.plusYears(1);
- LongTimestampWithTimeZone start = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(periodStart.toEpochSecond(), 0, UTC_KEY);
- LongTimestampWithTimeZone end = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(periodEnd.toEpochSecond(), 0, UTC_KEY);
+ Object start;
+ Object end;
+ int precision = ((TimestampWithTimeZoneType) type).getPrecision();
+ if (precision <= MAX_SHORT_PRECISION) {
+ start = packDateTimeWithZone(periodStart.toEpochSecond() * MILLISECONDS_PER_SECOND, UTC_KEY);
+ end = packDateTimeWithZone(periodEnd.toEpochSecond() * MILLISECONDS_PER_SECOND, UTC_KEY);
+ }
+ else {
+ start = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(periodStart.toEpochSecond(), 0, UTC_KEY);
+ end = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(periodEnd.toEpochSecond(), 0, UTC_KEY);
+ }
return createDomain(functionName, type, start, end);
}
- private static Optional createDomain(FunctionName functionName, Type type, LongTimestampWithTimeZone startOfDate, LongTimestampWithTimeZone startOfNextDate)
+ private static Optional createDomain(FunctionName functionName, Type type, Object startOfDate, Object startOfNextDate)
{
if (functionName.equals(EQUAL_OPERATOR_FUNCTION_NAME)) {
return Optional.of(Domain.create(ValueSet.ofRanges(Range.range(type, startOfDate, true, startOfNextDate, false)), false));
@@ -237,7 +254,7 @@ private static Optional createDomain(FunctionName functionName, Type typ
return Optional.empty();
}
- private static Optional> unwrapDateTruncInComparison(
+ private static Optional> unwrapDateTruncInComparison(
// upon invocation, we don't know if this really is a comparison
FunctionName functionName,
Constant unit,
@@ -261,10 +278,8 @@ private static Optional> unwrapDateTruncInCompa
return Optional.empty();
}
- IcebergColumnHandle column = resolve(sourceVariable, assignments);
- if (column.getType() instanceof TimestampWithTimeZoneType type) {
- // Iceberg supports only timestamp(6) with time zone
- checkArgument(type.getPrecision() == 6, "Unexpected type: %s", column.getType());
+ ColumnHandle column = resolve(sourceVariable, assignments);
+ if (sourceVariable.getType() instanceof TimestampWithTimeZoneType type) {
verify(constant.getType().equals(type), "This method should not be invoked when type mismatch (i.e. surely not a comparison)");
return unwrapDateTruncInComparison(((Slice) unit.getValue()).toStringUtf8(), functionName, constant)
@@ -278,12 +293,23 @@ private static Optional unwrapDateTruncInComparison(String unit, Functio
{
Type type = constant.getType();
checkArgument(constant.getValue() != null, "Unexpected constant: %s", constant);
- checkArgument(type.equals(TIMESTAMP_TZ_MICROS), "Unexpected type: %s", type);
- // Normalized to UTC because for comparisons the zone is irrelevant
- ZonedDateTime dateTime = Instant.ofEpochMilli(((LongTimestampWithTimeZone) constant.getValue()).getEpochMillis())
- .plusNanos(LongMath.divide(((LongTimestampWithTimeZone) constant.getValue()).getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND, UNNECESSARY))
- .atZone(UTC);
+ ZonedDateTime dateTime;
+ int precision = ((TimestampWithTimeZoneType) type).getPrecision();
+ if (precision <= MAX_SHORT_PRECISION) {
+ // Normalized to UTC because for comparisons the zone is irrelevant
+ dateTime = Instant.ofEpochMilli(unpackMillisUtc((long) constant.getValue()))
+ .atZone(UTC);
+ }
+ else {
+ if (precision > 9) {
+ return Optional.empty();
+ }
+ // Normalized to UTC because for comparisons the zone is irrelevant
+ dateTime = Instant.ofEpochMilli(((LongTimestampWithTimeZone) constant.getValue()).getEpochMillis())
+ .plusNanos(LongMath.divide(((LongTimestampWithTimeZone) constant.getValue()).getPicosOfMilli(), PICOSECONDS_PER_NANOSECOND, UNNECESSARY))
+ .atZone(UTC);
+ }
ZonedDateTime periodStart;
ZonedDateTime nextPeriodStart;
@@ -310,8 +336,16 @@ private static Optional unwrapDateTruncInComparison(String unit, Functio
}
boolean constantAtPeriodStart = dateTime.equals(periodStart);
- LongTimestampWithTimeZone start = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(periodStart.toEpochSecond(), 0, UTC_KEY);
- LongTimestampWithTimeZone end = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(nextPeriodStart.toEpochSecond(), 0, UTC_KEY);
+ Object start;
+ Object end;
+ if (precision <= MAX_SHORT_PRECISION) {
+ start = packDateTimeWithZone(periodStart.toEpochSecond() * MILLISECONDS_PER_SECOND, UTC_KEY);
+ end = packDateTimeWithZone(nextPeriodStart.toEpochSecond() * MILLISECONDS_PER_SECOND, UTC_KEY);
+ }
+ else {
+ start = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(periodStart.toEpochSecond(), 0, UTC_KEY);
+ end = LongTimestampWithTimeZone.fromEpochSecondsAndFraction(nextPeriodStart.toEpochSecond(), 0, UTC_KEY);
+ }
if (functionName.equals(EQUAL_OPERATOR_FUNCTION_NAME)) {
if (!constantAtPeriodStart) {
@@ -352,7 +386,7 @@ private static Optional unwrapDateTruncInComparison(String unit, Functio
return Optional.empty();
}
- private static Optional> unwrapYearInTimestampTzComparison(
+ private static Optional> unwrapYearInTimestampTzComparison(
// upon invocation, we don't know if this really is a comparison
FunctionName functionName,
ConnectorExpression yearSource,
@@ -371,11 +405,8 @@ private static Optional> unwrapYearInTimestampT
return Optional.empty();
}
- IcebergColumnHandle column = resolve(sourceVariable, assignments);
- if (column.getType() instanceof TimestampWithTimeZoneType type) {
- // Iceberg supports only timestamp(6) with time zone
- checkArgument(type.getPrecision() == 6, "Unexpected type: %s", column.getType());
-
+ ColumnHandle column = resolve(sourceVariable, assignments);
+ if (sourceVariable.getType() instanceof TimestampWithTimeZoneType type) {
return unwrapYearInTimestampTzComparison(functionName, type, constant)
.map(domain -> TupleDomain.withColumnDomains(ImmutableMap.of(column, domain)));
}
@@ -383,14 +414,14 @@ private static Optional> unwrapYearInTimestampT
return Optional.empty();
}
- private static IcebergColumnHandle resolve(Variable variable, Map assignments)
+ private static ColumnHandle resolve(Variable variable, Map assignments)
{
ColumnHandle columnHandle = assignments.get(variable.getName());
checkArgument(columnHandle != null, "No assignment for %s", variable);
- return (IcebergColumnHandle) columnHandle;
+ return columnHandle;
}
- public record ExtractionResult(TupleDomain tupleDomain, ConnectorExpression remainingExpression)
+ public record ExtractionResult(TupleDomain tupleDomain, ConnectorExpression remainingExpression)
{
public ExtractionResult
{
diff --git a/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java b/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java
new file mode 100644
index 000000000000..3bb4c0722e7e
--- /dev/null
+++ b/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/filter/TestUtcConstraintExtractor.java
@@ -0,0 +1,628 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.base.filter;
+
+import com.google.common.collect.ImmutableList;
+import io.trino.spi.connector.ColumnHandle;
+import io.trino.spi.connector.Constraint;
+import io.trino.spi.connector.TestingColumnHandle;
+import io.trino.spi.expression.Call;
+import io.trino.spi.expression.ConnectorExpression;
+import io.trino.spi.expression.Constant;
+import io.trino.spi.expression.FunctionName;
+import io.trino.spi.expression.Variable;
+import io.trino.spi.predicate.Domain;
+import io.trino.spi.predicate.Range;
+import io.trino.spi.predicate.TupleDomain;
+import io.trino.spi.predicate.ValueSet;
+import io.trino.spi.type.DateType;
+import io.trino.spi.type.LongTimestampWithTimeZone;
+import io.trino.spi.type.TimestampType;
+import io.trino.spi.type.TimestampWithTimeZoneType;
+import org.junit.jupiter.api.Test;
+
+import java.time.LocalDate;
+import java.util.Map;
+import java.util.Set;
+
+import static io.airlift.slice.Slices.utf8Slice;
+import static io.trino.plugin.base.filter.UtcConstraintExtractor.extractTupleDomain;
+import static io.trino.spi.expression.StandardFunctions.CAST_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.EQUAL_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.GREATER_THAN_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.LESS_THAN_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.expression.StandardFunctions.NOT_EQUAL_OPERATOR_FUNCTION_NAME;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
+import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MILLIS;
+import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_DAY;
+import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
+import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
+import static io.trino.spi.type.VarcharType.createVarcharType;
+import static java.time.ZoneOffset.UTC;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestUtcConstraintExtractor
+{
+ private static final ColumnHandle A_BIGINT = new TestingColumnHandle("a_bigint");
+
+ @Test
+ public void testExtractSummary()
+ {
+ assertThat(extract(
+ new Constraint(
+ TupleDomain.withColumnDomains(Map.of(A_BIGINT, Domain.singleValue(BIGINT, 1L))),
+ Constant.TRUE,
+ Map.of(),
+ values -> {
+ throw new AssertionError("should not be called");
+ },
+ Set.of(A_BIGINT))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_BIGINT, Domain.singleValue(BIGINT, 1L))));
+ }
+
+ /**
+ * Test equivalent of {@code io.trino.sql.planner.iterative.rule.UnwrapCastInComparison} for {@link TimestampWithTimeZoneType}.
+ * {@code UnwrapCastInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
+ * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. If we know
+ * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
+ * we can unwrap.
+ */
+ @Test
+ public void testExtractTimestampTzMillisDateComparison()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MILLIS;
+ ColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression castOfColumn = new Call(DATE, CAST_FUNCTION_NAME, ImmutableList.of(new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression someDateExpression = new Constant(someDate.toEpochDay(), DATE);
+
+ long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ long startOfDateUtc = timestampTzMillisFromEpochMillis(startOfDateUtcEpochMillis);
+ long startOfNextDateUtc = timestampTzMillisFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, Domain.create(
+ ValueSet.ofRanges(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)),
+ true))));
+ }
+
+ /**
+ * Test equivalent of {@code io.trino.sql.planner.iterative.rule.UnwrapCastInComparison} for {@link TimestampWithTimeZoneType}.
+ * {@code UnwrapCastInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
+ * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. If we know
+ * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
+ * we can unwrap.
+ */
+ @Test
+ public void testExtractTimestampTzMicrosDateComparison()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MICROS;
+ ColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression castOfColumn = new Call(DATE, CAST_FUNCTION_NAME, ImmutableList.of(new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression someDateExpression = new Constant(someDate.toEpochDay(), DATE);
+
+ long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ LongTimestampWithTimeZone startOfDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis);
+ LongTimestampWithTimeZone startOfNextDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, Domain.create(
+ ValueSet.ofRanges(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)),
+ true))));
+ }
+
+ /**
+ * Test equivalent of {@code io.trino.sql.planner.iterative.rule.UnwrapDateTruncInComparison} for {@link TimestampWithTimeZoneType}.
+ * {@code UnwrapDateTruncInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
+ * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. If we know
+ * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
+ * we can unwrap.
+ */
+ @Test
+ public void testExtractDateTruncTimestampTzMillisComparison()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MILLIS;
+ ColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression truncateToDay = new Call(
+ columnType,
+ new FunctionName("date_trunc"),
+ ImmutableList.of(
+ new Constant(utf8Slice("day"), createVarcharType(17)),
+ new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression someMidnightExpression = new Constant(
+ timestampTzMillisFromEpochMillis(someDate.toEpochDay() * MILLISECONDS_PER_DAY),
+ columnType);
+ ConnectorExpression someMiddayExpression = new Constant(
+ timestampTzMillisFromEpochMillis(someDate.toEpochDay() * MILLISECONDS_PER_DAY + MILLISECONDS_PER_DAY / 2),
+ columnType);
+
+ long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ long startOfDateUtc = timestampTzMillisFromEpochMillis(startOfDateUtcEpochMillis);
+ long startOfNextDateUtc = timestampTzMillisFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMiddayExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.none());
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, Domain.create(
+ ValueSet.ofRanges(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)),
+ true))));
+ }
+
+ /**
+ * Test equivalent of {@code io.trino.sql.planner.iterative.rule.UnwrapDateTruncInComparison} for {@link TimestampWithTimeZoneType}.
+ * {@code UnwrapDateTruncInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
+ * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. If we know
+ * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
+ * we can unwrap.
+ */
+ @Test
+ public void testExtractDateTruncTimestampTzMicrosComparison()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MICROS;
+ ColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression truncateToDay = new Call(
+ columnType,
+ new FunctionName("date_trunc"),
+ ImmutableList.of(
+ new Constant(utf8Slice("day"), createVarcharType(17)),
+ new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression someMidnightExpression = new Constant(
+ LongTimestampWithTimeZone.fromEpochMillisAndFraction(someDate.toEpochDay() * MILLISECONDS_PER_DAY, 0, UTC_KEY),
+ columnType);
+ ConnectorExpression someMiddayExpression = new Constant(
+ LongTimestampWithTimeZone.fromEpochMillisAndFraction(someDate.toEpochDay() * MILLISECONDS_PER_DAY, PICOSECONDS_PER_MICROSECOND, UTC_KEY),
+ columnType);
+
+ long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ LongTimestampWithTimeZone startOfDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis);
+ LongTimestampWithTimeZone startOfNextDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.range(columnType, startOfDateUtc, true, startOfNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMiddayExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.none());
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME, ImmutableList.of(truncateToDay, someMidnightExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(columnHandle, Domain.create(
+ ValueSet.ofRanges(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)),
+ true))));
+ }
+
+ /**
+ * Test equivalent of {@code io.trino.sql.planner.iterative.rule.UnwrapYearInComparison} for {@link TimestampWithTimeZoneType}.
+ * {@code UnwrapYearInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
+ * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. If we know
+ * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
+ * we can unwrap.
+ */
+ @Test
+ public void testExtractYearTimestampTzMicrosComparison()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MICROS;
+ TestingColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression extractYear = new Call(
+ BIGINT,
+ new FunctionName("year"),
+ ImmutableList.of(new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression yearExpression = new Constant(2005L, BIGINT);
+
+ long startOfYearUtcEpochMillis = someDate.withDayOfYear(1).atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ LongTimestampWithTimeZone startOfYearUtc = timestampTzMicrosFromEpochMillis(startOfYearUtcEpochMillis);
+ LongTimestampWithTimeZone startOfNextDateUtc = timestampTzMicrosFromEpochMillis(someDate.plusYears(1).withDayOfYear(1).atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND);
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.range(columnType, startOfYearUtc, true, startOfNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(
+ Range.lessThan(columnType, startOfYearUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.lessThan(columnType, startOfYearUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfYearUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, Domain.create(
+ ValueSet.ofRanges(
+ Range.lessThan(columnType, startOfYearUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)),
+ true))));
+ }
+
+ /**
+ * Test equivalent of {@code io.trino.sql.planner.iterative.rule.UnwrapYearInComparison} for {@link TimestampWithTimeZoneType}.
+ * {@code UnwrapYearInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
+ * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. If we know
+ * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
+ * we can unwrap.
+ */
+ @Test
+ public void testExtractYearTimestampTzMillisComparison()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MILLIS;
+ TestingColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression extractYear = new Call(
+ BIGINT,
+ new FunctionName("year"),
+ ImmutableList.of(new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression yearExpression = new Constant(2005L, BIGINT);
+
+ long startOfYearUtcEpochMillis = someDate.withDayOfYear(1).atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ long startOfYearUtc = timestampTzMillisFromEpochMillis(startOfYearUtcEpochMillis);
+ long startOfNextDateUtc = timestampTzMillisFromEpochMillis(someDate.plusYears(1).withDayOfYear(1).atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND);
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.range(columnType, startOfYearUtc, true, startOfNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(
+ Range.lessThan(columnType, startOfYearUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.lessThan(columnType, startOfYearUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, LESS_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfNextDateUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfYearUtc)))));
+
+ assertThat(extract(
+ constraint(
+ new Call(BOOLEAN, IS_DISTINCT_FROM_OPERATOR_FUNCTION_NAME, ImmutableList.of(extractYear, yearExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of((ColumnHandle) columnHandle, Domain.create(
+ ValueSet.ofRanges(
+ Range.lessThan(columnType, startOfYearUtc),
+ Range.greaterThanOrEqual(columnType, startOfNextDateUtc)),
+ true))));
+ }
+
+ @Test
+ public void testIntersectSummaryAndExpressionExtraction()
+ {
+ String timestampTzColumnSymbol = "timestamp_tz_symbol";
+ TimestampWithTimeZoneType columnType = TIMESTAMP_TZ_MICROS;
+ TestingColumnHandle columnHandle = new TestingColumnHandle(timestampTzColumnSymbol);
+
+ ConnectorExpression castOfColumn = new Call(DATE, CAST_FUNCTION_NAME, ImmutableList.of(new Variable(timestampTzColumnSymbol, columnType)));
+
+ LocalDate someDate = LocalDate.of(2005, 9, 10);
+ ConnectorExpression someDateExpression = new Constant(someDate.toEpochDay(), DATE);
+
+ long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
+ LongTimestampWithTimeZone startOfDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis);
+ LongTimestampWithTimeZone startOfNextDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
+ LongTimestampWithTimeZone startOfNextNextDateUtc = timestampTzMicrosFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY * 2);
+
+ assertThat(extract(
+ constraint(
+ TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfNextNextDateUtc)))),
+ new Call(BOOLEAN, NOT_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(
+ (ColumnHandle) columnHandle, domain(
+ Range.lessThan(columnType, startOfDateUtc),
+ Range.range(columnType, startOfNextDateUtc, true, startOfNextNextDateUtc, false)))));
+
+ assertThat(extract(
+ constraint(
+ TupleDomain.withColumnDomains(Map.of(columnHandle, domain(Range.lessThan(columnType, startOfNextDateUtc)))),
+ new Call(BOOLEAN, GREATER_THAN_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.none());
+
+ assertThat(extract(
+ constraint(
+ TupleDomain.withColumnDomains(Map.of(A_BIGINT, Domain.singleValue(BIGINT, 1L))),
+ new Call(BOOLEAN, GREATER_THAN_OR_EQUAL_OPERATOR_FUNCTION_NAME, ImmutableList.of(castOfColumn, someDateExpression)),
+ Map.of(timestampTzColumnSymbol, columnHandle))))
+ .isEqualTo(TupleDomain.withColumnDomains(Map.of(
+ A_BIGINT, Domain.singleValue(BIGINT, 1L),
+ columnHandle, domain(Range.greaterThanOrEqual(columnType, startOfDateUtc)))));
+ }
+
+ private static TupleDomain extract(Constraint constraint)
+ {
+ UtcConstraintExtractor.ExtractionResult result = extractTupleDomain(constraint);
+ assertThat(result.remainingExpression())
+ .isEqualTo(Constant.TRUE);
+ return result.tupleDomain();
+ }
+
+ private static Constraint constraint(ConnectorExpression expression, Map assignments)
+ {
+ return constraint(TupleDomain.all(), expression, assignments);
+ }
+
+ private static Constraint constraint(TupleDomain summary, ConnectorExpression expression, Map assignments)
+ {
+ return new Constraint(summary, expression, assignments);
+ }
+
+ private static long timestampTzMillisFromEpochMillis(long epochMillis)
+ {
+ return packDateTimeWithZone(epochMillis, UTC_KEY);
+ }
+
+ private static LongTimestampWithTimeZone timestampTzMicrosFromEpochMillis(long epochMillis)
+ {
+ return LongTimestampWithTimeZone.fromEpochMillisAndFraction(epochMillis, 0, UTC_KEY);
+ }
+
+ private static Domain domain(Range first, Range... rest)
+ {
+ return Domain.create(ValueSet.ofRanges(first, rest), false);
+ }
+}
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java
index 42f790c003eb..6d663016204c 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java
@@ -32,6 +32,7 @@
import io.trino.filesystem.TrinoFileSystem;
import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable;
+import io.trino.plugin.base.filter.UtcConstraintExtractor;
import io.trino.plugin.base.projection.ApplyProjectionUtil;
import io.trino.plugin.deltalake.DeltaLakeAnalyzeProperties.AnalyzeMode;
import io.trino.plugin.deltalake.expression.ParsingException;
@@ -175,6 +176,7 @@
import static com.google.common.collect.Sets.difference;
import static com.google.common.primitives.Ints.max;
import static io.trino.filesystem.Locations.appendPath;
+import static io.trino.plugin.base.filter.UtcConstraintExtractor.extractTupleDomain;
import static io.trino.plugin.base.projection.ApplyProjectionUtil.ProjectedColumnRepresentation;
import static io.trino.plugin.base.projection.ApplyProjectionUtil.extractSupportedProjectedColumns;
import static io.trino.plugin.base.projection.ApplyProjectionUtil.replaceWithNewVariables;
@@ -2694,31 +2696,56 @@ public Optional> applyFilter(C
DeltaLakeTableHandle tableHandle = (DeltaLakeTableHandle) handle;
SchemaTableName tableName = tableHandle.getSchemaTableName();
- Set partitionColumns = ImmutableSet.copyOf(extractPartitionColumns(tableHandle.getMetadataEntry(), tableHandle.getProtocolEntry(), typeManager));
- Map constraintDomains = constraint.getSummary().getDomains().orElseThrow(() -> new IllegalArgumentException("constraint summary is NONE"));
+ checkArgument(constraint.getSummary().getDomains().isPresent(), "constraint summary is NONE");
- ImmutableMap.Builder enforceableDomains = ImmutableMap.builder();
- ImmutableMap.Builder unenforceableDomains = ImmutableMap.builder();
- ImmutableSet.Builder constraintColumns = ImmutableSet.builder();
- // We need additional field to track partition columns used in queries as enforceDomains seem to be not catching
- // cases when partition columns is used within complex filter as 'partitionColumn % 2 = 0'
- constraint.getPredicateColumns().stream()
- .flatMap(Collection::stream)
- .map(DeltaLakeColumnHandle.class::cast)
- .forEach(constraintColumns::add);
- for (Entry domainEntry : constraintDomains.entrySet()) {
- DeltaLakeColumnHandle column = (DeltaLakeColumnHandle) domainEntry.getKey();
- if (!partitionColumns.contains(column)) {
- unenforceableDomains.put(column, domainEntry.getValue());
- }
- else {
- enforceableDomains.put(column, domainEntry.getValue());
+ UtcConstraintExtractor.ExtractionResult extractionResult = extractTupleDomain(constraint);
+ TupleDomain predicate = extractionResult.tupleDomain();
+
+ if (predicate.isAll() && constraint.getPredicateColumns().isEmpty()) {
+ return Optional.empty();
+ }
+
+ TupleDomain newEnforcedConstraint;
+ TupleDomain newUnenforcedConstraint;
+ Set newConstraintColumns;
+ if (predicate.isNone()) {
+ // Engine does not pass none Constraint.summary. It can become none when combined with the expression and connector's domain knowledge.
+ newEnforcedConstraint = TupleDomain.none();
+ newUnenforcedConstraint = TupleDomain.all();
+ newConstraintColumns = constraint.getPredicateColumns().stream()
+ .flatMap(Collection::stream)
+ .map(DeltaLakeColumnHandle.class::cast)
+ .collect(toImmutableSet());
+ }
+ else {
+ Set partitionColumns = ImmutableSet.copyOf(extractPartitionColumns(tableHandle.getMetadataEntry(), tableHandle.getProtocolEntry(), typeManager));
+ Map constraintDomains = predicate.getDomains().orElseThrow();
+
+ ImmutableMap.Builder enforceableDomains = ImmutableMap.builder();
+ ImmutableMap.Builder unenforceableDomains = ImmutableMap.builder();
+ ImmutableSet.Builder constraintColumns = ImmutableSet.builder();
+ // We need additional field to track partition columns used in queries as enforceDomains seem to be not catching
+ // cases when partition columns is used within complex filter as 'partitionColumn % 2 = 0'
+ constraint.getPredicateColumns().stream()
+ .flatMap(Collection::stream)
+ .map(DeltaLakeColumnHandle.class::cast)
+ .forEach(constraintColumns::add);
+ for (Entry domainEntry : constraintDomains.entrySet()) {
+ DeltaLakeColumnHandle column = (DeltaLakeColumnHandle) domainEntry.getKey();
+ if (!partitionColumns.contains(column)) {
+ unenforceableDomains.put(column, domainEntry.getValue());
+ }
+ else {
+ enforceableDomains.put(column, domainEntry.getValue());
+ }
+ constraintColumns.add(column);
}
- constraintColumns.add(column);
+
+ newEnforcedConstraint = TupleDomain.withColumnDomains(enforceableDomains.buildOrThrow());
+ newUnenforcedConstraint = TupleDomain.withColumnDomains(unenforceableDomains.buildOrThrow());
+ newConstraintColumns = constraintColumns.build();
}
- TupleDomain newEnforcedConstraint = TupleDomain.withColumnDomains(enforceableDomains.buildOrThrow());
- TupleDomain newUnenforcedConstraint = TupleDomain.withColumnDomains(unenforceableDomains.buildOrThrow());
DeltaLakeTableHandle newHandle = new DeltaLakeTableHandle(
tableName.getSchemaName(),
tableName.getTableName(),
@@ -2733,7 +2760,7 @@ public Optional> applyFilter(C
tableHandle.getNonPartitionConstraint()
.intersect(newUnenforcedConstraint)
.simplify(domainCompactionThreshold),
- Sets.union(tableHandle.getConstraintColumns(), constraintColumns.build()),
+ Sets.union(tableHandle.getConstraintColumns(), newConstraintColumns),
tableHandle.getWriteType(),
tableHandle.getProjectedColumns(),
tableHandle.getUpdatedColumns(),
@@ -2753,7 +2780,7 @@ public Optional> applyFilter(C
return Optional.of(new ConstraintApplicationResult<>(
newHandle,
newUnenforcedConstraint.transformKeys(ColumnHandle.class::cast),
- constraint.getExpression(),
+ extractionResult.remainingExpression(),
false));
}
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java
index 5a18909846b4..52b6e166372c 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeConnectorTest.java
@@ -23,6 +23,7 @@
import io.trino.plugin.deltalake.transactionlog.DeltaLakeSchemaSupport.ColumnMappingMode;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.tpch.TpchPlugin;
+import io.trino.sql.planner.plan.FilterNode;
import io.trino.sql.planner.plan.TableDeleteNode;
import io.trino.sql.planner.plan.TableFinishNode;
import io.trino.sql.planner.plan.TableWriterNode;
@@ -61,6 +62,7 @@
import static io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.TRANSACTION_LOG_DIRECTORY;
import static io.trino.plugin.hive.metastore.file.TestingFileHiveMetastore.createTestingFileHiveMetastore;
import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
+import static io.trino.spi.type.TimeZoneKey.getTimeZoneKey;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.sql.planner.optimizations.PlanNodeSearcher.searchFrom;
import static io.trino.testing.MaterializedResult.resultBuilder;
@@ -629,6 +631,72 @@ public void testTimestampWithTimeZonePartition()
assertUpdate("DROP TABLE " + tableName);
}
+ @Test
+ public void testTimestampWithTimeZoneOptimization()
+ {
+ String tableName = "test_timestamp_tz_optimization_" + randomNameSuffix();
+
+ assertUpdate("CREATE TABLE " + tableName + "(id INT, part TIMESTAMP WITH TIME ZONE) WITH (partitioned_by = ARRAY['part'])");
+ assertUpdate(
+ "INSERT INTO " + tableName + " VALUES " +
+ "(1, NULL)," +
+ "(2, TIMESTAMP '0001-01-01 00:00:00.000 UTC')," +
+ "(3, TIMESTAMP '2023-11-21 09:19:00.000 +02:00')," +
+ "(4, TIMESTAMP '2005-09-10 13:00:00.000 UTC')",
+ 4);
+
+ // date_trunc optimization
+ assertThat(query("SELECT * FROM " + tableName + " WHERE date_trunc('day', part) >= TIMESTAMP '2005-09-10 07:00:00.000 +07:00'"))
+ .isFullyPushedDown()
+ .matches("VALUES " +
+ "(3, TIMESTAMP '2023-11-21 07:19:00.000 UTC')," +
+ "(4, TIMESTAMP '2005-09-10 13:00:00.000 UTC')");
+
+ assertThat(query("SELECT * FROM " + tableName + " WHERE date_trunc('day', part) = TIMESTAMP '2005-09-10 00:00:00.000 +07:00'"))
+ .isReplacedWithEmptyValues();
+
+ assertThat(query("SELECT * FROM " + tableName + " WHERE date_trunc('hour', part) >= TIMESTAMP '2005-09-10 13:00:00.001 +00:00'"))
+ .isFullyPushedDown()
+ .matches("VALUES " +
+ "(3, TIMESTAMP '2023-11-21 07:19:00.000 UTC')");
+
+ // the DATE is upcast to timestamp_tz using the session time zone (Asia/Kathmandu).
+ // part is in UTC, so there is no match for date_trunc.
+ assertThat(query(
+ Session.builder(getSession())
+ .setTimeZoneKey(getTimeZoneKey("Asia/Kathmandu"))
+ .build(),
+ "SELECT * FROM " + tableName + " WHERE date_trunc('day', part) = DATE '2005-09-10'"))
+ .isReplacedWithEmptyValues();
+
+ assertThat(query("SELECT * FROM " + tableName + " WHERE date_trunc('week', part) >= TIMESTAMP '2005-09-10 00:00:00.000 +00:00'"))
+ .isNotFullyPushedDown(FilterNode.class);
+
+ // cast timestamp_tz as DATE optimization
+ assertThat(query("SELECT * FROM " + tableName + " WHERE cast(part AS date) >= DATE '2005-09-10'"))
+ .isFullyPushedDown()
+ .matches("VALUES " +
+ "(3, TIMESTAMP '2023-11-21 07:19:00.000 UTC')," +
+ "(4, TIMESTAMP '2005-09-10 13:00:00.000 UTC')");
+
+ assertThat(query("SELECT * FROM " + tableName + " WHERE cast(part AS date) = DATE '2005-10-10'"))
+ .isFullyPushedDown()
+ .returnsEmptyResult();
+
+ // year function optimization
+ assertThat(query("SELECT * FROM " + tableName + " WHERE year(part) >= 2005"))
+ .isFullyPushedDown()
+ .matches("VALUES " +
+ "(3, TIMESTAMP '2023-11-21 07:19:00.000 UTC')," +
+ "(4, TIMESTAMP '2005-09-10 13:00:00.000 UTC')");
+
+ assertThat(query("SELECT * FROM " + tableName + " WHERE year(part) = 2006"))
+ .isFullyPushedDown()
+ .returnsEmptyResult();
+
+ assertUpdate("DROP TABLE " + tableName);
+ }
+
@Test
public void testAddColumnToPartitionedTable()
{
@@ -2851,11 +2919,11 @@ public void testRequiredPartitionFilterOnJoin()
"test_partition_left_",
"(x varchar, part varchar)",
ImmutableList.of("('a', 'part_a')"));
- TestTable rightTable = new TestTable(
- new TrinoSqlExecutor(getQueryRunner(), session),
- "test_partition_right_",
- "(x varchar, part varchar) WITH (partitioned_by = ARRAY['part'])",
- ImmutableList.of("('a', 'part_a')"))) {
+ TestTable rightTable = new TestTable(
+ new TrinoSqlExecutor(getQueryRunner(), session),
+ "test_partition_right_",
+ "(x varchar, part varchar) WITH (partitioned_by = ARRAY['part'])",
+ ImmutableList.of("('a', 'part_a')"))) {
assertQueryFails(
session,
"SELECT a.x, b.x from %s a JOIN %s b on (a.x = b.x) where a.x = 'a'".formatted(leftTable.getName(), rightTable.getName()),
@@ -2877,11 +2945,11 @@ public void testRequiredPartitionFilterOnJoinBothTablePartitioned()
"test_partition_inferred_left_",
"(x varchar, part varchar) WITH (partitioned_by = ARRAY['part'])",
ImmutableList.of("('a', 'part_a')"));
- TestTable rightTable = new TestTable(
- new TrinoSqlExecutor(getQueryRunner(), session),
- "test_partition_inferred_right_",
- "(x varchar, part varchar) WITH (partitioned_by = ARRAY['part'])",
- ImmutableList.of("('a', 'part_a')"))) {
+ TestTable rightTable = new TestTable(
+ new TrinoSqlExecutor(getQueryRunner(), session),
+ "test_partition_inferred_right_",
+ "(x varchar, part varchar) WITH (partitioned_by = ARRAY['part'])",
+ ImmutableList.of("('a', 'part_a')"))) {
assertQueryFails(
session,
"SELECT a.x, b.x from %s a JOIN %s b on (a.x = b.x) where a.x = 'a'".formatted(leftTable.getName(), rightTable.getName()),
@@ -2942,7 +3010,7 @@ public void testPartitionPredicateFilterAndAnalyzeOnPartitionedTable()
"(x integer, part integer) WITH (partitioned_by = ARRAY['part'])",
ImmutableList.of("(1, 11)", "(2, 22)"))) {
String expectedMessageRegExp = "ANALYZE statement can not be performed on partitioned tables because filtering is required on at least one partition." +
- " However, the partition filtering check can be disabled with the catalog session property 'query_partition_filter_required'.";
+ " However, the partition filtering check can be disabled with the catalog session property 'query_partition_filter_required'.";
assertQueryFails(session, "ANALYZE " + table.getName(), expectedMessageRegExp);
assertQueryFails(session, "EXPLAIN ANALYZE " + table.getName(), expectedMessageRegExp);
}
@@ -3068,23 +3136,23 @@ public void testPartitionFilterRequiredAndWriteOperation()
assertUpdate(session, "UPDATE " + table.getName() + " SET x = 20 WHERE part = 22", 1);
assertQueryFails(session, "MERGE INTO " + table.getName() + " t " +
- "USING (SELECT * FROM (VALUES (3, 99), (4,44))) AS s(x, part) " +
- "ON t.x = s.x " +
- "WHEN MATCHED THEN DELETE ", expectedMessageRegExp);
+ "USING (SELECT * FROM (VALUES (3, 99), (4,44))) AS s(x, part) " +
+ "ON t.x = s.x " +
+ "WHEN MATCHED THEN DELETE ", expectedMessageRegExp);
assertUpdate(session, "MERGE INTO " + table.getName() + " t " +
- "USING (SELECT * FROM (VALUES (2, 22), (4 , 44))) AS s(x, part) " +
- "ON (t.part = s.part) " +
- "WHEN MATCHED THEN UPDATE " +
- " SET x = t.x + s.x, part = t.part ", 1);
+ "USING (SELECT * FROM (VALUES (2, 22), (4 , 44))) AS s(x, part) " +
+ "ON (t.part = s.part) " +
+ "WHEN MATCHED THEN UPDATE " +
+ " SET x = t.x + s.x, part = t.part ", 1);
assertQueryFails(session, "MERGE INTO " + table.getName() + " t " +
- "USING (SELECT * FROM (VALUES (4,44))) AS s(x, part) " +
- "ON t.x = s.x " +
- "WHEN NOT MATCHED THEN INSERT (x, part) VALUES(s.x, s.part) ", expectedMessageRegExp);
+ "USING (SELECT * FROM (VALUES (4,44))) AS s(x, part) " +
+ "ON t.x = s.x " +
+ "WHEN NOT MATCHED THEN INSERT (x, part) VALUES(s.x, s.part) ", expectedMessageRegExp);
assertUpdate(session, "MERGE INTO " + table.getName() + " t " +
- "USING (SELECT * FROM (VALUES (4, 44))) AS s(x, part) " +
- "ON (t.part = s.part) " +
- "WHEN NOT MATCHED THEN INSERT (x, part) VALUES(s.x, s.part) ", 1);
+ "USING (SELECT * FROM (VALUES (4, 44))) AS s(x, part) " +
+ "ON (t.part = s.part) " +
+ "WHEN NOT MATCHED THEN INSERT (x, part) VALUES(s.x, s.part) ", 1);
assertQueryFails(session, "DELETE FROM " + table.getName() + " WHERE x = 3", expectedMessageRegExp);
assertUpdate(session, "DELETE FROM " + table.getName() + " WHERE part = 33 and x = 3", 1);
@@ -3201,11 +3269,11 @@ public void testTrinoCacheInvalidatedOnCreateTable()
String tableLocation = "s3://%s/%s/%s".formatted(bucketName, SCHEMA, tableName);
String initialValues = "VALUES" +
- " (1, BOOLEAN 'false', TINYINT '-128')" +
- ",(2, BOOLEAN 'true', TINYINT '127')" +
- ",(3, BOOLEAN 'false', TINYINT '0')" +
- ",(4, BOOLEAN 'false', TINYINT '1')" +
- ",(5, BOOLEAN 'true', TINYINT '37')";
+ " (1, BOOLEAN 'false', TINYINT '-128')" +
+ ",(2, BOOLEAN 'true', TINYINT '127')" +
+ ",(3, BOOLEAN 'false', TINYINT '0')" +
+ ",(4, BOOLEAN 'false', TINYINT '1')" +
+ ",(5, BOOLEAN 'true', TINYINT '37')";
assertUpdate("CREATE TABLE " + tableName + "(id, boolean, tinyint) WITH (location = '" + tableLocation + "') AS " + initialValues, 5);
assertThat(query("SELECT * FROM " + tableName)).matches(initialValues);
@@ -3215,13 +3283,13 @@ public void testTrinoCacheInvalidatedOnCreateTable()
}
String newValues = "VALUES" +
- " (1, BOOLEAN 'true', TINYINT '1')" +
- ",(2, BOOLEAN 'true', TINYINT '1')" +
- ",(3, BOOLEAN 'false', TINYINT '2')" +
- ",(4, BOOLEAN 'true', TINYINT '3')" +
- ",(5, BOOLEAN 'true', TINYINT '5')" +
- ",(6, BOOLEAN 'false', TINYINT '8')" +
- ",(7, BOOLEAN 'true', TINYINT '13')";
+ " (1, BOOLEAN 'true', TINYINT '1')" +
+ ",(2, BOOLEAN 'true', TINYINT '1')" +
+ ",(3, BOOLEAN 'false', TINYINT '2')" +
+ ",(4, BOOLEAN 'true', TINYINT '3')" +
+ ",(5, BOOLEAN 'true', TINYINT '5')" +
+ ",(6, BOOLEAN 'false', TINYINT '8')" +
+ ",(7, BOOLEAN 'true', TINYINT '13')";
assertUpdate("CREATE TABLE " + tableName + "(id, boolean, tinyint) WITH (location = '" + tableLocation + "') AS " + newValues, 7);
assertThat(query("SELECT * FROM " + tableName)).matches(newValues);
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
index 41cd8c7bdd93..6e3f6e54b82f 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
@@ -34,6 +34,7 @@
import io.trino.filesystem.TrinoFileSystem;
import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable;
+import io.trino.plugin.base.filter.UtcConstraintExtractor;
import io.trino.plugin.base.projection.ApplyProjectionUtil;
import io.trino.plugin.base.projection.ApplyProjectionUtil.ProjectedColumnRepresentation;
import io.trino.plugin.hive.HiveWrittenPartitions;
@@ -195,10 +196,10 @@
import static com.google.common.collect.Iterables.getLast;
import static com.google.common.collect.Maps.transformValues;
import static com.google.common.collect.Sets.difference;
+import static io.trino.plugin.base.filter.UtcConstraintExtractor.extractTupleDomain;
import static io.trino.plugin.base.projection.ApplyProjectionUtil.extractSupportedProjectedColumns;
import static io.trino.plugin.base.projection.ApplyProjectionUtil.replaceWithNewVariables;
import static io.trino.plugin.base.util.Procedures.checkProcedureArgument;
-import static io.trino.plugin.iceberg.ConstraintExtractor.extractTupleDomain;
import static io.trino.plugin.iceberg.ExpressionConverter.isConvertableToIcebergExpression;
import static io.trino.plugin.iceberg.ExpressionConverter.toIcebergExpression;
import static io.trino.plugin.iceberg.IcebergAnalyzeProperties.getColumnNames;
@@ -2470,8 +2471,9 @@ public Optional> applyLimit(Connect
public Optional> applyFilter(ConnectorSession session, ConnectorTableHandle handle, Constraint constraint)
{
IcebergTableHandle table = (IcebergTableHandle) handle;
- ConstraintExtractor.ExtractionResult extractionResult = extractTupleDomain(constraint);
- TupleDomain predicate = extractionResult.tupleDomain();
+ UtcConstraintExtractor.ExtractionResult extractionResult = extractTupleDomain(constraint);
+ TupleDomain predicate = extractionResult.tupleDomain()
+ .transformKeys(IcebergColumnHandle.class::cast);
if (predicate.isAll() && constraint.getPredicateColumns().isEmpty()) {
return Optional.empty();
}
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestConstraintExtractor.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestConstraintExtractor.java
deleted file mode 100644
index 0d2bc2cedef3..000000000000
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestConstraintExtractor.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.iceberg;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import io.trino.security.AllowAllAccessControl;
-import io.trino.spi.connector.ColumnHandle;
-import io.trino.spi.connector.Constraint;
-import io.trino.spi.expression.ConnectorExpression;
-import io.trino.spi.expression.Constant;
-import io.trino.spi.predicate.Domain;
-import io.trino.spi.predicate.Range;
-import io.trino.spi.predicate.TupleDomain;
-import io.trino.spi.predicate.ValueSet;
-import io.trino.spi.type.DateType;
-import io.trino.spi.type.LongTimestampWithTimeZone;
-import io.trino.spi.type.TimestampType;
-import io.trino.spi.type.TimestampWithTimeZoneType;
-import io.trino.spi.type.Type;
-import io.trino.sql.planner.ConnectorExpressionTranslator;
-import io.trino.sql.planner.LiteralEncoder;
-import io.trino.sql.planner.Symbol;
-import io.trino.sql.planner.TypeProvider;
-import io.trino.sql.planner.iterative.rule.UnwrapCastInComparison;
-import io.trino.sql.planner.iterative.rule.UnwrapDateTruncInComparison;
-import io.trino.sql.planner.iterative.rule.UnwrapYearInComparison;
-import io.trino.sql.tree.Cast;
-import io.trino.sql.tree.ComparisonExpression;
-import io.trino.sql.tree.Expression;
-import io.trino.sql.tree.FunctionCall;
-import io.trino.sql.tree.SymbolReference;
-import io.trino.transaction.NoOpTransactionManager;
-import io.trino.transaction.TransactionId;
-import org.junit.jupiter.api.Test;
-
-import java.time.LocalDate;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import static com.google.common.collect.ImmutableMap.toImmutableMap;
-import static io.airlift.slice.Slices.utf8Slice;
-import static io.trino.SessionTestUtils.TEST_SESSION;
-import static io.trino.plugin.iceberg.ColumnIdentity.primitiveColumnIdentity;
-import static io.trino.plugin.iceberg.ConstraintExtractor.extractTupleDomain;
-import static io.trino.spi.type.BigintType.BIGINT;
-import static io.trino.spi.type.DateType.DATE;
-import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
-import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_TZ_MICROS;
-import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_DAY;
-import static io.trino.spi.type.Timestamps.MILLISECONDS_PER_SECOND;
-import static io.trino.spi.type.Timestamps.PICOSECONDS_PER_MICROSECOND;
-import static io.trino.spi.type.VarcharType.VARCHAR;
-import static io.trino.spi.type.VarcharType.createVarcharType;
-import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes;
-import static io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType;
-import static io.trino.sql.planner.TestingPlannerContext.PLANNER_CONTEXT;
-import static io.trino.sql.planner.TypeAnalyzer.createTestingTypeAnalyzer;
-import static io.trino.sql.tree.ComparisonExpression.Operator.EQUAL;
-import static io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN;
-import static io.trino.sql.tree.ComparisonExpression.Operator.GREATER_THAN_OR_EQUAL;
-import static io.trino.sql.tree.ComparisonExpression.Operator.IS_DISTINCT_FROM;
-import static io.trino.sql.tree.ComparisonExpression.Operator.LESS_THAN;
-import static io.trino.sql.tree.ComparisonExpression.Operator.LESS_THAN_OR_EQUAL;
-import static io.trino.sql.tree.ComparisonExpression.Operator.NOT_EQUAL;
-import static java.time.ZoneOffset.UTC;
-import static org.assertj.core.api.Assertions.assertThat;
-
-public class TestConstraintExtractor
-{
- private static final LiteralEncoder LITERAL_ENCODER = new LiteralEncoder(PLANNER_CONTEXT);
-
- private static final AtomicInteger nextColumnId = new AtomicInteger(1);
-
- private static final IcebergColumnHandle A_BIGINT = newPrimitiveColumn(BIGINT);
- private static final IcebergColumnHandle A_TIMESTAMP_TZ = newPrimitiveColumn(TIMESTAMP_TZ_MICROS);
-
- @Test
- public void testExtractSummary()
- {
- assertThat(extract(
- new Constraint(
- TupleDomain.withColumnDomains(Map.of(A_BIGINT, Domain.singleValue(BIGINT, 1L))),
- Constant.TRUE,
- Map.of(),
- values -> {
- throw new AssertionError("should not be called");
- },
- Set.of(A_BIGINT))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_BIGINT, Domain.singleValue(BIGINT, 1L))));
- }
-
- /**
- * Test equivalent of {@link UnwrapCastInComparison} for {@link TimestampWithTimeZoneType}.
- * {@link UnwrapCastInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
- * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. Within Iceberg, we know
- * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
- * so we can unwrap.
- */
- @Test
- public void testExtractTimestampTzDateComparison()
- {
- String timestampTzColumnSymbol = "timestamp_tz_symbol";
- Cast castOfColumn = new Cast(new SymbolReference(timestampTzColumnSymbol), toSqlType(DATE));
-
- LocalDate someDate = LocalDate.of(2005, 9, 10);
- Expression someDateExpression = LITERAL_ENCODER.toExpression(someDate.toEpochDay(), DATE);
-
- long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
- LongTimestampWithTimeZone startOfDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis);
- LongTimestampWithTimeZone startOfNextDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(EQUAL, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.range(TIMESTAMP_TZ_MICROS, startOfDateUtc, true, startOfNextDateUtc, false)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(NOT_EQUAL, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc),
- Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(LESS_THAN, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(LESS_THAN_OR_EQUAL, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(GREATER_THAN, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(GREATER_THAN_OR_EQUAL, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(IS_DISTINCT_FROM, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, Domain.create(
- ValueSet.ofRanges(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc),
- Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)),
- true))));
- }
-
- /**
- * Test equivalent of {@link UnwrapDateTruncInComparison} for {@link TimestampWithTimeZoneType}.
- * {@link UnwrapDateTruncInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
- * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. Within Iceberg, we know
- * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
- * so we can unwrap.
- */
- @Test
- public void testExtractDateTruncTimestampTzComparison()
- {
- String timestampTzColumnSymbol = "timestamp_tz_symbol";
- FunctionCall truncateToDay = new FunctionCall(
- PLANNER_CONTEXT.getMetadata().resolveBuiltinFunction("date_trunc", fromTypes(VARCHAR, TIMESTAMP_TZ_MICROS)).toQualifiedName(),
- List.of(
- LITERAL_ENCODER.toExpression(utf8Slice("day"), createVarcharType(17)),
- new SymbolReference(timestampTzColumnSymbol)));
-
- LocalDate someDate = LocalDate.of(2005, 9, 10);
- Expression someMidnightExpression = LITERAL_ENCODER.toExpression(
- LongTimestampWithTimeZone.fromEpochMillisAndFraction(someDate.toEpochDay() * MILLISECONDS_PER_DAY, 0, UTC_KEY),
- TIMESTAMP_TZ_MICROS);
- Expression someMiddayExpression = LITERAL_ENCODER.toExpression(
- LongTimestampWithTimeZone.fromEpochMillisAndFraction(someDate.toEpochDay() * MILLISECONDS_PER_DAY, PICOSECONDS_PER_MICROSECOND, UTC_KEY),
- TIMESTAMP_TZ_MICROS);
-
- long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
- LongTimestampWithTimeZone startOfDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis);
- LongTimestampWithTimeZone startOfNextDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(EQUAL, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.range(TIMESTAMP_TZ_MICROS, startOfDateUtc, true, startOfNextDateUtc, false)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(EQUAL, truncateToDay, someMiddayExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.none());
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(NOT_EQUAL, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc),
- Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(LESS_THAN, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(LESS_THAN_OR_EQUAL, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(GREATER_THAN, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(GREATER_THAN_OR_EQUAL, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(IS_DISTINCT_FROM, truncateToDay, someMidnightExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, Domain.create(
- ValueSet.ofRanges(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc),
- Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)),
- true))));
- }
-
- /**
- * Test equivalent of {@link UnwrapYearInComparison} for {@link TimestampWithTimeZoneType}.
- * {@link UnwrapYearInComparison} handles {@link DateType} and {@link TimestampType}, but cannot handle
- * {@link TimestampWithTimeZoneType}. Such unwrap would not be monotonic. Within Iceberg, we know
- * that {@link TimestampWithTimeZoneType} is always in UTC zone (point in time, with no time zone information),
- * so we can unwrap.
- */
- @Test
- public void testExtractYearTimestampTzComparison()
- {
- String timestampTzColumnSymbol = "timestamp_tz_symbol";
- FunctionCall extractYear = new FunctionCall(
- PLANNER_CONTEXT.getMetadata().resolveBuiltinFunction("year", fromTypes(TIMESTAMP_TZ_MICROS)).toQualifiedName(),
- List.of(new SymbolReference(timestampTzColumnSymbol)));
-
- LocalDate someDate = LocalDate.of(2005, 9, 10);
- Expression yearExpression = LITERAL_ENCODER.toExpression(2005L, BIGINT);
-
- long startOfYearUtcEpochMillis = someDate.withDayOfYear(1).atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
- LongTimestampWithTimeZone startOfYearUtc = timestampTzFromEpochMillis(startOfYearUtcEpochMillis);
- LongTimestampWithTimeZone startOfNextDateUtc = timestampTzFromEpochMillis(someDate.plusYears(1).withDayOfYear(1).atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND);
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(EQUAL, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.range(TIMESTAMP_TZ_MICROS, startOfYearUtc, true, startOfNextDateUtc, false)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(NOT_EQUAL, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfYearUtc),
- Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(LESS_THAN, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfYearUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(LESS_THAN_OR_EQUAL, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(GREATER_THAN, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(GREATER_THAN_OR_EQUAL, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfYearUtc)))));
-
- assertThat(extract(
- constraint(
- new ComparisonExpression(IS_DISTINCT_FROM, extractYear, yearExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, Domain.create(
- ValueSet.ofRanges(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfYearUtc),
- Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)),
- true))));
- }
-
- @Test
- public void testIntersectSummaryAndExpressionExtraction()
- {
- String timestampTzColumnSymbol = "timestamp_tz_symbol";
- Cast castOfColumn = new Cast(new SymbolReference(timestampTzColumnSymbol), toSqlType(DATE));
-
- LocalDate someDate = LocalDate.of(2005, 9, 10);
- Expression someDateExpression = LITERAL_ENCODER.toExpression(someDate.toEpochDay(), DATE);
-
- long startOfDateUtcEpochMillis = someDate.atStartOfDay().toEpochSecond(UTC) * MILLISECONDS_PER_SECOND;
- LongTimestampWithTimeZone startOfDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis);
- LongTimestampWithTimeZone startOfNextDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY);
- LongTimestampWithTimeZone startOfNextNextDateUtc = timestampTzFromEpochMillis(startOfDateUtcEpochMillis + MILLISECONDS_PER_DAY * 2);
-
- assertThat(extract(
- constraint(
- TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfNextNextDateUtc)))),
- new ComparisonExpression(NOT_EQUAL, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(
- A_TIMESTAMP_TZ, domain(
- Range.lessThan(TIMESTAMP_TZ_MICROS, startOfDateUtc),
- Range.range(TIMESTAMP_TZ_MICROS, startOfNextDateUtc, true, startOfNextNextDateUtc, false)))));
-
- assertThat(extract(
- constraint(
- TupleDomain.withColumnDomains(Map.of(A_TIMESTAMP_TZ, domain(Range.lessThan(TIMESTAMP_TZ_MICROS, startOfNextDateUtc)))),
- new ComparisonExpression(GREATER_THAN, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.none());
-
- assertThat(extract(
- constraint(
- TupleDomain.withColumnDomains(Map.of(A_BIGINT, Domain.singleValue(BIGINT, 1L))),
- new ComparisonExpression(GREATER_THAN_OR_EQUAL, castOfColumn, someDateExpression),
- Map.of(timestampTzColumnSymbol, A_TIMESTAMP_TZ))))
- .isEqualTo(TupleDomain.withColumnDomains(Map.of(
- A_BIGINT, Domain.singleValue(BIGINT, 1L),
- A_TIMESTAMP_TZ, domain(Range.greaterThanOrEqual(TIMESTAMP_TZ_MICROS, startOfDateUtc)))));
- }
-
- private static IcebergColumnHandle newPrimitiveColumn(Type type)
- {
- int id = nextColumnId.getAndIncrement();
- return new IcebergColumnHandle(
- primitiveColumnIdentity(id, "column_" + id),
- type,
- ImmutableList.of(),
- type,
- Optional.empty());
- }
-
- private static TupleDomain extract(Constraint constraint)
- {
- ConstraintExtractor.ExtractionResult result = extractTupleDomain(constraint);
- assertThat(result.remainingExpression())
- .isEqualTo(Constant.TRUE);
- return result.tupleDomain();
- }
-
- private static Constraint constraint(Expression expression, Map assignments)
- {
- return constraint(TupleDomain.all(), expression, assignments);
- }
-
- private static Constraint constraint(TupleDomain summary, Expression expression, Map assignments)
- {
- Map symbolTypes = assignments.entrySet().stream()
- .collect(toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().getType()));
- ConnectorExpression connectorExpression = connectorExpression(expression, symbolTypes);
- return new Constraint(summary, connectorExpression, ImmutableMap.copyOf(assignments));
- }
-
- private static ConnectorExpression connectorExpression(Expression expression, Map symbolTypes)
- {
- return ConnectorExpressionTranslator.translate(
- TEST_SESSION.beginTransactionId(TransactionId.create(), new NoOpTransactionManager(), new AllowAllAccessControl()),
- expression,
- TypeProvider.viewOf(symbolTypes.entrySet().stream()
- .collect(toImmutableMap(entry -> new Symbol(entry.getKey()), Map.Entry::getValue))),
- PLANNER_CONTEXT,
- createTestingTypeAnalyzer(PLANNER_CONTEXT))
- .orElseThrow(() -> new RuntimeException("Translation to ConnectorExpression failed for: " + expression));
- }
-
- private static LongTimestampWithTimeZone timestampTzFromEpochMillis(long epochMillis)
- {
- return LongTimestampWithTimeZone.fromEpochMillisAndFraction(epochMillis, 0, UTC_KEY);
- }
-
- private static Domain domain(Range first, Range... rest)
- {
- return Domain.create(ValueSet.ofRanges(first, rest), false);
- }
-}
diff --git a/testing/trino-tests/src/test/java/io/trino/tests/BaseQueryAssertionsTest.java b/testing/trino-tests/src/test/java/io/trino/tests/BaseQueryAssertionsTest.java
index 68a95015e993..d9bd147f9cd8 100644
--- a/testing/trino-tests/src/test/java/io/trino/tests/BaseQueryAssertionsTest.java
+++ b/testing/trino-tests/src/test/java/io/trino/tests/BaseQueryAssertionsTest.java
@@ -333,6 +333,25 @@ public void testIsFullyPushedDownWithSession()
"Output[columnNames = [_col0]]\n");
}
+ @Test
+ public void testIsReplacedWithEmptyValues()
+ {
+ assertThat(query("SELECT 1 WHERE false")).isReplacedWithEmptyValues();
+
+ // Test that, in case of failure, there is no failure when rendering expected and actual plans
+ assertThatThrownBy(() -> assertThat(query("SELECT 1 WHERE true")).isReplacedWithEmptyValues())
+ .hasMessageContaining(
+ "Plan does not match, expected [\n" +
+ "\n" +
+ "- node(OutputNode)\n")
+ .hasMessageContaining(
+ "\n" +
+ "\n" +
+ "] but found [\n" +
+ "\n" +
+ "Output[columnNames = [_col0]]\n");
+ }
+
@Test
public void testIsNotFullyPushedDown()
{
diff --git a/testing/trino-tests/src/test/java/io/trino/tests/TestLocalQueryAssertions.java b/testing/trino-tests/src/test/java/io/trino/tests/TestLocalQueryAssertions.java
index 2a22eda30e60..f5854d1923ce 100644
--- a/testing/trino-tests/src/test/java/io/trino/tests/TestLocalQueryAssertions.java
+++ b/testing/trino-tests/src/test/java/io/trino/tests/TestLocalQueryAssertions.java
@@ -66,6 +66,15 @@ public void testIsFullyPushedDownWithSession()
.hasMessage("isFullyPushedDown() currently does not work with LocalQueryRunner");
}
+ @Test
+ @Override
+ public void testIsReplacedWithEmptyValues()
+ {
+ assertThatThrownBy(() -> assertThat(query("SELECT 1 WHERE false")).isReplacedWithEmptyValues())
+ .isInstanceOf(IllegalStateException.class)
+ .hasMessage("isReplacedWithEmptyValues() currently does not work with LocalQueryRunner");
+ }
+
@Test
public void testNullInErrorMessage()
{