Skip to content

Commit

Permalink
SPARK-45959. fixed test failures after merge
Browse files Browse the repository at this point in the history
  • Loading branch information
ashahid committed Jan 15, 2025
1 parent d30375b commit 8d77a4b
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import scala.collection.mutable
import scala.util.{Failure, Success, Try}

import org.apache.spark.sql.Dataset
import org.apache.spark.sql.catalyst.analysis.{SQLFunctionExpression, SQLScalarFunction}
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeMap, AttributeReference, Expression, NamedExpression, UserDefinedExpression}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
Expand Down Expand Up @@ -94,8 +95,16 @@ private[sql] object EarlyCollapseProject extends Rule[LogicalPlan] {
p.projectList.forall(_.collectFirst {
case ex if !ex.deterministic => ex
case ex: UserDefinedExpression => ex
}.isEmpty) &&
// Because functions take incoming attributes as parameter and the function code is using
// those attributes, it is wrong to collapse the attribute function input with its replacement
newP.projectList.forall(_.collectFirst {
case ex: SQLFunctionExpression => ex
case ex: SQLScalarFunction => ex
}.isEmpty)



private def transferMetadata(from: Attribute, to: NamedExpression): NamedExpression =
if (from.metadata == Metadata.empty) {
to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -484,13 +484,12 @@ Sort [spark_catalog.default.foo2_3(c1, c2)#x ASC NULLS FIRST, spark_catalog.defa
SELECT * FROM V1 WHERE foo2_3(c1, 0) = c1 AND foo2_3(c1, c2) < 8
-- !query analysis
Project [c1#x, c2#x]
+- Project [c1#x, c2#x]
+- Filter ((spark_catalog.default.foo2_3(a#x, b#x) = c1#x) AND (spark_catalog.default.foo2_3(a#x, b#x) < 8))
+- Project [c1#x, c2#x, cast(c1#x as int) AS a#x, cast(0 as int) AS b#x, cast(c1#x as int) AS a#x, cast(c2#x as int) AS b#x]
+- SubqueryAlias spark_catalog.default.v1
+- View (`spark_catalog`.`default`.`v1`, [c1#x, c2#x])
+- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]
+- LocalRelation [col1#x, col2#x]
+- Filter ((spark_catalog.default.foo2_3(a#x, b#x) = c1#x) AND (spark_catalog.default.foo2_3(a#x, b#x) < 8))
+- Project [c1#x, c2#x, cast(c1#x as int) AS a#x, cast(0 as int) AS b#x, cast(c1#x as int) AS a#x, cast(c2#x as int) AS b#x]
+- SubqueryAlias spark_catalog.default.v1
+- View (`spark_catalog`.`default`.`v1`, [c1#x, c2#x])
+- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]
+- LocalRelation [col1#x, col2#x]


-- !query
Expand Down

0 comments on commit 8d77a4b

Please sign in to comment.