diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index 3cd39a91919ab..4485c0d3d783a 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -444,7 +444,7 @@ resource ; dmlStatementNoWith - : insertInto queryTerm queryOrganization #singleInsertQuery + : insertInto query #singleInsertQuery | fromClause multiInsertQueryBody+ #multiInsertQuery | DELETE FROM multipartIdentifier tableAlias whereClause? #deleteFromTable | UPDATE multipartIdentifier tableAlias setClause whereClause? #updateTable diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 1968142a0409f..b813d939fdaae 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -239,9 +239,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg */ override def visitSingleInsertQuery( ctx: SingleInsertQueryContext): LogicalPlan = withOrigin(ctx) { - withInsertInto( - ctx.insertInto(), - plan(ctx.queryTerm).optionalMap(ctx.queryOrganization)(withQueryResultClauses)) + withInsertInto(ctx.insertInto(), visitQuery(ctx.query)) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index 73fdf2804b2c6..2b83f9eb0de12 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -1055,6 +1055,14 @@ class InsertSuite extends DataSourceTest with SharedSparkSession { } } } + + test("SPARK-36980: Insert support query with CTE") { + withTable("t") { + sql("CREATE TABLE t(i int, part1 int, part2 int) using parquet") + sql("INSERT INTO t WITH v1(c1) as (values (1)) select 1, 2,3 from v1") + checkAnswer(spark.table("t"), Row(1, 2, 3)) + } + } } class FileExistingTestFileSystem extends RawLocalFileSystem {