Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-24923][SQL] Implement v2 CreateTableAsSelect #24570

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ unsupportedHiveNativeCommands
;

createTableHeader
: CREATE TEMPORARY? EXTERNAL? TABLE (IF NOT EXISTS)? tableIdentifier
: CREATE TEMPORARY? EXTERNAL? TABLE (IF NOT EXISTS)? multipartIdentifier
;

bucketSpec
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ import org.apache.spark.sql.types._
*/
trait CheckAnalysis extends PredicateHelper {

import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._

/**
* Override to provide additional checks for correct analysis.
* These rules will be evaluated after our built-in check rules.
Expand Down Expand Up @@ -296,6 +298,21 @@ trait CheckAnalysis extends PredicateHelper {
}
}

case CreateTableAsSelect(_, _, partitioning, query, _, _, _) =>
val references = partitioning.flatMap(_.references).toSet
val badReferences = references.map(_.fieldNames).flatMap { column =>
query.schema.findNestedField(column) match {
case Some(_) =>
None
case _ =>
Some(s"${column.quoted} is missing or is in a map or array")
}
}

if (badReferences.nonEmpty) {
failAnalysis(s"Invalid partitioning: ${badReferences.mkString(", ")}")
}

case _ => // Fallbacks to the following checks
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2019,7 +2019,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
/**
* Type to keep track of a table header: (identifier, isTemporary, ifNotExists, isExternal).
*/
type TableHeader = (TableIdentifier, Boolean, Boolean, Boolean)
type TableHeader = (Seq[String], Boolean, Boolean, Boolean)

/**
* Validate a create table statement and return the [[TableIdentifier]].
Expand All @@ -2031,7 +2031,8 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
if (temporary && ifNotExists) {
operationNotAllowed("CREATE TEMPORARY TABLE ... IF NOT EXISTS", ctx)
}
(visitTableIdentifier(ctx.tableIdentifier), temporary, ifNotExists, ctx.EXTERNAL != null)
val multipartIdentifier: Seq[String] = ctx.multipartIdentifier.parts.asScala.map(_.getText)
rdblue marked this conversation as resolved.
Show resolved Hide resolved
(multipartIdentifier, temporary, ifNotExists, ctx.EXTERNAL != null)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.spark.sql.catalyst.plans.logical

import org.apache.spark.sql.catalog.v2.{Identifier, TableCatalog}
import org.apache.spark.sql.catalog.v2.expressions.Transform
import org.apache.spark.sql.catalyst.AliasIdentifier
import org.apache.spark.sql.catalyst.analysis.{MultiInstanceRelation, NamedRelation}
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable}
Expand Down Expand Up @@ -402,6 +404,35 @@ trait V2WriteCommand extends Command {
}
}

/**
* Create a new table from a select query with a v2 catalog.
*/
case class CreateTableAsSelect(
catalog: TableCatalog,
tableName: Identifier,
partitioning: Seq[Transform],
query: LogicalPlan,
properties: Map[String, String],
writeOptions: Map[String, String],
ignoreIfExists: Boolean) extends Command {

override def children: Seq[LogicalPlan] = Seq(query)

override lazy val resolved: Boolean = {
// the table schema is created from the query schema, so the only resolution needed is to check
// that the columns referenced by the table's partitioning exist in the query schema
val references = partitioning.flatMap(_.references).toSet
references.map(_.fieldNames).forall { column =>
query.schema.findNestedField(column) match {
rdblue marked this conversation as resolved.
Show resolved Hide resolved
case Some(_) =>
true
case _ =>
false
}
}
}
}

/**
* Append data to an existing table.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.catalyst.plans.logical.sql

import org.apache.spark.sql.catalog.v2.expressions.Transform
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
Expand All @@ -30,7 +29,7 @@ import org.apache.spark.sql.types.StructType
* This is a metadata-only command and is not used to write data to the created table.
*/
case class CreateTableStatement(
table: TableIdentifier,
tableName: Seq[String],
tableSchema: StructType,
partitioning: Seq[Transform],
bucketSpec: Option[BucketSpec],
Expand All @@ -50,7 +49,7 @@ case class CreateTableStatement(
* A CREATE TABLE AS SELECT command, as parsed from SQL.
*/
case class CreateTableAsSelectStatement(
table: TableIdentifier,
tableName: Seq[String],
asSelect: LogicalPlan,
partitioning: Seq[Transform],
bucketSpec: Option[BucketSpec],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,29 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
nameToIndex.get(name)
}

/**
* Returns a field in this struct and its child structs.
*
* This does not support finding fields nested in maps or arrays.
*/
private[sql] def findNestedField(fieldNames: Seq[String]): Option[StructField] = {
fieldNames.headOption.flatMap(nameToField.get) match {
case Some(field) =>
if (fieldNames.tail.isEmpty) {
Some(field)
} else {
field.dataType match {
case struct: StructType =>
struct.findNestedField(fieldNames.tail)
case _ =>
None
}
}
case _ =>
None
}
}

protected[sql] def toAttributes: Seq[AttributeReference] =
map(f => AttributeReference(f.name, f.dataType, f.nullable, f.metadata)())

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ class TestTableCatalog extends TableCatalog {
override def dropTable(ident: Identifier): Boolean = Option(tables.remove(ident)).isDefined
}

private object TestTableCatalog {
object TestTableCatalog {
/**
* Apply properties changes to a map and return the result.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.analysis

import org.apache.spark.sql.catalog.v2.{Identifier, TableCatalog, TestTableCatalog}
import org.apache.spark.sql.catalog.v2.expressions.LogicalExpressions
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, LeafNode}
import org.apache.spark.sql.types.{DoubleType, LongType, StringType, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap

class CreateTablePartitioningValidationSuite extends AnalysisTest {
import CreateTablePartitioningValidationSuite._

test("CreateTableAsSelect: fail missing top-level column") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "does_not_exist") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assert(!plan.resolved)
assertAnalysisError(plan, Seq(
"Invalid partitioning",
"does_not_exist is missing or is in a map or array"))
}

test("CreateTableAsSelect: fail missing top-level column nested reference") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "does_not_exist.z") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assert(!plan.resolved)
assertAnalysisError(plan, Seq(
"Invalid partitioning",
"does_not_exist.z is missing or is in a map or array"))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not a blocker, but we can improve the error message in the future. It's better to let users know which column/field is missing. For example, a.b, it's possible that column a exists but a is not a struct or it doesn't have a b field.

}

test("CreateTableAsSelect: fail missing nested column") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "point.z") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assert(!plan.resolved)
assertAnalysisError(plan, Seq(
"Invalid partitioning",
"point.z is missing or is in a map or array"))
}

test("CreateTableAsSelect: fail with multiple errors") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "does_not_exist", "point.z") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assert(!plan.resolved)
assertAnalysisError(plan, Seq(
"Invalid partitioning",
"point.z is missing or is in a map or array",
"does_not_exist is missing or is in a map or array"))
}

test("CreateTableAsSelect: success with top-level column") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "id") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assertAnalysisSuccess(plan)
}

test("CreateTableAsSelect: success using nested column") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "point.x") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assertAnalysisSuccess(plan)
}

test("CreateTableAsSelect: success using complex column") {
val plan = CreateTableAsSelect(
catalog,
Identifier.of(Array(), "table_name"),
LogicalExpressions.bucket(4, "point") :: Nil,
TestRelation2,
Map.empty,
Map.empty,
ignoreIfExists = false)

assertAnalysisSuccess(plan)
}
}

private object CreateTablePartitioningValidationSuite {
val catalog: TableCatalog = {
val cat = new TestTableCatalog()
cat.initialize("test", CaseInsensitiveStringMap.empty())
cat
}

val schema: StructType = new StructType()
.add("id", LongType)
.add("data", StringType)
.add("point", new StructType().add("x", DoubleType).add("y", DoubleType))
}

private case object TestRelation2 extends LeafNode with NamedRelation {
override def name: String = "source_relation"
override def output: Seq[AttributeReference] =
CreateTablePartitioningValidationSuite.schema.toAttributes
}

Loading