From caaf93f4e21383702e68427b56b66c609e00394d Mon Sep 17 00:00:00 2001 From: Udbhav30 Date: Wed, 6 Mar 2019 09:06:10 -0800 Subject: [PATCH] [SPARK-24669][SQL] Invalidate tables in case of DROP DATABASE CASCADE ## What changes were proposed in this pull request? Before dropping database refresh the tables of that database, so as to refresh all cached entries associated with those tables. We follow the same when dropping a table. UT is added Closes #23905 from Udbhav30/SPARK-24669. Authored-by: Udbhav30 Signed-off-by: Dongjoon Hyun (cherry picked from commit 9bddf7180e9e76e1cabc580eee23962dd66f84c3) Signed-off-by: Dongjoon Hyun --- .../sql/catalyst/catalog/SessionCatalog.scala | 5 +++ .../sql/execution/command/DDLSuite.scala | 38 ++++++++++++++++++- 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index afb0f009db05c..5987a2019ef41 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -218,6 +218,11 @@ class SessionCatalog( if (dbName == DEFAULT_DATABASE) { throw new AnalysisException(s"Can not drop default database") } + if (cascade && databaseExists(dbName)) { + listTables(dbName).foreach { t => + invalidateCachedTable(QualifiedTableName(dbName, t.table)) + } + } externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index f8d98dead2d42..6ca4536164f9d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command -import java.io.File +import java.io.{File, PrintWriter} import java.net.URI import java.util.Locale @@ -2715,4 +2715,40 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { } } } + + test("Refresh table before drop database cascade") { + withTempDir { tempDir => + val file1 = new File(tempDir + "/first.csv") + val writer1 = new PrintWriter(file1) + writer1.write("first") + writer1.close() + + val file2 = new File(tempDir + "/second.csv") + val writer2 = new PrintWriter(file2) + writer2.write("second") + writer2.close() + + withDatabase("foo") { + withTable("foo.first") { + sql("CREATE DATABASE foo") + sql( + s"""CREATE TABLE foo.first (id STRING) + |USING csv OPTIONS (path='${file1.toURI}') + """.stripMargin) + sql("SELECT * FROM foo.first") + checkAnswer(spark.table("foo.first"), Row("first")) + + // Dropping the database and again creating same table with different path + sql("DROP DATABASE foo CASCADE") + sql("CREATE DATABASE foo") + sql( + s"""CREATE TABLE foo.first (id STRING) + |USING csv OPTIONS (path='${file2.toURI}') + """.stripMargin) + sql("SELECT * FROM foo.first") + checkAnswer(spark.table("foo.first"), Row("second")) + } + } + } + } }