Skip to content

Commit

Permalink
Simplify APIs.
Browse files Browse the repository at this point in the history
  • Loading branch information
yhuai committed Feb 10, 2015
1 parent 3abc215 commit 4c76d78
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 102 deletions.
49 changes: 39 additions & 10 deletions sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.spark.sql

import scala.collection.JavaConversions._
import scala.reflect.ClassTag

import org.apache.spark.annotation.{DeveloperApi, Experimental}
Expand Down Expand Up @@ -576,7 +577,9 @@ trait DataFrame extends RDDApi[Row] {
* be the target of an `insertInto`.
*/
@Experimental
def saveAsTable(tableName: String): Unit
def saveAsTable(tableName: String): Unit = {
saveAsTable(tableName, SaveMode.ErrorIfExists)
}

/**
* :: Experimental ::
Expand All @@ -589,7 +592,16 @@ trait DataFrame extends RDDApi[Row] {
* be the target of an `insertInto`.
*/
@Experimental
def saveAsTable(tableName: String, mode: SaveMode): Unit
def saveAsTable(tableName: String, mode: SaveMode): Unit = {
if (sqlContext.catalog.tableExists(Seq(tableName)) && mode == SaveMode.Append) {
// If table already exists and the save mode is Append,
// we will just call insertInto to append the contents of this DataFrame.
insertInto(tableName, overwrite = false)
} else {
val dataSourceName = sqlContext.conf.defaultDataSourceName
saveAsTable(tableName, dataSourceName, mode)
}
}

/**
* :: Experimental ::
Expand All @@ -605,7 +617,9 @@ trait DataFrame extends RDDApi[Row] {
@Experimental
def saveAsTable(
tableName: String,
dataSourceName: String): Unit
dataSourceName: String): Unit = {
saveAsTable(tableName, dataSourceName, SaveMode.ErrorIfExists)
}

/**
* :: Experimental ::
Expand All @@ -621,7 +635,9 @@ trait DataFrame extends RDDApi[Row] {
def saveAsTable(
tableName: String,
dataSourceName: String,
mode: SaveMode): Unit
mode: SaveMode): Unit = {
saveAsTable(tableName, dataSourceName, mode, Map.empty[String, String])
}

/**
* :: Experimental ::
Expand All @@ -638,7 +654,9 @@ trait DataFrame extends RDDApi[Row] {
tableName: String,
dataSourceName: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit
options: java.util.Map[String, String]): Unit = {
saveAsTable(tableName, dataSourceName, mode, options.toMap)
}

/**
* :: Experimental ::
Expand All @@ -665,31 +683,40 @@ trait DataFrame extends RDDApi[Row] {
* [[SaveMode.ErrorIfExists]] as the save mode.
*/
@Experimental
def save(path: String): Unit
def save(path: String): Unit = {
save(path, SaveMode.ErrorIfExists)
}

/**
* :: Experimental ::
* Saves the contents of this DataFrame to the given path and [[SaveMode]] specified by mode,
* using the default data source configured by spark.sql.sources.default.
*/
@Experimental
def save(path: String, mode: SaveMode): Unit
def save(path: String, mode: SaveMode): Unit = {
val dataSourceName = sqlContext.conf.defaultDataSourceName
save(path, dataSourceName, mode)
}

/**
* :: Experimental ::
* Saves the contents of this DataFrame to the given path based on the given data source,
* using [[SaveMode.ErrorIfExists]] as the save mode.
*/
@Experimental
def save(path: String, dataSourceName: String): Unit
def save(path: String, dataSourceName: String): Unit = {
save(dataSourceName, SaveMode.ErrorIfExists, Map("path" -> path))
}

/**
* :: Experimental ::
* Saves the contents of this DataFrame to the given path based on the given data source and
* [[SaveMode]] specified by mode.
*/
@Experimental
def save(path: String, dataSourceName: String, mode: SaveMode): Unit
def save(path: String, dataSourceName: String, mode: SaveMode): Unit = {
save(dataSourceName, mode, Map("path" -> path))
}

/**
* :: Experimental ::
Expand All @@ -700,7 +727,9 @@ trait DataFrame extends RDDApi[Row] {
def save(
dataSourceName: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit
options: java.util.Map[String, String]): Unit = {
save(dataSourceName, mode, options.toMap)
}

/**
* :: Experimental ::
Expand Down
60 changes: 0 additions & 60 deletions sql/core/src/main/scala/org/apache/spark/sql/DataFrameImpl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -350,42 +350,6 @@ private[sql] class DataFrameImpl protected[sql](
}
}

override def saveAsTable(tableName: String): Unit = {
saveAsTable(tableName, SaveMode.ErrorIfExists)
}

override def saveAsTable(tableName: String, mode: SaveMode): Unit = {
if (sqlContext.catalog.tableExists(Seq(tableName)) && mode == SaveMode.Append) {
// If table already exists and the save mode is Append,
// we will just call insertInto to append the contents of this DataFrame.
insertInto(tableName, overwrite = false)
} else {
val dataSourceName = sqlContext.conf.defaultDataSourceName
saveAsTable(tableName, dataSourceName, mode)
}
}

override def saveAsTable(
tableName: String,
dataSourceName: String): Unit = {
saveAsTable(tableName, dataSourceName, SaveMode.ErrorIfExists)
}

override def saveAsTable(
tableName: String,
dataSourceName: String,
mode: SaveMode): Unit = {
saveAsTable(tableName, dataSourceName, mode, Map.empty[String, String])
}

override def saveAsTable(
tableName: String,
dataSourceName: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit = {
saveAsTable(tableName, dataSourceName, mode, options.toMap)
}

override def saveAsTable(
tableName: String,
dataSourceName: String,
Expand All @@ -403,30 +367,6 @@ private[sql] class DataFrameImpl protected[sql](
sqlContext.executePlan(cmd).toRdd
}

override def save(path: String): Unit = {
save(path, SaveMode.ErrorIfExists)
}

override def save(path: String, mode: SaveMode): Unit = {
val dataSourceName = sqlContext.conf.defaultDataSourceName
save(path, dataSourceName, mode)
}

override def save(path: String, dataSourceName: String): Unit = {
save(dataSourceName, SaveMode.ErrorIfExists, Map("path" -> path))
}

override def save(path: String, dataSourceName: String, mode: SaveMode): Unit = {
save(dataSourceName, mode, Map("path" -> path))
}

override def save(
dataSourceName: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit = {
save(dataSourceName, mode, options.toMap)
}

override def save(
dataSourceName: String,
mode: SaveMode,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,44 +154,12 @@ private[sql] class IncomputableColumn(protected[sql] val expr: Expression) exten

override def saveAsParquetFile(path: String): Unit = err()

override def saveAsTable(tableName: String): Unit = err()

override def saveAsTable(tableName: String, mode: SaveMode): Unit = err()

override def saveAsTable(
tableName: String,
dataSourceName: String): Unit = err()

override def saveAsTable(
tableName: String,
dataSourceName: String,
mode: SaveMode): Unit = err()

override def saveAsTable(
tableName: String,
dataSourceName: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit = err()

override def saveAsTable(
tableName: String,
dataSourceName: String,
mode: SaveMode,
options: Map[String, String]): Unit = err()

override def save(path: String): Unit = err()

override def save(path: String, mode: SaveMode): Unit = err()

override def save(path: String, dataSourceName: String): Unit = err()

override def save(path: String, dataSourceName: String, mode: SaveMode): Unit = err()

override def save(
dataSourceName: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit = err()

override def save(
dataSourceName: String,
mode: SaveMode,
Expand Down

0 comments on commit 4c76d78

Please sign in to comment.