diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala index b48a58691ce85..6680cdf91d959 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala @@ -17,8 +17,6 @@ package org.apache.spark.sql -import org.apache.spark.sql.sources.SaveMode - import scala.reflect.ClassTag import org.apache.spark.annotation.{DeveloperApi, Experimental} @@ -26,10 +24,10 @@ import org.apache.spark.api.java.JavaRDD import org.apache.spark.rdd.RDD import org.apache.spark.storage.StorageLevel import org.apache.spark.sql.catalyst.plans.logical._ +import org.apache.spark.sql.sources.SaveMode import org.apache.spark.sql.types.StructType import org.apache.spark.util.Utils - private[sql] object DataFrame { def apply(sqlContext: SQLContext, logicalPlan: LogicalPlan): DataFrame = { new DataFrameImpl(sqlContext, logicalPlan) @@ -627,6 +625,7 @@ trait DataFrame extends RDDApi[Row] { /** * :: Experimental :: + * (Scala-specific) * Creates a table from the the contents of this DataFrame based on a given data source * and a set of options. * If appendIfExists is true and the table already exists, @@ -688,6 +687,7 @@ trait DataFrame extends RDDApi[Row] { /** * :: Experimental :: + * (Scala-specific) * Saves the contents of this DataFrame based on the given data source and a set of options. */ @Experimental diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index 6212d7f4f09bd..a0ee1102748e6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -412,7 +412,6 @@ class SQLContext(@transient val sparkContext: SparkContext) /** * :: Experimental :: * Loads a dataset from the given path as a DataFrame based on a given data source. - * It will use the default data source configured in spark.sql.sources.default. */ @Experimental def load( @@ -434,6 +433,7 @@ class SQLContext(@transient val sparkContext: SparkContext) /** * :: Experimental :: + * (Scala-specific) * Loads a dataset based on a given data source and a set of options. */ @Experimental @@ -458,6 +458,7 @@ class SQLContext(@transient val sparkContext: SparkContext) /** * :: Experimental :: + * (Scala-specific) * Loads a dataset based on a given data source, a schema and a set of options. */ @Experimental @@ -508,6 +509,7 @@ class SQLContext(@transient val sparkContext: SparkContext) /** * :: Experimental :: + * (Scala-specific) * Creates an external table from the given path based on a data source and a set of options. * Then, returns the corresponding DataFrame. */ @@ -545,6 +547,7 @@ class SQLContext(@transient val sparkContext: SparkContext) /** * :: Experimental :: + * (Scala-specific) * Create an external table from the given path based on a data source, a schema and * a set of options. Then, returns the corresponding DataFrame. */