diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index 2753c913b6a0a..aaa380d9ad3d9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -781,6 +781,22 @@ class SQLContext(@transient val sparkContext: SparkContext) load(source, schema, options.toMap) } + /** + * :: Experimental :: + * (Java-specific) Returns the dataset specified by the given data source and + * a set of options as a DataFrame, using the given schema as the schema of the DataFrame. + * + * @group genericdata + */ + @Experimental + def load( + source: String, + schema: StructType, + partitionColumns: Array[String], + options: java.util.Map[String, String]): DataFrame = { + load(source, schema, partitionColumns, options.toMap) + } + /** * :: Experimental :: * (Scala-specific) Returns the dataset specified by the given data source and @@ -796,6 +812,22 @@ class SQLContext(@transient val sparkContext: SparkContext) DataFrame(this, LogicalRelation(resolved.relation)) } + /** + * :: Experimental :: + * (Scala-specific) Returns the dataset specified by the given data source and + * a set of options as a DataFrame, using the given schema as the schema of the DataFrame. + * @group genericdata + */ + @Experimental + def load( + source: String, + schema: StructType, + partitionColumns: Array[String], + options: Map[String, String]): DataFrame = { + val resolved = ResolvedDataSource(this, Some(schema), partitionColumns, source, options) + DataFrame(this, LogicalRelation(resolved.relation)) + } + /** * :: Experimental :: * Creates an external table from the given path and returns the corresponding DataFrame.