Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
cloud-fan committed Jan 15, 2019
1 parent ec6129a commit 693fb98
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import org.apache.spark.sql.SaveMode;

// A temporary mixin trait for `WriteBuilder` to support `SaveMode`. Will be removed before
// Spark 3.0 when all the new write operators are finished.
// Spark 3.0 when all the new write operators are finished. See SPARK-26356 for more details.
public interface SupportsSaveMode extends WriteBuilder {
WriteBuilder mode(SaveMode mode);
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,23 +23,32 @@
import org.apache.spark.sql.types.StructType;

/**
* An interface for building the {@link BatchWrite}. Implementations can mix in interfaces like
* {@link SupportsSaveMode} to support different ways to write data to data sources.
* An interface for building the {@link BatchWrite}. Implementations can mix in some interfaces to
* support different ways to write data to data sources.
*
* Unless modified by a mixin interface, the {@link BatchWrite} configured by this builder is to
* append data without affecting existing data.
*/
@Evolving
public interface WriteBuilder {

/**
* Returns a new builder with the `queryId`. `queryId` is a unique string of the query. It's
* Passes the `queryId` from Spark to data source. `queryId` is a unique string of the query. It's
* possible that there are many queries running at the same time, or a query is restarted and
* resumed. {@link BatchWrite} can use this id to identify the query.
*
* @return a new builder with the `queryId`. By default it returns `this`, which means the given
* `queryId` is ignored. Please override this method to take the `queryId`.
*/
default WriteBuilder withQueryId(String queryId) {
return this;
}

/**
* Returns a new builder with the schema of the input data to write.
* Passes the schema of the input data from Spark to data source.
*
* @return a new builder with the `schema`. By default it returns `this`, which means the given
* `schema` is ignored. Please override this method to take the `schema`.
*/
default WriteBuilder withInputDataSchema(StructType schema) {
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
}

case _ => throw new AnalysisException(
s"data source ${table.name} does not support SaveMode")
s"data source ${table.name} does not support SaveMode $mode")
}
}

Expand Down

0 comments on commit 693fb98

Please sign in to comment.