Skip to content

Commit

Permalink
revert incorrect changes to AdaptiveSparkPlanExec and HadoopTableReader
Browse files Browse the repository at this point in the history
  • Loading branch information
luluorta committed Nov 11, 2020
1 parent 7047924 commit ce97a61
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import org.apache.spark.sql.execution.command.DataWritingCommandExec
import org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec
import org.apache.spark.sql.execution.exchange._
import org.apache.spark.sql.execution.ui.{SparkListenerSQLAdaptiveExecutionUpdate, SparkListenerSQLAdaptiveSQLMetricUpdates, SQLPlanMetric}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.util.ThreadUtils

/**
Expand Down Expand Up @@ -146,6 +147,8 @@ case class AdaptiveSparkPlanExec(

def executedPlan: SparkPlan = currentPhysicalPlan

override def conf: SQLConf = context.session.sessionState.conf

override def output: Seq[Attribute] = inputPlan.output

override def doCanonicalize(): SparkPlan = inputPlan.canonicalized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ import org.apache.spark.sql.catalyst.{InternalRow, SQLConfHelper}
import org.apache.spark.sql.catalyst.analysis.CastSupport
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.{SerializableConfiguration, Utils}

Expand Down Expand Up @@ -88,6 +89,8 @@ class HadoopTableReader(
private val _broadcastedHadoopConf =
sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf))

override def conf: SQLConf = sparkSession.sessionState.conf

override def makeRDDForTable(hiveTable: HiveTable): RDD[InternalRow] =
makeRDDForTable(
hiveTable,
Expand Down

0 comments on commit ce97a61

Please sign in to comment.