Skip to content

Commit

Permalink
cloned hiveconf for each TableScanOperators so that only required col…
Browse files Browse the repository at this point in the history
…umns are added
  • Loading branch information
gvramana committed Oct 8, 2014
1 parent 70e824f commit 6a93459
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants._
import org.apache.hadoop.hive.ql.exec.Utilities
import org.apache.hadoop.hive.ql.metadata.{Partition => HivePartition, Table => HiveTable}
Expand Down Expand Up @@ -52,8 +53,9 @@ private[hive]
class HadoopTableReader(
@transient attributes: Seq[Attribute],
@transient relation: MetastoreRelation,
@transient sc: HiveContext)
extends TableReader {
@transient sc: HiveContext,
@transient hiveExtraConf: HiveConf)
extends TableReader {

// Choose the minimum number of splits. If mapred.map.tasks is set, then use that unless
// it is smaller than what Spark suggests.
Expand All @@ -63,7 +65,7 @@ class HadoopTableReader(
// TODO: set aws s3 credentials.

private val _broadcastedHiveConf =
sc.sparkContext.broadcast(new SerializableWritable(sc.hiveconf))
sc.sparkContext.broadcast(new SerializableWritable(hiveExtraConf))

def broadcastedHiveConf = _broadcastedHiveConf

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,10 @@ case class HiveTableScan(
}

@transient
private[this] val hadoopReader = new HadoopTableReader(attributes, relation, context)
private[this] val hiveExtraConf = new HiveConf(context.hiveconf)

@transient
private[this] val hadoopReader = new HadoopTableReader(attributes, relation, context, hiveExtraConf)

private[this] def castFromString(value: String, dataType: DataType) = {
Cast(Literal(value), dataType).eval(null)
Expand Down Expand Up @@ -97,7 +100,7 @@ case class HiveTableScan(
hiveConf.set(serdeConstants.LIST_COLUMNS, relation.attributes.map(_.name).mkString(","))
}

addColumnMetadataToConf(context.hiveconf)
addColumnMetadataToConf(hiveExtraConf)

/**
* Prunes partitions not involve the query plan.
Expand Down

0 comments on commit 6a93459

Please sign in to comment.