From 1f26805e714049e83ed9399c8ca8361da459777c Mon Sep 17 00:00:00 2001 From: chirag Date: Fri, 10 Oct 2014 10:28:35 +0530 Subject: [PATCH] SPARK-3807: SparkSql does not work for tables created using custom serde (Incorporated Review Comments) --- .../apache/spark/sql/hive/execution/HiveTableScan.scala | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala index ee6b8fd770346..1d13d35a35ad1 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala @@ -80,15 +80,14 @@ case class HiveTableScan( ColumnProjectionUtils.appendReadColumnIDs(hiveConf, neededColumnIDs) ColumnProjectionUtils.appendReadColumnNames(hiveConf, attributes.map(_.name)) - val td = relation.tableDesc - val deClass = td.getDeserializerClass; - val de = deClass.newInstance(); - de.initialize(hiveConf, td.getProperties); + val tableDesc = relation.tableDesc + val deserializer = tableDesc.getDeserializerClass.newInstance + deserializer.initialize(hiveConf, tableDesc.getProperties) // Specifies types and object inspectors of columns to be scanned. val structOI = ObjectInspectorUtils .getStandardObjectInspector( - de.getObjectInspector, + deserializer.getObjectInspector, ObjectInspectorCopyOption.JAVA) .asInstanceOf[StructObjectInspector]