Skip to content

Commit 0d4642a

Browse files
committed
Sun Jul 17 15:12:24 PDT 2016
1 parent 98d6d74 commit 0d4642a

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ private[sql] case class RowDataSourceScanExec(
219219
private[sql] override lazy val metrics =
220220
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
221221

222-
val outputUnsafeRows = relation match {
222+
private val outputUnsafeRows = relation match {
223223
case r: HadoopFsRelation if r.fileFormat.isInstanceOf[ParquetSource] =>
224224
!SparkSession.getActiveSession.get.sessionState.conf.getConf(
225225
SQLConf.PARQUET_VECTORIZED_READER_ENABLED)

sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -862,8 +862,8 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
862862
.load(path)
863863

864864
val Some(fileScanRDD) = df2.queryExecution.executedPlan.collectFirst {
865-
case scan: DataSourceScanExec if scan.rdd.isInstanceOf[FileScanRDD] =>
866-
scan.rdd.asInstanceOf[FileScanRDD]
865+
case scan: DataSourceScanExec if scan.inputRDDs().head.isInstanceOf[FileScanRDD] =>
866+
scan.inputRDDs().head.asInstanceOf[FileScanRDD]
867867
}
868868

869869
val partitions = fileScanRDD.partitions

0 commit comments

Comments
 (0)