Skip to content
Prev Previous commit
Next Next commit
revise
  • Loading branch information
gengliangwang committed Apr 12, 2018
commit 71d98ed9d6af9304fcaee6c9be966e4a2fdcb5b0
Original file line number Diff line number Diff line change
Expand Up @@ -127,10 +127,6 @@ case class DataSource(
* columns.
*/
private def getOrInferFileFormatSchema(format: FileFormat): (StructType, StructType) = {
// the operations below are expensive therefore try not to do them if we don't need to, e.g.,
// in streaming mode, we have already inferred and registered partition columns, we will
// never have to materialize the lazy val below

val partitionSchema = if (partitionColumns.isEmpty) {
// Try to infer partitioning, because no DataSource in the read path provides the partitioning
// columns properly unless it is a Hive DataSource
Expand Down Expand Up @@ -195,7 +191,12 @@ case class DataSource(
(dataSchema, partitionSchema)
}

/** An [[InMemoryFileIndex]] that can be used to get partition schema and file list. */
/**
* An [[InMemoryFileIndex]] that can be used to get partition schema and file list.
* The operations below are expensive therefore try not to do them if we don't need to, e.g.,
* in streaming mode, we have already inferred and registered partition columns, we will
* never have to materialize the lazy val below
*/
private lazy val inMemoryFileIndex: InMemoryFileIndex = {
val globbedPaths =
checkAndGlobPathIfNecessary(checkEmptyGlobPath = false, checkFilesExist = false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,16 +99,6 @@ class PartitionedTablePerfStatsSuite
}
}

/** For data source tables, all the files should be parsed once for creating file index */
private def checkFilesDiscovered(isDatasourceTable: Boolean, count: Int): Unit = {
val expectCount = if (isDatasourceTable) {
count + 5
} else {
count
}
assert(HiveCatalogMetrics.METRIC_FILES_DISCOVERED.getCount() == expectCount)
}

genericTest("partitioned pruned table reports only selected files") { spec =>
assert(spark.sqlContext.getConf(HiveUtils.CONVERT_METASTORE_PARQUET.key) == "true")
withTable("test") {
Expand Down