-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-18911] [SQL] Define CatalogStatistics to interact with metastore and convert it to Statistics in relations #16323
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
f9db620
72a16e5
5dbaade
bd5eacc
d3227dc
573b560
978bb11
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -21,8 +21,8 @@ import java.util.Date | |
|
|
||
| import org.apache.spark.sql.AnalysisException | ||
| import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier} | ||
| import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Literal} | ||
| import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics} | ||
| import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, Cast, Literal} | ||
| import org.apache.spark.sql.catalyst.plans.logical._ | ||
| import org.apache.spark.sql.catalyst.util.quoteIdentifier | ||
| import org.apache.spark.sql.types.{StructField, StructType} | ||
|
|
||
|
|
@@ -161,7 +161,7 @@ case class CatalogTable( | |
| createTime: Long = System.currentTimeMillis, | ||
| lastAccessTime: Long = -1, | ||
| properties: Map[String, String] = Map.empty, | ||
| stats: Option[Statistics] = None, | ||
| stats: Option[CatalogStatistics] = None, | ||
| viewOriginalText: Option[String] = None, | ||
| viewText: Option[String] = None, | ||
| comment: Option[String] = None, | ||
|
|
@@ -237,6 +237,34 @@ case class CatalogTable( | |
| } | ||
|
|
||
|
|
||
| /** | ||
| * This class of statistics is used in [[CatalogTable]] to interact with metastore. | ||
| * We define this new class instead of directly using [[Statistics]] here because there are no | ||
| * concepts of attributes or broadcast hint in catalog. | ||
| */ | ||
| case class CatalogStatistics( | ||
| sizeInBytes: BigInt, | ||
| rowCount: Option[BigInt] = None, | ||
| colStats: Map[String, ColumnStat] = Map.empty) { | ||
|
|
||
| /** | ||
| * Convert [[CatalogStatistics]] to [[Statistics]], and match column stats to attributes based | ||
| * on column names. | ||
| */ | ||
| def toPlanStats(planOutput: Seq[Attribute]): Statistics = { | ||
| val matched = planOutput.flatMap(a => colStats.get(a.name).map(a -> _)) | ||
| Statistics(sizeInBytes = sizeInBytes, rowCount = rowCount, | ||
| attributeStats = AttributeMap(matched)) | ||
| } | ||
|
|
||
| /** Readable string representation for the CatalogStatistics. */ | ||
| def simpleString: String = { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why do you define a
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Because we don't print column stats in it, it's not a "complete" string representation. Column stats can be too much and make CatalogTable unreadable. |
||
| val rowCountString = if (rowCount.isDefined) s", ${rowCount.get} rows" else "" | ||
| s"$sizeInBytes bytes$rowCountString" | ||
| } | ||
| } | ||
|
|
||
|
|
||
| case class CatalogTableType private(name: String) | ||
| object CatalogTableType { | ||
| val EXTERNAL = new CatalogTableType("EXTERNAL") | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -41,13 +41,13 @@ import org.apache.spark.sql.types._ | |
| * @param sizeInBytes Physical size in bytes. For leaf operators this defaults to 1, otherwise it | ||
| * defaults to the product of children's `sizeInBytes`. | ||
| * @param rowCount Estimated number of rows. | ||
| * @param colStats Column-level statistics. | ||
| * @param attributeStats Statistics for Attributes. | ||
| * @param isBroadcastable If true, output is small enough to be used in a broadcast join. | ||
| */ | ||
| case class Statistics( | ||
| sizeInBytes: BigInt, | ||
| rowCount: Option[BigInt] = None, | ||
| colStats: Map[String, ColumnStat] = Map.empty, | ||
| attributeStats: AttributeMap[ColumnStat] = AttributeMap(Nil), | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Will we estimate statistics for all attributes in logical plan? I meant if an attribute is not coming from a leaf node but from a later plan like If not, I think we don't need to call this parameter as
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We will estimate attributes in logical plan from the bottom up. |
||
| isBroadcastable: Boolean = false) { | ||
|
|
||
| override def toString: String = "Statistics(" + simpleString + ")" | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Can you add few words explaining why don't use
StatisticsforCatalogTable?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ok