Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
04959c2
refactor analyzer adding a new object
anchovYu Nov 23, 2022
6f44c85
lca code
anchovYu Nov 23, 2022
725e5ac
add tests, refine logic
anchovYu Nov 28, 2022
660e1d2
move lca rule to a new file
anchovYu Nov 28, 2022
fd06094
rename conf
anchovYu Nov 28, 2022
7d4f80f
test failure
anchovYu Nov 29, 2022
b9704d5
small fix
anchovYu Nov 29, 2022
777f13a
temp commit, still in implementation
anchovYu Nov 29, 2022
09480ea
a temporary solution, but still fail certain cases
anchovYu Nov 30, 2022
c972738
working solution, needs some refinement
anchovYu Dec 1, 2022
97ee293
Merge remote-tracking branch 'apache/master' into SPARK-27561-refactor
anchovYu Dec 1, 2022
5785943
make changes to accomodate the recent refactor
anchovYu Dec 2, 2022
757cffb
introduce leaf exp in Project as well
anchovYu Dec 5, 2022
29de892
handle a corner case
anchovYu Dec 5, 2022
72991c6
add more tests; add check rule
anchovYu Dec 6, 2022
d45fe31
uplift the necessity to resolve expression in second phase; add more …
anchovYu Dec 8, 2022
1f55f73
address comments to add tests for LCA off
anchovYu Dec 8, 2022
f753529
revert the refactor, split LCA into two rules
anchovYu Dec 9, 2022
b9f706f
better refactor
anchovYu Dec 9, 2022
94d5c9e
address comments
anchovYu Dec 9, 2022
d2e75fd
Merge branch 'SPARK-27561-refactor' into SPARK-27561-agg
anchovYu Dec 9, 2022
edde37c
basic version passing all tests
anchovYu Dec 9, 2022
fb7b18c
update the logic, add and refactor tests
anchovYu Dec 12, 2022
3698cff
update comments
anchovYu Dec 13, 2022
e700d6a
add a corner case comment
anchovYu Dec 13, 2022
8d20986
address comments
anchovYu Dec 13, 2022
d952aa7
Merge branch 'SPARK-27561-refactor' into SPARK-27561-agg
anchovYu Dec 13, 2022
44d5a3d
Merge remote-tracking branch 'apache/master' into SPARK-27561-agg
anchovYu Dec 13, 2022
ccebc1c
revert some changes
anchovYu Dec 13, 2022
5540b70
fix few todos
anchovYu Dec 13, 2022
338ba11
Merge remote-tracking branch 'apache/master' into SPARK-27561-agg
anchovYu Dec 16, 2022
136a930
fix the failing test
anchovYu Dec 16, 2022
5076ad2
fix the missing_aggregate issue, turn on conf to see failed tests
anchovYu Dec 19, 2022
2f2dee5
remove few todos
anchovYu Dec 19, 2022
3a5509a
better fix to maintain aggregate error: only lift up in certain cases
anchovYu Dec 20, 2022
a23debb
Merge remote-tracking branch 'apache/master' into SPARK-27561-agg
anchovYu Dec 20, 2022
b200da0
typo
anchovYu Dec 20, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
revert some changes
  • Loading branch information
anchovYu committed Dec 13, 2022
commit ccebc1c46ac2e05074227a2a37b82ae33f1fe783
Original file line number Diff line number Diff line change
Expand Up @@ -469,6 +469,70 @@ class Analyzer(override val catalogManager: CatalogManager)
}
}

/**
* Replaces [[UnresolvedAlias]]s with concrete aliases.
*/
object ResolveAliases extends Rule[LogicalPlan] {
private def assignAliases(exprs: Seq[NamedExpression]) = {
def extractOnly(e: Expression): Boolean = e match {
case _: ExtractValue => e.children.forall(extractOnly)
case _: Literal => true
case _: Attribute => true
case _ => false
}
def metaForAutoGeneratedAlias = {
new MetadataBuilder()
.putString("__autoGeneratedAlias", "true")
.build()
}
exprs.map(_.transformUpWithPruning(_.containsPattern(UNRESOLVED_ALIAS)) {
case u @ UnresolvedAlias(child, optGenAliasFunc) =>
child match {
case ne: NamedExpression => ne
case go @ GeneratorOuter(g: Generator) if g.resolved => MultiAlias(go, Nil)
case e if !e.resolved => u
case g: Generator => MultiAlias(g, Nil)
case c @ Cast(ne: NamedExpression, _, _, _) => Alias(c, ne.name)()
case e: ExtractValue =>
if (extractOnly(e)) {
Alias(e, toPrettySQL(e))()
} else {
Alias(e, toPrettySQL(e))(explicitMetadata = Some(metaForAutoGeneratedAlias))
}
case e if optGenAliasFunc.isDefined =>
Alias(child, optGenAliasFunc.get.apply(e))()
case l: Literal => Alias(l, toPrettySQL(l))()
case e =>
Alias(e, toPrettySQL(e))(explicitMetadata = Some(metaForAutoGeneratedAlias))
}
}
).asInstanceOf[Seq[NamedExpression]]
}

private def hasUnresolvedAlias(exprs: Seq[NamedExpression]) =
exprs.exists(_.exists(_.isInstanceOf[UnresolvedAlias]))

def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUpWithPruning(
_.containsPattern(UNRESOLVED_ALIAS), ruleId) {
case Aggregate(groups, aggs, child) if child.resolved && hasUnresolvedAlias(aggs) =>
Aggregate(groups, assignAliases(aggs), child)

case Pivot(groupByOpt, pivotColumn, pivotValues, aggregates, child)
if child.resolved && groupByOpt.isDefined && hasUnresolvedAlias(groupByOpt.get) =>
Pivot(Some(assignAliases(groupByOpt.get)), pivotColumn, pivotValues, aggregates, child)

case up: Unpivot if up.child.resolved &&
(up.ids.exists(hasUnresolvedAlias) || up.values.exists(_.exists(hasUnresolvedAlias))) =>
up.copy(ids = up.ids.map(assignAliases), values = up.values.map(_.map(assignAliases)))

case Project(projectList, child) if child.resolved && hasUnresolvedAlias(projectList) =>
Project(assignAliases(projectList), child)

case c: CollectMetrics if c.child.resolved && hasUnresolvedAlias(c.metrics) =>
c.copy(metrics = assignAliases(c.metrics))
}
}

object ResolveGroupingAnalytics extends Rule[LogicalPlan] {
private[analysis] def hasGroupingFunction(e: Expression): Boolean = {
e.exists (g => g.isInstanceOf[Grouping] || g.isInstanceOf[GroupingID])
Expand Down Expand Up @@ -4209,67 +4273,3 @@ object RemoveTempResolvedColumn extends Rule[LogicalPlan] {
}
}

/**
* Replaces [[UnresolvedAlias]]s with concrete aliases.
*/
object ResolveAliases extends Rule[LogicalPlan] {
def metaForAutoGeneratedAlias: Metadata = {
new MetadataBuilder()
.putString("__autoGeneratedAlias", "true")
.build()
}

def assignAliases(exprs: Seq[NamedExpression]): Seq[NamedExpression] = {
def extractOnly(e: Expression): Boolean = e match {
case _: ExtractValue => e.children.forall(extractOnly)
case _: Literal => true
case _: Attribute => true
case _ => false
}
exprs.map(_.transformUpWithPruning(_.containsPattern(UNRESOLVED_ALIAS)) {
case u @ UnresolvedAlias(child, optGenAliasFunc) =>
child match {
case ne: NamedExpression => ne
case go @ GeneratorOuter(g: Generator) if g.resolved => MultiAlias(go, Nil)
case e if !e.resolved => u
case g: Generator => MultiAlias(g, Nil)
case c @ Cast(ne: NamedExpression, _, _, _) => Alias(c, ne.name)()
case e: ExtractValue =>
if (extractOnly(e)) {
Alias(e, toPrettySQL(e))()
} else {
Alias(e, toPrettySQL(e))(explicitMetadata = Some(metaForAutoGeneratedAlias))
}
case e if optGenAliasFunc.isDefined =>
Alias(child, optGenAliasFunc.get.apply(e))()
case l: Literal => Alias(l, toPrettySQL(l))()
case e =>
Alias(e, toPrettySQL(e))(explicitMetadata = Some(metaForAutoGeneratedAlias))
}
}
).asInstanceOf[Seq[NamedExpression]]
}

private def hasUnresolvedAlias(exprs: Seq[NamedExpression]) =
exprs.exists(_.exists(_.isInstanceOf[UnresolvedAlias]))

def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUpWithPruning(
_.containsPattern(UNRESOLVED_ALIAS), ruleId) {
case Aggregate(groups, aggs, child) if child.resolved && hasUnresolvedAlias(aggs) =>
Aggregate(groups, assignAliases(aggs), child)

case Pivot(groupByOpt, pivotColumn, pivotValues, aggregates, child)
if child.resolved && groupByOpt.isDefined && hasUnresolvedAlias(groupByOpt.get) =>
Pivot(Some(assignAliases(groupByOpt.get)), pivotColumn, pivotValues, aggregates, child)

case up: Unpivot if up.child.resolved &&
(up.ids.exists(hasUnresolvedAlias) || up.values.exists(_.exists(hasUnresolvedAlias))) =>
up.copy(ids = up.ids.map(assignAliases), values = up.values.map(_.map(assignAliases)))

case Project(projectList, child) if child.resolved && hasUnresolvedAlias(projectList) =>
Project(assignAliases(projectList), child)

case c: CollectMetrics if c.child.resolved && hasUnresolvedAlias(c.metrics) =>
c.copy(metrics = assignAliases(c.metrics))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,6 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB

groupingExprs.foreach(checkValidGroupingExprs)
aggregateExprs.foreach(checkValidAggregateExpression)
// TODO: if the Aggregate is resolved, it can't contain the LateralColumnAliasReference

case CollectMetrics(name, metrics, _) =>
if (name == null || name.isEmpty) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LogicalPlan, Proj
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
import org.apache.spark.sql.catalyst.trees.TreePattern.LATERAL_COLUMN_ALIAS_REFERENCE
import org.apache.spark.sql.catalyst.util.toPrettySQL
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.SQLConf

Expand Down Expand Up @@ -101,6 +102,14 @@ object ResolveLateralColumnAliasReference extends Rule[LogicalPlan] {
*/
val NAME_PARTS_FROM_UNRESOLVED_ATTR = TreeNodeTag[Seq[String]]("name_parts_from_unresolved_attr")

private def assignAlias(expr: Expression): NamedExpression = {
expr match {
case ne: NamedExpression => ne
case e =>
Alias(e, toPrettySQL(e))()
}
}

override def apply(plan: LogicalPlan): LogicalPlan = {
if (!conf.getConf(SQLConf.LATERAL_COLUMN_ALIAS_IMPLICIT_ENABLED)) {
plan
Expand Down Expand Up @@ -172,24 +181,15 @@ object ResolveLateralColumnAliasReference extends Rule[LogicalPlan] {
lcaRef.nameParts, aggExpr)
}
}
val ne = expressionMap.getOrElseUpdate(
aggExpr.canonicalized,
ResolveAliases.assignAliases(Seq(UnresolvedAlias(aggExpr))).map {
// TODO temporarily clear the metadata for an issue found in test
case a: Alias => a.copy(a.child, a.name)(
a.exprId, a.qualifier, None, a.nonInheritableMetadataKeys)
case other => other
}.head)
val ne = expressionMap.getOrElseUpdate(aggExpr.canonicalized, assignAlias(aggExpr))
newAggExprs += ne
ne.toAttribute
case e if groupingExpressions.exists(_.semanticEquals(e)) =>
// TODO one concern here, is condition here be able to match all grouping
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I surprisingly found out that this existing query can't analyze:

select 1 + dept + 10 from $testTable group by dept + 10
-- error: [MISSING_AGGREGATION] The non-aggregating expression "dept" is based on columns which are not participating in the GROUP BY clause

Seems in our checkAnalysis, we don't canonicalize to compare the expressions. It is structured as (1 + dept) + 10, and can't match the grouping expression (dept + 10).

// expressions? For example, Agg [age + 10] [1 + age + 10], when transforming down,
// is it possible that (1 + age) + 10, so that it won't be able to match (age + 10)
// add a test.
val ne = expressionMap.getOrElseUpdate(
e.canonicalized,
ResolveAliases.assignAliases(Seq(UnresolvedAlias(e))).head)
val ne = expressionMap.getOrElseUpdate(e.canonicalized, assignAlias(e))
newAggExprs += ne
ne.toAttribute
}.asInstanceOf[NamedExpression]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ object RuleIdCollection {
"org.apache.spark.sql.catalyst.analysis.Analyzer$GlobalAggregates" ::
"org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveAggAliasInGroupBy" ::
"org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveAggregateFunctions" ::
"org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveAliases" ::
"org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveBinaryArithmetic" ::
"org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveDeserializer" ::
"org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveEncodersInUDF" ::
Expand Down Expand Up @@ -82,7 +83,6 @@ object RuleIdCollection {
"org.apache.spark.sql.catalyst.analysis.DeduplicateRelations" ::
"org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases" ::
"org.apache.spark.sql.catalyst.analysis.EliminateUnions" ::
"org.apache.spark.sql.catalyst.analysis.ResolveAliases" ::
"org.apache.spark.sql.catalyst.analysis.ResolveDefaultColumns" ::
"org.apache.spark.sql.catalyst.analysis.ResolveExpressionsWithNamePlaceholders" ::
"org.apache.spark.sql.catalyst.analysis.ResolveHints$ResolveCoalesceHints" ::
Expand Down