Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
04959c2
refactor analyzer adding a new object
anchovYu Nov 23, 2022
6f44c85
lca code
anchovYu Nov 23, 2022
725e5ac
add tests, refine logic
anchovYu Nov 28, 2022
660e1d2
move lca rule to a new file
anchovYu Nov 28, 2022
fd06094
rename conf
anchovYu Nov 28, 2022
7d4f80f
test failure
anchovYu Nov 29, 2022
b9704d5
small fix
anchovYu Nov 29, 2022
777f13a
temp commit, still in implementation
anchovYu Nov 29, 2022
09480ea
a temporary solution, but still fail certain cases
anchovYu Nov 30, 2022
c972738
working solution, needs some refinement
anchovYu Dec 1, 2022
97ee293
Merge remote-tracking branch 'apache/master' into SPARK-27561-refactor
anchovYu Dec 1, 2022
5785943
make changes to accomodate the recent refactor
anchovYu Dec 2, 2022
757cffb
introduce leaf exp in Project as well
anchovYu Dec 5, 2022
29de892
handle a corner case
anchovYu Dec 5, 2022
72991c6
add more tests; add check rule
anchovYu Dec 6, 2022
d45fe31
uplift the necessity to resolve expression in second phase; add more …
anchovYu Dec 8, 2022
1f55f73
address comments to add tests for LCA off
anchovYu Dec 8, 2022
f753529
revert the refactor, split LCA into two rules
anchovYu Dec 9, 2022
b9f706f
better refactor
anchovYu Dec 9, 2022
94d5c9e
address comments
anchovYu Dec 9, 2022
d2e75fd
Merge branch 'SPARK-27561-refactor' into SPARK-27561-agg
anchovYu Dec 9, 2022
edde37c
basic version passing all tests
anchovYu Dec 9, 2022
fb7b18c
update the logic, add and refactor tests
anchovYu Dec 12, 2022
3698cff
update comments
anchovYu Dec 13, 2022
e700d6a
add a corner case comment
anchovYu Dec 13, 2022
8d20986
address comments
anchovYu Dec 13, 2022
d952aa7
Merge branch 'SPARK-27561-refactor' into SPARK-27561-agg
anchovYu Dec 13, 2022
44d5a3d
Merge remote-tracking branch 'apache/master' into SPARK-27561-agg
anchovYu Dec 13, 2022
ccebc1c
revert some changes
anchovYu Dec 13, 2022
5540b70
fix few todos
anchovYu Dec 13, 2022
338ba11
Merge remote-tracking branch 'apache/master' into SPARK-27561-agg
anchovYu Dec 16, 2022
136a930
fix the failing test
anchovYu Dec 16, 2022
5076ad2
fix the missing_aggregate issue, turn on conf to see failed tests
anchovYu Dec 19, 2022
2f2dee5
remove few todos
anchovYu Dec 19, 2022
3a5509a
better fix to maintain aggregate error: only lift up in certain cases
anchovYu Dec 20, 2022
a23debb
Merge remote-tracking branch 'apache/master' into SPARK-27561-agg
anchovYu Dec 20, 2022
b200da0
typo
anchovYu Dec 20, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1744,7 +1744,7 @@ class Analyzer(override val catalogManager: CatalogManager)
* the current plan is needed to first try resolving the attribute by its
* children
*/
private def wrapLCARefHelper(
private def wrapLCARef(
e: NamedExpression,
currentPlan: LogicalPlan,
aliasMap: CaseInsensitiveMap[Seq[AliasEntry]]): NamedExpression = {
Expand All @@ -1763,9 +1763,14 @@ class Analyzer(override val catalogManager: CatalogManager)
case _ => u
}
case o: OuterReference
if aliasMap.contains(o.nameParts.map(_.head).getOrElse(o.name)) =>
if aliasMap.contains(
o.getTagValue(ResolveLateralColumnAliasReference.NAME_PARTS_FROM_UNRESOLVED_ATTR)
.map(_.head)
.getOrElse(o.name)) =>
// handle OuterReference exactly same as UnresolvedAttribute
val nameParts = o.nameParts.getOrElse(Seq(o.name))
val nameParts = o
.getTagValue(ResolveLateralColumnAliasReference.NAME_PARTS_FROM_UNRESOLVED_ATTR)
.getOrElse(Seq(o.name))
val aliases = aliasMap.get(nameParts.head).get
aliases.size match {
case n if n > 1 =>
Expand All @@ -1789,15 +1794,15 @@ class Analyzer(override val catalogManager: CatalogManager)
var aliasMap = CaseInsensitiveMap(Map[String, Seq[AliasEntry]]())
val newProjectList = projectList.zipWithIndex.map {
case (a: Alias, idx) =>
val lcaWrapped = wrapLCARefHelper(a, p, aliasMap).asInstanceOf[Alias]
val lcaWrapped = wrapLCARef(a, p, aliasMap).asInstanceOf[Alias]
// Insert the LCA-resolved alias instead of the unresolved one into map. If it is
// resolved, it can be referenced as LCA by later expressions (chaining).
// Unresolved Alias is also added to the map to perform ambiguous name check, but
// only resolved alias can be LCA.
aliasMap = insertIntoAliasMap(lcaWrapped, idx, aliasMap)
lcaWrapped
case (e, _) =>
wrapLCARefHelper(e, p, aliasMap)
wrapLCARef(e, p, aliasMap)
}
p.copy(projectList = newProjectList)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -644,8 +644,10 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
projectList.foreach(_.transformDownWithPruning(
_.containsPattern(LATERAL_COLUMN_ALIAS_REFERENCE)) {
case lcaRef: LateralColumnAliasReference if p.resolved =>
failUnresolvedAttribute(
p, UnresolvedAttribute(lcaRef.nameParts), "UNRESOLVED_COLUMN")
throw SparkException.internalError("Resolved Project should not contain " +
s"any LateralColumnAliasReference.\nDebugging information: plan: $p",
context = lcaRef.origin.getQueryContext,
summary = lcaRef.origin.context.summary)
})

case j: Join if !j.duplicateResolved =>
Expand Down Expand Up @@ -730,8 +732,10 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
aggList.foreach(_.transformDownWithPruning(
_.containsPattern(LATERAL_COLUMN_ALIAS_REFERENCE)) {
case lcaRef: LateralColumnAliasReference =>
failUnresolvedAttribute(
agg, UnresolvedAttribute(lcaRef.nameParts), "UNRESOLVED_COLUMN")
throw SparkException.internalError("Resolved Aggregate should not contain " +
s"any LateralColumnAliasReference.\nDebugging information: plan: $agg",
context = lcaRef.origin.getQueryContext,
summary = lcaRef.origin.context.summary)
})

case _ => // Analysis successful!
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeMap, Expressio
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LogicalPlan, Project}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
import org.apache.spark.sql.catalyst.trees.TreePattern.LATERAL_COLUMN_ALIAS_REFERENCE
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.SQLConf
Expand Down Expand Up @@ -93,6 +94,13 @@ import org.apache.spark.sql.internal.SQLConf
object ResolveLateralColumnAliasReference extends Rule[LogicalPlan] {
case class AliasEntry(alias: Alias, index: Int)

/**
* A tag to store the nameParts from the original unresolved attribute.
* It is set for [[OuterReference]], used in the current rule to convert [[OuterReference]] back
* to [[LateralColumnAliasReference]].
*/
val NAME_PARTS_FROM_UNRESOLVED_ATTR = TreeNodeTag[Seq[String]]("name_parts_from_unresolved_attr")

override def apply(plan: LogicalPlan): LogicalPlan = {
if (!conf.getConf(SQLConf.LATERAL_COLUMN_ALIAS_IMPLICIT_ENABLED)) {
plan
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -424,18 +424,8 @@ case class OuterReference(e: NamedExpression)
override def qualifier: Seq[String] = e.qualifier
override def exprId: ExprId = e.exprId
override def toAttribute: Attribute = e.toAttribute
override def newInstance(): NamedExpression =
OuterReference(e.newInstance()).setNameParts(nameParts)
override def newInstance(): NamedExpression = OuterReference(e.newInstance())
final override val nodePatterns: Seq[TreePattern] = Seq(OUTER_REFERENCE)

// optional field, the original name parts of UnresolvedAttribute before it is resolved to
// OuterReference. Used in rule ResolveLateralColumnAlias to convert OuterReference back to
// LateralColumnAliasReference.
var nameParts: Option[Seq[String]] = None
def setNameParts(newNameParts: Option[Seq[String]]): OuterReference = {
nameParts = newNameParts
this
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.expressions

import scala.collection.mutable.ArrayBuffer

import org.apache.spark.sql.catalyst.analysis.ResolveLateralColumnAliasReference.NAME_PARTS_FROM_UNRESOLVED_ATTR
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{Filter, HintInfo, LogicalPlan}
Expand Down Expand Up @@ -159,7 +160,11 @@ object SubExprUtils extends PredicateHelper {
* Wrap attributes in the expression with [[OuterReference]]s.
*/
def wrapOuterReference[E <: Expression](e: E, nameParts: Option[Seq[String]] = None): E = {
e.transform { case a: Attribute => OuterReference(a).setNameParts(nameParts) }.asInstanceOf[E]
e.transform { case a: Attribute =>
val o = OuterReference(a)
nameParts.map(o.setTagValue(NAME_PARTS_FROM_UNRESOLVED_ATTR, _))
o
}.asInstanceOf[E]
}

/**
Expand Down