Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.analysis

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.expressions.{AliasHelper, Attribute, Expression, NamedExpression}
import org.apache.spark.sql.catalyst.expressions.{AliasHelper, Attribute, Expression, IntegerLiteral, Literal, NamedExpression}
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, AppendColumns, LogicalPlan}
import org.apache.spark.sql.catalyst.trees.TreePattern.{LATERAL_COLUMN_ALIAS_REFERENCE, UNRESOLVED_ATTRIBUTE}
Expand Down Expand Up @@ -136,7 +136,19 @@ class ResolveReferencesInAggregate(val catalogManager: CatalogManager) extends S
groupExprs
} else {
// This is a valid GROUP BY ALL aggregate.
expandedGroupExprs.get
expandedGroupExprs.get.zipWithIndex.map { case (expr, index) =>
trimAliases(expr) match {
// HACK ALERT: If the expanded grouping expression is an integer literal, don't use it
// but use an integer literal of the index. The reason is we may repeatedly
// analyze the plan, and the original integer literal may cause failures
// with a later GROUP BY ordinal resolution. GROUP BY constant is
// meaningless so whatever value does not matter here.
case IntegerLiteral(_) =>
// GROUP BY ordinal uses 1-based index.
Literal(index + 1)
case _ => expr
}
}
}
} else {
groupExprs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,4 +86,22 @@ class SubstituteUnresolvedOrdinalsSuite extends AnalysisTest {
testRelationWithData.groupBy(Literal(1))(Literal(100).as("a"))
)
}

test("SPARK-47895: group by all repeated analysis") {
val plan = testRelation.groupBy($"all")(Literal(100).as("a")).analyze
comparePlans(
plan,
testRelation.groupBy(Literal(1))(Literal(100).as("a"))
)

val testRelationWithData = testRelation.copy(data = Seq(new GenericInternalRow(Array(1: Any))))
// Copy the plan to reset its `analyzed` flag, so that analyzer rules will re-apply.
val copiedPlan = plan.transform {
case _: LocalRelation => testRelationWithData
}
comparePlans(
copiedPlan.analyze, // repeated analysis
testRelationWithData.groupBy(Literal(1))(Literal(100).as("a"))
)
}
}