Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class SubstituteUnresolvedOrdinals(conf: SQLConf) extends Rule[LogicalPlan] {
case _ => false
}

def apply(plan: LogicalPlan): LogicalPlan = plan transform {
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case s: Sort if conf.orderByOrdinal && s.order.exists(o => isIntLiteral(o.child)) =>
val newOrders = s.order.map {
case order @ SortOrder(ordinal @ Literal(index: Int, IntegerType), _, _, _) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ select count(a), a from (select 1 as a) tmp group by 2 having a > 0;
-- mixed cases: group-by ordinals and aliases
select a, a AS k, count(b) from data group by k, 1;

-- turn of group by ordinal
-- turn off group by ordinal
set spark.sql.groupByOrdinal=false;

-- can now group by negative literal
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -557,4 +557,20 @@ class DataFrameAggregateSuite extends QueryTest with SharedSQLContext {
}
assert(e.message.contains("aggregate functions are not allowed in GROUP BY"))
}

test("SPARK-21580 ints in aggregation expressions are taken as group-by ordinal.") {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please also add order-by test too. Maybe add to DataFrameSuite.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok,thanks

checkAnswer(
testData2.groupBy(lit(3), lit(4)).agg(lit(6), lit(7), sum("b")),
Seq(Row(3, 4, 6, 7, 9)))
checkAnswer(
testData2.groupBy(lit(3), lit(4)).agg(lit(6), 'b, sum("b")),
Seq(Row(3, 4, 6, 1, 3), Row(3, 4, 6, 2, 6)))

checkAnswer(
spark.sql("SELECT 3, 4, SUM(b) FROM testData2 GROUP BY 1, 2"),
Seq(Row(3, 4, 9)))
checkAnswer(
spark.sql("SELECT 3 AS c, 4 AS d, SUM(b) FROM testData2 GROUP BY c, d"),
Seq(Row(3, 4, 9)))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2023,4 +2023,10 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
assert(df1.join(df2, $"t1.i" === $"t2.i").cache().count() == 1)
}
}

test("order-by ordinal.") {
checkAnswer(
testData2.select(lit(7), 'a, 'b).orderBy(lit(1), lit(2), lit(3)),
Seq(Row(7, 1, 1), Row(7, 1, 2), Row(7, 2, 1), Row(7, 2, 2), Row(7, 3, 1), Row(7, 3, 2)))
}
}