-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-35207][SQL] Normalize hash function behavior with negative zero (floating point types) #32496
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[SPARK-35207][SQL] Normalize hash function behavior with negative zero (floating point types) #32496
Changes from 2 commits
f86fba6
641629d
a671ce7
1de9c3d
869ae7c
c123599
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -369,11 +369,25 @@ abstract class HashExpression[E] extends Expression { | |
| protected def genHashBoolean(input: String, result: String): String = | ||
| genHashInt(s"$input ? 1 : 0", result) | ||
|
|
||
| protected def genHashFloat(input: String, result: String): String = | ||
| genHashInt(s"Float.floatToIntBits($input)", result) | ||
| protected def genHashFloat(input: String, result: String): String = { | ||
| s""" | ||
| |if(Float.floatToIntBits($input) == Float.floatToIntBits(-0.0f)) { | ||
| | ${genHashInt(s"Float.floatToIntBits(0.0f)", result)} | ||
|
||
| |} else { | ||
| | ${genHashInt(s"Float.floatToIntBits($input)", result)} | ||
| |} | ||
| """.stripMargin | ||
| } | ||
|
|
||
| protected def genHashDouble(input: String, result: String): String = | ||
| genHashLong(s"Double.doubleToLongBits($input)", result) | ||
| protected def genHashDouble(input: String, result: String): String = { | ||
| s""" | ||
| |if(Double.doubleToLongBits($input) == Double.doubleToLongBits(-0.0d)) { | ||
|
||
| | ${genHashLong(s"Double.doubleToLongBits(0.0d)", result)} | ||
|
||
| |} else { | ||
| | ${genHashLong(s"Double.doubleToLongBits($input)", result)} | ||
| |} | ||
| """.stripMargin | ||
| } | ||
|
|
||
| protected def genHashDecimal( | ||
| ctx: CodegenContext, | ||
|
|
@@ -523,7 +537,9 @@ abstract class InterpretedHashFunction { | |
| case s: Short => hashInt(s, seed) | ||
| case i: Int => hashInt(i, seed) | ||
| case l: Long => hashLong(l, seed) | ||
| case f: Float if (f == -0.0f) => hashInt(java.lang.Float.floatToIntBits(0.0f), seed) | ||
| case f: Float => hashInt(java.lang.Float.floatToIntBits(f), seed) | ||
| case d: Double if (d == -0.0d) => hashLong(java.lang.Double.doubleToLongBits(0.0d), seed) | ||
| case d: Double => hashLong(java.lang.Double.doubleToLongBits(d), seed) | ||
| case d: Decimal => | ||
| val precision = dataType.asInstanceOf[DecimalType].precision | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -708,6 +708,16 @@ class HashExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { | |
| checkEvaluation(HiveHash(Seq(yearMonth)), 1234) | ||
| } | ||
|
|
||
| test("SPARK-35207: Compute hash consistent between -0.0 and 0.0") { | ||
| def checkResult(exprs1: Expression, exprs2: Expression): Unit = { | ||
| assert(Murmur3Hash(Seq(exprs1), 42).eval() == Murmur3Hash(Seq(exprs2), 42).eval()) | ||
| assert(XxHash64(Seq(exprs1), 42).eval() == XxHash64(Seq(exprs2), 42).eval()) | ||
| assert(HiveHash(Seq(exprs1)).eval() == HiveHash(Seq(exprs2)).eval()) | ||
| } | ||
| checkResult(Literal.create(0D, DoubleType), Literal.create(-0D, DoubleType)) | ||
|
||
| checkResult(Literal.create(0L, LongType), Literal.create(-0L, LongType)) | ||
| } | ||
|
|
||
| private def testHash(inputSchema: StructType): Unit = { | ||
| val inputGenerator = RandomDataGenerator.forType(inputSchema, nullable = false).get | ||
| val toRow = RowEncoder(inputSchema).createSerializer() | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -654,4 +654,30 @@ class WholeStageCodegenSuite extends QueryTest with SharedSparkSession | |
| } | ||
| } | ||
| } | ||
|
|
||
| test("SPARK-35207: Compute hash consistent between -0.0 and 0.0 doubles with Codegen") { | ||
|
||
| val data = Seq((0.0d, -1.0d, 1.0d)) | ||
| withTempPath { dir => | ||
| val path = dir.getCanonicalPath | ||
| data.toDF("col1", "col2", "col3").write.parquet(path) | ||
| sql(s"create table testHash(col1 double, col2 double, col3 double) " + | ||
| s"using parquet location '$path'") | ||
| sql("select hash(col1 / col2) == hash(col1 / col3) from testHash").collect() | ||
| .foreach(row => assert(row.getBoolean(0) == true)) | ||
| sql("drop table testHash") | ||
| } | ||
| } | ||
|
|
||
| test("SPARK-35207: Compute hash consistent between -0.0 and 0.0 floats with Codegen") { | ||
| val data = Seq((0.0f, -1.0f, 1.0f)) | ||
| withTempPath { dir => | ||
| val path = dir.getCanonicalPath | ||
| data.toDF("col1", "col2", "col3").write.parquet(path) | ||
| sql(s"create table testHash(col1 float, col2 float, col3 float) " + | ||
| s"using parquet location '$path'") | ||
| sql("select hash(col1 / col2) == hash(col1 / col3) from testHash").collect() | ||
| .foreach(row => assert(row.getBoolean(0) == true)) | ||
| sql("drop table testHash") | ||
| } | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why do we need to use
floatToIntBitshere?$input == -0.0finstead?Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
+1,
$input == 0.0fshould be good enough.