Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -498,34 +498,35 @@ case class Pmod(left: Expression, right: Expression) extends BinaryArithmetic wi

override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
val remainder = ctx.freshName("remainder")
dataType match {
case dt: DecimalType =>
val decimalAdd = "$plus"
s"""
${ctx.javaType(dataType)} r = $eval1.remainder($eval2);
if (r.compare(new org.apache.spark.sql.types.Decimal().set(0)) < 0) {
${ev.value} = (r.$decimalAdd($eval2)).remainder($eval2);
${ctx.javaType(dataType)} $remainder = $eval1.remainder($eval2);
if ($remainder.compare(new org.apache.spark.sql.types.Decimal().set(0)) < 0) {
${ev.value} = ($remainder.$decimalAdd($eval2)).remainder($eval2);
} else {
${ev.value} = r;
${ev.value} = $remainder;
}
"""
// byte and short are casted into int when add, minus, times or divide
case ByteType | ShortType =>
s"""
${ctx.javaType(dataType)} r = (${ctx.javaType(dataType)})($eval1 % $eval2);
if (r < 0) {
${ev.value} = (${ctx.javaType(dataType)})((r + $eval2) % $eval2);
${ctx.javaType(dataType)} $remainder = (${ctx.javaType(dataType)})($eval1 % $eval2);
if ($remainder < 0) {
${ev.value} = (${ctx.javaType(dataType)})(($remainder + $eval2) % $eval2);
} else {
${ev.value} = r;
${ev.value} = $remainder;
}
"""
case _ =>
s"""
${ctx.javaType(dataType)} r = $eval1 % $eval2;
if (r < 0) {
${ev.value} = (r + $eval2) % $eval2;
${ctx.javaType(dataType)} $remainder = $eval1 % $eval2;
if ($remainder < 0) {
${ev.value} = ($remainder + $eval2) % $eval2;
} else {
${ev.value} = r;
${ev.value} = $remainder;
}
"""
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,20 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSQLContext with Be
}
}

test("pmod with partitionBy") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you can remove this once #14146 makes it in.

val spark = this.spark
import spark.implicits._

case class Test(a: Int, b: String)
val data = Seq((0, "a"), (1, "b"), (1, "a"))
spark.createDataset(data).createOrReplaceTempView("test")
sql("select * from test distribute by pmod(_1, 2)")
.write
.partitionBy("_2")
.mode("overwrite")
.parquet(dir)
}

private def testRead(
df: => DataFrame,
expectedResult: Seq[String],
Expand Down