Skip to content

Commit 52234d6

Browse files
committed
Add secFrac
1 parent 561911a commit 52234d6

File tree

3 files changed

+13
-4
lines changed

3 files changed

+13
-4
lines changed

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/IntervalExpressionsSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import scala.language.implicitConversions
2121

2222
import org.apache.spark.SparkFunSuite
2323
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
24+
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
2425
import org.apache.spark.sql.catalyst.util.IntervalUtils.{safeStringToInterval, stringToInterval}
2526
import org.apache.spark.sql.internal.SQLConf
2627
import org.apache.spark.sql.types.Decimal
@@ -260,7 +261,7 @@ class IntervalExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
260261
seconds: Int = 0,
261262
millis: Int = 0,
262263
micros: Int = 0): Unit = {
263-
val secFrac = seconds * MICROS_PER_SECOND + millis * MICROS_PER_MILLIS + micros
264+
val secFrac = DateTimeTestUtils.secFrac(seconds, millis, micros)
264265
val intervalExpr = MakeInterval(Literal(years), Literal(months), Literal(weeks),
265266
Literal(days), Literal(hours), Literal(minutes), Literal(Decimal(secFrac, 8, 6)))
266267
val totalMonths = years * MONTHS_PER_YEAR + months

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, _}
2727
import org.apache.spark.sql.catalyst.expressions._
2828
import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last}
2929
import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, IntervalUtils}
30-
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
3130
import org.apache.spark.sql.catalyst.util.IntervalUtils.IntervalUnit._
3231
import org.apache.spark.sql.internal.SQLConf
3332
import org.apache.spark.sql.types._
@@ -681,13 +680,13 @@ class ExpressionParserSuite extends AnalysisTest {
681680
Literal(new CalendarInterval(
682681
0,
683682
0,
684-
-13 * MICROS_PER_SECOND - 123 * MICROS_PER_MILLIS - 456)))
683+
DateTimeTestUtils.secFrac(-13, -123, -456))))
685684
checkIntervals(
686685
"13.123456 second",
687686
Literal(new CalendarInterval(
688687
0,
689688
0,
690-
13 * MICROS_PER_SECOND + 123 * MICROS_PER_MILLIS + 456)))
689+
DateTimeTestUtils.secFrac(13, 123, 456))))
691690
checkIntervals("1.001 second",
692691
Literal(IntervalUtils.stringToInterval("1 second 1 millisecond")))
693692

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@ import java.time.{LocalDate, LocalDateTime, LocalTime, ZoneId, ZoneOffset}
2121
import java.util.TimeZone
2222
import java.util.concurrent.TimeUnit
2323

24+
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
25+
2426
/**
2527
* Helper functions for testing date and time functionality.
2628
*/
@@ -95,4 +97,11 @@ object DateTimeTestUtils {
9597
val localDateTime = LocalDateTime.of(localDate, localTime)
9698
localDateTimeToMicros(localDateTime, zid)
9799
}
100+
101+
def secFrac(seconds: Int, milliseconds: Int, microseconds: Int): Long = {
102+
var result: Long = microseconds
103+
result = Math.addExact(result, Math.multiplyExact(milliseconds, MICROS_PER_MILLIS))
104+
result = Math.addExact(result, Math.multiplyExact(seconds, MICROS_PER_SECOND))
105+
result
106+
}
98107
}

0 commit comments

Comments
 (0)