Skip to content

Commit 82888da

Browse files
MaxGekkSeongjin Cho
authored andcommitted
[SPARK-30869][SQL] Convert dates to/from timestamps in microseconds precision
### What changes were proposed in this pull request? In the PR, I propose to replace: 1. `millisToDays()` by `microsToDays()` which accepts microseconds since the epoch and returns days since the epoch in the specified time zone. The last one is the internal representation of Catalyst's DateType. 2. `daysToMillis()` by `daysToMicros()` which accepts days since the epoch in some time zone and returns the number of microseconds since the epoch. The last one is internal representation of Catalyst's TimestampType. 3. `fromMillis()` by `millisToMicros()` 4. `toMillis()` by `microsToMillis()` ### Why are the changes needed? Spark stores timestamps in microseconds precision, so, there is no actual need to convert dates to milliseconds, and then to microseconds. As examples, look at DateTimeUtils functions `monthsBetween()` and `truncTimestamp()`. ### Does this PR introduce any user-facing change? No ### How was this patch tested? By existing test suites UnivocityParserSuite, DateExpressionsSuite, ComputeCurrentTimeSuite, DateTimeUtilsSuite, DateFunctionsSuite, JsonSuite, StreamSuite. Closes apache#27618 from MaxGekk/replace-millis-by-micros. Authored-by: Maxim Gekk <[email protected]> Signed-off-by: Wenchen Fan <[email protected]>
1 parent e9cdfeb commit 82888da

File tree

24 files changed

+98
-99
lines changed

24 files changed

+98
-99
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -131,10 +131,12 @@ case class CurrentBatchTimestamp(
131131
*/
132132
override protected def evalInternal(input: InternalRow): Any = toLiteral.value
133133

134-
def toLiteral: Literal = dataType match {
135-
case _: TimestampType =>
136-
Literal(DateTimeUtils.fromJavaTimestamp(new Timestamp(timestampMs)), TimestampType)
137-
case _: DateType => Literal(DateTimeUtils.millisToDays(timestampMs, zoneId), DateType)
134+
def toLiteral: Literal = {
135+
val timestampUs = millisToMicros(timestampMs)
136+
dataType match {
137+
case _: TimestampType => Literal(timestampUs, TimestampType)
138+
case _: DateType => Literal(microsToDays(timestampUs, zoneId), DateType)
139+
}
138140
}
139141
}
140142

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@ import java.util.{Date, Locale}
2323

2424
import org.apache.commons.lang3.time.FastDateFormat
2525

26-
import org.apache.spark.sql.catalyst.util.DateTimeUtils.{convertSpecialDate, localDateToDays}
26+
import org.apache.spark.sql.catalyst.util.DateTimeConstants.MICROS_PER_MILLIS
27+
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
2728
import org.apache.spark.sql.internal.SQLConf
2829

2930
sealed trait DateFormatter extends Serializable {
@@ -57,8 +58,8 @@ trait LegacyDateFormatter extends DateFormatter {
5758
def formatDate(d: Date): String
5859

5960
override def parse(s: String): Int = {
60-
val milliseconds = parseToDate(s).getTime
61-
DateTimeUtils.millisToDays(milliseconds)
61+
val micros = DateTimeUtils.millisToMicros(parseToDate(s).getTime)
62+
DateTimeUtils.microsToDays(micros)
6263
}
6364

6465
override def format(days: Int): String = {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala

Lines changed: 25 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -59,24 +59,22 @@ object DateTimeUtils {
5959
TimeZone.getTimeZone(getZoneId(timeZoneId))
6060
}
6161

62-
// we should use the exact day as Int, for example, (year, month, day) -> day
63-
def millisToDays(millisUtc: Long): SQLDate = {
64-
millisToDays(millisUtc, defaultTimeZone().toZoneId)
62+
def microsToDays(timestamp: SQLTimestamp): SQLDate = {
63+
microsToDays(timestamp, defaultTimeZone().toZoneId)
6564
}
6665

67-
def millisToDays(millisUtc: Long, zoneId: ZoneId): SQLDate = {
68-
val instant = microsToInstant(fromMillis(millisUtc))
66+
def microsToDays(timestamp: SQLTimestamp, zoneId: ZoneId): SQLDate = {
67+
val instant = microsToInstant(timestamp)
6968
localDateToDays(LocalDateTime.ofInstant(instant, zoneId).toLocalDate)
7069
}
7170

72-
// reverse of millisToDays
73-
def daysToMillis(days: SQLDate): Long = {
74-
daysToMillis(days, defaultTimeZone().toZoneId)
71+
def daysToMicros(days: SQLDate): SQLTimestamp = {
72+
daysToMicros(days, defaultTimeZone().toZoneId)
7573
}
7674

77-
def daysToMillis(days: SQLDate, zoneId: ZoneId): Long = {
75+
def daysToMicros(days: SQLDate, zoneId: ZoneId): SQLTimestamp = {
7876
val instant = daysToLocalDate(days).atStartOfDay(zoneId).toInstant
79-
toMillis(instantToMicros(instant))
77+
instantToMicros(instant)
8078
}
8179

8280
// Converts Timestamp to string according to Hive TimestampWritable convention.
@@ -88,14 +86,14 @@ object DateTimeUtils {
8886
* Returns the number of days since epoch from java.sql.Date.
8987
*/
9088
def fromJavaDate(date: Date): SQLDate = {
91-
millisToDays(date.getTime)
89+
microsToDays(millisToMicros(date.getTime))
9290
}
9391

9492
/**
9593
* Returns a java.sql.Date from number of days since epoch.
9694
*/
9795
def toJavaDate(daysSinceEpoch: SQLDate): Date = {
98-
new Date(daysToMillis(daysSinceEpoch))
96+
new Date(microsToMillis(daysToMicros(daysSinceEpoch)))
9997
}
10098

10199
/**
@@ -138,7 +136,7 @@ object DateTimeUtils {
138136
* Converts the timestamp to milliseconds since epoch. In spark timestamp values have microseconds
139137
* precision, so this conversion is lossy.
140138
*/
141-
def toMillis(us: SQLTimestamp): Long = {
139+
def microsToMillis(us: SQLTimestamp): Long = {
142140
// When the timestamp is negative i.e before 1970, we need to adjust the millseconds portion.
143141
// Example - 1965-01-01 10:11:12.123456 is represented as (-157700927876544) in micro precision.
144142
// In millis precision the above needs to be represented as (-157700927877).
@@ -148,7 +146,7 @@ object DateTimeUtils {
148146
/*
149147
* Converts milliseconds since epoch to SQLTimestamp.
150148
*/
151-
def fromMillis(millis: Long): SQLTimestamp = {
149+
def millisToMicros(millis: Long): SQLTimestamp = {
152150
Math.multiplyExact(millis, MICROS_PER_MILLIS)
153151
}
154152

@@ -574,10 +572,8 @@ object DateTimeUtils {
574572
time2: SQLTimestamp,
575573
roundOff: Boolean,
576574
zoneId: ZoneId): Double = {
577-
val millis1 = toMillis(time1)
578-
val millis2 = toMillis(time2)
579-
val date1 = millisToDays(millis1, zoneId)
580-
val date2 = millisToDays(millis2, zoneId)
575+
val date1 = microsToDays(time1, zoneId)
576+
val date2 = microsToDays(time2, zoneId)
581577
val (year1, monthInYear1, dayInMonth1, daysToMonthEnd1) = splitDate(date1)
582578
val (year2, monthInYear2, dayInMonth2, daysToMonthEnd2) = splitDate(date2)
583579

@@ -591,8 +587,8 @@ object DateTimeUtils {
591587
}
592588
// using milliseconds can cause precision loss with more than 8 digits
593589
// we follow Hive's implementation which uses seconds
594-
val secondsInDay1 = MILLISECONDS.toSeconds(millis1 - daysToMillis(date1, zoneId))
595-
val secondsInDay2 = MILLISECONDS.toSeconds(millis2 - daysToMillis(date2, zoneId))
590+
val secondsInDay1 = MICROSECONDS.toSeconds(time1 - daysToMicros(date1, zoneId))
591+
val secondsInDay2 = MICROSECONDS.toSeconds(time2 - daysToMicros(date2, zoneId))
596592
val secondsDiff = (dayInMonth1 - dayInMonth2) * SECONDS_PER_DAY + secondsInDay1 - secondsInDay2
597593
val secondsInMonth = DAYS.toSeconds(31)
598594
val diff = monthDiff + secondsDiff / secondsInMonth.toDouble
@@ -711,21 +707,17 @@ object DateTimeUtils {
711707
def truncTimestamp(t: SQLTimestamp, level: Int, zoneId: ZoneId): SQLTimestamp = {
712708
level match {
713709
case TRUNC_TO_MICROSECOND => t
710+
case TRUNC_TO_MILLISECOND =>
711+
t - Math.floorMod(t, MICROS_PER_MILLIS)
712+
case TRUNC_TO_SECOND =>
713+
t - Math.floorMod(t, MICROS_PER_SECOND)
714+
case TRUNC_TO_MINUTE =>
715+
t - Math.floorMod(t, MICROS_PER_MINUTE)
714716
case TRUNC_TO_HOUR => truncToUnit(t, zoneId, ChronoUnit.HOURS)
715717
case TRUNC_TO_DAY => truncToUnit(t, zoneId, ChronoUnit.DAYS)
716-
case _ =>
717-
val millis = toMillis(t)
718-
val truncated = level match {
719-
case TRUNC_TO_MILLISECOND => millis
720-
case TRUNC_TO_SECOND =>
721-
millis - Math.floorMod(millis, MILLIS_PER_SECOND)
722-
case TRUNC_TO_MINUTE =>
723-
millis - Math.floorMod(millis, MILLIS_PER_MINUTE)
724-
case _ => // Try to truncate date levels
725-
val dDays = millisToDays(millis, zoneId)
726-
daysToMillis(truncDate(dDays, level), zoneId)
727-
}
728-
fromMillis(truncated)
718+
case _ => // Try to truncate date levels
719+
val dDays = microsToDays(t, zoneId)
720+
daysToMicros(truncDate(dDays, level), zoneId)
729721
}
730722
}
731723

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import java.util.concurrent.TimeUnit
2323
import scala.util.control.NonFatal
2424

2525
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
26-
import org.apache.spark.sql.catalyst.util.DateTimeUtils.fromMillis
26+
import org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToMicros
2727
import org.apache.spark.sql.internal.SQLConf
2828
import org.apache.spark.sql.types.Decimal
2929
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
@@ -736,7 +736,7 @@ object IntervalUtils {
736736
microseconds = Math.addExact(microseconds, minutesUs)
737737
i += minuteStr.numBytes()
738738
} else if (s.matchAt(millisStr, i)) {
739-
val millisUs = fromMillis(currentValue)
739+
val millisUs = millisToMicros(currentValue)
740740
microseconds = Math.addExact(microseconds, millisUs)
741741
i += millisStr.numBytes()
742742
} else if (s.matchAt(microsStr, i)) {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ class LegacyFastTimestampFormatter(
141141
}
142142
val micros = cal.getMicros()
143143
cal.set(Calendar.MILLISECOND, 0)
144-
Math.addExact(fromMillis(cal.getTimeInMillis), micros)
144+
Math.addExact(millisToMicros(cal.getTimeInMillis), micros)
145145
}
146146

147147
def format(timestamp: SQLTimestamp): String = {
@@ -164,7 +164,7 @@ class LegacySimpleTimestampFormatter(
164164
}
165165

166166
override def parse(s: String): Long = {
167-
fromMillis(sdf.parse(s).getTime)
167+
millisToMicros(sdf.parse(s).getTime)
168168
}
169169

170170
override def format(us: Long): String = {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/UnivocityParserSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,10 +135,10 @@ class UnivocityParserSuite extends SparkFunSuite with SQLHelper {
135135
dateOptions.dateFormat,
136136
TimeZone.getTimeZone(dateOptions.zoneId),
137137
dateOptions.locale)
138-
val expectedDate = format.parse(customDate).getTime
138+
val expectedDate = DateTimeUtils.millisToMicros(format.parse(customDate).getTime)
139139
val castedDate = parser.makeConverter("_1", DateType, nullable = true)
140140
.apply(customDate)
141-
assert(castedDate == DateTimeUtils.millisToDays(expectedDate, ZoneOffset.UTC))
141+
assert(castedDate == DateTimeUtils.microsToDays(expectedDate, ZoneOffset.UTC))
142142

143143
val timestamp = "2015-01-01 00:00:00"
144144
timestampsOptions = new CSVOptions(Map(

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -271,13 +271,13 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
271271
checkEvaluation(
272272
cast(cast(new Timestamp(c.getTimeInMillis), StringType, timeZoneId),
273273
TimestampType, timeZoneId),
274-
fromMillis(c.getTimeInMillis))
274+
millisToMicros(c.getTimeInMillis))
275275
c = Calendar.getInstance(TimeZoneGMT)
276276
c.set(2015, 10, 1, 2, 30, 0)
277277
checkEvaluation(
278278
cast(cast(new Timestamp(c.getTimeInMillis), StringType, timeZoneId),
279279
TimestampType, timeZoneId),
280-
fromMillis(c.getTimeInMillis))
280+
millisToMicros(c.getTimeInMillis))
281281
}
282282

283283
val gmtId = Option("GMT")

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -47,17 +47,17 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
4747

4848
def toMillis(timestamp: String): Long = {
4949
val tf = TimestampFormatter("yyyy-MM-dd HH:mm:ss", ZoneOffset.UTC)
50-
DateTimeUtils.toMillis(tf.parse(timestamp))
50+
DateTimeUtils.microsToMillis(tf.parse(timestamp))
5151
}
5252
val date = "2015-04-08 13:10:15"
5353
val d = new Date(toMillis(date))
5454
val time = "2013-11-08 13:10:15"
5555
val ts = new Timestamp(toMillis(time))
5656

5757
test("datetime function current_date") {
58-
val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis(), ZoneOffset.UTC)
58+
val d0 = DateTimeUtils.currentDate(ZoneOffset.UTC)
5959
val cd = CurrentDate(gmtId).eval(EmptyRow).asInstanceOf[Int]
60-
val d1 = DateTimeUtils.millisToDays(System.currentTimeMillis(), ZoneOffset.UTC)
60+
val d1 = DateTimeUtils.currentDate(ZoneOffset.UTC)
6161
assert(d0 <= cd && cd <= d1 && d1 - d0 <= 1)
6262

6363
val cdjst = CurrentDate(jstId).eval(EmptyRow).asInstanceOf[Int]
@@ -787,15 +787,15 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
787787
1000L)
788788
checkEvaluation(
789789
UnixTimestamp(Literal(date1), Literal("yyyy-MM-dd HH:mm:ss"), timeZoneId),
790-
MILLISECONDS.toSeconds(
791-
DateTimeUtils.daysToMillis(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
790+
MICROSECONDS.toSeconds(
791+
DateTimeUtils.daysToMicros(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
792792
checkEvaluation(
793793
UnixTimestamp(Literal(sdf2.format(new Timestamp(-1000000))),
794794
Literal(fmt2), timeZoneId),
795795
-1000L)
796796
checkEvaluation(UnixTimestamp(
797797
Literal(sdf3.format(Date.valueOf("2015-07-24"))), Literal(fmt3), timeZoneId),
798-
MILLISECONDS.toSeconds(DateTimeUtils.daysToMillis(
798+
MICROSECONDS.toSeconds(DateTimeUtils.daysToMicros(
799799
DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-24")), tz.toZoneId)))
800800
val t1 = UnixTimestamp(
801801
CurrentTimestamp(), Literal("yyyy-MM-dd HH:mm:ss")).eval().asInstanceOf[Long]
@@ -813,8 +813,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
813813
null)
814814
checkEvaluation(
815815
UnixTimestamp(Literal(date1), Literal.create(null, StringType), timeZoneId),
816-
MILLISECONDS.toSeconds(
817-
DateTimeUtils.daysToMillis(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
816+
MICROSECONDS.toSeconds(
817+
DateTimeUtils.daysToMicros(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
818818
checkEvaluation(
819819
UnixTimestamp(Literal("2015-07-24"), Literal("not a valid format"), timeZoneId), null)
820820
}
@@ -851,16 +851,16 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
851851
1000L)
852852
checkEvaluation(
853853
ToUnixTimestamp(Literal(date1), Literal(fmt1), timeZoneId),
854-
MILLISECONDS.toSeconds(
855-
DateTimeUtils.daysToMillis(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
854+
MICROSECONDS.toSeconds(
855+
DateTimeUtils.daysToMicros(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
856856
checkEvaluation(
857857
ToUnixTimestamp(
858858
Literal(sdf2.format(new Timestamp(-1000000))),
859859
Literal(fmt2), timeZoneId),
860860
-1000L)
861861
checkEvaluation(ToUnixTimestamp(
862862
Literal(sdf3.format(Date.valueOf("2015-07-24"))), Literal(fmt3), timeZoneId),
863-
MILLISECONDS.toSeconds(DateTimeUtils.daysToMillis(
863+
MICROSECONDS.toSeconds(DateTimeUtils.daysToMicros(
864864
DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-24")), tz.toZoneId)))
865865
val t1 = ToUnixTimestamp(
866866
CurrentTimestamp(), Literal(fmt1)).eval().asInstanceOf[Long]
@@ -875,8 +875,8 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
875875
null)
876876
checkEvaluation(ToUnixTimestamp(
877877
Literal(date1), Literal.create(null, StringType), timeZoneId),
878-
MILLISECONDS.toSeconds(
879-
DateTimeUtils.daysToMillis(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
878+
MICROSECONDS.toSeconds(
879+
DateTimeUtils.daysToMicros(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
880880
checkEvaluation(
881881
ToUnixTimestamp(
882882
Literal("2015-07-24"),

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.sql.catalyst.optimizer
1919

20+
import java.time.ZoneId
21+
2022
import org.apache.spark.sql.catalyst.dsl.plans._
2123
import org.apache.spark.sql.catalyst.expressions.{Alias, CurrentDate, CurrentTimestamp, Literal}
2224
import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -51,9 +53,9 @@ class ComputeCurrentTimeSuite extends PlanTest {
5153
test("analyzer should replace current_date with literals") {
5254
val in = Project(Seq(Alias(CurrentDate(), "a")(), Alias(CurrentDate(), "b")()), LocalRelation())
5355

54-
val min = DateTimeUtils.millisToDays(System.currentTimeMillis())
56+
val min = DateTimeUtils.currentDate(ZoneId.systemDefault())
5557
val plan = Optimize.execute(in.analyze).asInstanceOf[Project]
56-
val max = DateTimeUtils.millisToDays(System.currentTimeMillis())
58+
val max = DateTimeUtils.currentDate(ZoneId.systemDefault())
5759

5860
val lits = new scala.collection.mutable.ArrayBuffer[Int]
5961
plan.transformAllExpressions { case e: Literal =>

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,8 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
8989

9090
test("SPARK-6785: java date conversion before and after epoch") {
9191
def format(d: Date): String = {
92-
TimestampFormatter("uuuu-MM-dd", defaultTimeZone().toZoneId).format(fromMillis(d.getTime))
92+
TimestampFormatter("uuuu-MM-dd", defaultTimeZone().toZoneId)
93+
.format(millisToMicros(d.getTime))
9394
}
9495
def checkFromToJavaDate(d1: Date): Unit = {
9596
val d2 = toJavaDate(fromJavaDate(d1))
@@ -582,17 +583,17 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
582583
}
583584
}
584585

585-
test("daysToMillis and millisToDays") {
586-
val input = toMillis(date(2015, 12, 31, 16, zid = zonePST))
587-
assert(millisToDays(input, zonePST) === 16800)
588-
assert(millisToDays(input, ZoneOffset.UTC) === 16801)
589-
assert(millisToDays(-1 * MILLIS_PER_DAY + 1, ZoneOffset.UTC) == -1)
586+
test("daysToMicros and microsToDays") {
587+
val input = date(2015, 12, 31, 16, zid = zonePST)
588+
assert(microsToDays(input, zonePST) === 16800)
589+
assert(microsToDays(input, ZoneOffset.UTC) === 16801)
590+
assert(microsToDays(-1 * MILLIS_PER_DAY + 1, ZoneOffset.UTC) == -1)
590591

591-
var expected = toMillis(date(2015, 12, 31, zid = zonePST))
592-
assert(daysToMillis(16800, zonePST) === expected)
592+
var expected = date(2015, 12, 31, zid = zonePST)
593+
assert(daysToMicros(16800, zonePST) === expected)
593594

594-
expected = toMillis(date(2015, 12, 31, zid = zoneGMT))
595-
assert(daysToMillis(16800, ZoneOffset.UTC) === expected)
595+
expected = date(2015, 12, 31, zid = zoneGMT)
596+
assert(daysToMicros(16800, ZoneOffset.UTC) === expected)
596597

597598
// There are some days are skipped entirely in some timezone, skip them here.
598599
val skipped_days = Map[String, Set[Int]](
@@ -607,16 +608,16 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
607608
val skipped = skipped_days.getOrElse(tz.getID, Set.empty)
608609
(-20000 to 20000).foreach { d =>
609610
if (!skipped.contains(d)) {
610-
assert(millisToDays(daysToMillis(d, tz.toZoneId), tz.toZoneId) === d,
611+
assert(microsToDays(daysToMicros(d, tz.toZoneId), tz.toZoneId) === d,
611612
s"Round trip of ${d} did not work in tz ${tz}")
612613
}
613614
}
614615
}
615616
}
616617

617-
test("toMillis") {
618-
assert(DateTimeUtils.toMillis(-9223372036844776001L) === -9223372036844777L)
619-
assert(DateTimeUtils.toMillis(-157700927876544L) === -157700927877L)
618+
test("microsToMillis") {
619+
assert(DateTimeUtils.microsToMillis(-9223372036844776001L) === -9223372036844777L)
620+
assert(DateTimeUtils.microsToMillis(-157700927876544L) === -157700927877L)
620621
}
621622

622623
test("special timestamp values") {

0 commit comments

Comments
 (0)