Skip to content

Commit 1a2977e

Browse files
uros-dbMaxGekk
andcommitted
[SPARK-52881][SQL] Implement the make_time function in Scala
### What changes were proposed in this pull request? Implement the `make_time` function in Scala API. ### Why are the changes needed? Expand API support for the `MakeTime` expression. ### Does this PR introduce _any_ user-facing change? Yes, the new function is now available in Scala API. ### How was this patch tested? Added appropriate Scala function tests. ### Was this patch authored or co-authored using generative AI tooling? No. Closes apache#51573 from uros-db/scala-make_time. Lead-authored-by: Uros Bojanic <uros.bojanic@databricks.com> Co-authored-by: Maxim Gekk <max.gekk@gmail.com> Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent ea83a17 commit 1a2977e

File tree

3 files changed

+55
-1
lines changed

3 files changed

+55
-1
lines changed

python/pyspark/sql/tests/test_functions.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,10 @@ def test_function_parity(self):
8181
missing_in_py = jvm_fn_set.difference(py_fn_set)
8282

8383
# Functions that we expect to be missing in python until they are added to pyspark
84-
expected_missing_in_py = set()
84+
expected_missing_in_py = set(
85+
# TODO(SPARK-52888): Implement the make_time function in Python
86+
["make_time"]
87+
)
8588

8689
self.assertEqual(
8790
expected_missing_in_py, missing_in_py, "Missing functions in pyspark not as expected"

sql/api/src/main/scala/org/apache/spark/sql/functions.scala

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -871,6 +871,22 @@ object functions {
871871
def last_value(e: Column, ignoreNulls: Column): Column =
872872
Column.fn("last_value", e, ignoreNulls)
873873

874+
/**
875+
* Create time from hour, minute and second fields. For invalid inputs it will throw an error.
876+
*
877+
* @param hour
878+
* the hour to represent, from 0 to 23
879+
* @param minute
880+
* the minute to represent, from 0 to 59
881+
* @param second
882+
* the second to represent, from 0 to 59.999999
883+
* @group datetime_funcs
884+
* @since 4.1.0
885+
*/
886+
def make_time(hour: Column, minute: Column, second: Column): Column = {
887+
Column.fn("make_time", hour, minute, second)
888+
}
889+
874890
/**
875891
* Aggregate function: returns the most frequent value in a group.
876892
*

sql/core/src/test/scala/org/apache/spark/sql/TimeFunctionsSuiteBase.scala

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,41 @@ import org.apache.spark.sql.types._
2828
abstract class TimeFunctionsSuiteBase extends QueryTest with SharedSparkSession {
2929
import testImplicits._
3030

31+
test("SPARK-52881: make_time function") {
32+
// Input data for the function.
33+
val schema = StructType(Seq(
34+
StructField("hour", IntegerType, nullable = false),
35+
StructField("minute", IntegerType, nullable = false),
36+
StructField("second", DecimalType(16, 6), nullable = false)
37+
))
38+
val data = Seq(
39+
Row(0, 0, BigDecimal(0.0)),
40+
Row(1, 2, BigDecimal(3.4)),
41+
Row(23, 59, BigDecimal(59.999999))
42+
)
43+
val df = spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
44+
45+
// Test the function using both `selectExpr` and `select`.
46+
val result1 = df.selectExpr(
47+
"make_time(hour, minute, second)"
48+
)
49+
val result2 = df.select(
50+
make_time(col("hour"), col("minute"), col("second"))
51+
)
52+
// Check that both methods produce the same result.
53+
checkAnswer(result1, result2)
54+
55+
// Expected output of the function.
56+
val expected = Seq(
57+
"00:00:00",
58+
"01:02:03.4",
59+
"23:59:59.999999"
60+
).toDF("timeString").select(col("timeString").cast("time"))
61+
// Check that the results match the expected output.
62+
checkAnswer(result1, expected)
63+
checkAnswer(result2, expected)
64+
}
65+
3166
test("SPARK-52885: hour function") {
3267
// Input data for the function.
3368
val schema = StructType(Seq(

0 commit comments

Comments
 (0)