Skip to content

Commit e77a8f8

Browse files
committed
Fix
1 parent 16b9615 commit e77a8f8

File tree

7 files changed

+59
-20
lines changed

7 files changed

+59
-20
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,7 @@ object FunctionRegistry {
404404
expression[Month]("month"),
405405
expression[MonthsBetween]("months_between"),
406406
expression[NextDay]("next_day"),
407-
expression[CurrentTimestamp]("now", true),
407+
expression[Now]("now"),
408408
expression[Quarter]("quarter"),
409409
expression[Second]("second"),
410410
expression[ParseToTimestamp]("to_timestamp"),

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,11 @@ package org.apache.spark.sql.catalyst.analysis
1919

2020
import org.apache.spark.internal.Logging
2121
import org.apache.spark.sql.AnalysisException
22-
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, AttributeSet, CurrentDate, CurrentTimestamp, MonotonicallyIncreasingID}
22+
import org.apache.spark.sql.catalyst.expressions.{Attribute, CurrentDate, CurrentTimestamp, MonotonicallyIncreasingID, Now}
2323
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
24-
import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys
2524
import org.apache.spark.sql.catalyst.plans._
2625
import org.apache.spark.sql.catalyst.plans.logical._
2726
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes
28-
import org.apache.spark.sql.internal.SQLConf
2927
import org.apache.spark.sql.streaming.OutputMode
3028

3129
/**
@@ -412,7 +410,7 @@ object UnsupportedOperationChecker extends Logging {
412410

413411
subPlan.expressions.foreach { e =>
414412
if (e.collectLeaves().exists {
415-
case (_: CurrentTimestamp | _: CurrentDate) => true
413+
case (_: CurrentTimestamp | _: Now | _: CurrentDate) => true
416414
case _ => false
417415
}) {
418416
throwError(s"Continuous processing does not support current time operations.")

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala

Lines changed: 40 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import org.apache.commons.text.StringEscapeUtils
2828
import org.apache.spark.SparkUpgradeException
2929
import org.apache.spark.sql.AnalysisException
3030
import org.apache.spark.sql.catalyst.InternalRow
31-
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
3231
import org.apache.spark.sql.catalyst.expressions.codegen._
3332
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
3433
import org.apache.spark.sql.catalyst.util.{DateTimeUtils, LegacyDateFormats, TimestampFormatter}
@@ -62,7 +61,18 @@ trait TimeZoneAwareExpression extends Expression {
6261
* There is no code generation since this expression should get constant folded by the optimizer.
6362
*/
6463
@ExpressionDescription(
65-
usage = "_FUNC_() - Returns the current date at the start of query evaluation.",
64+
usage = """
65+
_FUNC_() - Returns the current date at the start of query evaluation.
66+
67+
_FUNC_ - Returns the current date at the start of query evaluation.
68+
""",
69+
examples = """
70+
Examples:
71+
> SELECT _FUNC_();
72+
2020-04-25
73+
> SELECT _FUNC_;
74+
2020-04-25
75+
""",
6676
group = "datetime_funcs",
6777
since = "1.5.0")
6878
case class CurrentDate(timeZoneId: Option[String] = None)
@@ -83,26 +93,44 @@ case class CurrentDate(timeZoneId: Option[String] = None)
8393
override def prettyName: String = "current_date"
8494
}
8595

96+
abstract class CurrentTimestampLike() extends LeafExpression with CodegenFallback {
97+
override def foldable: Boolean = true
98+
override def nullable: Boolean = false
99+
override def dataType: DataType = TimestampType
100+
override def eval(input: InternalRow): Any = currentTimestamp()
101+
}
102+
86103
/**
87104
* Returns the current timestamp at the start of query evaluation.
88105
* All calls of current_timestamp within the same query return the same value.
89106
*
90107
* There is no code generation since this expression should get constant folded by the optimizer.
91108
*/
92109
@ExpressionDescription(
93-
usage = "_FUNC_() - Returns the current timestamp at the start of query evaluation.",
110+
usage = """
111+
_FUNC_() - Returns the current timestamp at the start of query evaluation.
112+
113+
_FUNC_ - Returns the current timestamp at the start of query evaluation.
114+
""",
115+
examples = """
116+
Examples:
117+
> SELECT _FUNC_();
118+
2020-04-25 15:49:11.914
119+
> SELECT _FUNC_;
120+
2020-04-25 15:49:11.914
121+
""",
94122
group = "datetime_funcs",
95123
since = "1.5.0")
96-
case class CurrentTimestamp() extends LeafExpression with CodegenFallback {
97-
override def foldable: Boolean = true
98-
override def nullable: Boolean = false
99-
100-
override def dataType: DataType = TimestampType
101-
102-
override def eval(input: InternalRow): Any = currentTimestamp()
124+
case class CurrentTimestamp() extends CurrentTimestampLike {
125+
override def prettyName: String = "current_timestamp"
126+
}
103127

104-
override def prettyName: String =
105-
getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("current_timestamp")
128+
@ExpressionDescription(
129+
usage = "_FUNC_() - Returns the current timestamp at the start of query evaluation.",
130+
group = "datetime_funcs",
131+
since = "1.5.0")
132+
case class Now() extends CurrentTimestampLike {
133+
override def prettyName: String = "now"
106134
}
107135

108136
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1619,7 +1619,11 @@ case class StringSpace(child: Expression)
16191619
*/
16201620
// scalastyle:off line.size.limit
16211621
@ExpressionDescription(
1622-
usage = "_FUNC_(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`.",
1622+
usage = """
1623+
_FUNC_(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`.
1624+
1625+
_FUNC_(str FROM pos[ FOR len]]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`.
1626+
""",
16231627
examples = """
16241628
Examples:
16251629
> SELECT _FUNC_('Spark SQL', 5);
@@ -1628,6 +1632,12 @@ case class StringSpace(child: Expression)
16281632
SQL
16291633
> SELECT _FUNC_('Spark SQL', 5, 1);
16301634
k
1635+
> SELECT _FUNC_('Spark SQL' FROM 5);
1636+
k SQL
1637+
> SELECT _FUNC_('Spark SQL' FROM -3);
1638+
SQL
1639+
> SELECT _FUNC_('Spark SQL' FROM 5 FOR 1);
1640+
k
16311641
""",
16321642
since = "1.5.0")
16331643
// scalastyle:on line.size.limit

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ object ComputeCurrentTime extends Rule[LogicalPlan] {
8585
LocalDate.now(DateTimeUtils.getZoneId(timeZoneId)),
8686
DateType)
8787
})
88-
case CurrentTimestamp() => currentTime
88+
case CurrentTimestamp() | Now() => currentTime
8989
}
9090
}
9191
}

sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,9 @@ class ExpressionInfoSuite extends SparkFunSuite with SharedSparkSession {
120120
val ignoreSet = Set(
121121
// One of examples shows getting the current timestamp
122122
"org.apache.spark.sql.catalyst.expressions.UnixTimestamp",
123+
"org.apache.spark.sql.catalyst.expressions.CurrentDate",
124+
"org.apache.spark.sql.catalyst.expressions.CurrentTimestamp",
125+
"org.apache.spark.sql.catalyst.expressions.Now",
123126
// Random output without a seed
124127
"org.apache.spark.sql.catalyst.expressions.Rand",
125128
"org.apache.spark.sql.catalyst.expressions.Randn",

sql/gen-sql-functions-docs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def _make_pretty_usage(infos):
106106
for info in infos:
107107
# Extracts (signature, description) pairs from `info.usage`, e.g.,
108108
# the signature is `func(expr)` and the description is `...` in an usage `func(expr) - ...`.
109-
usages = iter(re.split(r"(%s\(.*\)) - " % info.name, info.usage.strip())[1:])
109+
usages = iter(re.split(r"(%s.*) - " % info.name, info.usage.strip())[1:])
110110
for (sig, description) in zip(usages, usages):
111111
result.append(" <tr>")
112112
result.append(" <td>%s</td>" % sig)

0 commit comments

Comments
 (0)