Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix expected results
  • Loading branch information
MaxGekk committed Sep 26, 2019
commit 1f52b756c572cb16d3b7900b22be2ac5ddfd478d
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ import org.apache.spark.sql.types._
> SELECT _FUNC_(10.0, array(0.5, 0.4, 0.1), 100);
[10.0,10.0,10.0]
> SELECT _FUNC_(10.0, 0.5, 100);
10.0
10
""",
since = "2.1.0")
case class ApproximatePercentile(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import org.apache.spark.unsafe.types.UTF8String
examples = """
Examples:
> SELECT _FUNC_('1, 0.8', 'a INT, b DOUBLE');
{"a":1, "b":0.8}
{"a":1,"b":0.8}
> SELECT _FUNC_('26/08/2015', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy'))
{"time":2015-08-26 00:00:00.0}
""",
Expand Down Expand Up @@ -199,7 +199,7 @@ case class SchemaOfCsv(
> SELECT _FUNC_(named_struct('a', 1, 'b', 2));
1,2
> SELECT _FUNC_(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy'));
"26/08/2015"
26/08/2015
""",
since = "3.0.0")
// scalastyle:on line.size.limit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -842,7 +842,7 @@ abstract class UnixTime extends ToTimestamp {
examples = """
Examples:
> SELECT _FUNC_(0, 'yyyy-MM-dd HH:mm:ss');
1970-01-01 00:00:00
1969-12-31 16:00:00
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, surprising.

Copy link
Member Author

@MaxGekk MaxGekk Sep 26, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The yyyy-MM-dd HH:mm:ss pattern does not contain the time zone sub-pattern. If you point out it, you will see something like:

spark-sql> SELECT from_unixtime(0, 'yyyy-MM-dd HH:mm:ssXXX');
1970-01-01 03:00:00+03:00

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And you can change your current time zone to UTC to see 1970-01-01 00:00:00:

spark-sql> set spark.sql.session.timeZone=UTC;
spark.sql.session.timeZone	UTC
spark-sql> SELECT from_unixtime(0, 'yyyy-MM-dd HH:mm:ssXXX');
1970-01-01 00:00:00Z

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ya. The timezone issue will make a failure on different timezone machines.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

but the time zone is forcibly set to "America/Los_Angeles" in tests:

TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))

.createWithDefaultFunction(() => TimeZone.getDefault.getID)

""",
since = "1.5.0")
case class FromUnixTime(sec: Expression, format: Expression, timeZoneId: Option[String] = None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -502,9 +502,9 @@ case class JsonTuple(children: Seq[Expression])
examples = """
Examples:
> SELECT _FUNC_('{"a":1, "b":0.8}', 'a INT, b DOUBLE');
{"a":1, "b":0.8}
{"a":1,"b":0.8}
> SELECT _FUNC_('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy'));
{"time":"2015-08-26 00:00:00.0"}
{"time":2015-08-26 00:00:00.0}
""",
since = "2.2.0")
// scalastyle:on line.size.limit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -949,7 +949,7 @@ object StringTrimRight {
usage = """
_FUNC_(str) - Removes the trailing space characters from `str`.

_FUNC_(trimStr, str) - Removes the trailing string which contains the characters from the trim string from the `str`
_FUNC_(str, trimStr) - Removes the trailing string which contains the characters from the trim string from the `str`
""",
arguments = """
Arguments:
Expand All @@ -960,7 +960,7 @@ object StringTrimRight {
Examples:
> SELECT _FUNC_(' SparkSQL ');
SparkSQL
> SELECT _FUNC_('LQSa', 'SSparkSQLS');
> SELECT _FUNC_('SSparkSQLS', 'SQLS');
SSpark
""",
since = "1.5.0")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ case class XPathString(xml: Expression, path: Expression) extends XPathExtract {
examples = """
Examples:
> SELECT _FUNC_('<a><b>b1</b><b>b2</b><b>b3</b><c>c1</c><c>c2</c></a>','a/b/text()');
['b1','b2','b3']
["b1","b2","b3"]
""")
// scalastyle:on line.size.limit
case class XPathList(xml: Expression, path: Expression) extends XPathExtract {
Expand Down