Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update code
  • Loading branch information
chenzhx committed Jul 2, 2022
commit 0b13a5e28d0af2d24a55695c8360bb7ee03995e3
Original file line number Diff line number Diff line change
Expand Up @@ -377,13 +377,15 @@ class V2ExpressionBuilder(e: Expression, isPredicate: Boolean = false) {
generateExpression(child).map(v => new V2Extract("QUARTER", v))
case Year(child) =>
generateExpression(child).map(v => new V2Extract("YEAR", v))
// translate the DayOfWeek function in Spark using ISO standards
// DayOfWeek uses 1 = Sunday, 2 = Monday, ... and ISO standard is Monday=1, ...,
// so we use the formula ((ISO_standard % 7) + 1) to do translation.
case DayOfWeek(child) =>
generateExpression(child).map(v => new GeneralScalarExpression("+",
Array[V2Expression](new GeneralScalarExpression("%",
Array[V2Expression](new V2Extract("DAY_OF_WEEK", v), LiteralValue(7, IntegerType))),
LiteralValue(1, IntegerType))))
// translate the WeekDay function in Spark using ISO standards
// WeekDay uses 0 = Monday, 1 = Tuesday, ... and ISO standard is Monday=1, ...,
// so we use the formula (ISO_standard - 1) to do translation.
case WeekDay(child) =>
generateExpression(child).map(v => new GeneralScalarExpression("-",
Array[V2Expression](new V2Extract("DAY_OF_WEEK", v), LiteralValue(1, IntegerType))))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ package org.apache.spark.sql.jdbc

import java.sql.{Connection, DriverManager}
import java.util.Properties

import scala.util.control.NonFatal

import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.{AnalysisException, DataFrame, ExplainSuiteHelper, QueryTest, Row}
import org.apache.spark.sql.catalyst.InternalRow
Expand All @@ -29,10 +31,9 @@ import org.apache.spark.sql.connector.{IntegralAverage, StrLen}
import org.apache.spark.sql.connector.catalog.functions.{ScalarFunction, UnboundFunction}
import org.apache.spark.sql.connector.expressions.Expression
import org.apache.spark.sql.connector.expressions.aggregate.{AggregateFunc, UserDefinedAggregateFunc}
import org.apache.spark.sql.execution.SimpleMode
import org.apache.spark.sql.execution.datasources.v2.{DataSourceV2ScanRelation, V1ScanWrapper}
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
import org.apache.spark.sql.functions.{abs, acos, asin, atan, atan2, avg, ceil, coalesce, cos, cosh, cot, count, count_distinct, degrees, exp, floor, lit, log10, not, pow, radians, round, signum, sin, sinh, sqrt, sum, tan, tanh, udf, when, log => logarithm}
import org.apache.spark.sql.functions.{abs, acos, asin, atan, atan2, avg, ceil, coalesce, cos, cosh, cot, count, count_distinct, degrees, exp, floor, lit, log => logarithm, log10, not, pow, radians, round, signum, sin, sinh, sqrt, sum, tan, tanh, udf, when}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{DataType, IntegerType, StringType}
Expand Down Expand Up @@ -209,7 +210,7 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel
private def checkPushedInfo(df: DataFrame, expectedPlanFragment: String*): Unit = {
df.queryExecution.optimizedPlan.collect {
case _: DataSourceV2ScanRelation =>
checkKeywordsExistsInExplain(df, SimpleMode, expectedPlanFragment: _*)
checkKeywordsExistsInExplain(df, expectedPlanFragment: _*)
}
}

Expand Down