Skip to content

Commit d2a535f

Browse files
committed
[SPARK-34246][FOLLOWUP] Change the definition of findTightestCommonType for backward compatibility
### What changes were proposed in this pull request? Change the definition of `findTightestCommonType` from ``` def findTightestCommonType(t1: DataType, t2: DataType): Option[DataType] ``` to ``` val findTightestCommonType: (DataType, DataType) => Option[DataType] ``` ### Why are the changes needed? For backward compatibility. When running a MongoDB connector (built with Spark 3.1.1) with the latest master, there is such an error ``` java.lang.NoSuchMethodError: org.apache.spark.sql.catalyst.analysis.TypeCoercion$.findTightestCommonType()Lscala/Function2 ``` from https://github.com/mongodb/mongo-spark/blob/master/src/main/scala/com/mongodb/spark/sql/MongoInferSchema.scala#L150 In the previous release, the function was ``` static public scala.Function2<org.apache.spark.sql.types.DataType, org.apache.spark.sql.types.DataType, scala.Option<org.apache.spark.sql.types.DataType>> findTightestCommonType () ``` After #31349, the function becomes: ``` static public scala.Option<org.apache.spark.sql.types.DataType> findTightestCommonType (org.apache.spark.sql.types.DataType t1, org.apache.spark.sql.types.DataType t2) ``` This PR is to reduce the unnecessary API change. ### Does this PR introduce _any_ user-facing change? Yes, the definition of `TypeCoercion.findTightestCommonType` is consistent with previous release again. ### How was this patch tested? Existing unit tests Closes #32493 from gengliangwang/typecoercion. Authored-by: Gengliang Wang <[email protected]> Signed-off-by: Gengliang Wang <[email protected]>
1 parent 7182f8c commit d2a535f

File tree

2 files changed

+27
-32
lines changed

2 files changed

+27
-32
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala

Lines changed: 25 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -93,36 +93,33 @@ object AnsiTypeCoercion extends TypeCoercionBase {
9393
WindowFrameCoercion ::
9494
StringLiteralCoercion :: Nil) :: Nil
9595

96-
override def findTightestCommonType(t1: DataType, t2: DataType): Option[DataType] = {
97-
(t1, t2) match {
98-
case (t1, t2) if t1 == t2 => Some(t1)
99-
case (NullType, t1) => Some(t1)
100-
case (t1, NullType) => Some(t1)
101-
102-
case (t1: IntegralType, t2: DecimalType) if t2.isWiderThan(t1) =>
103-
Some(t2)
104-
case (t1: DecimalType, t2: IntegralType) if t1.isWiderThan(t2) =>
105-
Some(t1)
106-
107-
case (t1: NumericType, t2: NumericType)
108-
if !t1.isInstanceOf[DecimalType] && !t2.isInstanceOf[DecimalType] =>
109-
val index = numericPrecedence.lastIndexWhere(t => t == t1 || t == t2)
110-
val widerType = numericPrecedence(index)
111-
if (widerType == FloatType) {
112-
// If the input type is an Integral type and a Float type, simply return Double type as
113-
// the tightest common type to avoid potential precision loss on converting the Integral
114-
// type as Float type.
115-
Some(DoubleType)
116-
} else {
117-
Some(widerType)
118-
}
119-
120-
case (_: TimestampType, _: DateType) | (_: DateType, _: TimestampType) =>
121-
Some(TimestampType)
96+
val findTightestCommonType: (DataType, DataType) => Option[DataType] = {
97+
case (t1, t2) if t1 == t2 => Some(t1)
98+
case (NullType, t1) => Some(t1)
99+
case (t1, NullType) => Some(t1)
100+
101+
case (t1: IntegralType, t2: DecimalType) if t2.isWiderThan(t1) =>
102+
Some(t2)
103+
case (t1: DecimalType, t2: IntegralType) if t1.isWiderThan(t2) =>
104+
Some(t1)
105+
106+
case (t1: NumericType, t2: NumericType)
107+
if !t1.isInstanceOf[DecimalType] && !t2.isInstanceOf[DecimalType] =>
108+
val index = numericPrecedence.lastIndexWhere(t => t == t1 || t == t2)
109+
val widerType = numericPrecedence(index)
110+
if (widerType == FloatType) {
111+
// If the input type is an Integral type and a Float type, simply return Double type as
112+
// the tightest common type to avoid potential precision loss on converting the Integral
113+
// type as Float type.
114+
Some(DoubleType)
115+
} else {
116+
Some(widerType)
117+
}
122118

123-
case (t1, t2) => findTypeForComplex(t1, t2, findTightestCommonType)
124-
}
119+
case (_: TimestampType, _: DateType) | (_: DateType, _: TimestampType) =>
120+
Some(TimestampType)
125121

122+
case (t1, t2) => findTypeForComplex(t1, t2, findTightestCommonType)
126123
}
127124

128125
override def findWiderTypeForTwo(t1: DataType, t2: DataType): Option[DataType] = {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ abstract class TypeCoercionBase {
4444
* with primitive types, because in that case the precision and scale of the result depends on
4545
* the operation. Those rules are implemented in [[DecimalPrecision]].
4646
*/
47-
def findTightestCommonType(type1: DataType, type2: DataType): Option[DataType]
47+
val findTightestCommonType: (DataType, DataType) => Option[DataType]
4848

4949
/**
5050
* Looking for a widened data type of two given data types with some acceptable loss of precision.
@@ -845,8 +845,7 @@ object TypeCoercion extends TypeCoercionBase {
845845
FloatType,
846846
DoubleType)
847847

848-
override def findTightestCommonType(t1: DataType, t2: DataType): Option[DataType] = {
849-
(t1, t2) match {
848+
override val findTightestCommonType: (DataType, DataType) => Option[DataType] = {
850849
case (t1, t2) if t1 == t2 => Some(t1)
851850
case (NullType, t1) => Some(t1)
852851
case (t1, NullType) => Some(t1)
@@ -866,7 +865,6 @@ object TypeCoercion extends TypeCoercionBase {
866865
Some(TimestampType)
867866

868867
case (t1, t2) => findTypeForComplex(t1, t2, findTightestCommonType)
869-
}
870868
}
871869

872870
/** Promotes all the way to StringType. */

0 commit comments

Comments
 (0)