diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala index 05de21b9eafe..3b04f44d6802 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala @@ -30,6 +30,7 @@ import org.apache.spark.sql.catalyst.expressions.objects._ import org.apache.spark.sql.catalyst.util.{ArrayData, MapData} import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} +import org.apache.spark.util.Utils /** @@ -377,6 +378,23 @@ object ScalaReflection extends ScalaReflection { expressions.Literal.create(null, ObjectType(cls)), newInstance ) + + case t if isSubtype(t, localTypeOf[Enumeration#Value]) => + // package example + // object Foo extends Enumeration { + // type Foo = Value + // val E1, E2 = Value + // } + // the fullName of tpe is example.Foo.Foo, but we need example.Foo so that + // we can call example.Foo.withName to deserialize string to enumeration. + val parent = t.asInstanceOf[TypeRef].pre.typeSymbol.asClass + val cls = mirror.runtimeClass(parent) + StaticInvoke( + cls, + ObjectType(getClassFromType(t)), + "withName", + createDeserializerForString(path, false) :: Nil, + returnNullable = false) } } @@ -562,6 +580,14 @@ object ScalaReflection extends ScalaReflection { } createSerializerForObject(inputObject, fields) + case t if isSubtype(t, localTypeOf[Enumeration#Value]) => + createSerializerForString( + Invoke( + inputObject, + "toString", + ObjectType(classOf[java.lang.String]), + returnNullable = false)) + case _ => throw new UnsupportedOperationException( s"No Encoder found for $tpe\n" + walkedTypePath) @@ -739,6 +765,8 @@ object ScalaReflection extends ScalaReflection { val Schema(dataType, nullable) = schemaFor(fieldType) StructField(fieldName, dataType, nullable) }), nullable = true) + case t if isSubtype(t, localTypeOf[Enumeration#Value]) => + Schema(StringType, nullable = true) case other => throw new UnsupportedOperationException(s"Schema for type $other is not supported") } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala index b981a50499bf..e8c7aed6d72c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala @@ -22,6 +22,7 @@ import java.sql.{Date, Timestamp} import scala.reflect.runtime.universe.TypeTag import org.apache.spark.SparkFunSuite +import org.apache.spark.sql.catalyst.FooEnum.FooEnum import org.apache.spark.sql.catalyst.analysis.UnresolvedExtractValue import org.apache.spark.sql.catalyst.expressions.{CreateNamedStruct, Expression, If, SpecificInternalRow, UpCast} import org.apache.spark.sql.catalyst.expressions.objects.{AssertNotNull, NewInstance} @@ -90,6 +91,13 @@ case class FooWithAnnotation(f1: String @FooAnnotation, f2: Option[String] @FooA case class SpecialCharAsFieldData(`field.1`: String, `field 2`: String) +object FooEnum extends Enumeration { + type FooEnum = Value + val E1, E2 = Value +} + +case class FooClassWithEnum(i: Int, e: FooEnum) + object TestingUDT { @SQLUserDefinedType(udt = classOf[NestedStructUDT]) class NestedStruct(val a: Integer, val b: Long, val c: Double) @@ -437,4 +445,11 @@ class ScalaReflectionSuite extends SparkFunSuite { StructField("f2", StringType)))) assert(deserializerFor[FooWithAnnotation].dataType == ObjectType(classOf[FooWithAnnotation])) } + + test("SPARK-32585: Support scala enumeration in ScalaReflection") { + assert(serializerFor[FooClassWithEnum].dataType == StructType(Seq( + StructField("i", IntegerType, false), + StructField("e", StringType, true)))) + assert(deserializerFor[FooClassWithEnum].dataType == ObjectType(classOf[FooClassWithEnum])) + } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala index 6a094d4aadda..f2598a925e08 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala @@ -25,7 +25,7 @@ import scala.collection.mutable.ArrayBuffer import scala.reflect.runtime.universe.TypeTag import org.apache.spark.sql.{Encoder, Encoders} -import org.apache.spark.sql.catalyst.{OptionalData, PrimitiveData} +import org.apache.spark.sql.catalyst.{FooClassWithEnum, FooEnum, OptionalData, PrimitiveData} import org.apache.spark.sql.catalyst.analysis.AnalysisTest import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions.AttributeReference @@ -389,6 +389,14 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes assert(e.getMessage.contains("tuple with more than 22 elements are not supported")) } + encodeDecodeTest((1, FooEnum.E1), "Tuple with Int and scala Enum") + encodeDecodeTest((null, FooEnum.E1, FooEnum.E2), "Tuple with Null and scala Enum") + encodeDecodeTest(Seq(FooEnum.E1, null), "Seq with scala Enum") + encodeDecodeTest(Map("key" -> FooEnum.E1), "Map with String key and scala Enum") + encodeDecodeTest(Map(FooEnum.E1 -> "value"), "Map with scala Enum key and String value") + encodeDecodeTest(FooClassWithEnum(1, FooEnum.E1), "case class with Int and scala Enum") + encodeDecodeTest(FooEnum.E1, "scala Enum") + // Scala / Java big decimals ---------------------------------------------------------- encodeDecodeTest(BigDecimal(("9" * 20) + "." + "9" * 18), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala index 4923e8b55690..3c914ae04367 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala @@ -25,7 +25,7 @@ import org.scalatest.exceptions.TestFailedException import org.scalatest.prop.TableDrivenPropertyChecks._ import org.apache.spark.{SparkException, TaskContext} -import org.apache.spark.sql.catalyst.ScroogeLikeExample +import org.apache.spark.sql.catalyst.{FooClassWithEnum, FooEnum, ScroogeLikeExample} import org.apache.spark.sql.catalyst.encoders.{OuterScopes, RowEncoder} import org.apache.spark.sql.catalyst.plans.{LeftAnti, LeftSemi} import org.apache.spark.sql.catalyst.util.sideBySide @@ -1926,6 +1926,19 @@ class DatasetSuite extends QueryTest } } } + + test("SPARK-32585: Support scala enumeration in ScalaReflection") { + checkDataset( + Seq(FooClassWithEnum(1, FooEnum.E1), FooClassWithEnum(2, FooEnum.E2)).toDS(), + Seq(FooClassWithEnum(1, FooEnum.E1), FooClassWithEnum(2, FooEnum.E2)): _* + ) + + // test null + checkDataset( + Seq(FooClassWithEnum(1, null), FooClassWithEnum(2, FooEnum.E2)).toDS(), + Seq(FooClassWithEnum(1, null), FooClassWithEnum(2, FooEnum.E2)): _* + ) + } } object AssertExecutionId {