Skip to content

Commit c7fc3e6

Browse files
committed
[SPARK-8349][SQL] Use expression constructors (rather than apply) in FunctionRegistry
1 parent d986fb9 commit c7fc3e6

File tree

5 files changed

+23
-32
lines changed

5 files changed

+23
-32
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -158,27 +158,24 @@ object FunctionRegistry {
158158
/** See usage above. */
159159
private def expression[T <: Expression](name: String)
160160
(implicit tag: ClassTag[T]): (String, FunctionBuilder) = {
161-
// Use the companion class to find apply methods.
162-
val objectClass = Class.forName(tag.runtimeClass.getName + "$")
163-
val companionObj = objectClass.getDeclaredField("MODULE$").get(null)
164-
165-
// See if we can find an apply that accepts Seq[Expression]
166-
val varargApply = Try(objectClass.getDeclaredMethod("apply", classOf[Seq[_]])).toOption
167161

162+
// See if we can find a constructor that accepts Seq[Expression]
163+
val varargCtor = Try(tag.runtimeClass.getDeclaredConstructor(classOf[Seq[_]])).toOption
168164
val builder = (expressions: Seq[Expression]) => {
169-
if (varargApply.isDefined) {
165+
if (varargCtor.isDefined) {
170166
// If there is an apply method that accepts Seq[Expression], use that one.
171-
varargApply.get.invoke(companionObj, expressions).asInstanceOf[Expression]
167+
//varargApply.get.invoke(companionObj, expressions).asInstanceOf[Expression]
168+
varargCtor.get.newInstance(expressions).asInstanceOf[Expression]
172169
} else {
173-
// Otherwise, find an apply method that matches the number of arguments, and use that.
170+
// Otherwise, find an ctor method that matches the number of arguments, and use that.
174171
val params = Seq.fill(expressions.size)(classOf[Expression])
175-
val f = Try(objectClass.getDeclaredMethod("apply", params : _*)) match {
172+
val f = Try(tag.runtimeClass.getDeclaredConstructor(params : _*)) match {
176173
case Success(e) =>
177174
e
178175
case Failure(e) =>
179176
throw new AnalysisException(s"Invalid number of arguments for function $name")
180177
}
181-
f.invoke(companionObj, expressions : _*).asInstanceOf[Expression]
178+
f.newInstance(expressions : _*).asInstanceOf[Expression]
182179
}
183180
}
184181
(name, builder)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,7 @@ import org.apache.spark.sql.types._
2727
/**
2828
* If an expression wants to be exposed in the function registry (so users can call it with
2929
* "name(arguments...)", the concrete implementation must be a case class whose constructor
30-
* arguments are all Expressions types. In addition, if it needs to support more than one
31-
* constructor, define those constructors explicitly as apply methods in the companion object.
30+
* arguments are all Expressions types.
3231
*
3332
* See [[Substring]] for an example.
3433
*/

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/random.scala

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,10 @@ abstract class RDG(seed: Long) extends LeafExpression with Serializable {
4949
/** Generate a random column with i.i.d. uniformly distributed values in [0, 1). */
5050
case class Rand(seed: Long) extends RDG(seed) {
5151
override def eval(input: InternalRow): Double = rng.nextDouble()
52-
}
5352

54-
object Rand {
55-
def apply(): Rand = apply(Utils.random.nextLong())
53+
def this() = this(Utils.random.nextLong())
5654

57-
def apply(seed: Expression): Rand = apply(seed match {
55+
def this(seed: Expression) = this(seed match {
5856
case IntegerLiteral(s) => s
5957
case _ => throw new AnalysisException("Input argument to rand must be an integer literal.")
6058
})
@@ -63,12 +61,10 @@ object Rand {
6361
/** Generate a random column with i.i.d. gaussian random distribution. */
6462
case class Randn(seed: Long) extends RDG(seed) {
6563
override def eval(input: InternalRow): Double = rng.nextGaussian()
66-
}
6764

68-
object Randn {
69-
def apply(): Randn = apply(Utils.random.nextLong())
65+
def this() = this(Utils.random.nextLong())
7066

71-
def apply(seed: Expression): Randn = apply(seed match {
67+
def this(seed: Expression) = this(seed match {
7268
case IntegerLiteral(s) => s
7369
case _ => throw new AnalysisException("Input argument to rand must be an integer literal.")
7470
})

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
2020
import java.util.regex.Pattern
2121

2222
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
23+
import org.apache.spark.sql.catalyst.expressions.Substring
2324
import org.apache.spark.sql.catalyst.expressions.codegen._
2425
import org.apache.spark.sql.types._
2526
import org.apache.spark.unsafe.types.UTF8String
@@ -225,6 +226,10 @@ case class EndsWith(left: Expression, right: Expression)
225226
case class Substring(str: Expression, pos: Expression, len: Expression)
226227
extends Expression with ExpectsInputTypes {
227228

229+
def this(str: Expression, pos: Expression) = {
230+
this(str, pos, Literal(Integer.MAX_VALUE))
231+
}
232+
228233
override def foldable: Boolean = str.foldable && pos.foldable && len.foldable
229234

230235
override def nullable: Boolean = str.nullable || pos.nullable || len.nullable
@@ -290,12 +295,6 @@ case class Substring(str: Expression, pos: Expression, len: Expression)
290295
}
291296
}
292297

293-
object Substring {
294-
def apply(str: Expression, pos: Expression): Substring = {
295-
apply(str, pos, Literal(Integer.MAX_VALUE))
296-
}
297-
}
298-
299298
/**
300299
* A function that return the length of the given string expression.
301300
*/

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -344,11 +344,11 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] {
344344
* @param newArgs the new product arguments.
345345
*/
346346
def makeCopy(newArgs: Array[AnyRef]): this.type = attachTree(this, "makeCopy") {
347-
val defaultCtor =
348-
getClass.getConstructors
349-
.find(_.getParameterTypes.size != 0)
350-
.headOption
351-
.getOrElse(sys.error(s"No valid constructor for $nodeName"))
347+
val ctors = getClass.getConstructors.filter(_.getParameterTypes.size != 0)
348+
if (ctors.isEmpty) {
349+
sys.error(s"No valid constructor for $nodeName")
350+
}
351+
val defaultCtor = ctors.maxBy(_.getParameterTypes.size)
352352

353353
try {
354354
CurrentOrigin.withOrigin(origin) {

0 commit comments

Comments
 (0)