-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-21499] [SQL] Support creating persistent function for Spark UDAF(UserDefinedAggregateFunction) #18700
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
4028155
f634043
a65607c
12cefc2
bd5ae26
7251be9
d3fbdc5
57607b5
05e8168
aff8f9e
7d9aabd
8ea4ad1
50224a7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,13 +17,15 @@ | |
|
|
||
| package org.apache.spark.sql.catalyst.catalog | ||
|
|
||
| import java.lang.reflect.InvocationTargetException | ||
| import java.net.URI | ||
| import java.util.Locale | ||
| import java.util.concurrent.Callable | ||
| import javax.annotation.concurrent.GuardedBy | ||
|
|
||
| import scala.collection.mutable | ||
| import scala.util.{Failure, Success, Try} | ||
| import scala.util.control.NonFatal | ||
|
|
||
| import com.google.common.cache.{Cache, CacheBuilder} | ||
| import org.apache.hadoop.conf.Configuration | ||
|
|
@@ -39,7 +41,9 @@ import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParserInterface} | |
| import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias, View} | ||
| import org.apache.spark.sql.catalyst.util.StringUtils | ||
| import org.apache.spark.sql.internal.SQLConf | ||
| import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION | ||
| import org.apache.spark.sql.types.StructType | ||
| import org.apache.spark.util.Utils | ||
|
|
||
| object SessionCatalog { | ||
| val DEFAULT_DATABASE = "default" | ||
|
|
@@ -126,6 +130,13 @@ class SessionCatalog( | |
| if (conf.caseSensitiveAnalysis) name else name.toLowerCase(Locale.ROOT) | ||
| } | ||
|
|
||
| /** | ||
| * Checks whether the Hive metastore is being used | ||
| */ | ||
| private def isUsingHiveMetastore: Boolean = { | ||
| conf.getConf(CATALOG_IMPLEMENTATION).toLowerCase(Locale.ROOT) == "hive" | ||
| } | ||
|
|
||
| private val tableRelationCache: Cache[QualifiedTableName, LogicalPlan] = { | ||
| val cacheSize = conf.tableRelationCacheSize | ||
| CacheBuilder.newBuilder().maximumSize(cacheSize).build[QualifiedTableName, LogicalPlan]() | ||
|
|
@@ -1090,14 +1101,50 @@ class SessionCatalog( | |
| // | Methods that interact with temporary and metastore functions | | ||
| // ---------------------------------------------------------------- | ||
|
|
||
| /** | ||
| * Constructs a [[FunctionBuilder]] based on the provided class that represents a function. | ||
| */ | ||
| protected def makeFunctionBuilder(name: String, functionClassName: String): FunctionBuilder = { | ||
|
||
| val clazz = Utils.classForName(functionClassName) | ||
| (children: Seq[Expression]) => { | ||
| try { | ||
| makeFunctionExpression(name, Utils.classForName(functionClassName), children).getOrElse { | ||
|
||
| val extraMsg = | ||
| if (!isUsingHiveMetastore) "Use sparkSession.udf.register(...) instead." else "" | ||
| throw new AnalysisException( | ||
| s"No handler for UDF/UDAF/UDTF '${clazz.getCanonicalName}'. $extraMsg") | ||
| } | ||
| } catch { | ||
| case ae: AnalysisException => | ||
| throw ae | ||
| case NonFatal(e) => | ||
| val analysisException = | ||
| new AnalysisException(s"No handler for UDF/UDAF/UDTF '${clazz.getCanonicalName}': $e") | ||
| analysisException.setStackTrace(e.getStackTrace) | ||
| throw analysisException | ||
| } | ||
| } | ||
| } | ||
|
|
||
| /** | ||
| * Construct a [[FunctionBuilder]] based on the provided class that represents a function. | ||
| * | ||
| * This performs reflection to decide what type of [[Expression]] to return in the builder. | ||
| */ | ||
| protected def makeFunctionBuilder(name: String, functionClassName: String): FunctionBuilder = { | ||
| // TODO: at least support UDAFs here | ||
| throw new UnsupportedOperationException("Use sqlContext.udf.register(...) instead.") | ||
| protected def makeFunctionExpression( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. seems we need to catch exception for this method anyway, how about we just make this method return |
||
| name: String, | ||
| clazz: Class[_], | ||
| children: Seq[Expression]): Option[Expression] = { | ||
| val clsForUDAF = | ||
| Utils.classForName("org.apache.spark.sql.expressions.UserDefinedAggregateFunction") | ||
| if (clsForUDAF.isAssignableFrom(clazz)) { | ||
| val cls = Utils.classForName("org.apache.spark.sql.execution.aggregate.ScalaUDAF") | ||
| Some(cls.getConstructor(classOf[Seq[Expression]], clsForUDAF, classOf[Int], classOf[Int]) | ||
| .newInstance(children, clazz.newInstance().asInstanceOf[Object], Int.box(1), Int.box(1)) | ||
| .asInstanceOf[Expression]) | ||
| } else { | ||
| None | ||
| } | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -1121,7 +1168,14 @@ class SessionCatalog( | |
| } | ||
| val info = new ExpressionInfo(funcDefinition.className, func.database.orNull, func.funcName) | ||
| val builder = | ||
| functionBuilder.getOrElse(makeFunctionBuilder(func.unquotedString, funcDefinition.className)) | ||
| functionBuilder.getOrElse { | ||
| val className = funcDefinition.className | ||
| if (!Utils.classIsLoadable(className)) { | ||
| throw new AnalysisException(s"Can not load class '$className' when registering " + | ||
| s"the function '$func', please make sure it is on the classpath") | ||
| } | ||
| makeFunctionBuilder(func.unquotedString, className) | ||
| } | ||
| functionRegistry.registerFunction(func, info, builder) | ||
| } | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,13 @@ | ||
| CREATE OR REPLACE TEMPORARY VIEW t1 AS SELECT * FROM VALUES | ||
| (1), (2), (3), (4) | ||
| as t1(int_col1); | ||
|
|
||
| CREATE FUNCTION myDoubleAvg AS 'test.org.apache.spark.sql.MyDoubleAvg'; | ||
|
|
||
| SELECT default.myDoubleAvg(int_col1) as my_avg from t1; | ||
|
|
||
| SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1; | ||
|
|
||
| CREATE FUNCTION udaf1 AS 'test.non.existent.udaf'; | ||
|
|
||
| SELECT default.udaf1(int_col1) as udaf1 from t1; |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,54 @@ | ||
| -- Automatically generated by SQLQueryTestSuite | ||
| -- Number of queries: 6 | ||
|
|
||
|
|
||
| -- !query 0 | ||
| CREATE OR REPLACE TEMPORARY VIEW t1 AS SELECT * FROM VALUES | ||
| (1), (2), (3), (4) | ||
| as t1(int_col1) | ||
| -- !query 0 schema | ||
| struct<> | ||
| -- !query 0 output | ||
|
|
||
|
|
||
|
|
||
| -- !query 1 | ||
| CREATE FUNCTION myDoubleAvg AS 'test.org.apache.spark.sql.MyDoubleAvg' | ||
| -- !query 1 schema | ||
| struct<> | ||
| -- !query 1 output | ||
|
|
||
|
|
||
|
|
||
| -- !query 2 | ||
| SELECT default.myDoubleAvg(int_col1) as my_avg from t1 | ||
| -- !query 2 schema | ||
| struct<my_avg:double> | ||
| -- !query 2 output | ||
| 102.5 | ||
|
|
||
|
|
||
| -- !query 3 | ||
| SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1 | ||
| -- !query 3 schema | ||
| struct<> | ||
| -- !query 3 output | ||
| java.lang.AssertionError | ||
| assertion failed: Incorrect number of children | ||
|
|
||
|
|
||
| -- !query 4 | ||
| CREATE FUNCTION udaf1 AS 'test.non.existent.udaf' | ||
| -- !query 4 schema | ||
| struct<> | ||
| -- !query 4 output | ||
|
|
||
|
|
||
|
|
||
| -- !query 5 | ||
| SELECT default.udaf1(int_col1) as udaf1 from t1 | ||
| -- !query 5 schema | ||
| struct<> | ||
| -- !query 5 output | ||
| org.apache.spark.sql.AnalysisException | ||
| Can not load class 'test.non.existent.udaf' when registering the function 'default.udaf1', please make sure it is on the classpath; line 1 pos 7 |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
do we still need this?