diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendService.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendService.scala index 854a28e85a1..781ac2d0779 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendService.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendService.scala @@ -19,6 +19,7 @@ package org.apache.kyuubi.engine.spark import scala.collection.JavaConverters._ +import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.Text import org.apache.hadoop.security.{Credentials, UserGroupInformation} import org.apache.hadoop.security.token.{Token, TokenIdentifier} @@ -30,6 +31,7 @@ import org.apache.kyuubi.{KyuubiSQLException, Logging} import org.apache.kyuubi.config.KyuubiConf import org.apache.kyuubi.config.KyuubiReservedKeys._ import org.apache.kyuubi.ha.client.{EngineServiceDiscovery, ServiceDiscovery} +import org.apache.kyuubi.reflection.DynConstructors import org.apache.kyuubi.service.{Serverable, Service, TBinaryFrontendService} import org.apache.kyuubi.service.TFrontendService._ import org.apache.kyuubi.util.KyuubiHadoopUtils @@ -110,6 +112,8 @@ class SparkTBinaryFrontendService( object SparkTBinaryFrontendService extends Logging { val HIVE_DELEGATION_TOKEN = new Text("HIVE_DELEGATION_TOKEN") + val HIVE_CONF_CLASSNAME = "org.apache.hadoop.hive.conf.HiveConf" + @volatile private var _hiveConf: Configuration = _ private[spark] def renewDelegationToken(sc: SparkContext, delegationToken: String): Unit = { val newCreds = KyuubiHadoopUtils.decodeCredentials(delegationToken) @@ -133,7 +137,7 @@ object SparkTBinaryFrontendService extends Logging { newTokens: Map[Text, Token[_ <: TokenIdentifier]], oldCreds: Credentials, updateCreds: Credentials): Unit = { - val metastoreUris = sc.hadoopConfiguration.getTrimmed("hive.metastore.uris", "") + val metastoreUris = hiveConf(sc.hadoopConfiguration).getTrimmed("hive.metastore.uris", "") // `HiveMetaStoreClient` selects the first token whose service is "" and kind is // "HIVE_DELEGATION_TOKEN" to authenticate. @@ -204,4 +208,25 @@ object SparkTBinaryFrontendService extends Logging { 1 } } + + private[kyuubi] def hiveConf(hadoopConf: Configuration): Configuration = { + if (_hiveConf == null) { + synchronized { + if (_hiveConf == null) { + _hiveConf = + try { + DynConstructors.builder() + .impl(HIVE_CONF_CLASSNAME, classOf[Configuration], classOf[Class[_]]) + .build[Configuration]() + .newInstance(hadoopConf, Class.forName(HIVE_CONF_CLASSNAME)) + } catch { + case e: Throwable => + warn("Fail to create Hive Configuration", e) + hadoopConf + } + } + } + } + _hiveConf + } } diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendServiceSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendServiceSuite.scala new file mode 100644 index 00000000000..5f81e51f825 --- /dev/null +++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SparkTBinaryFrontendServiceSuite.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kyuubi.engine.spark + +import org.apache.hadoop.conf.Configuration + +import org.apache.kyuubi.KyuubiFunSuite + +class SparkTBinaryFrontendServiceSuite extends KyuubiFunSuite { + test("new hive conf") { + val hiveConf = SparkTBinaryFrontendService.hiveConf(new Configuration()) + assert(hiveConf.getClass().getName == SparkTBinaryFrontendService.HIVE_CONF_CLASSNAME) + } +}