diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index 693be99d4749..1d1358487abc 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -33,7 +33,6 @@ import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.expressions.{ExprUtils, GenericRowWithSchema} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.execution.arrow.ArrowConverters -import org.apache.spark.sql.execution.command.ShowTablesCommand import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION import org.apache.spark.sql.types._ @@ -216,15 +215,6 @@ private[sql] object SQLUtils extends Logging { } } - def getTables(sparkSession: SparkSession, databaseName: String): DataFrame = { - databaseName match { - case n: String if n != null && n.trim.nonEmpty => - Dataset.ofRows(sparkSession, ShowTablesCommand(Some(n), None)) - case _ => - Dataset.ofRows(sparkSession, ShowTablesCommand(None, None)) - } - } - def getTableNames(sparkSession: SparkSession, databaseName: String): Array[String] = { val db = databaseName match { case _ if databaseName != null && databaseName.trim.nonEmpty =>