Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 15 additions & 10 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql

import java.beans.BeanInfo
import java.util.Properties

import scala.collection.immutable
Expand Down Expand Up @@ -527,8 +526,9 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops
* @since 1.3.0
*/
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(tableName: String, path: String): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, path)
sparkSession.catalog.createTable(tableName, path)
}

/**
Expand All @@ -538,11 +538,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops
* @since 1.3.0
*/
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(
tableName: String,
path: String,
source: String): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, path, source)
sparkSession.catalog.createTable(tableName, path, source)
}

/**
Expand All @@ -552,11 +553,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops
* @since 1.3.0
*/
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(
tableName: String,
source: String,
options: java.util.Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, options)
sparkSession.catalog.createTable(tableName, source, options)
}

/**
Expand All @@ -567,11 +569,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops
* @since 1.3.0
*/
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(
tableName: String,
source: String,
options: Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, options)
sparkSession.catalog.createTable(tableName, source, options)
}

/**
Expand All @@ -581,12 +584,13 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops
* @since 1.3.0
*/
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(
tableName: String,
source: String,
schema: StructType,
options: java.util.Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, schema, options)
sparkSession.catalog.createTable(tableName, source, schema, options)
}

/**
Expand All @@ -597,12 +601,13 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops
* @since 1.3.0
*/
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(
tableName: String,
source: String,
schema: StructType,
options: Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, schema, options)
sparkSession.catalog.createTable(tableName, source, schema, options)
}

/**
Expand Down Expand Up @@ -1089,9 +1094,9 @@ object SQLContext {
* method for internal use.
*/
private[sql] def beansToRows(
data: Iterator[_],
beanClass: Class[_],
attrs: Seq[AttributeReference]): Iterator[InternalRow] = {
data: Iterator[_],
beanClass: Class[_],
attrs: Seq[AttributeReference]): Iterator[InternalRow] = {
val extractors =
JavaTypeInference.getJavaBeanReadableProperties(beanClass).map(_.getReadMethod)
val methodsToConverts = extractors.zip(attrs).map { case (e, attr) =>
Expand Down