Skip to content

Commit 89d6822

Browse files
gatorsmilecloud-fan
authored andcommitted
[SPARK-19148][SQL][FOLLOW-UP] do not expose the external table concept in Catalog
### What changes were proposed in this pull request? After we renames `Catalog`.`createExternalTable` to `createTable` in the PR: apache#16528, we also need to deprecate the corresponding functions in `SQLContext`. ### How was this patch tested? N/A Author: Xiao Li <[email protected]> Closes apache#17502 from gatorsmile/deprecateCreateExternalTable.
1 parent cf5963c commit 89d6822

File tree

1 file changed

+15
-10
lines changed

1 file changed

+15
-10
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql
1919

20-
import java.beans.BeanInfo
2120
import java.util.Properties
2221

2322
import scala.collection.immutable
@@ -527,8 +526,9 @@ class SQLContext private[sql](val sparkSession: SparkSession)
527526
* @group ddl_ops
528527
* @since 1.3.0
529528
*/
529+
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
530530
def createExternalTable(tableName: String, path: String): DataFrame = {
531-
sparkSession.catalog.createExternalTable(tableName, path)
531+
sparkSession.catalog.createTable(tableName, path)
532532
}
533533

534534
/**
@@ -538,11 +538,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
538538
* @group ddl_ops
539539
* @since 1.3.0
540540
*/
541+
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
541542
def createExternalTable(
542543
tableName: String,
543544
path: String,
544545
source: String): DataFrame = {
545-
sparkSession.catalog.createExternalTable(tableName, path, source)
546+
sparkSession.catalog.createTable(tableName, path, source)
546547
}
547548

548549
/**
@@ -552,11 +553,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
552553
* @group ddl_ops
553554
* @since 1.3.0
554555
*/
556+
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
555557
def createExternalTable(
556558
tableName: String,
557559
source: String,
558560
options: java.util.Map[String, String]): DataFrame = {
559-
sparkSession.catalog.createExternalTable(tableName, source, options)
561+
sparkSession.catalog.createTable(tableName, source, options)
560562
}
561563

562564
/**
@@ -567,11 +569,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
567569
* @group ddl_ops
568570
* @since 1.3.0
569571
*/
572+
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
570573
def createExternalTable(
571574
tableName: String,
572575
source: String,
573576
options: Map[String, String]): DataFrame = {
574-
sparkSession.catalog.createExternalTable(tableName, source, options)
577+
sparkSession.catalog.createTable(tableName, source, options)
575578
}
576579

577580
/**
@@ -581,12 +584,13 @@ class SQLContext private[sql](val sparkSession: SparkSession)
581584
* @group ddl_ops
582585
* @since 1.3.0
583586
*/
587+
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
584588
def createExternalTable(
585589
tableName: String,
586590
source: String,
587591
schema: StructType,
588592
options: java.util.Map[String, String]): DataFrame = {
589-
sparkSession.catalog.createExternalTable(tableName, source, schema, options)
593+
sparkSession.catalog.createTable(tableName, source, schema, options)
590594
}
591595

592596
/**
@@ -597,12 +601,13 @@ class SQLContext private[sql](val sparkSession: SparkSession)
597601
* @group ddl_ops
598602
* @since 1.3.0
599603
*/
604+
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
600605
def createExternalTable(
601606
tableName: String,
602607
source: String,
603608
schema: StructType,
604609
options: Map[String, String]): DataFrame = {
605-
sparkSession.catalog.createExternalTable(tableName, source, schema, options)
610+
sparkSession.catalog.createTable(tableName, source, schema, options)
606611
}
607612

608613
/**
@@ -1089,9 +1094,9 @@ object SQLContext {
10891094
* method for internal use.
10901095
*/
10911096
private[sql] def beansToRows(
1092-
data: Iterator[_],
1093-
beanClass: Class[_],
1094-
attrs: Seq[AttributeReference]): Iterator[InternalRow] = {
1097+
data: Iterator[_],
1098+
beanClass: Class[_],
1099+
attrs: Seq[AttributeReference]): Iterator[InternalRow] = {
10951100
val extractors =
10961101
JavaTypeInference.getJavaBeanReadableProperties(beanClass).map(_.getReadMethod)
10971102
val methodsToConverts = extractors.zip(attrs).map { case (e, attr) =>

0 commit comments

Comments
 (0)