From d7001867c06e53545f05bec05ef57ea71cf47275 Mon Sep 17 00:00:00 2001 From: zero323 Date: Mon, 9 Sep 2019 23:37:04 +0200 Subject: [PATCH] Reflect removals introduced in SPARK-28980 --- third_party/3/pyspark/__init__.pyi | 2 +- third_party/3/pyspark/mllib/clustering.pyi | 2 +- third_party/3/pyspark/sql/__init__.pyi | 2 +- third_party/3/pyspark/sql/catalog.pyi | 1 - third_party/3/pyspark/sql/context.pyi | 5 ----- 5 files changed, 3 insertions(+), 9 deletions(-) diff --git a/third_party/3/pyspark/__init__.pyi b/third_party/3/pyspark/__init__.pyi index 25d6e4b9..4c0299e2 100644 --- a/third_party/3/pyspark/__init__.pyi +++ b/third_party/3/pyspark/__init__.pyi @@ -15,7 +15,7 @@ from pyspark.storagelevel import StorageLevel as StorageLevel from pyspark.taskcontext import BarrierTaskContext as BarrierTaskContext, BarrierTaskInfo as BarrierTaskInfo, TaskContext as TaskContext # Compatiblity imports -from pyspark.sql import SQLContext, HiveContext, Row +from pyspark.sql import SQLContext, Row # Names in __all__ with no definition: # SparkJobInfo diff --git a/third_party/3/pyspark/mllib/clustering.pyi b/third_party/3/pyspark/mllib/clustering.pyi index a00c0426..701c7ac6 100644 --- a/third_party/3/pyspark/mllib/clustering.pyi +++ b/third_party/3/pyspark/mllib/clustering.pyi @@ -57,7 +57,7 @@ class KMeansModel(Saveable, Loader[KMeansModel]): class KMeans: @classmethod - def train(cls, rdd: RDD[VectorLike], k: int, maxIterations: int = ..., runs: int = ..., initializationMode: str = ..., seed: Optional[int] = ..., initializationSteps: int = ..., epsilon: float = ..., initialModel: Optional[KMeansModel] = ...) -> KMeansModel: ... + def train(cls, rdd: RDD[VectorLike], k: int, maxIterations: int = ..., initializationMode: str = ..., seed: Optional[int] = ..., initializationSteps: int = ..., epsilon: float = ..., initialModel: Optional[KMeansModel] = ...) -> KMeansModel: ... class GaussianMixtureModel(JavaModelWrapper, JavaSaveable, JavaLoader[GaussianMixtureModel]): @property diff --git a/third_party/3/pyspark/sql/__init__.pyi b/third_party/3/pyspark/sql/__init__.pyi index 687fb99c..50728780 100644 --- a/third_party/3/pyspark/sql/__init__.pyi +++ b/third_party/3/pyspark/sql/__init__.pyi @@ -3,7 +3,7 @@ # NOTE: This dynamically typed stub was automatically generated by stubgen. from pyspark.sql.types import Row as Row -from pyspark.sql.context import SQLContext as SQLContext, HiveContext as HiveContext, UDFRegistration as UDFRegistration +from pyspark.sql.context import SQLContext as SQLContext, UDFRegistration as UDFRegistration from pyspark.sql.session import SparkSession as SparkSession from pyspark.sql.column import Column as Column from pyspark.sql.dataframe import DataFrame as DataFrame, DataFrameNaFunctions as DataFrameNaFunctions, DataFrameStatFunctions as DataFrameStatFunctions diff --git a/third_party/3/pyspark/sql/catalog.pyi b/third_party/3/pyspark/sql/catalog.pyi index 53818954..7a1c67d7 100644 --- a/third_party/3/pyspark/sql/catalog.pyi +++ b/third_party/3/pyspark/sql/catalog.pyi @@ -24,7 +24,6 @@ class Catalog: def listTables(self, dbName: Optional[str] = ...) -> List[Table]: ... def listFunctions(self, dbName: Optional[str] = ...) -> List[Function]: ... def listColumns(self, tableName: str, dbName: Optional[str] = ...) -> List[Column]: ... - def createExternalTable(self, tableName: str, path: Optional[str] = ..., source: Optional[str] = ..., schema: Optional[StructType] = ..., **options: str) -> DataFrame: ... def createTable(self, tableName: str, path: Optional[str] = ..., source: Optional[str] = ..., schema: Optional[StructType] = ..., **options: str) -> DataFrame: ... def dropTempView(self, viewName: str) -> None: ... def dropGlobalTempView(self, viewName: str) -> None: ... diff --git a/third_party/3/pyspark/sql/context.pyi b/third_party/3/pyspark/sql/context.pyi index 5a6a4469..13f32af2 100644 --- a/third_party/3/pyspark/sql/context.pyi +++ b/third_party/3/pyspark/sql/context.pyi @@ -39,7 +39,6 @@ class SQLContext: def createDataFrame(self, data: Union[RDD[Union[DateTimeLiteral, LiteralType, DecimalLiteral]], List[Union[DateTimeLiteral, LiteralType, DecimalLiteral]]], schema: DataType, verifySchema: bool = ...) -> DataFrame: ... def registerDataFrameAsTable(self, df: DataFrame, tableName: str) -> None: ... def dropTempTable(self, tableName: str) -> None: ... - def createExternalTable(self, tableName: str, path: Optional[str] = ..., source: Optional[str] = ..., schema: Optional[StructType] = ..., **options) -> DataFrame: ... def sql(self, sqlQuery: str) -> DataFrame: ... def table(self, tableName: str) -> DataFrame: ... def tables(self, dbName: Optional[str] = ...) -> DataFrame: ... @@ -54,10 +53,6 @@ class SQLContext: @property def streams(self) -> StreamingQueryManager: ... -class HiveContext(SQLContext): - def __init__(self, sparkContext: SparkContext, jhiveContext: Optional[JavaObject] = ...) -> None: ... - def refreshTable(self, tableName: str) -> None: ... - class UDFRegistration: sqlContext = ... # SQLContext def __init__(self, sqlContext: SQLContext) -> None: ...