Skip to content

Commit 39db1bf

Browse files
committed
[SQL] Update SQL Programming Guide
Author: Michael Armbrust <[email protected]> Author: Yin Huai <[email protected]> Closes apache#2258 from marmbrus/sqlDocUpdate and squashes the following commits: f3d450b [Michael Armbrust] fix brackets bea3bfa [Michael Armbrust] Davies suggestions 3a29fe2 [Michael Armbrust] tighten visibility a71aa36 [Michael Armbrust] Draft of doc updates 52932c0 [Michael Armbrust] Merge remote-tracking branch 'origin/master' into sqlDocUpdate 1e8c849 [Yin Huai] Update the example used for applySchema. 9457c39 [Yin Huai] Update doc. 31ba240 [Yin Huai] Merge remote-tracking branch 'upstream/master' into dataTypeDoc 29bc668 [Yin Huai] Draft doc for data type and schema APIs.
1 parent 6754570 commit 39db1bf

File tree

7 files changed

+865
-101
lines changed

7 files changed

+865
-101
lines changed

docs/sql-programming-guide.md

Lines changed: 857 additions & 95 deletions
Large diffs are not rendered by default.

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ private[spark] object SQLConf {
5353
*
5454
* SQLConf is thread-safe (internally synchronized, so safe to be used in multiple threads).
5555
*/
56-
trait SQLConf {
56+
private[sql] trait SQLConf {
5757
import SQLConf._
5858

5959
/** Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap. */

sql/core/src/main/scala/org/apache/spark/sql/UdfRegistration.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import scala.reflect.runtime.universe.{TypeTag, typeTag}
3030
/**
3131
* Functions for registering scala lambda functions as UDFs in a SQLContext.
3232
*/
33-
protected[sql] trait UDFRegistration {
33+
private[sql] trait UDFRegistration {
3434
self: SQLContext =>
3535

3636
private[spark] def registerPython(

sql/core/src/main/scala/org/apache/spark/sql/columnar/InMemoryColumnarTableScan.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.expressions._
2828
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2929
import org.apache.spark.sql.execution.{LeafNode, SparkPlan}
3030

31-
object InMemoryRelation {
31+
private[sql] object InMemoryRelation {
3232
def apply(useCompression: Boolean, batchSize: Int, child: SparkPlan): InMemoryRelation =
3333
new InMemoryRelation(child.output, useCompression, batchSize, child)()
3434
}

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -382,7 +382,7 @@ private[parquet] class CatalystPrimitiveConverter(
382382
parent.updateLong(fieldIndex, value)
383383
}
384384

385-
object CatalystArrayConverter {
385+
private[parquet] object CatalystArrayConverter {
386386
val INITIAL_ARRAY_SIZE = 20
387387
}
388388

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetFilters.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ import org.apache.spark.sql.catalyst.expressions.{Predicate => CatalystPredicate
3333
import org.apache.spark.sql.catalyst.expressions._
3434
import org.apache.spark.sql.execution.SparkSqlSerializer
3535

36-
object ParquetFilters {
36+
private[sql] object ParquetFilters {
3737
val PARQUET_FILTER_DATA = "org.apache.spark.sql.parquet.row.filter"
3838
// set this to false if pushdown should be disabled
3939
val PARQUET_FILTER_PUSHDOWN_ENABLED = "spark.sql.hints.parquetFilterPushdown"

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/server/SparkSQLOperationManager.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,9 @@ import org.apache.spark.sql.hive.thriftserver.ReflectionUtils
3939
/**
4040
* Executes queries using Spark SQL, and maintains a list of handles to active queries.
4141
*/
42-
class SparkSQLOperationManager(hiveContext: HiveContext) extends OperationManager with Logging {
42+
private[thriftserver] class SparkSQLOperationManager(hiveContext: HiveContext)
43+
extends OperationManager with Logging {
44+
4345
val handleToOperation = ReflectionUtils
4446
.getSuperField[JMap[OperationHandle, Operation]](this, "handleToOperation")
4547

0 commit comments

Comments
 (0)