@@ -115,7 +115,7 @@ private[sql] object DataFrame {
115115@ Experimental
116116class DataFrame private [sql](
117117 @ transient val sqlContext : SQLContext ,
118- @ DeveloperApi @ transient private var _queryExecution : SQLContext # QueryExecution )
118+ @ DeveloperApi @ transient val queryExecution : SQLContext # QueryExecution )
119119 extends RDDApi [Row ] with Serializable {
120120
121121 /**
@@ -134,8 +134,6 @@ class DataFrame private[sql](
134134 })
135135 }
136136
137- @ DeveloperApi def queryExecution : SQLContext # QueryExecution = _queryExecution
138-
139137 @ transient protected [sql] val logicalPlan : LogicalPlan = queryExecution.logical match {
140138 // For various commands (like DDL) and queries with side effects, we force query optimization to
141139 // happen right away to let these side effects take place eagerly.
@@ -1304,7 +1302,8 @@ class DataFrame private[sql](
13041302 * @since 1.3.0
13051303 */
13061304 override def persist (): this .type = {
1307- persist(org.apache.spark.storage.StorageLevel .MEMORY_AND_DISK )
1305+ sqlContext.cacheManager.cacheQuery(this )
1306+ this
13081307 }
13091308
13101309 /**
@@ -1319,7 +1318,6 @@ class DataFrame private[sql](
13191318 */
13201319 override def persist (newLevel : StorageLevel ): this .type = {
13211320 sqlContext.cacheManager.cacheQuery(this , None , newLevel)
1322- this ._queryExecution = new sqlContext.QueryExecution (this .queryExecution.logical)
13231321 this
13241322 }
13251323
@@ -1329,7 +1327,6 @@ class DataFrame private[sql](
13291327 */
13301328 override def unpersist (blocking : Boolean ): this .type = {
13311329 sqlContext.cacheManager.tryUncacheQuery(this , blocking)
1332- this ._queryExecution = new sqlContext.QueryExecution (this .queryExecution.logical)
13331330 this
13341331 }
13351332
0 commit comments