File tree Expand file tree Collapse file tree 1 file changed +3
-2
lines changed
sql/core/src/main/scala/org/apache/spark/sql Expand file tree Collapse file tree 1 file changed +3
-2
lines changed Original file line number Diff line number Diff line change @@ -518,6 +518,7 @@ class Dataset[T] private[sql](
518518 * the logical plan of this Dataset, which is especially useful in iterative algorithms where the
519519 * plan may grow exponentially. It will be saved to files inside the checkpoint
520520 * directory set with `SparkContext#setCheckpointDir`.
521+ *
521522 * @group basic
522523 * @since 2.1.0
523524 */
@@ -536,7 +537,7 @@ class Dataset[T] private[sql](
536537 */
537538 @ Experimental
538539 @ InterfaceStability .Evolving
539- def checkpoint (eager : Boolean = true ): Dataset [T ] = _checkpoint(eager = eager)
540+ def checkpoint (eager : Boolean ): Dataset [T ] = _checkpoint(eager = eager)
540541
541542 /**
542543 * Eagerly locally checkpoints a Dataset and return the new Dataset. Checkpointing can be
@@ -562,7 +563,7 @@ class Dataset[T] private[sql](
562563 */
563564 @ Experimental
564565 @ InterfaceStability .Evolving
565- def localCheckpoint (eager : Boolean = true ): Dataset [T ] = _checkpoint(eager = eager, local = true )
566+ def localCheckpoint (eager : Boolean ): Dataset [T ] = _checkpoint(eager = eager, local = true )
566567
567568 /**
568569 * Returns a checkpointed version of this Dataset. Checkpointing can be used to truncate the
You can’t perform that action at this time.
0 commit comments