Skip to content

Commit c5f1b2c

Browse files
committed
Default args removed in methods having equivalent signature
1 parent 59b5562 commit c5f1b2c

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -518,6 +518,7 @@ class Dataset[T] private[sql](
518518
* the logical plan of this Dataset, which is especially useful in iterative algorithms where the
519519
* plan may grow exponentially. It will be saved to files inside the checkpoint
520520
* directory set with `SparkContext#setCheckpointDir`.
521+
*
521522
* @group basic
522523
* @since 2.1.0
523524
*/
@@ -536,7 +537,7 @@ class Dataset[T] private[sql](
536537
*/
537538
@Experimental
538539
@InterfaceStability.Evolving
539-
def checkpoint(eager: Boolean = true): Dataset[T] = _checkpoint(eager = eager)
540+
def checkpoint(eager: Boolean): Dataset[T] = _checkpoint(eager = eager)
540541

541542
/**
542543
* Eagerly locally checkpoints a Dataset and return the new Dataset. Checkpointing can be
@@ -562,7 +563,7 @@ class Dataset[T] private[sql](
562563
*/
563564
@Experimental
564565
@InterfaceStability.Evolving
565-
def localCheckpoint(eager: Boolean = true): Dataset[T] = _checkpoint(eager = eager, local = true)
566+
def localCheckpoint(eager: Boolean): Dataset[T] = _checkpoint(eager = eager, local = true)
566567

567568
/**
568569
* Returns a checkpointed version of this Dataset. Checkpointing can be used to truncate the

0 commit comments

Comments
 (0)