Skip to content

Commit a1cba65

Browse files
committed
Merge remote-tracking branch 'origin/master' into SPARK-4180
Conflicts: streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
2 parents 7ba6db8 + bd86cb1 commit a1cba65

File tree

126 files changed

+3043
-617
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

126 files changed

+3043
-617
lines changed

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ and Spark Streaming for stream processing.
1313
## Online Documentation
1414

1515
You can find the latest Spark documentation, including a programming
16-
guide, on the [project web page](http://spark.apache.org/documentation.html).
16+
guide, on the [project web page](http://spark.apache.org/documentation.html)
17+
and [project wiki](https://cwiki.apache.org/confluence/display/SPARK).
1718
This README file only contains basic setup instructions.
1819

1920
## Building Spark

core/pom.xml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,13 @@
204204
<artifactId>derby</artifactId>
205205
<scope>test</scope>
206206
</dependency>
207+
<dependency>
208+
<groupId>org.tachyonproject</groupId>
209+
<artifactId>tachyon</artifactId>
210+
<version>0.5.0</version>
211+
<type>test-jar</type>
212+
<scope>test</scope>
213+
</dependency>
207214
<dependency>
208215
<groupId>org.tachyonproject</groupId>
209216
<artifactId>tachyon-client</artifactId>

core/src/main/resources/org/apache/spark/ui/static/additional-metrics.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,8 @@ $(function() {
3939
var column = "table ." + $(this).attr("name");
4040
$(column).hide();
4141
});
42+
// Stripe table rows after rows have been hidden to ensure correct striping.
43+
stripeTables();
4244

4345
$("input:checkbox").click(function() {
4446
var column = "table ." + $(this).attr("name");

core/src/main/resources/org/apache/spark/ui/static/table.js

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,3 @@ function stripeTables() {
2828
});
2929
});
3030
}
31-
32-
/* Stripe all tables after pages finish loading. */
33-
$(function() {
34-
stripeTables();
35-
});

core/src/main/resources/org/apache/spark/ui/static/webui.css

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,20 @@ pre {
120120
border: none;
121121
}
122122

123+
.stacktrace-details {
124+
max-height: 300px;
125+
overflow-y: auto;
126+
margin: 0;
127+
transition: max-height 0.5s ease-out, padding 0.5s ease-out;
128+
}
129+
130+
.stacktrace-details.collapsed {
131+
max-height: 0;
132+
padding-top: 0;
133+
padding-bottom: 0;
134+
border: none;
135+
}
136+
123137
span.expand-additional-metrics {
124138
cursor: pointer;
125139
}

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -343,15 +343,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging with
343343
*/
344344
def getSecretKey(): String = secretKey
345345

346-
override def getSaslUser(appId: String): String = {
347-
val myAppId = sparkConf.getAppId
348-
require(appId == myAppId, s"SASL appId $appId did not match my appId ${myAppId}")
349-
getSaslUser()
350-
}
351-
352-
override def getSecretKey(appId: String): String = {
353-
val myAppId = sparkConf.getAppId
354-
require(appId == myAppId, s"SASL appId $appId did not match my appId ${myAppId}")
355-
getSecretKey()
356-
}
346+
// Default SecurityManager only has a single secret key, so ignore appId.
347+
override def getSaslUser(appId: String): String = getSaslUser()
348+
override def getSecretKey(appId: String): String = getSecretKey()
357349
}

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -568,6 +568,8 @@ class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
568568

569569

570570
/**
571+
* :: Experimental ::
572+
*
571573
* Get an RDD for a Hadoop-readable dataset as PortableDataStream for each file
572574
* (useful for binary data)
573575
*
@@ -610,6 +612,8 @@ class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
610612
}
611613

612614
/**
615+
* :: Experimental ::
616+
*
613617
* Load data from a flat binary file, assuming the length of each record is constant.
614618
*
615619
* @param path Directory to the input data files

core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.hadoop.mapred._
2626
import org.apache.hadoop.fs.FileSystem
2727
import org.apache.hadoop.fs.Path
2828

29+
import org.apache.spark.mapred.SparkHadoopMapRedUtil
2930
import org.apache.spark.rdd.HadoopRDD
3031

3132
/**

core/src/main/scala/org/apache/spark/TaskEndReason.scala

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,15 +83,48 @@ case class FetchFailed(
8383
* :: DeveloperApi ::
8484
* Task failed due to a runtime exception. This is the most common failure case and also captures
8585
* user program exceptions.
86+
*
87+
* `stackTrace` contains the stack trace of the exception itself. It still exists for backward
88+
* compatibility. It's better to use `this(e: Throwable, metrics: Option[TaskMetrics])` to
89+
* create `ExceptionFailure` as it will handle the backward compatibility properly.
90+
*
91+
* `fullStackTrace` is a better representation of the stack trace because it contains the whole
92+
* stack trace including the exception and its causes
8693
*/
8794
@DeveloperApi
8895
case class ExceptionFailure(
8996
className: String,
9097
description: String,
9198
stackTrace: Array[StackTraceElement],
99+
fullStackTrace: String,
92100
metrics: Option[TaskMetrics])
93101
extends TaskFailedReason {
94-
override def toErrorString: String = Utils.exceptionString(className, description, stackTrace)
102+
103+
private[spark] def this(e: Throwable, metrics: Option[TaskMetrics]) {
104+
this(e.getClass.getName, e.getMessage, e.getStackTrace, Utils.exceptionString(e), metrics)
105+
}
106+
107+
override def toErrorString: String =
108+
if (fullStackTrace == null) {
109+
// fullStackTrace is added in 1.2.0
110+
// If fullStackTrace is null, use the old error string for backward compatibility
111+
exceptionString(className, description, stackTrace)
112+
} else {
113+
fullStackTrace
114+
}
115+
116+
/**
117+
* Return a nice string representation of the exception, including the stack trace.
118+
* Note: It does not include the exception's causes, and is only used for backward compatibility.
119+
*/
120+
private def exceptionString(
121+
className: String,
122+
description: String,
123+
stackTrace: Array[StackTraceElement]): String = {
124+
val desc = if (description == null) "" else description
125+
val st = if (stackTrace == null) "" else stackTrace.map(" " + _).mkString("\n")
126+
s"$className: $desc\n$st"
127+
}
95128
}
96129

97130
/**

core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -493,9 +493,9 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
493493
}
494494

495495
/**
496-
* Returns the top K elements from this RDD as defined by
496+
* Returns the top k (largest) elements from this RDD as defined by
497497
* the specified Comparator[T].
498-
* @param num the number of top elements to return
498+
* @param num k, the number of top elements to return
499499
* @param comp the comparator that defines the order
500500
* @return an array of top elements
501501
*/
@@ -507,9 +507,9 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
507507
}
508508

509509
/**
510-
* Returns the top K elements from this RDD using the
510+
* Returns the top k (largest) elements from this RDD using the
511511
* natural ordering for T.
512-
* @param num the number of top elements to return
512+
* @param num k, the number of top elements to return
513513
* @return an array of top elements
514514
*/
515515
def top(num: Int): JList[T] = {
@@ -518,9 +518,9 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
518518
}
519519

520520
/**
521-
* Returns the first K elements from this RDD as defined by
521+
* Returns the first k (smallest) elements from this RDD as defined by
522522
* the specified Comparator[T] and maintains the order.
523-
* @param num the number of top elements to return
523+
* @param num k, the number of elements to return
524524
* @param comp the comparator that defines the order
525525
* @return an array of top elements
526526
*/
@@ -552,9 +552,9 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
552552
}
553553

554554
/**
555-
* Returns the first K elements from this RDD using the
555+
* Returns the first k (smallest) elements from this RDD using the
556556
* natural ordering for T while maintain the order.
557-
* @param num the number of top elements to return
557+
* @param num k, the number of top elements to return
558558
* @return an array of top elements
559559
*/
560560
def takeOrdered(num: Int): JList[T] = {

0 commit comments

Comments
 (0)