Skip to content

Commit bae2c0c

Browse files
committed
Merge branch 'master' of https://github.com/apache/spark into SPARK-31167-missing-test-deps
2 parents 7df0040 + 17586f9 commit bae2c0c

File tree

670 files changed

+22486
-13299
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

670 files changed

+22486
-13299
lines changed

R/pkg/DESCRIPTION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ Suggests:
2323
testthat,
2424
e1071,
2525
survival,
26-
arrow
26+
arrow (>= 0.15.1)
2727
Collate:
2828
'schema.R'
2929
'generics.R'

appveyor.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,8 @@ install:
4848
build_script:
4949
# '-Djna.nosys=true' is required to avoid kernel32.dll load failure.
5050
# See SPARK-28759.
51-
- cmd: mvn -DskipTests -Psparkr -Phive -Djna.nosys=true package
51+
# Ideally we should check the tests related to Hive in SparkR as well (SPARK-31745).
52+
- cmd: mvn -DskipTests -Psparkr -Djna.nosys=true package
5253

5354
environment:
5455
NOT_CRAN: true

common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,6 @@ public class ShuffleMetrics implements MetricSet {
229229
private final Meter blockTransferRateBytes = new Meter();
230230
// Number of active connections to the shuffle service
231231
private Counter activeConnections = new Counter();
232-
// Number of registered connections to the shuffle service
233-
private Counter registeredConnections = new Counter();
234232
// Number of exceptions caught in connections to the shuffle service
235233
private Counter caughtExceptions = new Counter();
236234

@@ -242,7 +240,6 @@ public ShuffleMetrics() {
242240
allMetrics.put("registeredExecutorsSize",
243241
(Gauge<Integer>) () -> blockManager.getRegisteredExecutorsSize());
244242
allMetrics.put("numActiveConnections", activeConnections);
245-
allMetrics.put("numRegisteredConnections", registeredConnections);
246243
allMetrics.put("numCaughtExceptions", caughtExceptions);
247244
}
248245

common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleService.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,7 @@ protected void serviceInit(Configuration conf) throws Exception {
198198
// register metrics on the block handler into the Node Manager's metrics system.
199199
blockHandler.getAllMetrics().getMetrics().put("numRegisteredConnections",
200200
shuffleServer.getRegisteredConnections());
201+
blockHandler.getAllMetrics().getMetrics().putAll(shuffleServer.getAllMetrics().getMetrics());
201202
YarnShuffleServiceMetrics serviceMetrics =
202203
new YarnShuffleServiceMetrics(blockHandler.getAllMetrics());
203204

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/package.scala renamed to common/tags/src/test/java/org/apache/spark/tags/ChromeUITest.java

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -15,17 +15,13 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark.sql.execution
18+
package org.apache.spark.tags;
1919

20-
/**
21-
* Physical execution operators for join operations.
22-
*/
23-
package object joins {
24-
25-
sealed abstract class BuildSide
26-
27-
case object BuildRight extends BuildSide
20+
import java.lang.annotation.*;
2821

29-
case object BuildLeft extends BuildSide
22+
import org.scalatest.TagAnnotation;
3023

31-
}
24+
@TagAnnotation
25+
@Retention(RetentionPolicy.RUNTIME)
26+
@Target({ElementType.METHOD, ElementType.TYPE})
27+
public @interface ChromeUITest { }

conf/spark-env.sh.template

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@
6262
# Generic options for the daemons used in the standalone deploy mode
6363
# - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf)
6464
# - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs)
65+
# - SPARK_LOG_MAX_FILES Max log files of Spark daemons can rotate to. Default is 5.
6566
# - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp)
6667
# - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER)
6768
# - SPARK_NICENESS The scheduling priority for daemons. (Default: 0)

core/src/main/java/org/apache/spark/SparkFirehoseListener.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,11 @@ public void onSpeculativeTaskSubmitted(SparkListenerSpeculativeTaskSubmitted spe
162162
onEvent(speculativeTask);
163163
}
164164

165+
@Override
166+
public void onResourceProfileAdded(SparkListenerResourceProfileAdded event) {
167+
onEvent(event);
168+
}
169+
165170
@Override
166171
public void onOtherEvent(SparkListenerEvent event) {
167172
onEvent(event);

0 commit comments

Comments
 (0)