Skip to content

Commit 4cc52f7

Browse files
committed
Merge remote-tracking branch 'upstream/master' into spark-36447
2 parents 3746d76 + 48e333a commit 4cc52f7

File tree

80 files changed

+2934
-1764
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

80 files changed

+2934
-1764
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ Spark is a unified analytics engine for large-scale data processing. It provides
44
high-level APIs in Scala, Java, Python, and R, and an optimized engine that
55
supports general computation graphs for data analysis. It also supports a
66
rich set of higher-level tools including Spark SQL for SQL and DataFrames,
7-
MLlib for machine learning, GraphX for graph processing,
7+
pandas API on Spark for pandas workloads, MLlib for machine learning, GraphX for graph processing,
88
and Structured Streaming for stream processing.
99

1010
<https://spark.apache.org/>

common/network-common/pom.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,10 @@
9191
<groupId>org.apache.commons</groupId>
9292
<artifactId>commons-crypto</artifactId>
9393
</dependency>
94+
<dependency>
95+
<groupId>com.google.crypto.tink</groupId>
96+
<artifactId>tink</artifactId>
97+
</dependency>
9498
<dependency>
9599
<groupId>org.roaringbitmap</groupId>
96100
<artifactId>RoaringBitmap</artifactId>

common/network-common/src/main/java/org/apache/spark/network/client/StreamCallbackWithID.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,16 @@
1717

1818
package org.apache.spark.network.client;
1919

20+
import java.nio.ByteBuffer;
21+
2022
public interface StreamCallbackWithID extends StreamCallback {
2123
String getID();
24+
25+
/**
26+
* Response to return to client upon the completion of a stream. Currently only invoked in
27+
* {@link org.apache.spark.network.server.TransportRequestHandler#processStreamUpload}
28+
*/
29+
default ByteBuffer getCompletionResponse() {
30+
return ByteBuffer.allocate(0);
31+
}
2232
}

common/network-common/src/main/java/org/apache/spark/network/crypto/AuthClientBootstrap.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -105,15 +105,15 @@ private void doSparkAuth(TransportClient client, Channel channel)
105105

106106
String secretKey = secretKeyHolder.getSecretKey(appId);
107107
try (AuthEngine engine = new AuthEngine(appId, secretKey, conf)) {
108-
ClientChallenge challenge = engine.challenge();
108+
AuthMessage challenge = engine.challenge();
109109
ByteBuf challengeData = Unpooled.buffer(challenge.encodedLength());
110110
challenge.encode(challengeData);
111111

112112
ByteBuffer responseData =
113113
client.sendRpcSync(challengeData.nioBuffer(), conf.authRTTimeoutMs());
114-
ServerResponse response = ServerResponse.decodeMessage(responseData);
114+
AuthMessage response = AuthMessage.decodeMessage(responseData);
115115

116-
engine.validate(response);
116+
engine.deriveSessionCipher(challenge, response);
117117
engine.sessionCipher().addToChannel(channel);
118118
}
119119
}

0 commit comments

Comments
 (0)