Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -239,10 +239,6 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import com.fasterxml.jackson.annotation._
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility
import com.fasterxml.jackson.annotation.JsonInclude.Include
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.json4s.JsonAST._
import org.json4s.jackson.JsonMethods._

Expand Down Expand Up @@ -102,7 +101,6 @@ private[spark] object SubmitRestProtocolMessage {
private val mapper = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.enable(SerializationFeature.INDENT_OUTPUT)
.registerModule(DefaultScalaModule)

/**
* Parse the value of the action field from the given JSON.
Expand Down
13 changes: 6 additions & 7 deletions core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@ package org.apache.spark.rdd

import java.util.concurrent.atomic.AtomicInteger

import com.fasterxml.jackson.annotation.{JsonIgnore, JsonInclude, JsonPropertyOrder}
import com.fasterxml.jackson.annotation.{JsonCreator, JsonIgnore, JsonInclude, JsonProperty, JsonPropertyOrder}
import com.fasterxml.jackson.annotation.JsonInclude.Include
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.google.common.base.Objects

import org.apache.spark.SparkContext
Expand All @@ -43,10 +42,10 @@ import org.apache.spark.internal.Logging
*/
@JsonInclude(Include.NON_NULL)
@JsonPropertyOrder(Array("id", "name", "parent"))
private[spark] class RDDOperationScope(
val name: String,
val parent: Option[RDDOperationScope] = None,
val id: String = RDDOperationScope.nextScopeId().toString) {
private[spark] class RDDOperationScope @JsonCreator() (
@JsonProperty("name") val name: String,
@JsonProperty("parent") val parent: Option[RDDOperationScope] = None,
@JsonProperty("id") val id: String = RDDOperationScope.nextScopeId().toString) {

def toJson: String = {
RDDOperationScope.jsonMapper.writeValueAsString(this)
Expand Down Expand Up @@ -79,7 +78,7 @@ private[spark] class RDDOperationScope(
* An RDD scope tracks the series of operations that created a given RDD.
*/
private[spark] object RDDOperationScope extends Logging {
private val jsonMapper = new ObjectMapper().registerModule(DefaultScalaModule)
private val jsonMapper = new ObjectMapper()
private val scopeCounter = new AtomicInteger(0)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was added by @andrewor14 in #5729 as part of the initial DAG visualization patch. Here, the only class that we serialize happens to be RDDOperationScope, so I think this removal is addressed by the explicit @JsonProperty annotations that I added above.

Let me quickly double-check that the handling of Option is correct, though.


def fromJson(s: String): RDDOperationScope = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{
super.writeValueAsString(t)
}
}
mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule)
mapper.enable(SerializationFeature.INDENT_OUTPUT)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was added by @squito in #5940 in order to allow SparkStatusAPI POJOs to be serialized using Jackson. These POJOs are defined in https://github.com/apache/spark/blob/a4ead6d3881f071a2ae53ff1c961c6ac388cac1d/core/src/main/scala/org/apache/spark/status/api/v1/api.scala

I wonder if the default values in the ApplicationAttemptInfo are going to be handled differently without the jackson-module-scala stuff. We might have to explicitly add annotations in order to pin those defaults.

mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL)
mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat)
Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import scala.collection.JavaConverters._
import scala.collection.Map

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.json4s.DefaultFormats
import org.json4s.JsonAST._
import org.json4s.JsonDSL._
Expand Down Expand Up @@ -56,7 +55,7 @@ private[spark] object JsonProtocol {

private implicit val format = DefaultFormats

private val mapper = new ObjectMapper().registerModule(DefaultScalaModule)
private val mapper = new ObjectMapper()

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was added in #10061 in order to allow the new SparkListenerSQLExecutionStart and SparkListenerSQLExecutionEnd events to be written to the event log using Jackson. I'll see if there's an existing unit test for roundtrip serialization of these events.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

/** ------------------------------------------------- *
* JSON serialization methods for SparkListenerEvents |
Expand Down
3 changes: 1 addition & 2 deletions dev/deps/spark-deps-hadoop-2.2
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ jackson-core-asl-1.9.13.jar
jackson-databind-2.5.3.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-scala_2.11-2.5.3.jar
jackson-xc-1.9.13.jar
janino-2.7.8.jar
javax.inject-1.jar
Expand Down Expand Up @@ -142,7 +141,7 @@ netty-all-4.0.29.Final.jar
objenesis-1.2.jar
opencsv-2.3.jar
oro-2.0.8.jar
paranamer-2.6.jar
paranamer-2.3.jar
parquet-column-1.7.0.jar
parquet-common-1.7.0.jar
parquet-encoding-1.7.0.jar
Expand Down
3 changes: 1 addition & 2 deletions dev/deps/spark-deps-hadoop-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ jackson-core-asl-1.9.13.jar
jackson-databind-2.5.3.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-scala_2.11-2.5.3.jar
jackson-xc-1.9.13.jar
janino-2.7.8.jar
java-xmlbuilder-1.0.jar
Expand Down Expand Up @@ -133,7 +132,7 @@ netty-all-4.0.29.Final.jar
objenesis-1.2.jar
opencsv-2.3.jar
oro-2.0.8.jar
paranamer-2.6.jar
paranamer-2.3.jar
parquet-column-1.7.0.jar
parquet-common-1.7.0.jar
parquet-encoding-1.7.0.jar
Expand Down
3 changes: 1 addition & 2 deletions dev/deps/spark-deps-hadoop-2.4
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ jackson-core-asl-1.9.13.jar
jackson-databind-2.5.3.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-scala_2.11-2.5.3.jar
jackson-xc-1.9.13.jar
janino-2.7.8.jar
java-xmlbuilder-1.0.jar
Expand Down Expand Up @@ -134,7 +133,7 @@ netty-all-4.0.29.Final.jar
objenesis-1.2.jar
opencsv-2.3.jar
oro-2.0.8.jar
paranamer-2.6.jar
paranamer-2.3.jar
parquet-column-1.7.0.jar
parquet-common-1.7.0.jar
parquet-encoding-1.7.0.jar
Expand Down
3 changes: 1 addition & 2 deletions dev/deps/spark-deps-hadoop-2.6
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ jackson-core-asl-1.9.13.jar
jackson-databind-2.5.3.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-scala_2.11-2.5.3.jar
jackson-xc-1.9.13.jar
janino-2.7.8.jar
java-xmlbuilder-1.0.jar
Expand Down Expand Up @@ -140,7 +139,7 @@ netty-all-4.0.29.Final.jar
objenesis-1.2.jar
opencsv-2.3.jar
oro-2.0.8.jar
paranamer-2.6.jar
paranamer-2.3.jar
parquet-column-1.7.0.jar
parquet-common-1.7.0.jar
parquet-encoding-1.7.0.jar
Expand Down
3 changes: 1 addition & 2 deletions dev/deps/spark-deps-hadoop-2.7
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ jackson-core-asl-1.9.13.jar
jackson-databind-2.5.3.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-scala_2.11-2.5.3.jar
jackson-xc-1.9.13.jar
janino-2.7.8.jar
java-xmlbuilder-1.0.jar
Expand Down Expand Up @@ -141,7 +140,7 @@ netty-all-4.0.29.Final.jar
objenesis-1.2.jar
opencsv-2.3.jar
oro-2.0.8.jar
paranamer-2.6.jar
paranamer-2.3.jar
parquet-column-1.7.0.jar
parquet-common-1.7.0.jar
parquet-encoding-1.7.0.jar
Expand Down
13 changes: 0 additions & 13 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -574,19 +574,6 @@
<artifactId>jackson-annotations</artifactId>
<version>${fasterxml.jackson.version}</version>
</dependency>
<!-- Guava is excluded because of SPARK-6149. The Guava version referenced in this module is
15.0, which causes runtime incompatibility issues. -->
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
<version>${fasterxml.jackson.version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
Expand Down