From 4533dd075d38752eae7ca03c3c1216817b993c6b Mon Sep 17 00:00:00 2001 From: CodingCat Date: Mon, 7 Mar 2016 09:37:37 -0500 Subject: [PATCH 1/4] improve the doc for "spark.memory.offHeap.size" --- docs/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration.md b/docs/configuration.md index d0c76aaad0b35..a4f395ded1d2d 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -871,7 +871,7 @@ Apart from these, the following properties are also available, and may be useful spark.memory.offHeap.size 0 - The absolute amount of memory in bytes which can be used for off-heap allocation. + The absolute amount of memory (in terms by bytes) which can be used for off-heap allocation. This setting has no impact on heap memory usage, so if your executors' total memory consumption must fit within some hard limit then be sure to shrink your JVM heap size accordingly. This must be set to a positive value when spark.memory.offHeap.enabled=true. From 59f6edf95d35a108252c2d96477fc1697355c712 Mon Sep 17 00:00:00 2001 From: CodingCat Date: Mon, 7 Mar 2016 14:00:16 -0500 Subject: [PATCH 2/4] fix --- docs/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration.md b/docs/configuration.md index a4f395ded1d2d..d0c76aaad0b35 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -871,7 +871,7 @@ Apart from these, the following properties are also available, and may be useful spark.memory.offHeap.size 0 - The absolute amount of memory (in terms by bytes) which can be used for off-heap allocation. + The absolute amount of memory in bytes which can be used for off-heap allocation. This setting has no impact on heap memory usage, so if your executors' total memory consumption must fit within some hard limit then be sure to shrink your JVM heap size accordingly. This must be set to a positive value when spark.memory.offHeap.enabled=true. From 31a0cd75b7330e52f709b6d174bf27a7ef57ea19 Mon Sep 17 00:00:00 2001 From: CodingCat Date: Wed, 31 Aug 2016 16:01:15 -0400 Subject: [PATCH 3/4] the alternative Encoder type shall be Encoder[Map[String, Any]] --- .../scala/org/apache/spark/examples/sql/SparkSQLExample.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala b/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala index 5cd437d017f6f..ca6c0b4e24e5a 100644 --- a/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala @@ -203,7 +203,7 @@ object SparkSQLExample { // No pre-defined encoders for Dataset[Map[K,V]], define explicitly implicit val mapEncoder = org.apache.spark.sql.Encoders.kryo[Map[String, Any]] // Primitive types and case classes can be also defined as - implicit val stringIntMapEncoder: Encoder[Map[String, Int]] = ExpressionEncoder() + implicit val stringIntMapEncoder: Encoder[Map[String, Any]] = ExpressionEncoder() // row.getValuesMap[T] retrieves multiple columns at once into a Map[String, T] teenagersDF.map(teenager => teenager.getValuesMap[Any](List("name", "age"))).collect() From 8ef776d07808623b0cb1bbaafcddad8bd0ddc070 Mon Sep 17 00:00:00 2001 From: CodingCat Date: Wed, 31 Aug 2016 16:22:00 -0400 Subject: [PATCH 4/4] fix the compilation error --- .../scala/org/apache/spark/examples/sql/SparkSQLExample.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala b/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala index ca6c0b4e24e5a..129b81d5fbbf3 100644 --- a/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala +++ b/examples/src/main/scala/org/apache/spark/examples/sql/SparkSQLExample.scala @@ -203,7 +203,7 @@ object SparkSQLExample { // No pre-defined encoders for Dataset[Map[K,V]], define explicitly implicit val mapEncoder = org.apache.spark.sql.Encoders.kryo[Map[String, Any]] // Primitive types and case classes can be also defined as - implicit val stringIntMapEncoder: Encoder[Map[String, Any]] = ExpressionEncoder() + // implicit val stringIntMapEncoder: Encoder[Map[String, Any]] = ExpressionEncoder() // row.getValuesMap[T] retrieves multiple columns at once into a Map[String, T] teenagersDF.map(teenager => teenager.getValuesMap[Any](List("name", "age"))).collect()