From 72b19a96606fa83be32948dc5848ac99dc9dedb9 Mon Sep 17 00:00:00 2001 From: scwf Date: Sat, 27 Dec 2014 21:51:11 +0800 Subject: [PATCH 1/2] fix HiveInspectorSuite test error --- .../spark/sql/hive/HiveInspectorSuite.scala | 28 +- test.log | 3897 +++++++++++++++++ 2 files changed, 3914 insertions(+), 11 deletions(-) create mode 100644 test.log diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala index bfe608a51a30b..f90d3607915ae 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive import java.sql.Date import java.util +import java.util.{Locale, TimeZone} import org.apache.hadoop.hive.serde2.io.DoubleWritable import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory @@ -63,6 +64,11 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors { .get()) } + // Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*) + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")) + // Add Locale setting + Locale.setDefault(Locale.US) + val data = Literal(true) :: Literal(0.asInstanceOf[Byte]) :: @@ -121,11 +127,11 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors { def checkValues(row1: Seq[Any], row2: Seq[Any]): Unit = { row1.zip(row2).map { - case (r1, r2) => checkValues(r1, r2) + case (r1, r2) => checkValue(r1, r2) } } - def checkValues(v1: Any, v2: Any): Unit = { + def checkValue(v1: Any, v2: Any): Unit = { (v1, v2) match { case (r1: Decimal, r2: Decimal) => // Ignore the Decimal precision @@ -195,26 +201,26 @@ class HiveInspectorSuite extends FunSuite with HiveInspectors { }) checkValues(row, unwrap(wrap(row, toInspector(dt)), toInspector(dt)).asInstanceOf[Row]) - checkValues(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt))) + checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt))) } test("wrap / unwrap Array Type") { val dt = ArrayType(dataTypes(0)) val d = row(0) :: row(0) :: Nil - checkValues(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt))) - checkValues(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt))) - checkValues(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) - checkValues(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) + checkValue(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt))) + checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt))) + checkValue(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) + checkValue(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) } test("wrap / unwrap Map Type") { val dt = MapType(dataTypes(0), dataTypes(1)) val d = Map(row(0) -> row(1)) - checkValues(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt))) - checkValues(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt))) - checkValues(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) - checkValues(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) + checkValue(d, unwrap(wrap(d, toInspector(dt)), toInspector(dt))) + checkValue(null, unwrap(wrap(null, toInspector(dt)), toInspector(dt))) + checkValue(d, unwrap(wrap(d, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) + checkValue(d, unwrap(wrap(null, toInspector(Literal(d, dt))), toInspector(Literal(d, dt)))) } } diff --git a/test.log b/test.log new file mode 100644 index 0000000000000..4511efe682782 --- /dev/null +++ b/test.log @@ -0,0 +1,3897 @@ +Using /home/wf/tools/jdk1.7.0_67 as default JAVA_HOME. +Note, this will be overridden by -java-home if it is set. +[info] Loading project definition from /home/wf/code/spark1/project/project +[info] Loading project definition from /home/kf/.sbt/0.13/staging/ad8e8574a5bcb2d22d23/sbt-pom-reader/project +[warn] Multiple resolvers having different access mechanism configured with same name 'sbt-plugin-releases'. To avoid conflict, Remove duplicate project resolvers (`resolvers`) or rename publishing resolver (`publishTo`). +[warn] There may be incompatibilities among your library dependencies. +[warn] Here are some of the libraries that were evicted: +[warn]  * com.typesafe.sbt:sbt-git:0.6.1 -> 0.6.2 +[warn]  * com.typesafe.sbt:sbt-site:0.7.0 -> 0.7.1 +[warn] Run 'evicted' to see detailed eviction warnings +[info] Loading project definition from /home/wf/code/spark1/project +[warn] There may be incompatibilities among your library dependencies. +[warn] Here are some of the libraries that were evicted: +[warn]  * org.apache.maven.wagon:wagon-provider-api:1.0-beta-6 -> 2.2 +[warn] Run 'evicted' to see detailed eviction warnings +Note: We ignore environment variables, when use of profile is detected in conjunction with environment variable. +[info] Set current project to spark-parent (in build file:/home/wf/code/spark1/) +[warn] There may be incompatibilities among your library dependencies. +[warn] Here are some of the libraries that were evicted: +[warn]  * commons-net:commons-net:2.2 -> 3.1 +[warn]  * com.google.guava:guava:11.0.2 -> 14.0.1 +[warn] Run 'evicted' to see detailed eviction warnings +[info] Compiling 1 Scala source to /home/wf/code/spark1/sql/hive/target/scala-2.10/test-classes... +[warn] there were 1 deprecation warning(s); re-run with -deprecation for details +[warn] one warning found +21:25:44.266 WARN org.apache.spark.util.Utils: Your hostname, kf resolves to a loopback address: 127.0.1.1; using 192.168.1.100 instead (on interface eth0) +21:25:44.267 WARN org.apache.spark.util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address +21:25:44.938 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +21:26:21.442 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 0.13.1aa +[info] BigDataBenchmarkSuite: +[info] - No data files found for BigDataBenchmark tests. !!! IGNORED !!! +[info] ConcurrentHiveSuite: +[info] - multiple instances not supported !!! IGNORED !!! +[info] SQLQuerySuite: +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas4 +[info] - CTAS with serde (7 seconds, 85 milliseconds) +[info] - ordering not in select (320 milliseconds) +[info] - ordering not in agg (503 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_ctas_1234 +[info] - double nested data (885 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_ctas_123 +[info] - test CTAS (533 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test +[info] - SPARK-4825 save join to table (1 second, 183 milliseconds) +[info] - SPARK-3708 Backticks aren't handled correctly is aliases (194 milliseconds) +[info] - SPARK-3834 Backticks not correctly handled in subquery aliases (181 milliseconds) +[info] - SPARK-3814 Support Bitwise & operator (170 milliseconds) +[info] - SPARK-3814 Support Bitwise | operator (184 milliseconds) +[info] - SPARK-3814 Support Bitwise ^ operator (164 milliseconds) +[info] - SPARK-3814 Support Bitwise ~ operator (162 milliseconds) +[info] - SPARK-4154 Query does not work if it has 'not between' in Spark SQL and HQL (301 milliseconds) +[info] - SPARK-2554 SumDistinct partial aggregation (463 milliseconds) +[info] ParquetMetastoreSuite: +[info] - project the partitioning column partitioned_parquet (483 milliseconds) +[info] - project partitioning and non-partitioning columns partitioned_parquet (419 milliseconds) +[info] - simple count partitioned_parquet (295 milliseconds) +[info] - pruned count partitioned_parquet (136 milliseconds) +[info] - non-existant partition partitioned_parquet (96 milliseconds) +[info] - multi-partition pruned count partitioned_parquet (177 milliseconds) +[info] - non-partition predicates partitioned_parquet (342 milliseconds) +[info] - sum partitioned_parquet (148 milliseconds) +[info] - hive udfs partitioned_parquet (536 milliseconds) +[info] - project the partitioning column partitioned_parquet_with_key (397 milliseconds) +[info] - project partitioning and non-partitioning columns partitioned_parquet_with_key (336 milliseconds) +[info] - simple count partitioned_parquet_with_key (331 milliseconds) +[info] - pruned count partitioned_parquet_with_key (153 milliseconds) +[info] - non-existant partition partitioned_parquet_with_key (87 milliseconds) +[info] - multi-partition pruned count partitioned_parquet_with_key (216 milliseconds) +[info] - non-partition predicates partitioned_parquet_with_key (385 milliseconds) +[info] - sum partitioned_parquet_with_key (175 milliseconds) +[info] - hive udfs partitioned_parquet_with_key (505 milliseconds) +[info] - non-part select(*) (114 milliseconds) +[info] - conversion is working (136 milliseconds) +[info] ParquetSourceSuite: +[info] - project the partitioning column partitioned_parquet (153 milliseconds) +[info] - project partitioning and non-partitioning columns partitioned_parquet (190 milliseconds) +[info] - simple count partitioned_parquet (121 milliseconds) +[info] - pruned count partitioned_parquet (82 milliseconds) +[info] - non-existant partition partitioned_parquet (62 milliseconds) +[info] - multi-partition pruned count partitioned_parquet (91 milliseconds) +[info] - non-partition predicates partitioned_parquet (125 milliseconds) +[info] - sum partitioned_parquet (82 milliseconds) +[info] - hive udfs partitioned_parquet (163 milliseconds) +[info] - project the partitioning column partitioned_parquet_with_key (140 milliseconds) +[info] - project partitioning and non-partitioning columns partitioned_parquet_with_key (178 milliseconds) +[info] - simple count partitioned_parquet_with_key (116 milliseconds) +[info] - pruned count partitioned_parquet_with_key (72 milliseconds) +[info] - non-existant partition partitioned_parquet_with_key (53 milliseconds) +[info] - multi-partition pruned count partitioned_parquet_with_key (85 milliseconds) +[info] - non-partition predicates partitioned_parquet_with_key (125 milliseconds) +[info] - sum partitioned_parquet_with_key (74 milliseconds) +[info] - hive udfs partitioned_parquet_with_key (167 milliseconds) +[info] - non-part select(*) (59 milliseconds) +[info] HiveTypeCoercionSuite: +[info] - 1 + 1 (14 seconds, 36 milliseconds) +[info] - 1 + 1.0 (2 seconds, 196 milliseconds) +[info] - 1 + 1L (988 milliseconds) +[info] - 1 + 1S (3 seconds, 427 milliseconds) +[info] - 1 + 1Y (742 milliseconds) +[info] - 1 + '1' (839 milliseconds) +[info] - 1.0 + 1 (751 milliseconds) +[info] - 1.0 + 1.0 (996 milliseconds) +[info] - 1.0 + 1L (734 milliseconds) +[info] - 1.0 + 1S (680 milliseconds) +[info] - 1.0 + 1Y (681 milliseconds) +[info] - 1.0 + '1' (747 milliseconds) +[info] - 1L + 1 (727 milliseconds) +[info] - 1L + 1.0 (1 second, 23 milliseconds) +[info] - 1L + 1L (692 milliseconds) +[info] - 1L + 1S (658 milliseconds) +[info] - 1L + 1Y (713 milliseconds) +[info] - 1L + '1' (1 second, 73 milliseconds) +[info] - 1S + 1 (757 milliseconds) +[info] - 1S + 1.0 (696 milliseconds) +[info] - 1S + 1L (1 second, 721 milliseconds) +[info] - 1S + 1S (702 milliseconds) +[info] - 1S + 1Y (671 milliseconds) +[info] - 1S + '1' (1 second, 610 milliseconds) +[info] - 1Y + 1 (751 milliseconds) +[info] - 1Y + 1.0 (696 milliseconds) +[info] - 1Y + 1L (685 milliseconds) +[info] - 1Y + 1S (701 milliseconds) +[info] - 1Y + 1Y (684 milliseconds) +[info] - 1Y + '1' (618 milliseconds) +[info] - '1' + 1 (710 milliseconds) +[info] - '1' + 1.0 (1 second, 48 milliseconds) +[info] - '1' + 1L (770 milliseconds) +[info] - '1' + 1S (682 milliseconds) +[info] - '1' + 1Y (676 milliseconds) +[info] - '1' + '1' (638 milliseconds) +[info] - case when then 1 else null end (660 milliseconds) +[info] - case when then null else 1 end (654 milliseconds) +[info] - case when then 1.0 else null end (757 milliseconds) +[info] - case when then null else 1.0 end (597 milliseconds) +[info] - case when then 1L else null end (692 milliseconds) +[info] - case when then null else 1L end (701 milliseconds) +[info] - case when then 1S else null end (671 milliseconds) +[info] - case when then null else 1S end (706 milliseconds) +[info] - case when then 1Y else null end (610 milliseconds) +[info] - case when then null else 1Y end (958 milliseconds) +[info] - [SPARK-2210] boolean cast on boolean value should be removed (44 milliseconds) +[info] QueryTest: +[info] HiveQuerySuite: +[info] - constant object inspector for generic udf (822 milliseconds) +[info] - NaN to Decimal (717 milliseconds) +[info] - constant null testing (828 milliseconds) +[info] - constant array (657 milliseconds) +[info] - count distinct 0 values (704 milliseconds) +[info] - count distinct 1 value strings (828 milliseconds) +[info] - count distinct 1 value (894 milliseconds) +[info] - count distinct 2 values (814 milliseconds) +[info] - count distinct 2 values including null (916 milliseconds) +[info] - count distinct 1 value + null (700 milliseconds) +[info] - count distinct 1 value long (834 milliseconds) +[info] - count distinct 2 values long (725 milliseconds) +[info] - count distinct 1 value + null long (844 milliseconds) +[info] - null case (958 milliseconds) +[info] - single case (744 milliseconds) +[info] - double case (778 milliseconds) +[info] - case else null (868 milliseconds) +[info] - having no references (2 seconds, 779 milliseconds) +[info] - boolean = number (1 second, 353 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/foo +[info] - CREATE TABLE AS runs once (374 milliseconds) +[info] - between (1 second, 501 milliseconds) +[info] - div (1 second, 505 milliseconds) +[info] - division (73 milliseconds) +[info] - modulus (1 second, 826 milliseconds) +[info] - Query expressed in SQL (79 milliseconds) +[info] - Query expressed in HiveQL (78 milliseconds) +[info] - Query with constant folding the CAST (73 milliseconds) +[info] - Constant Folding Optimization for AVG_SUM_COUNT (2 seconds, 474 milliseconds) +[info] - Cast Timestamp to Timestamp in UDF (1 second, 605 milliseconds) +[info] - Simple Average (2 seconds, 322 milliseconds) +[info] - Simple Average + 1 (1 second, 360 milliseconds) +[info] - Simple Average + 1 with group (1 second, 406 milliseconds) +[info] - string literal (878 milliseconds) +[info] - Escape sequences (987 milliseconds) +[info] - IgnoreExplain (946 milliseconds) +[info] - trivial join where clause (1 second, 138 milliseconds) +[info] - trivial join ON clause (853 milliseconds) +[info] - small.cartesian (1 second, 328 milliseconds) +[info] - length.udf (793 milliseconds) +[info] - partitioned table scan (894 milliseconds) +[info] - hash (765 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/createdtable +[info] - create table as (1 second, 177 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/testdb.db/createdtable +[info] - create table as with db name (4 seconds, 52 milliseconds) +[info] - insert table with db name (1 second, 778 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/createdtable +[info] - insert into and insert overwrite (3 seconds, 444 milliseconds) +[info] - transform (2 seconds, 84 milliseconds) +[info] - LIKE (863 milliseconds) +[info] - DISTINCT (1 second, 79 milliseconds) +[info] - empty aggregate input (720 milliseconds) +[info] - lateral view1 (1 second, 143 milliseconds) +[info] - lateral view2 (739 milliseconds) +[info] - lateral view3 (1 second, 108 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 +[info] - lateral view4 (1 second, 295 milliseconds) +[info] - lateral view5 (914 milliseconds) +[info] - lateral view6 (665 milliseconds) +[info] - sampling (30 milliseconds) +[info] - SchemaRDD toString (77 milliseconds) +[info] - case statements with key #1 (1 second, 273 milliseconds) +[info] - case statements with key #2 (907 milliseconds) +[info] - case statements with key #3 (869 milliseconds) +[info] - case statements with key #4 (1 second, 118 milliseconds) +[info] - case statements WITHOUT key #1 (795 milliseconds) +[info] - case statements WITHOUT key #2 (704 milliseconds) +[info] - case statements WITHOUT key #3 (812 milliseconds) +[info] - case statements WITHOUT key #4 (712 milliseconds) +[info] - timestamp cast #1 (44 milliseconds) +[info] - timestamp cast #2 (878 milliseconds) +[info] - timestamp cast #3 (774 milliseconds) +[info] - timestamp cast #4 (767 milliseconds) +[info] - timestamp cast #5 (717 milliseconds) +[info] - timestamp cast #6 (773 milliseconds) +[info] - timestamp cast #7 (731 milliseconds) +[info] - timestamp cast #8 (752 milliseconds) +[info] - select null from table (816 milliseconds) +[info] - predicates contains an empty AttributeSet() references (44 milliseconds) +[info] - implement identity function using case statement (87 milliseconds) +[info] - non-boolean conditions in a CaseWhen are illegal !!! IGNORED !!! +[info] - case sensitivity when query Hive table (1 second, 303 milliseconds) +[info] - case sensitivity: registered table (45 milliseconds) +[info] - SPARK-1704: Explain commands as a SchemaRDD (981 milliseconds) +[info] - SPARK-2180: HAVING support in GROUP BY clauses (positive) (1 second, 297 milliseconds) +[info] - SPARK-2180: HAVING with non-boolean clause raises no exceptions (93 milliseconds) +[info] - SPARK-2225: turn HAVING without GROUP BY into a simple filter (46 milliseconds) +[info] - Query Hive native command execution result (884 milliseconds) +[info] - Exactly once semantics for DDL and command statements (99 milliseconds) +[info] - DESCRIBE commands (482 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/m +[info] - SPARK-2263: Insert Map values (325 milliseconds) +21:29:04.471 ERROR hive.ql.exec.DDLTask: java.lang.RuntimeException: MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) + at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:290) + at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:281) + at org.apache.hadoop.hive.ql.exec.DDLTask.alterTable(DDLTask.java:3644) + at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:312) + at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) + at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) + at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) + at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.execute(commands.scala:61) + at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:425) + at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:425) + at org.apache.spark.sql.SchemaRDDLike$class.$init$(SchemaRDDLike.scala:58) + at org.apache.spark.sql.SchemaRDD.(SchemaRDD.scala:108) + at org.apache.spark.sql.hive.HiveContext.sql(HiveContext.scala:94) + at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22$$anonfun$apply$mcV$sp$9.apply(HiveQuerySuite.scala:635) + at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22$$anonfun$apply$mcV$sp$9.apply(HiveQuerySuite.scala:635) + at org.scalatest.Assertions$class.intercept(Assertions.scala:997) + at org.scalatest.FunSuite.intercept(FunSuite.scala:1555) + at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22.apply$mcV$sp(HiveQuerySuite.scala:634) + at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22.apply(HiveQuerySuite.scala:631) + at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22.apply(HiveQuerySuite.scala:631) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveQuerySuite.scala:41) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveQuerySuite.runTest(HiveQuerySuite.scala:41) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveQuerySuite.scala:41) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveQuerySuite.run(HiveQuerySuite.scala:41) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) + at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:346) + at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:288) + ... 75 more + +21:29:04.471 ERROR org.apache.hadoop.hive.ql.Driver: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) +21:29:04.483 ERROR org.apache.spark.sql.hive.test.TestHive: +====================== +HIVE FAILURE OUTPUT +====================== +rc stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) +Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) +Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) +Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) +Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +RESET +set hive.table.parameters.default= +set datanucleus.cache.collections=true +set datanucleus.cache.collections.lazy=true +set hive.metastore.partition.name.whitelist.pattern=.* +SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true +SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.src +Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) +Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) +Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) +Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) +Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +RESET +set hive.table.parameters.default= +set datanucleus.cache.collections=true +set datanucleus.cache.collections.lazy=true +set hive.metastore.partition.name.whitelist.pattern=.* +SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true +SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.src +Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) +Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) +Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) +Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) +Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +RESET +set hive.table.parameters.default= +set datanucleus.cache.collections=true +set datanucleus.cache.collections.lazy=true +set hive.metastore.partition.name.whitelist.pattern=.* +SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true +SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.src +Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) +Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) +Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) +Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) +Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +OK +OK +RESET +set hive.table.parameters.default= +set datanucleus.cache.collections=true +set datanucleus.cache.collections.lazy=true +set hive.metastore.partition.name.whitelist.pattern=.* +SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true +SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.src +Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) +Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) +Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) +Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt +Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) +Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] +OK +OK +OK +OK +OK +OK +OK +OK +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) + +====================== +END HIVE FAILURE OUTPUT +====================== + +[info] - ADD JAR command (176 milliseconds) +[info] - ADD FILE command (86 milliseconds) +[info] - dynamic_partition (2 seconds, 221 milliseconds) +[info] - Dynamic partition folder layout (2 seconds, 751 milliseconds) +[info] - Partition spec validation (153 milliseconds) +[info] - SPARK-3414 regression: should store analyzed logical plan when registering a temp table (73 milliseconds) +[info] - SPARK-3810: PreInsertionCasts static partitioning support (226 milliseconds) +[info] - SPARK-3810: PreInsertionCasts dynamic partitioning support (24 seconds, 607 milliseconds) +[info] - parse HQL set commands (13 milliseconds) +[info] - SET commands semantics for a HiveContext (20 milliseconds) +[info] - select from thrift based table (6 seconds, 264 milliseconds) +[info] HiveMetastoreCatalogSuite: +[info] - struct field should accept underscore in sub-column name (1 millisecond) +[info] - udt to metastore type conversion (1 millisecond) +[info] JavaHiveQLSuite: +[info] - SELECT * FROM src (98 milliseconds) +[info] - Query Hive native command execution result (3 seconds, 401 milliseconds) +[info] - Exactly once semantics for DDL and command statements (132 milliseconds) +[info] CachedTableSuite: +[info] - cache table (293 milliseconds) +[info] - cache invalidation (989 milliseconds) +[info] - Drop cached table (207 milliseconds) +[info] - DROP nonexistant table (27 milliseconds) +[info] - correct error on uncache of non-cached table (22 milliseconds) +[info] - 'CACHE TABLE' and 'UNCACHE TABLE' HiveQL statement (121 milliseconds) +[info] - CACHE TABLE tableName AS SELECT * FROM anotherTable (123 milliseconds) +[info] - CACHE TABLE tableName AS SELECT ... (121 milliseconds) +[info] - CACHE LAZY TABLE tableName (120 milliseconds) +[info] HiveInspectorSuite: +[info] - Test wrap SettableStructObjectInspector (3 milliseconds) +[info] - oi => datatype => oi (4 milliseconds) +########1 +true +0 +0 +0 +0 +0.0 +0.0 +0 +3914-10-23 +123.123 +1969-12-31 16:02:03.123 +[B@3719e702 +List(1, 2, 3) +Map(1 -> 2, 2 -> 1) +[1,2.0,3.0] +######## +true +0 +0 +0 +0 +0.0 +0.0 +0 +3914-10-23 +123.123 +1969-12-31 16:02:03.123 +[B@3719e702 +List(1, 2, 3) +Map(1 -> 2, 2 -> 1) +######## +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector@22c3aade +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector@5c06e6a6 +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector@4910cb7d +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector@465b037d +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector@11df5c7e +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector@6901922b +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector@5f315d5c +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector@2efceed0 +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector@6d9fa104 +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector@b0445d6 +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector@4584ea40 +org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector@192f3368 +org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector@69d7fdc2 +org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector@635501d7 +########2 +3914-10-23 ::: 3914-10-23 +[info] - wrap / unwrap null, constant null and writables (6 milliseconds) +3914-10-23 ::: 3914-10-23 +[info] - wrap / unwrap primitive writable object inspector (1 millisecond) +3914-10-23 ::: 3914-10-23 +[info] - wrap / unwrap primitive java object inspector (1 millisecond) +3914-10-23 ::: 3914-10-23 +[info] - wrap / unwrap Struct Type (1 millisecond) +[info] - wrap / unwrap Array Type (0 milliseconds) +[info] - wrap / unwrap Map Type (2 milliseconds) +[info] StatisticsSuite: +[info] - parse analyze commands (141 milliseconds) +[info] - analyze MetastoreRelations (1 second, 723 milliseconds) +[info] - estimates the size of a test MetastoreRelation (62 milliseconds) +[info] - auto converts to broadcast hash join, by size estimate of a relation (286 milliseconds) +[info] InsertIntoHiveTableSuite: +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/createandinserttest +[info] - insertInto() HiveTable (807 milliseconds) +[info] - Double create fails when allowExisting = false (68 milliseconds) +[info] - Double create does not fail when allowExisting = true (13 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewithmapvalue +[info] - SPARK-4052: scala.collection.Map as value type of MapType (376 milliseconds) +21:29:51.388 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/1419686991353-0 specified for non-external table:table_with_partition +[info] - SPARK-4203:random partition directory order (1 second, 16 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewitharrayvalue +[info] - Insert ArrayType.containsNull == false (391 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewithmapvalue +[info] - Insert MapType.valueContainsNull == false (408 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewithstructvalue +[info] - Insert StructType.fields.exists(_.nullable == false) (466 milliseconds) +[info] HiveCompatibilitySuite: +[info] - add_part_exist (2 seconds, 69 milliseconds) +[info] - add_part_multiple (3 seconds, 864 milliseconds) +[info] - add_partition_no_whitelist (1 second, 804 milliseconds) +[info] - add_partition_with_whitelist (1 second, 24 milliseconds) +[info] - alias_casted_column (899 milliseconds) +[info] - alter2 (2 seconds, 749 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter3_src +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter3_db.db/alter3_src +[info] - alter3 (3 seconds, 396 milliseconds) +[info] - alter4 (1 second, 555 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5_src +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5/parta +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5_db.db/alter5_src +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5_db.db/alter5/parta +[info] - alter5 (2 seconds, 199 milliseconds) +[info] - alter_char1 !!! IGNORED !!! +[info] - alter_char2 !!! IGNORED !!! +[info] - alter_db_owner !!! IGNORED !!! +[info] - alter_index (1 second, 287 milliseconds) +21:30:17.528 WARN org.apache.hadoop.mapreduce.JobSubmitter: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this. +21:30:17.646 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/staging/kf1954455609/.staging/job_local1954455609_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval; Ignoring. +21:30:17.648 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/staging/kf1954455609/.staging/job_local1954455609_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts; Ignoring. +21:30:17.709 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/local/localRunner/kf/job_local1954455609_0001/job_local1954455609_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval; Ignoring. +21:30:17.710 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/local/localRunner/kf/job_local1954455609_0001/job_local1954455609_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts; Ignoring. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_rc_merge_test_part/ds=2012-01-03/ts=2012-01-03+14%3A46%3A31 +[info] - alter_merge_2 (4 seconds, 211 milliseconds) +[info] - alter_partition_coltype !!! IGNORED !!! +[info] - alter_partition_format_loc (1 second, 724 milliseconds) +[info] - alter_partition_protect_mode (2 seconds, 674 milliseconds) +[info] - alter_partition_with_whitelist (1 second, 1 millisecond) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_rename_partition_src +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_rename_partition_db.db/alter_rename_partition_src +[info] - alter_rename_partition (2 seconds, 826 milliseconds) +[info] - alter_table_serde (1 second, 965 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_varchar_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_varchar_1 +[info] - alter_varchar1 (1 second, 900 milliseconds) +[info] - alter_varchar2 (1 second, 451 milliseconds) +[info] - alter_view_as_select (1 second, 275 milliseconds) +[info] - alter_view_rename !!! IGNORED !!! +[info] - ambiguous_col (1 second, 265 milliseconds) +[info] - ansi_sql_arithmetic !!! IGNORED !!! +[info] - archive !!! IGNORED !!! +[info] - archive_excludeHadoop20 !!! IGNORED !!! +[info] - archive_multi !!! IGNORED !!! +[info] - auto_join0 (1 second, 369 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - auto_join1 (1 second, 287 milliseconds) +[info] - auto_join10 (982 milliseconds) +[info] - auto_join11 (837 milliseconds) +[info] - auto_join12 (909 milliseconds) +[info] - auto_join13 (1 second, 890 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join14 (1 second, 389 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join14_hadoop20 (1 second, 141 milliseconds) +[info] - auto_join15 (1 second, 13 milliseconds) +[info] - auto_join16 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join17 (1 second, 242 milliseconds) +[info] - auto_join18 (1 second, 168 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join19 (1 second, 186 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +[info] - auto_join2 (1 second, 137 milliseconds) +[info] - auto_join20 (1 second, 368 milliseconds) +[info] - auto_join21 (901 milliseconds) +[info] - auto_join22 (1 second, 449 milliseconds) +[info] - auto_join23 (829 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tst1 +[info] - auto_join24 (1 second, 162 milliseconds) +21:30:57.083 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test auto_join25 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - auto_join25 (2 seconds, 120 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - auto_join26 (1 second, 414 milliseconds) +[info] - auto_join27 (1 second, 135 milliseconds) +[info] - auto_join28 (888 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join3 (1 second, 38 milliseconds) +[info] - auto_join30 (2 seconds, 201 milliseconds) +[info] - auto_join31 (935 milliseconds) +[info] - auto_join32 (2 seconds, 864 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join4 (1 second, 251 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join5 (1 second, 263 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join6 (1 second, 640 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join7 (1 second, 365 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join8 (1 second, 84 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - auto_join9 (1 second, 73 milliseconds) +21:31:17.371 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test auto_join_filters +[info] - auto_join_filters (8 seconds, 330 milliseconds) +[info] - auto_join_nulls (3 seconds, 807 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/testsrc +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/orderpayment_small +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/user_small +[info] - auto_join_reordering_values (1 second, 622 milliseconds) +[info] - auto_join_without_localtask !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - auto_smb_mapjoin_14 (5 seconds, 48 milliseconds) +[info] - auto_sortmerge_join_1 (2 seconds, 794 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +[info] - auto_sortmerge_join_10 (2 seconds, 263 milliseconds) +[info] - auto_sortmerge_join_11 (2 seconds, 702 milliseconds) +[info] - auto_sortmerge_join_12 (2 seconds, 946 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - auto_sortmerge_join_13 (3 seconds, 239 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +[info] - auto_sortmerge_join_14 (1 second, 740 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +[info] - auto_sortmerge_join_15 (1 second, 254 milliseconds) +[info] - auto_sortmerge_join_16 (2 seconds, 189 milliseconds) +[info] - auto_sortmerge_join_2 (4 seconds, 958 milliseconds) +[info] - auto_sortmerge_join_3 (4 seconds, 848 milliseconds) +[info] - auto_sortmerge_join_4 (2 seconds, 684 milliseconds) +[info] - auto_sortmerge_join_5 (2 seconds, 109 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl4 +[info] - auto_sortmerge_join_6 (3 seconds, 927 milliseconds) +[info] - auto_sortmerge_join_7 (3 seconds, 97 milliseconds) +[info] - auto_sortmerge_join_8 (3 seconds, 81 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +[info] - auto_sortmerge_join_9 (6 seconds, 209 milliseconds) +[info] - ba_table1 !!! IGNORED !!! +[info] - ba_table2 !!! IGNORED !!! +[info] - ba_table3 !!! IGNORED !!! +[info] - ba_table_udfs !!! IGNORED !!! +[info] - ba_table_union !!! IGNORED !!! +[info] - binary_constant (764 milliseconds) +[info] - binary_output_format !!! IGNORED !!! +[info] - binary_table_bincolserde !!! IGNORED !!! +[info] - binary_table_colserde !!! IGNORED !!! +[info] - binarysortable_1 (957 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - cast1 (1 second, 55 milliseconds) +[info] - cast_to_int !!! IGNORED !!! +[info] - char_1 !!! IGNORED !!! +[info] - char_2 !!! IGNORED !!! +[info] - char_cast !!! IGNORED !!! +[info] - char_comparison !!! IGNORED !!! +[info] - char_join1 !!! IGNORED !!! +[info] - char_nested_types !!! IGNORED !!! +[info] - char_serde !!! IGNORED !!! +[info] - char_udf1 !!! IGNORED !!! +[info] - char_union1 !!! IGNORED !!! +[info] - char_varchar_udf !!! IGNORED !!! +[info] - cluster (2 seconds, 658 milliseconds) +[info] - columnarserde_create_shortcut !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/combine1_1 +[info] - combine1 (1 second, 260 milliseconds) +[info] - combine2 !!! IGNORED !!! +[info] - combine2_hadoop20 !!! IGNORED !!! +[info] - combine2_win !!! IGNORED !!! +[info] - combine3 !!! IGNORED !!! +[info] - compile_processor !!! IGNORED !!! +[info] - compute_stats_binary (1 second, 145 milliseconds) +[info] - compute_stats_boolean (940 milliseconds) +[info] - compute_stats_decimal !!! IGNORED !!! +[info] - compute_stats_double (905 milliseconds) +[info] - compute_stats_empty_table (1 second, 297 milliseconds) +[info] - compute_stats_long (924 milliseconds) +[info] - compute_stats_string (1 second, 720 milliseconds) +[info] - constant_prop !!! IGNORED !!! +[info] - convert_enum_to_string (727 milliseconds) +[info] - correlationoptimizer1 (6 seconds, 230 milliseconds) +[info] - correlationoptimizer10 (3 seconds, 61 milliseconds) +[info] - correlationoptimizer11 (2 seconds, 330 milliseconds) +[info] - correlationoptimizer12 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp +[info] - correlationoptimizer13 (1 second, 630 milliseconds) +[info] - correlationoptimizer14 (2 seconds, 607 milliseconds) +[info] - correlationoptimizer15 (1 second, 512 milliseconds) +[info] - correlationoptimizer2 (3 seconds, 907 milliseconds) +[info] - correlationoptimizer3 (2 seconds, 536 milliseconds) +[info] - correlationoptimizer4 (4 seconds, 283 milliseconds) +[info] - correlationoptimizer5 !!! IGNORED !!! +[info] - correlationoptimizer6 (7 seconds, 831 milliseconds) +[info] - correlationoptimizer7 (2 seconds, 2 milliseconds) +[info] - correlationoptimizer8 (3 seconds, 552 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp +[info] - correlationoptimizer9 (2 seconds, 728 milliseconds) +[info] - count (1 second, 362 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_six_columns +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_two_columns +[info] - cp_mj_rc (1 second, 589 milliseconds) +[info] - create_1 !!! IGNORED !!! +[info] - create_big_view !!! IGNORED !!! +[info] - create_escape !!! IGNORED !!! +[info] - create_func1 !!! IGNORED !!! +[info] - create_genericudaf !!! IGNORED !!! +[info] - create_genericudf !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_test_output_format +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_test_output_format_sequencefile +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_test_output_format_hivesequencefile +[info] - create_insert_outputformat (1 second, 363 milliseconds) +[info] - create_like !!! IGNORED !!! +[info] - create_like_tbl_props (1 second, 154 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table2 +[info] - create_like_view (2 seconds, 348 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table1 +[info] - create_nested_type (854 milliseconds) +[info] - create_or_replace_view !!! IGNORED !!! +[info] - create_skewed_table1 (3 seconds, 447 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/abc +[info] - create_struct_table (852 milliseconds) +[info] - create_udaf !!! IGNORED !!! +[info] - create_union_table !!! IGNORED !!! +[info] - create_view !!! IGNORED !!! +[info] - cross_join (931 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/a +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/b +[info] - cross_product_check_1 (1 second, 281 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/a +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/b +[info] - cross_product_check_2 (1 second, 443 milliseconds) +[info] - ct_case_insensitive (1 second, 190 milliseconds) +[info] - ctas_char !!! IGNORED !!! +[info] - ctas_colname !!! IGNORED !!! +[info] - ctas_date !!! IGNORED !!! +[info] - ctas_uses_database_location !!! IGNORED !!! +[info] - ctas_varchar !!! IGNORED !!! +[info] - cte_1 !!! IGNORED !!! +[info] - cte_2 !!! IGNORED !!! +[info] - custom_input_output_format !!! IGNORED !!! +[info] - date_1 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_2 +[info] - date_2 (1 second, 148 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_3 +[info] - date_3 (982 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_4 +[info] - date_4 (1 second, 59 milliseconds) +[info] - date_comparison (1 second, 85 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_join1 +[info] - date_join1 (964 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_regex +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_lb +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_ls +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_c +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_lbc +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_orc +[info] - date_serde (3 seconds, 909 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_udf +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_udf_string +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_udf_flight +[info] - date_udf (2 seconds, 690 milliseconds) +[info] - dbtxnmgr_compact1 !!! IGNORED !!! +[info] - dbtxnmgr_compact2 !!! IGNORED !!! +[info] - dbtxnmgr_compact3 !!! IGNORED !!! +[info] - dbtxnmgr_ddl1 !!! IGNORED !!! +[info] - dbtxnmgr_query1 !!! IGNORED !!! +[info] - dbtxnmgr_query2 !!! IGNORED !!! +[info] - dbtxnmgr_query3 !!! IGNORED !!! +[info] - dbtxnmgr_query4 !!! IGNORED !!! +[info] - dbtxnmgr_query5 !!! IGNORED !!! +[info] - dbtxnmgr_showlocks !!! IGNORED !!! +[info] - ddltime !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/decimal_1 +[info] - decimal_1 (1 second, 441 milliseconds) +[info] - decimal_2 !!! IGNORED !!! +[info] - decimal_3 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/decimal_4_2 +[info] - decimal_4 (1 second, 325 milliseconds) +[info] - decimal_5 !!! IGNORED !!! +[info] - decimal_6 !!! IGNORED !!! +[info] - decimal_join (1 second, 216 milliseconds) +[info] - decimal_precision !!! IGNORED !!! +[info] - decimal_serde !!! IGNORED !!! +[info] - decimal_udf !!! IGNORED !!! +[info] - default_partition_name (976 milliseconds) +[info] - delimiter (1 second, 272 milliseconds) +[info] - desc_non_existent_tbl (627 milliseconds) +[info] - desc_tbl_part_cols !!! IGNORED !!! +[info] - describe_formatted_view_partitioned (907 milliseconds) +[info] - describe_table !!! IGNORED !!! +[info] - describe_xpath !!! IGNORED !!! +[info] - diff_part_input_formats (1 second, 105 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/kv_fileformat_check_txt +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/kv_fileformat_check_seq +[info] - disable_file_format_check (1 second, 20 milliseconds) +[info] - disallow_incompatible_type_change_off (1 second, 341 milliseconds) +[info] - distinct_stats (1 second, 126 milliseconds) +[info] - driverhook !!! IGNORED !!! +21:34:07.620 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_database_removes_partition_dirs_table specified for non-external table:test_table +Deleted file:///tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_database_removes_partition_dirs_table2/part=1 +[info] - drop_database_removes_partition_dirs (1 second, 220 milliseconds) +[info] - drop_function (747 milliseconds) +[info] - drop_index (1 second, 28 milliseconds) +21:34:10.588 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_database_removes_partition_dirs_table specified for non-external table:test_table +[info] - drop_index_removes_partition_dirs (918 milliseconds) +[info] - drop_multi_partitions (1 second, 190 milliseconds) +[info] - drop_partitions_filter (2 seconds, 174 milliseconds) +21:34:15.457 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM +javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 inner join "PARTITION_KEY_VALS" "FILTER1" on "FILTER1"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER1"."INTEGER_IDX" = 1 where ( (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?) and ((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER1"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?)) )". + at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) + at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) + at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +NestedThrowablesStackTrace: +java.sql.SQLDataException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) + at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) + at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) + at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) + at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) + at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) + at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) + at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) + at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) + ... 118 more +Caused by: ERROR 22018: Invalid character string format for type DECIMAL. + at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) + at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) + at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) + at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ec8.e6(Unknown Source) + at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) + ... 112 more +21:34:15.484 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM +javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 inner join "PARTITION_KEY_VALS" "FILTER1" on "FILTER1"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER1"."INTEGER_IDX" = 1 where ( (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?) and ((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER1"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?)) )". + at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) + at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) + at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) + at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) + at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) + at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +NestedThrowablesStackTrace: +java.sql.SQLDataException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) + at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) + at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) + at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) + at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) + at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) + at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) + at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) + at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) + at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) + at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) + at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) + ... 119 more +Caused by: ERROR 22018: Invalid character string format for type DECIMAL. + at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) + at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) + at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) + at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ec8.e6(Unknown Source) + at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) + ... 113 more +21:34:15.618 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM +javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 where (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?))". + at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) + at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) + at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +NestedThrowablesStackTrace: +java.sql.SQLDataException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) + at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) + at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) + at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) + at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) + at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) + at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) + at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) + at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) + ... 118 more +Caused by: ERROR 22018: Invalid character string format for type DECIMAL. + at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) + at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) + at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) + at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ece.e6(Unknown Source) + at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) + ... 112 more +21:34:15.635 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM +javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 where (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?))". + at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) + at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) + at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) + at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) + at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) + at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +NestedThrowablesStackTrace: +java.sql.SQLDataException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) + at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) + at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) + at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) + at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) + at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) + at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) + at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) + at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) + at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) + at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) + at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) + at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) + at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) + at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) + at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) + at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) + at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) + at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) + at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) + at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) + at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) + at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) + at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) + at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) + at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) + at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) + at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) + at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) + at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) + at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) + at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) + at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) + at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) + at scala.collection.AbstractTraversable.map(Traversable.scala:105) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) + at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) + at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) + at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) + at org.scalatest.Transformer.apply(Transformer.scala:22) + at org.scalatest.Transformer.apply(Transformer.scala:20) + at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) + at org.scalatest.Suite$class.withFixture(Suite.scala:1122) + at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) + at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) + at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) + at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) + at scala.collection.immutable.List.foreach(List.scala:318) + at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) + at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) + at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) + at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) + at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) + at org.scalatest.Suite$class.run(Suite.scala:1424) + at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) + at org.scalatest.SuperEngine.runImpl(Engine.scala:545) + at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) + at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) + at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) + at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) + at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) + at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) + at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) + at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) + at sbt.ForkMain$Run$2.call(ForkMain.java:294) + at sbt.ForkMain$Run$2.call(ForkMain.java:284) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. + at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) + at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) + ... 119 more +Caused by: ERROR 22018: Invalid character string format for type DECIMAL. + at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) + at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) + at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) + at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ece.e6(Unknown Source) + at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) + at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) + at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) + at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) + ... 113 more +[info] - drop_partitions_filter2 (1 second, 725 milliseconds) +[info] - drop_partitions_filter3 (1 second, 459 milliseconds) +[info] - drop_partitions_ignore_protection (1 second, 250 milliseconds) +[info] - drop_table (904 milliseconds) +[info] - drop_table2 (1 second, 138 milliseconds) +21:34:21.586 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_table_removes_partition_dirs_table specified for non-external table:test_table +Deleted file:///tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_table_removes_partition_dirs_table2/part=1 +[info] - drop_table_removes_partition_dirs (1 second, 228 milliseconds) +[info] - drop_udf !!! IGNORED !!! +[info] - drop_view (1 second, 4 milliseconds) +[info] - drop_with_concurrency !!! IGNORED !!! +[info] - dynamic_partition_skip_default (1 second, 361 milliseconds) +[info] - dynpart_sort_opt_vectorization !!! IGNORED !!! +[info] - dynpart_sort_optimization !!! IGNORED !!! +[info] - enforce_order !!! IGNORED !!! +[info] - escape1 !!! IGNORED !!! +[info] - escape2 !!! IGNORED !!! +[info] - escape_clusterby1 (845 milliseconds) +[info] - escape_distributeby1 (769 milliseconds) +[info] - escape_orderby1 (623 milliseconds) +[info] - escape_sortby1 (671 milliseconds) +[info] - exchange_partition !!! IGNORED !!! +[info] - exchange_partition2 !!! IGNORED !!! +[info] - exchange_partition3 !!! IGNORED !!! +[info] - exim_00_nonpart_empty !!! IGNORED !!! +[info] - exim_01_nonpart !!! IGNORED !!! +[info] - exim_02_00_part_empty !!! IGNORED !!! +[info] - exim_02_part !!! IGNORED !!! +[info] - exim_03_nonpart_over_compat !!! IGNORED !!! +[info] - exim_04_all_part !!! IGNORED !!! +[info] - exim_04_evolved_parts !!! IGNORED !!! +[info] - exim_05_some_part !!! IGNORED !!! +[info] - exim_06_one_part !!! IGNORED !!! +[info] - exim_07_all_part_over_nonoverlap !!! IGNORED !!! +[info] - exim_08_nonpart_rename !!! IGNORED !!! +[info] - exim_09_part_spec_nonoverlap !!! IGNORED !!! +[info] - exim_10_external_managed !!! IGNORED !!! +[info] - exim_11_managed_external !!! IGNORED !!! +[info] - exim_12_external_location !!! IGNORED !!! +[info] - exim_13_managed_location !!! IGNORED !!! +[info] - exim_14_managed_location_over_existing !!! IGNORED !!! +[info] - exim_15_external_part !!! IGNORED !!! +[info] - exim_16_part_external !!! IGNORED !!! +[info] - exim_17_part_managed !!! IGNORED !!! +[info] - exim_18_part_external !!! IGNORED !!! +[info] - exim_19_00_part_external_location !!! IGNORED !!! +[info] - exim_19_part_external_location !!! IGNORED !!! +[info] - exim_20_part_managed_location !!! IGNORED !!! +[info] - exim_21_export_authsuccess !!! IGNORED !!! +[info] - exim_22_import_exist_authsuccess !!! IGNORED !!! +[info] - exim_23_import_part_authsuccess !!! IGNORED !!! +[info] - exim_24_import_nonexist_authsuccess !!! IGNORED !!! +[info] - exim_hidden_files !!! IGNORED !!! +[info] - explain_dependency !!! IGNORED !!! +[info] - explain_dependency2 !!! IGNORED !!! +[info] - explain_logical !!! IGNORED !!! +[info] - explain_rearrange (1 second, 59 milliseconds) +[info] - explode_null !!! IGNORED !!! +[info] - external_table_with_space_in_location_path !!! IGNORED !!! +[info] - fetch_aggregation (1 second, 576 milliseconds) +[info] - file_with_header_footer !!! IGNORED !!! +[info] - fileformat_mix (1 second, 182 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - fileformat_sequencefile (955 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - fileformat_text (973 milliseconds) +[info] - filter_join_breaktask (1 second, 631 milliseconds) +[info] - filter_join_breaktask2 (2 seconds, 113 milliseconds) +[info] - filter_numeric !!! IGNORED !!! +[info] - global_limit !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g1 +[info] - groupby1 (3 seconds, 928 milliseconds) +[info] - groupby10 !!! IGNORED !!! +[info] - groupby11 (2 seconds, 439 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby12 (1 second, 543 milliseconds) +21:34:45.512 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby1_limit (1 second, 413 milliseconds) +21:34:47.247 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby1_map (2 seconds, 230 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby1_map_nomap (1 second, 527 milliseconds) +21:34:50.493 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby1_map_skew (1 second, 631 milliseconds) +21:34:52.334 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g1 +[info] - groupby1_noskew (1 second, 878 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g2 +[info] - groupby2 (1 second, 848 milliseconds) +21:34:55.908 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +[info] - groupby2_limit (878 milliseconds) +21:34:57.662 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby2_map (2 seconds, 357 milliseconds) +[info] - groupby2_map_multi_distinct !!! IGNORED !!! +21:35:01.128 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby2_map_skew (3 seconds, 437 milliseconds) +21:35:03.863 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g2 +[info] - groupby2_noskew (3 seconds, 610 milliseconds) +[info] - groupby2_noskew_multi_distinct !!! IGNORED !!! +[info] - groupby3 !!! IGNORED !!! +[info] - groupby3_map !!! IGNORED !!! +[info] - groupby3_map_multi_distinct !!! IGNORED !!! +[info] - groupby3_map_skew !!! IGNORED !!! +[info] - groupby3_noskew !!! IGNORED !!! +[info] - groupby3_noskew_multi_distinct !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby4 (1 second, 252 milliseconds) +21:35:07.354 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby4_map (865 milliseconds) +21:35:08.320 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby4_map_skew (979 milliseconds) +21:35:09.382 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby4_noskew (1 second, 583 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby5 (1 second, 278 milliseconds) +21:35:12.071 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby5_map (901 milliseconds) +21:35:13.035 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby5_map_skew (976 milliseconds) +21:35:14.324 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby5_noskew (2 seconds, 24 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby6 (1 second, 504 milliseconds) +21:35:17.464 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby6_map (1 second, 406 milliseconds) +21:35:18.945 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby6_map_skew (1 second, 527 milliseconds) +21:35:20.449 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby6_noskew (1 second, 366 milliseconds) +21:35:22.074 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.074 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.091 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.096 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.111 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.116 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.128 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.131 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.147 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.151 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.167 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.169 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.188 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.188 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.203 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.204 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.222 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.222 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.243 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.243 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.276 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.276 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.291 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.291 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.312 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.312 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.353 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.353 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.370 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +21:35:22.489 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.490 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.512 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.512 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.530 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.530 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.545 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.549 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.566 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.566 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.585 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.585 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.604 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.604 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.620 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.621 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.641 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.641 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.657 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.684 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.699 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.700 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.717 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.717 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.745 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.745 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.762 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.762 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.782 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.785 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:22.801 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby7 (1 second, 995 milliseconds) +21:35:23.834 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +21:35:24.100 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.101 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.117 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.118 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.138 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.139 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.155 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.155 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.174 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.177 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.191 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.201 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.214 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.221 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.237 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.245 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.258 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.265 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.276 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.280 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.294 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.300 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.315 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.315 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.359 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.360 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.374 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.374 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.392 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.392 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.406 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +21:35:24.535 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.535 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.553 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.553 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.571 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.571 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.588 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.590 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.609 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.609 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.627 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.629 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.649 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.650 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.666 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.691 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.726 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.726 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.743 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.743 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.758 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.760 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.783 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.784 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.800 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.804 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.822 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.822 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:24.840 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby7_map (2 seconds, 688 milliseconds) +21:35:26.540 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +21:35:26.786 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.786 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.804 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.805 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.823 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.823 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.843 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.845 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.862 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.867 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.877 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.881 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.896 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.916 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.930 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.933 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.945 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.950 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.962 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.966 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.978 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.984 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:26.997 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.003 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.017 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.024 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.037 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.048 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.059 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.067 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.081 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +21:35:27.200 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.200 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.214 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.214 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.277 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.277 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.291 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.292 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.311 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.313 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.331 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.333 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.348 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.349 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.366 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.370 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.384 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.389 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.407 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.411 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.430 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.434 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.452 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.454 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.471 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.472 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.488 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.489 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:27.509 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby7_map_multi_single_reducer (2 seconds, 620 milliseconds) +21:35:29.171 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +21:35:29.540 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.541 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.578 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.581 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.596 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.596 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.613 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.613 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.631 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.631 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.650 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.650 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.669 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.669 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.689 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.689 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.722 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.722 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.741 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.741 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.755 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.755 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.783 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.783 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.805 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.806 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.826 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.830 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:29.868 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +21:35:30.020 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.020 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.035 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.042 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.052 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.067 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.071 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.089 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.089 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.106 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.107 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.125 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.126 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.146 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.146 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.171 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.171 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.191 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.192 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.218 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.218 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.261 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.262 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.282 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.283 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.301 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.304 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.322 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.322 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.341 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:30.342 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby7_map_skew (2 seconds, 847 milliseconds) +21:35:32.020 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +21:35:32.318 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.318 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.335 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.335 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.355 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.356 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.371 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.371 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.389 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.393 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.407 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.410 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.428 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.428 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.441 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.442 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.461 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.463 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.498 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.499 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.518 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.519 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.544 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.544 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.563 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.563 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.584 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.584 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.602 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.603 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.620 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +21:35:32.760 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.760 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.774 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.774 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.796 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.797 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.815 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.816 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.835 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.835 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.872 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.876 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.889 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.893 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.913 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.916 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.938 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.938 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.952 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.957 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.973 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.982 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:32.997 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.006 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.021 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.030 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.037 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.050 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.055 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.071 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:35:33.074 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby7_noskew (2 seconds, 680 milliseconds) +21:35:35.241 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +21:35:35.600 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +21:35:35.797 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby7_noskew_multi_single_reducer (2 seconds, 32 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby8 (3 seconds, 512 milliseconds) +21:35:40.215 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby8_map (2 seconds, 436 milliseconds) +21:35:42.695 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby8_map_skew (2 seconds, 550 milliseconds) +21:35:45.254 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby8_noskew (2 seconds, 621 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby9 (6 seconds, 710 milliseconds) +[info] - groupby_bigdata !!! IGNORED !!! +[info] - groupby_complex_types !!! IGNORED !!! +[info] - groupby_complex_types_multi_single_reducer !!! IGNORED !!! +[info] - groupby_cube1 !!! IGNORED !!! +[info] - groupby_distinct_samekey (2 seconds, 112 milliseconds) +[info] - groupby_grouping_id1 (1 second, 25 milliseconds) +[info] - groupby_grouping_id2 (1 second, 767 milliseconds) +[info] - groupby_grouping_sets1 (1 second, 498 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 +[info] - groupby_grouping_sets2 (1 second, 431 milliseconds) +[info] - groupby_grouping_sets3 (1 second, 638 milliseconds) +[info] - groupby_grouping_sets4 (1 second, 549 milliseconds) +[info] - groupby_grouping_sets5 (1 second, 354 milliseconds) +21:36:06.957 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby_map_ppr (1 second, 393 milliseconds) +[info] - groupby_map_ppr_multi_distinct !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - groupby_multi_insert_common_distinct (3 seconds, 337 milliseconds) +[info] - groupby_multi_single_reducer !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g3 +[info] - groupby_multi_single_reducer2 (2 seconds, 167 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +[info] - groupby_multi_single_reducer3 (5 seconds, 180 milliseconds) +[info] - groupby_mutli_insert_common_distinct (740 milliseconds) +[info] - groupby_neg_float (1 second, 511 milliseconds) +[info] - groupby_position !!! IGNORED !!! +[info] - groupby_ppd (818 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - groupby_ppr (1 second, 334 milliseconds) +[info] - groupby_ppr_multi_distinct !!! IGNORED !!! +[info] - groupby_resolution !!! IGNORED !!! +[info] - groupby_rollup1 !!! IGNORED !!! +[info] - groupby_sort_1 !!! IGNORED !!! +[info] - groupby_sort_10 (1 second, 722 milliseconds) +[info] - groupby_sort_11 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +[info] - groupby_sort_2 (1 second, 594 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 +[info] - groupby_sort_3 (2 seconds, 597 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 +[info] - groupby_sort_4 (2 seconds, 361 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 +[info] - groupby_sort_5 (3 seconds, 705 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +[info] - groupby_sort_6 (2 seconds, 882 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1/ds=1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +[info] - groupby_sort_7 (2 seconds, 551 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1/ds=1 +[info] - groupby_sort_8 (1 second, 266 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1/ds=1 +[info] - groupby_sort_9 (1 second, 728 milliseconds) +[info] - groupby_sort_skew_1 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +[info] - groupby_sort_test_1 (1 second, 59 milliseconds) +[info] - having (1 second, 303 milliseconds) +[info] - implicit_cast1 (895 milliseconds) +[info] - import_exported_table !!! IGNORED !!! +[info] - index_auto !!! IGNORED !!! +[info] - index_bitmap !!! IGNORED !!! +[info] - index_bitmap1 !!! IGNORED !!! +[info] - index_bitmap2 !!! IGNORED !!! +[info] - index_bitmap3 !!! IGNORED !!! +[info] - index_bitmap_auto !!! IGNORED !!! +[info] - index_bitmap_rc !!! IGNORED !!! +[info] - index_compact !!! IGNORED !!! +[info] - index_compact_1 !!! IGNORED !!! +[info] - index_compact_2 !!! IGNORED !!! +[info] - index_compact_3 !!! IGNORED !!! +[info] - index_creation !!! IGNORED !!! +[info] - infer_const_type !!! IGNORED !!! +[info] - init_file !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - innerjoin (1 second, 521 milliseconds) +[info] - inoutdriver (835 milliseconds) +[info] - input (801 milliseconds) +[info] - input0 (686 milliseconds) +[info] - input1 (801 milliseconds) +[info] - input10 (678 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input11 (883 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input11_limit (1 second, 339 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - input12 (1 second, 382 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - input12_hadoop20 (1 second, 545 milliseconds) +[info] - input13 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input14 (1 second, 581 milliseconds) +[info] - input14_limit !!! IGNORED !!! +[info] - input15 (864 milliseconds) +[info] - input16_cc !!! IGNORED !!! +[info] - input17 !!! IGNORED !!! +[info] - input18 !!! IGNORED !!! +[info] - input19 (877 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - input1_limit (1 second, 220 milliseconds) +[info] - input2 (1 second, 227 milliseconds) +[info] - input20 !!! IGNORED !!! +[info] - input21 (1 second, 228 milliseconds) +[info] - input22 (864 milliseconds) +[info] - input23 (781 milliseconds) +[info] - input24 (1 second, 169 milliseconds) +[info] - input25 (997 milliseconds) +[info] - input26 (886 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tst/d=2009-01-01 +[info] - input28 (1 second, 528 milliseconds) +[info] - input2_limit (793 milliseconds) +[info] - input3 (1 second, 204 milliseconds) +[info] - input30 !!! IGNORED !!! +[info] - input31 !!! IGNORED !!! +[info] - input32 !!! IGNORED !!! +[info] - input33 !!! IGNORED !!! +[info] - input34 !!! IGNORED !!! +[info] - input35 !!! IGNORED !!! +[info] - input36 !!! IGNORED !!! +[info] - input37 !!! IGNORED !!! +[info] - input38 !!! IGNORED !!! +[info] - input39 !!! IGNORED !!! +[info] - input39_hadoop20 !!! IGNORED !!! +[info] - input3_limit !!! IGNORED !!! +[info] - input4 (1 second, 52 milliseconds) +[info] - input40 (2 seconds, 286 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_sp +[info] - input41 (1 second, 71 milliseconds) +[info] - input43 !!! IGNORED !!! +[info] - input45 !!! IGNORED !!! +[info] - input46 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/intable +[info] - input49 (1 second, 2 milliseconds) +[info] - input4_cb_delim (771 milliseconds) +[info] - input4_limit !!! IGNORED !!! +[info] - input5 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input6 (1 second, 472 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input7 (1 second, 62 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input8 (1 second, 156 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input9 (1 second, 246 milliseconds) +[info] - input_columnarserde !!! IGNORED !!! +[info] - input_dynamicserde !!! IGNORED !!! +[info] - input_lazyserde !!! IGNORED !!! +[info] - input_limit (1 second, 106 milliseconds) +[info] - input_part0 (880 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - input_part1 (1 second, 571 milliseconds) +[info] - input_part10 (1 second, 10 milliseconds) +[info] - input_part10_win (1 second, 96 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - input_part2 (1 second, 414 milliseconds) +[info] - input_part3 (1 second, 316 milliseconds) +[info] - input_part4 (1 second, 728 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +[info] - input_part5 (943 milliseconds) +[info] - input_part6 (950 milliseconds) +[info] - input_part7 (732 milliseconds) +[info] - input_part8 (702 milliseconds) +[info] - input_part9 (831 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest4_sequencefile +[info] - input_testsequencefile (902 milliseconds) +[info] - input_testxpath !!! IGNORED !!! +[info] - input_testxpath2 !!! IGNORED !!! +[info] - input_testxpath3 !!! IGNORED !!! +[info] - input_testxpath4 !!! IGNORED !!! +[info] - inputddl1 (915 milliseconds) +[info] - inputddl2 (2 seconds, 409 milliseconds) +[info] - inputddl3 (660 milliseconds) +[info] - inputddl4 (819 milliseconds) +[info] - inputddl5 !!! IGNORED !!! +[info] - inputddl6 (1 second, 310 milliseconds) +[info] - inputddl7 (3 seconds, 334 milliseconds) +[info] - inputddl8 (3 seconds, 508 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/insert1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db2.db/result +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db1.db/result +[info] - insert1 (2 seconds, 286 milliseconds) +[info] - insert1_overwrite_partitions !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db2.db/destintable/ds=2011-11-11 +[info] - insert2_overwrite_partitions (4 seconds, 370 milliseconds) +21:37:58.947 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/insert_compressed +21:37:59.113 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:37:59.290 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +[info] - insert_compressed (1 second, 400 milliseconds) +[info] - insert_into1 !!! IGNORED !!! +[info] - insert_into2 !!! IGNORED !!! +[info] - insert_into3 !!! IGNORED !!! +[info] - insert_into4 !!! IGNORED !!! +[info] - insert_into5 !!! IGNORED !!! +[info] - insert_into6 !!! IGNORED !!! +[info] - insert_overwrite_local_directory_1 !!! IGNORED !!! +[info] - insertexternal1 !!! IGNORED !!! +[info] - join0 (728 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join1 (1 second, 72 milliseconds) +[info] - join10 (903 milliseconds) +[info] - join11 (801 milliseconds) +[info] - join12 (1 second, 161 milliseconds) +[info] - join13 (1 second, 21 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join14 (1 second, 406 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join14_hadoop20 (1 second, 414 milliseconds) +[info] - join15 (909 milliseconds) +[info] - join16 (847 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join17 (957 milliseconds) +[info] - join18 (1 second, 338 milliseconds) +[info] - join19 (863 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +[info] - join2 (1 second, 63 milliseconds) +[info] - join20 (1 second, 190 milliseconds) +[info] - join21 (1 second, 213 milliseconds) +[info] - join22 (948 milliseconds) +[info] - join23 (1 second, 65 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tst1 +[info] - join24 (2 seconds, 46 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join25 (1 second, 581 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join26 (1 second, 498 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join27 (1 second, 769 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join28 (1 second, 865 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join29 (1 second, 834 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join3 (1 second, 355 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join30 (1 second, 572 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join31 (1 second, 645 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join32 (1 second, 715 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 +[info] - join32_lessSize (3 seconds, 47 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join33 (1 second, 434 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join34 (1 second, 328 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join35 (2 seconds, 6 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join36 (2 seconds, 773 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join37 (1 second, 293 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp +[info] - join38 (1 second, 203 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join39 (1 second, 602 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join4 (1 second, 468 milliseconds) +[info] - join40 (2 seconds, 323 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/s1 +[info] - join41 (1 second, 666 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join5 (1 second, 370 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join6 (1 second, 571 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join7 (1 second, 581 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join8 (1 second, 464 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - join9 (1 second, 38 milliseconds) +21:39:02.456 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test join_1to1 +[info] - join_1to1 (9 seconds, 893 milliseconds) +[info] - join_alt_syntax !!! IGNORED !!! +[info] - join_array (1 second, 315 milliseconds) +[info] - join_casesensitive (1 second, 306 milliseconds) +[info] - join_cond_pushdown_1 !!! IGNORED !!! +[info] - join_cond_pushdown_2 !!! IGNORED !!! +[info] - join_cond_pushdown_3 !!! IGNORED !!! +[info] - join_cond_pushdown_4 !!! IGNORED !!! +[info] - join_cond_pushdown_unqual1 !!! IGNORED !!! +[info] - join_cond_pushdown_unqual2 !!! IGNORED !!! +[info] - join_cond_pushdown_unqual3 !!! IGNORED !!! +[info] - join_cond_pushdown_unqual4 !!! IGNORED !!! +[info] - join_empty (1 second, 102 milliseconds) +21:39:16.078 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test join_filters +[info] - join_filters (20 seconds, 426 milliseconds) +[info] - join_filters_overlap !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_foo +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_bar +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_count +[info] - join_hive_626 (1 second, 283 milliseconds) +[info] - join_literals !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_copy +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src1_copy +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 +[info] - join_map_ppr (2 seconds, 141 milliseconds) +[info] - join_merging !!! IGNORED !!! +[info] - join_nulls (8 seconds, 917 milliseconds) +[info] - join_nullsafe (5 seconds, 728 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/join_rc1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/join_rc2 +[info] - join_rc (1 second, 256 milliseconds) +[info] - join_reorder !!! IGNORED !!! +[info] - join_reorder2 (1 second, 652 milliseconds) +[info] - join_reorder3 (2 seconds, 44 milliseconds) +[info] - join_reorder4 (1 second, 634 milliseconds) +[info] - join_star (3 seconds, 366 milliseconds) +[info] - join_thrift !!! IGNORED !!! +[info] - join_vc !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp_pyang_lv +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp_pyang_src_rcfile +[info] - lateral_view (2 seconds, 688 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/array_valued_src +[info] - lateral_view_cp (1 second, 37 milliseconds) +[info] - lateral_view_noalias !!! IGNORED !!! +[info] - lateral_view_ppd (1 second, 345 milliseconds) +[info] - lb_fs_stats !!! IGNORED !!! +[info] - leadlag !!! IGNORED !!! +[info] - leadlag_queries !!! IGNORED !!! +[info] - leftsemijoin (1 second, 950 milliseconds) +21:40:12.617 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. +[info] - leftsemijoin_mr (1 second, 222 milliseconds) +[info] - limit_partition_metadataonly !!! IGNORED !!! +[info] - limit_pushdown !!! IGNORED !!! +[info] - limit_pushdown_negative (1 second, 127 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_l1 +[info] - lineage1 (1 second, 420 milliseconds) +[info] - literal_decimal !!! IGNORED !!! +[info] - literal_double (784 milliseconds) +[info] - literal_ints (732 milliseconds) +[info] - literal_string (737 milliseconds) +[info] - load_binary_data !!! IGNORED !!! +[info] - load_dyn_part1 (1 second, 484 milliseconds) +[info] - load_dyn_part10 (1 second, 576 milliseconds) +21:40:21.635 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:40:21.635 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:40:21.665 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +21:40:21.665 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. +[info] - load_dyn_part11 (1 second, 225 milliseconds) +[info] - load_dyn_part12 (1 second, 229 milliseconds) +[info] - load_dyn_part13 (1 second, 184 milliseconds) +[info] - load_dyn_part14 (1 second, 390 milliseconds) +[info] - load_dyn_part14_win (1 second, 551 milliseconds) +[info] - load_dyn_part15 !!! IGNORED !!! +[info] - load_dyn_part2 (1 second, 292 milliseconds) +[info] - load_dyn_part3 (1 second, 404 milliseconds) +[info] - load_dyn_part4 (1 second, 585 milliseconds) +[info] - load_dyn_part5 (14 seconds, 550 milliseconds) +[info] - load_dyn_part6 (7 seconds, 923 milliseconds) +[info] - load_dyn_part7 (2 seconds, 413 milliseconds) +[info] - load_dyn_part8 (2 seconds, 30 milliseconds) +[info] - load_dyn_part9 (1 second, 176 milliseconds) +[info] - load_exist_part_authsuccess !!! IGNORED !!! +[info] - load_file_with_space_in_the_name (1 second, 575 milliseconds) +[info] - load_fs !!! IGNORED !!! +[info] - load_fs2 !!! IGNORED !!! +[info] - load_fs_overwrite !!! IGNORED !!! +[info] - load_hdfs_file_with_space_in_the_name !!! IGNORED !!! +[info] - load_nonpart_authsuccess !!! IGNORED !!! +[info] - load_part_authsuccess !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_test_src +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_test_dst/pcol1=test_part/pcol2=test_Part +[info] - loadpart1 (1 second, 826 milliseconds) +[info] - loadpart2 !!! IGNORED !!! +[info] - loadpart_err !!! IGNORED !!! +[info] - lock1 !!! IGNORED !!! +[info] - lock2 !!! IGNORED !!! +[info] - lock3 !!! IGNORED !!! +[info] - lock4 !!! IGNORED !!! +[info] - louter_join_ppr (1 second, 398 milliseconds) +[info] - macro !!! IGNORED !!! +[info] - mapjoin1 !!! IGNORED !!! +[info] - mapjoin_addjar !!! IGNORED !!! +[info] - mapjoin_decimal !!! IGNORED !!! +[info] - mapjoin_distinct (1 second, 301 milliseconds) +[info] - mapjoin_filter_on_outerjoin (1 second, 293 milliseconds) +[info] - mapjoin_mapjoin (1 second, 455 milliseconds) +[info] - mapjoin_memcheck !!! IGNORED !!! +[info] - mapjoin_subquery (1 second, 264 milliseconds) +[info] - mapjoin_subquery2 (1 second, 478 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_1 +[info] - mapjoin_test_outer (2 seconds, 507 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce1 (1 second, 53 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce2 (1 second, 689 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce3 (899 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce4 (1 second, 217 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce5 (867 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce6 (959 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce7 (872 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - mapreduce8 (937 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - merge1 (1 second, 645 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test1 +[info] - merge2 (1 second, 772 milliseconds) +[info] - merge3 !!! IGNORED !!! +[info] - merge4 !!! IGNORED !!! +[info] - merge_dynamic_partition !!! IGNORED !!! +[info] - merge_dynamic_partition2 !!! IGNORED !!! +[info] - merge_dynamic_partition3 !!! IGNORED !!! +[info] - merge_dynamic_partition4 !!! IGNORED !!! +[info] - merge_dynamic_partition5 !!! IGNORED !!! +[info] - mergejoins (1 second, 122 milliseconds) +[info] - metadata_only_queries !!! IGNORED !!! +[info] - metadata_only_queries_with_filters !!! IGNORED !!! +[info] - metadataonly1 !!! IGNORED !!! +[info] - mi !!! IGNORED !!! +[info] - mrr !!! IGNORED !!! +21:41:26.777 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test multiMapJoin1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bigtbl +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bigtbl +[info] - multiMapJoin1 (4 seconds, 161 milliseconds) +21:41:30.940 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test multiMapJoin2 +[info] - multiMapJoin2 (3 seconds, 492 milliseconds) +[info] - multi_insert !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +[info] - multi_insert_gby (3 seconds, 321 milliseconds) +[info] - multi_insert_gby2 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 +[info] - multi_insert_gby3 (1 second, 800 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_10 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv4 +[info] - multi_insert_lateral_view (4 seconds, 316 milliseconds) +[info] - multi_insert_move_tasks_share_dependencies !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src11 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src12 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src13 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src14 +[info] - multi_join_union (1 second, 515 milliseconds) +[info] - multigroupby_singlemr (1 second, 238 milliseconds) +[info] - nested_complex !!! IGNORED !!! +[info] - nestedvirtual !!! IGNORED !!! +[info] - newline !!! IGNORED !!! +[info] - no_hooks !!! IGNORED !!! +[info] - noalias_subq1 (935 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ambiguous +[info] - nomore_ambiguous_table_col (922 milliseconds) +[info] - nonblock_op_deduplicate (1 second, 17 milliseconds) +[info] - nonmr_fetch !!! IGNORED !!! +[info] - nonmr_fetch_threshold !!! IGNORED !!! +[info] - nonreserved_keywords_input37 !!! IGNORED !!! +[info] - nonreserved_keywords_insert_into1 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - notable_alias1 (1 second, 628 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - notable_alias2 (1 second, 291 milliseconds) +[info] - notable_alias3 !!! IGNORED !!! +[info] - null_cast !!! IGNORED !!! +[info] - null_column !!! IGNORED !!! +[info] - nullformat !!! IGNORED !!! +[info] - nullformatCTAS !!! IGNORED !!! +[info] - nullformatdir !!! IGNORED !!! +[info] - nullgroup (985 milliseconds) +[info] - nullgroup2 (986 milliseconds) +[info] - nullgroup3 (2 seconds, 147 milliseconds) +[info] - nullgroup4 (1 second, 141 milliseconds) +[info] - nullgroup4_multi_distinct (893 milliseconds) +[info] - nullgroup5 (1 second, 149 milliseconds) +[info] - nullinput (1 second, 76 milliseconds) +[info] - nullinput2 (702 milliseconds) +[info] - nullscript (898 milliseconds) +[info] - num_op_type_conv !!! IGNORED !!! +[info] - optional_outer (1 second, 674 milliseconds) +[info] - orc_analyze !!! IGNORED !!! +[info] - orc_create !!! IGNORED !!! +[info] - orc_createas1 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_orc +21:42:05.492 WARN org.apache.spark.scheduler.TaskSetManager: Stage 5620 contains a task of very large size (249 KB). The maximum recommended task size is 100 KB. +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_orc +[info] - orc_dictionary_threshold (1 second, 599 milliseconds) +[info] - orc_diff_part_cols !!! IGNORED !!! +[info] - orc_diff_part_cols2 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_orc +[info] - orc_empty_files (917 milliseconds) +[info] - orc_empty_strings !!! IGNORED !!! +[info] - orc_min_max !!! IGNORED !!! +[info] - orc_ppd_char !!! IGNORED !!! +[info] - orc_ppd_date !!! IGNORED !!! +[info] - orc_ppd_decimal !!! IGNORED !!! +[info] - orc_ppd_varchar !!! IGNORED !!! +[info] - orc_split_elimination !!! IGNORED !!! +[info] - orc_vectorization_ppd !!! IGNORED !!! +[info] - order (804 milliseconds) +[info] - order2 (634 milliseconds) +[info] - order_within_subquery !!! IGNORED !!! +[info] - outer_join_ppr (1 second, 52 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_a +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_b +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_a +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_b +[info] - parallel (1 second, 558 milliseconds) +[info] - parallel_orderby !!! IGNORED !!! +[info] - parenthesis_star_by (1 second, 75 milliseconds) +[info] - parquet_create !!! IGNORED !!! +[info] - parquet_ctas !!! IGNORED !!! +[info] - parquet_partitioned !!! IGNORED !!! +[info] - parquet_types !!! IGNORED !!! +[info] - partInit !!! IGNORED !!! +[info] - part_inherit_tbl_props (824 milliseconds) +[info] - part_inherit_tbl_props_empty (918 milliseconds) +[info] - part_inherit_tbl_props_with_star (934 milliseconds) +[info] - partcols1 (1 second, 552 milliseconds) +[info] - partition_date (2 seconds, 933 milliseconds) +[info] - partition_date2 !!! IGNORED !!! +[info] - partition_decode_name !!! IGNORED !!! +[info] - partition_schema1 (1 second, 96 milliseconds) +[info] - partition_serde_format (1 second, 139 milliseconds) +[info] - partition_special_char !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tab1/month=June/day=2008-01-01 +[info] - partition_type_check (1 second, 638 milliseconds) +[info] - partition_varchar1 (2 seconds, 756 milliseconds) +[info] - partition_varchar2 !!! IGNORED !!! +[info] - partition_vs_table_metadata !!! IGNORED !!! +[info] - partition_wise_fileformat !!! IGNORED !!! +[info] - partition_wise_fileformat10 !!! IGNORED !!! +[info] - partition_wise_fileformat11 !!! IGNORED !!! +[info] - partition_wise_fileformat12 !!! IGNORED !!! +[info] - partition_wise_fileformat13 !!! IGNORED !!! +[info] - partition_wise_fileformat14 !!! IGNORED !!! +[info] - partition_wise_fileformat15 !!! IGNORED !!! +[info] - partition_wise_fileformat16 !!! IGNORED !!! +[info] - partition_wise_fileformat17 !!! IGNORED !!! +[info] - partition_wise_fileformat18 !!! IGNORED !!! +[info] - partition_wise_fileformat2 !!! IGNORED !!! +[info] - partition_wise_fileformat3 !!! IGNORED !!! +[info] - partition_wise_fileformat4 (1 second, 311 milliseconds) +[info] - partition_wise_fileformat5 (1 second, 722 milliseconds) +[info] - partition_wise_fileformat6 (1 second, 532 milliseconds) +[info] - partition_wise_fileformat7 (1 second, 763 milliseconds) +[info] - partition_wise_fileformat8 !!! IGNORED !!! +[info] - partition_wise_fileformat9 (1 second, 335 milliseconds) +[info] - pcr !!! IGNORED !!! +[info] - plan_json (758 milliseconds) +[info] - ppd1 (727 milliseconds) +[info] - ppd2 (1 second, 7 milliseconds) +[info] - ppd_clusterby (1 second, 91 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppd_constant_expr +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppd_constant_expr +[info] - ppd_constant_expr (1 second, 101 milliseconds) +[info] - ppd_constant_where (915 milliseconds) +[info] - ppd_gby (773 milliseconds) +[info] - ppd_gby2 (875 milliseconds) +[info] - ppd_gby_join (717 milliseconds) +[info] - ppd_join (1 second, 151 milliseconds) +[info] - ppd_join2 (1 second, 152 milliseconds) +[info] - ppd_join3 (1 second, 47 milliseconds) +[info] - ppd_join4 !!! IGNORED !!! +[info] - ppd_join_filter (1 second, 989 milliseconds) +[info] - ppd_multi_insert !!! IGNORED !!! +[info] - ppd_outer_join1 (973 milliseconds) +[info] - ppd_outer_join2 (797 milliseconds) +[info] - ppd_outer_join3 (1 second, 33 milliseconds) +[info] - ppd_outer_join4 (1 second, 26 milliseconds) +[info] - ppd_outer_join5 (1 second, 202 milliseconds) +[info] - ppd_random (726 milliseconds) +[info] - ppd_repeated_alias (1 second, 108 milliseconds) +[info] - ppd_transform !!! IGNORED !!! +[info] - ppd_udf_case !!! IGNORED !!! +[info] - ppd_udf_col (868 milliseconds) +[info] - ppd_udtf !!! IGNORED !!! +[info] - ppd_union (1 second, 711 milliseconds) +[info] - ppd_union_view !!! IGNORED !!! +[info] - ppd_vc !!! IGNORED !!! +[info] - ppr_allchildsarenull (815 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=1234 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=1224 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=1214 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12+4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12.4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12%3A4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12%254 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12%2A4 +[info] - ppr_pushdown (3 seconds, 280 milliseconds) +[info] - ppr_pushdown2 (3 seconds, 781 milliseconds) +[info] - ppr_pushdown3 (1 second, 177 milliseconds) +[info] - print_header !!! IGNORED !!! +[info] - progress_1 (810 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +[info] - protectmode (2 seconds, 557 milliseconds) +[info] - ptf !!! IGNORED !!! +[info] - ptf_decimal !!! IGNORED !!! +[info] - ptf_general_queries !!! IGNORED !!! +[info] - ptf_matchpath !!! IGNORED !!! +[info] - ptf_rcfile !!! IGNORED !!! +[info] - ptf_register_tblfn !!! IGNORED !!! +[info] - ptf_seqfile !!! IGNORED !!! +[info] - push_or (1 second, 210 milliseconds) +[info] - query_result_fileformat !!! IGNORED !!! +[info] - query_with_semi (1 second, 498 milliseconds) +[info] - quote1 (4 seconds, 635 milliseconds) +[info] - quote2 (1 second, 180 milliseconds) +[info] - quotedid_alter !!! IGNORED !!! +[info] - quotedid_basic !!! IGNORED !!! +[info] - quotedid_partition !!! IGNORED !!! +[info] - quotedid_skew !!! IGNORED !!! +[info] - quotedid_smb !!! IGNORED !!! +[info] - quotedid_tblproperty !!! IGNORED !!! +[info] - rand_partitionpruner1 !!! IGNORED !!! +[info] - rand_partitionpruner2 !!! IGNORED !!! +[info] - rand_partitionpruner3 !!! IGNORED !!! +[info] - rcfile_bigdata !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/columntable +[info] - rcfile_columnar (870 milliseconds) +[info] - rcfile_createas1 !!! IGNORED !!! +[info] - rcfile_default_format !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/rcfiletablelazydecompress +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/rcfiletablelazydecompress +[info] - rcfile_lazydecompress (1 second, 433 milliseconds) +[info] - rcfile_merge1 !!! IGNORED !!! +[info] - rcfile_merge2 !!! IGNORED !!! +[info] - rcfile_merge3 !!! IGNORED !!! +[info] - rcfile_merge4 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src1_rc +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1_rc +[info] - rcfile_null_value (1 second, 526 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_src +[info] - rcfile_toleratecorruptions (1 second, 66 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/rcfile_uniontable +[info] - rcfile_union (913 milliseconds) +[info] - recursive_dir !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bucket5_1 +[info] - reduce_deduplicate (1 second, 70 milliseconds) +[info] - reduce_deduplicate_exclude_gby (940 milliseconds) +[info] - reduce_deduplicate_exclude_join (633 milliseconds) +[info] - reduce_deduplicate_extended (2 seconds, 387 milliseconds) +[info] - reducesink_dedup (931 milliseconds) +[info] - regex_col !!! IGNORED !!! +[info] - regexp_extract !!! IGNORED !!! +[info] - remote_script !!! IGNORED !!! +[info] - rename_column (2 seconds, 318 milliseconds) +[info] - rename_external_partition_location !!! IGNORED !!! +[info] - rename_partition_location !!! IGNORED !!! +[info] - rename_table_location !!! IGNORED !!! +[info] - reset_conf !!! IGNORED !!! +[info] - root_dir_external_table !!! IGNORED !!! +[info] - router_join_ppr (1 second, 710 milliseconds) +[info] - sample1 !!! IGNORED !!! +[info] - sample2 !!! IGNORED !!! +[info] - sample3 !!! IGNORED !!! +[info] - sample4 !!! IGNORED !!! +[info] - sample5 !!! IGNORED !!! +[info] - sample6 !!! IGNORED !!! +[info] - sample7 !!! IGNORED !!! +[info] - sample8 !!! IGNORED !!! +[info] - sample9 !!! IGNORED !!! +[info] - sample_islocalmode_hook !!! IGNORED !!! +[info] - sample_islocalmode_hook_hadoop20 !!! IGNORED !!! +[info] - schemeAuthority !!! IGNORED !!! +[info] - schemeAuthority2 !!! IGNORED !!! +[info] - script_env_var1 !!! IGNORED !!! +[info] - script_env_var2 !!! IGNORED !!! +[info] - script_pipe !!! IGNORED !!! +[info] - scriptfile1 !!! IGNORED !!! +[info] - scriptfile1_win !!! IGNORED !!! +[info] - select_as_omitted (825 milliseconds) +[info] - select_dummy_source !!! IGNORED !!! +[info] - select_transform_hint !!! IGNORED !!! +[info] - select_unquote_and (1 second, 105 milliseconds) +[info] - select_unquote_not (1 second, 892 milliseconds) +[info] - select_unquote_or (1 second, 333 milliseconds) +[info] - semicolon !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t3 +[info] - semijoin (4 seconds, 528 milliseconds) +[info] - serde_regex (1 second, 421 milliseconds) +[info] - serde_reported_schema (701 milliseconds) +[info] - serde_user_properties !!! IGNORED !!! +[info] - set_processor_namespaces !!! IGNORED !!! +[info] - set_variable_sub (980 milliseconds) +[info] - show_columns (1 second, 474 milliseconds) +[info] - show_create_table_alter (1 second, 219 milliseconds) +[info] - show_create_table_db_table (902 milliseconds) +21:43:49.858 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/tmp_showcrt1 specified for non-external table:tmp_showcrt1 +[info] - show_create_table_delimited (783 milliseconds) +[info] - show_create_table_partitioned (808 milliseconds) +[info] - show_create_table_serde (941 milliseconds) +[info] - show_create_table_view (800 milliseconds) +[info] - show_describe_func_quotes (789 milliseconds) +[info] - show_functions (815 milliseconds) +[info] - show_indexes_edge_cases !!! IGNORED !!! +[info] - show_indexes_syntax !!! IGNORED !!! +[info] - show_partitions (778 milliseconds) +[info] - show_roles !!! IGNORED !!! +[info] - show_tables !!! IGNORED !!! +[info] - show_tablestatus !!! IGNORED !!! +[info] - show_tblproperties (959 milliseconds) +[info] - showparts !!! IGNORED !!! +[info] - skewjoin_noskew !!! IGNORED !!! +[info] - skewjoin_union_remove_1 !!! IGNORED !!! +[info] - skewjoin_union_remove_2 !!! IGNORED !!! +[info] - skewjoinopt1 !!! IGNORED !!! +[info] - skewjoinopt10 !!! IGNORED !!! +[info] - skewjoinopt11 !!! IGNORED !!! +[info] - skewjoinopt12 !!! IGNORED !!! +[info] - skewjoinopt13 (2 seconds, 420 milliseconds) +[info] - skewjoinopt14 !!! IGNORED !!! +[info] - skewjoinopt15 !!! IGNORED !!! +[info] - skewjoinopt16 !!! IGNORED !!! +[info] - skewjoinopt17 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +[info] - skewjoinopt18 (1 second, 802 milliseconds) +[info] - skewjoinopt19 !!! IGNORED !!! +[info] - skewjoinopt2 !!! IGNORED !!! +[info] - skewjoinopt20 !!! IGNORED !!! +[info] - skewjoinopt3 !!! IGNORED !!! +[info] - skewjoinopt4 !!! IGNORED !!! +[info] - skewjoinopt5 !!! IGNORED !!! +[info] - skewjoinopt6 !!! IGNORED !!! +[info] - skewjoinopt7 !!! IGNORED !!! +[info] - skewjoinopt8 !!! IGNORED !!! +[info] - skewjoinopt9 (1 second, 356 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_mapjoin9_results +[info] - smb_mapjoin9 (2 seconds, 170 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 +[info] - smb_mapjoin_1 (2 seconds, 97 milliseconds) +[info] - smb_mapjoin_10 (1 second, 339 milliseconds) +[info] - smb_mapjoin_11 !!! IGNORED !!! +[info] - smb_mapjoin_12 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table4 +[info] - smb_mapjoin_13 (1 second, 622 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 +[info] - smb_mapjoin_14 (4 seconds, 50 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 +[info] - smb_mapjoin_15 (2 seconds, 544 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 +[info] - smb_mapjoin_16 (1 second, 141 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table5 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table6 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table7 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table8 +[info] - smb_mapjoin_17 (3 seconds, 495 milliseconds) +[info] - smb_mapjoin_18 !!! IGNORED !!! +[info] - smb_mapjoin_19 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 +[info] - smb_mapjoin_2 (2 seconds, 512 milliseconds) +[info] - smb_mapjoin_20 !!! IGNORED !!! +[info] - smb_mapjoin_21 (1 second, 786 milliseconds) +[info] - smb_mapjoin_22 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 +[info] - smb_mapjoin_25 (1 second, 489 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 +[info] - smb_mapjoin_3 (2 seconds, 92 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 +[info] - smb_mapjoin_4 (3 seconds, 601 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 +[info] - smb_mapjoin_5 (3 seconds, 274 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/normal_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/normal_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results +[info] - smb_mapjoin_6 (3 seconds, 355 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results_empty_bigtable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results_empty_bigtable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/normal_join_results +[info] - smb_mapjoin_7 (2 seconds, 473 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 +[info] - smb_mapjoin_8 (5 seconds, 989 milliseconds) +[info] - sort (824 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 +[info] - sort_merge_join_desc_1 (1 second, 183 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 +[info] - sort_merge_join_desc_2 (1 second, 623 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 +[info] - sort_merge_join_desc_3 (1 second, 295 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 +[info] - sort_merge_join_desc_4 (1 second, 166 milliseconds) +[info] - sort_merge_join_desc_5 (1 second, 247 milliseconds) +[info] - sort_merge_join_desc_6 (1 second, 443 milliseconds) +[info] - sort_merge_join_desc_7 (1 second, 741 milliseconds) +[info] - source !!! IGNORED !!! +[info] - split !!! IGNORED !!! +[info] - split_sample !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/stats_non_partitioned +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/stats_non_partitioned +[info] - stats0 (2 seconds, 205 milliseconds) +[info] - stats2 !!! IGNORED !!! +[info] - stats3 !!! IGNORED !!! +[info] - stats4 !!! IGNORED !!! +[info] - stats5 !!! IGNORED !!! +[info] - stats6 !!! IGNORED !!! +[info] - stats7 !!! IGNORED !!! +[info] - stats8 !!! IGNORED !!! +[info] - stats9 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +[info] - stats_aggregator_error_1 (1 second, 520 milliseconds) +[info] - stats_counter !!! IGNORED !!! +[info] - stats_counter_partitioned !!! IGNORED !!! +[info] - stats_empty_dyn_part !!! IGNORED !!! +[info] - stats_empty_partition (982 milliseconds) +[info] - stats_invalidation !!! IGNORED !!! +[info] - stats_noscan_1 !!! IGNORED !!! +[info] - stats_noscan_2 !!! IGNORED !!! +[info] - stats_only_null !!! IGNORED !!! +[info] - stats_partscan_1 !!! IGNORED !!! +[info] - stats_partscan_1_23 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +[info] - stats_publisher_error_1 (1 second, 796 milliseconds) +[info] - statsfs !!! IGNORED !!! +[info] - str_to_map !!! IGNORED !!! +[info] - subq !!! IGNORED !!! +[info] - subq2 (782 milliseconds) +[info] - subq_where_serialization !!! IGNORED !!! +[info] - subquery_alias !!! IGNORED !!! +[info] - subquery_exists !!! IGNORED !!! +[info] - subquery_exists_having !!! IGNORED !!! +[info] - subquery_in !!! IGNORED !!! +[info] - subquery_in_having !!! IGNORED !!! +[info] - subquery_multiinsert !!! IGNORED !!! +[info] - subquery_notexists !!! IGNORED !!! +[info] - subquery_notexists_having !!! IGNORED !!! +[info] - subquery_notin !!! IGNORED !!! +[info] - subquery_notin_having !!! IGNORED !!! +[info] - subquery_unqualcolumnrefs !!! IGNORED !!! +[info] - subquery_views !!! IGNORED !!! +[info] - table_access_keys_stats !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp_select +[info] - tablename_with_select (822 milliseconds) +[info] - test_boolean_whereclause !!! IGNORED !!! +[info] - tez_dml !!! IGNORED !!! +[info] - tez_fsstat !!! IGNORED !!! +[info] - tez_insert_overwrite_local_directory_1 !!! IGNORED !!! +[info] - tez_join_tests !!! IGNORED !!! +[info] - tez_joins_explain !!! IGNORED !!! +[info] - tez_schema_evolution !!! IGNORED !!! +[info] - tez_union !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 +[info] - timestamp_1 (3 seconds, 957 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 +[info] - timestamp_2 (3 seconds, 966 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_3 +[info] - timestamp_3 (1 second, 309 milliseconds) +[info] - timestamp_comparison (921 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_lazy +[info] - timestamp_lazy (1 second, 423 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_null +[info] - timestamp_null (921 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_udf +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_udf_string +[info] - timestamp_udf (1 second, 985 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tstsrc +[info] - touch (1 second, 810 milliseconds) +[info] - transform1 !!! IGNORED !!! +[info] - transform2 !!! IGNORED !!! +[info] - transform_ppr1 (750 milliseconds) +[info] - transform_ppr2 (694 milliseconds) +[info] - truncate_column !!! IGNORED !!! +[info] - truncate_column_merge !!! IGNORED !!! +[info] - truncate_table (1 second, 787 milliseconds) +[info] - type_cast_1 (879 milliseconds) +[info] - type_conversions_1 !!! IGNORED !!! +[info] - type_widening (813 milliseconds) +[info] - udaf_collect_set (993 milliseconds) +[info] - udaf_context_ngrams !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/covar_tab +[info] - udaf_corr (1 second, 276 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/covar_tab +[info] - udaf_covar_pop (1 second, 193 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/covar_tab +[info] - udaf_covar_samp (1 second, 115 milliseconds) +[info] - udaf_histogram_numeric (827 milliseconds) +[info] - udaf_ngrams !!! IGNORED !!! +[info] - udaf_number_format !!! IGNORED !!! +[info] - udaf_percentile !!! IGNORED !!! +[info] - udaf_percentile_approx_20 !!! IGNORED !!! +[info] - udaf_percentile_approx_23 !!! IGNORED !!! +[info] - udaf_sum_list !!! IGNORED !!! +[info] - udf1 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf2 (1 second, 211 milliseconds) +[info] - udf3 !!! IGNORED !!! +[info] - udf4 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf5 (944 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf6 (1 second, 87 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf7 (945 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf8 (1 second, 92 milliseconds) +[info] - udf9 (756 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf_10_trims (810 milliseconds) +[info] - udf_E (973 milliseconds) +[info] - udf_PI (947 milliseconds) +[info] - udf_abs (821 milliseconds) +[info] - udf_acos (3 seconds, 443 milliseconds) +[info] - udf_add (2 seconds, 407 milliseconds) +[info] - udf_array (2 seconds, 65 milliseconds) +[info] - udf_array_contains (747 milliseconds) +[info] - udf_ascii (1 second, 256 milliseconds) +[info] - udf_asin (877 milliseconds) +[info] - udf_atan (944 milliseconds) +[info] - udf_avg (602 milliseconds) +[info] - udf_between !!! IGNORED !!! +[info] - udf_bigint (750 milliseconds) +[info] - udf_bin (752 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bitmap_test +[info] - udf_bitmap_and (997 milliseconds) +[info] - udf_bitmap_empty (725 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bitmap_test +[info] - udf_bitmap_or (1 second, 699 milliseconds) +[info] - udf_bitwise_and (814 milliseconds) +[info] - udf_bitwise_not (1 second, 100 milliseconds) +[info] - udf_bitwise_or (652 milliseconds) +[info] - udf_bitwise_xor (725 milliseconds) +[info] - udf_boolean (662 milliseconds) +[info] - udf_case_column_pruning !!! IGNORED !!! +[info] - udf_case_thrift !!! IGNORED !!! +[info] - udf_ceil (695 milliseconds) +[info] - udf_ceiling (648 milliseconds) +[info] - udf_coalesce !!! IGNORED !!! +[info] - udf_compare_java_string !!! IGNORED !!! +[info] - udf_concat (887 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf_concat_insert1 (871 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf_concat_insert2 (915 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf_concat_ws (1 second, 96 milliseconds) +[info] - udf_context_aware !!! IGNORED !!! +[info] - udf_conv (1 second, 217 milliseconds) +[info] - udf_cos (804 milliseconds) +[info] - udf_count (1 second, 269 milliseconds) +[info] - udf_current_database !!! IGNORED !!! +[info] - udf_date_add (1 second, 52 milliseconds) +[info] - udf_date_sub (699 milliseconds) +[info] - udf_datediff (680 milliseconds) +[info] - udf_day (639 milliseconds) +[info] - udf_dayofmonth (651 milliseconds) +[info] - udf_degrees (1 second, 37 milliseconds) +[info] - udf_div (668 milliseconds) +[info] - udf_divide !!! IGNORED !!! +[info] - udf_double (706 milliseconds) +[info] - udf_elt (893 milliseconds) +[info] - udf_equal (1 second, 400 milliseconds) +[info] - udf_exp (705 milliseconds) +[info] - udf_explode !!! IGNORED !!! +[info] - udf_field (1 second, 240 milliseconds) +[info] - udf_find_in_set (2 seconds, 609 milliseconds) +[info] - udf_float (743 milliseconds) +[info] - udf_floor (687 milliseconds) +[info] - udf_format_number (1 second, 488 milliseconds) +[info] - udf_from_unixtime (622 milliseconds) +[info] - udf_get_json_object !!! IGNORED !!! +[info] - udf_greaterthan (798 milliseconds) +[info] - udf_greaterthanorequal (775 milliseconds) +[info] - udf_hash (764 milliseconds) +[info] - udf_hex (869 milliseconds) +[info] - udf_hour !!! IGNORED !!! +[info] - udf_if (830 milliseconds) +[info] - udf_in !!! IGNORED !!! +[info] - udf_in_file !!! IGNORED !!! +[info] - udf_index (628 milliseconds) +[info] - udf_inline !!! IGNORED !!! +[info] - udf_instr (838 milliseconds) +[info] - udf_int (1 second, 41 milliseconds) +[info] - udf_isnotnull (646 milliseconds) +[info] - udf_isnull (605 milliseconds) +[info] - udf_isnull_isnotnull !!! IGNORED !!! +[info] - udf_java_method (792 milliseconds) +[info] - udf_lcase (714 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +[info] - udf_length (1 second, 267 milliseconds) +[info] - udf_lessthan (864 milliseconds) +[info] - udf_lessthanorequal (728 milliseconds) +[info] - udf_like (793 milliseconds) +[info] - udf_ln (730 milliseconds) +[info] - udf_locate (747 milliseconds) +[info] - udf_log (721 milliseconds) +[info] - udf_log10 (1 second, 11 milliseconds) +[info] - udf_log2 (696 milliseconds) +[info] - udf_logic_java_boolean !!! IGNORED !!! +[info] - udf_lower (689 milliseconds) +[info] - udf_lpad (935 milliseconds) +[info] - udf_ltrim (743 milliseconds) +[info] - udf_map (796 milliseconds) +[info] - udf_map_keys !!! IGNORED !!! +[info] - udf_map_values !!! IGNORED !!! +[info] - udf_max !!! IGNORED !!! +[info] - udf_min !!! IGNORED !!! +[info] - udf_minute (721 milliseconds) +[info] - udf_modulo (692 milliseconds) +[info] - udf_month (578 milliseconds) +[info] - udf_named_struct (788 milliseconds) +[info] - udf_negative (974 milliseconds) +[info] - udf_not (823 milliseconds) +[info] - udf_notequal (823 milliseconds) +[info] - udf_notop (1 second, 95 milliseconds) +[info] - udf_nvl (746 milliseconds) +[info] - udf_or (654 milliseconds) +[info] - udf_parse_url (735 milliseconds) +[info] - udf_percentile !!! IGNORED !!! +[info] - udf_pmod (1 second, 210 milliseconds) +[info] - udf_positive (710 milliseconds) +[info] - udf_pow (742 milliseconds) +[info] - udf_power (591 milliseconds) +[info] - udf_printf !!! IGNORED !!! +[info] - udf_radians (1 second, 5 milliseconds) +[info] - udf_rand (981 milliseconds) +[info] - udf_reflect !!! IGNORED !!! +[info] - udf_reflect2 !!! IGNORED !!! +[info] - udf_regexp (1 second, 84 milliseconds) +[info] - udf_regexp_extract (1 second, 367 milliseconds) +[info] - udf_regexp_replace (1 second, 529 milliseconds) +[info] - udf_repeat (958 milliseconds) +[info] - udf_reverse !!! IGNORED !!! +[info] - udf_rlike (734 milliseconds) +[info] - udf_round_2 !!! IGNORED !!! +[info] - udf_round_3 (881 milliseconds) +[info] - udf_rpad (831 milliseconds) +[info] - udf_rtrim (608 milliseconds) +[info] - udf_second (813 milliseconds) +[info] - udf_sentences !!! IGNORED !!! +[info] - udf_sign (1 second, 45 milliseconds) +[info] - udf_sin (792 milliseconds) +[info] - udf_size !!! IGNORED !!! +[info] - udf_smallint (652 milliseconds) +[info] - udf_space (810 milliseconds) +[info] - udf_split !!! IGNORED !!! +[info] - udf_sqrt (730 milliseconds) +[info] - udf_std (983 milliseconds) +[info] - udf_stddev (789 milliseconds) +[info] - udf_stddev_pop (737 milliseconds) +[info] - udf_stddev_samp (656 milliseconds) +[info] - udf_string (672 milliseconds) +[info] - udf_struct (725 milliseconds) +[info] - udf_substr !!! IGNORED !!! +[info] - udf_substring (677 milliseconds) +[info] - udf_subtract (658 milliseconds) +[info] - udf_sum (715 milliseconds) +[info] - udf_tan (908 milliseconds) +[info] - udf_testlength !!! IGNORED !!! +[info] - udf_testlength2 !!! IGNORED !!! +[info] - udf_tinyint (703 milliseconds) +[info] - udf_to_boolean !!! IGNORED !!! +[info] - udf_to_byte (1 second, 48 milliseconds) +[info] - udf_to_date (781 milliseconds) +[info] - udf_to_double (1 second, 597 milliseconds) +[info] - udf_to_float (1 second, 110 milliseconds) +[info] - udf_to_long (1 second, 34 milliseconds) +[info] - udf_to_short (1 second, 157 milliseconds) +[info] - udf_to_string !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_input +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_translate +[info] - udf_translate (1 second, 441 milliseconds) +[info] - udf_trim (804 milliseconds) +[info] - udf_ucase (603 milliseconds) +[info] - udf_unhex !!! IGNORED !!! +[info] - udf_union !!! IGNORED !!! +[info] - udf_unix_timestamp (1 second, 6 milliseconds) +[info] - udf_upper (797 milliseconds) +[info] - udf_using !!! IGNORED !!! +[info] - udf_var_pop (3 seconds, 398 milliseconds) +[info] - udf_var_samp (1 second, 435 milliseconds) +[info] - udf_variance (894 milliseconds) +[info] - udf_weekofyear (1 second, 33 milliseconds) +[info] - udf_xpath (991 milliseconds) +[info] - udf_xpath_boolean (974 milliseconds) +[info] - udf_xpath_double (1 second, 224 milliseconds) +[info] - udf_xpath_float (1 second, 163 milliseconds) +[info] - udf_xpath_int (1 second, 424 milliseconds) +[info] - udf_xpath_long (1 second, 36 milliseconds) +[info] - udf_xpath_short (1 second, 197 milliseconds) +[info] - udf_xpath_string (1 second, 90 milliseconds) +[info] - udtf_explode !!! IGNORED !!! +[info] - udtf_json_tuple !!! IGNORED !!! +[info] - udtf_parse_url_tuple !!! IGNORED !!! +[info] - udtf_posexplode !!! IGNORED !!! +[info] - udtf_stack !!! IGNORED !!! +[info] - unicode_notation (1 second, 148 milliseconds) +[info] - union !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +[info] - union10 (1 second, 8 milliseconds) +[info] - union11 (938 milliseconds) +[info] - union12 !!! IGNORED !!! +[info] - union13 (714 milliseconds) +[info] - union14 (1 second, 342 milliseconds) +[info] - union15 (1 second, 48 milliseconds) +[info] - union16 (1 second, 90 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - union17 (1 second, 243 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - union18 (1 second, 352 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 +[info] - union19 (1 second, 312 milliseconds) +[info] - union2 (848 milliseconds) +[info] - union20 (830 milliseconds) +[info] - union21 !!! IGNORED !!! +[info] - union22 (1 second, 558 milliseconds) +[info] - union23 (1 second, 86 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src5 +[info] - union24 (2 seconds, 962 milliseconds) +[info] - union25 (1 second, 19 milliseconds) +[info] - union26 (1 second, 426 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/jackson_sev_same +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dim_pho +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/jackson_sev_add +[info] - union27 (1 second, 299 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_subq_union +[info] - union28 (1 second, 90 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_subq_union +[info] - union29 (909 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_out +[info] - union3 (1 second, 87 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_subq_union +[info] - union30 (1 second, 188 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t4 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t5 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t6 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t7 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t8 +[info] - union31 (3 seconds, 557 milliseconds) +[info] - union32 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_src +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_src +[info] - union33 (1 second, 496 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_2 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_3 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_4 +[info] - union34 (2 seconds, 174 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +[info] - union4 (1 second, 133 milliseconds) +[info] - union5 (809 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable +[info] - union6 (1 second, 308 milliseconds) +[info] - union7 (1 second, 371 milliseconds) +[info] - union8 (853 milliseconds) +[info] - union9 (860 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_date_1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_date_2 +[info] - union_date (1 second, 100 milliseconds) +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_union_lateral_view +[info] - union_lateralview (1 second, 96 milliseconds) +[info] - union_null !!! IGNORED !!! +[info] - union_ppr (778 milliseconds) +[info] - union_remove_1 !!! IGNORED !!! +[info] - union_remove_10 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +[info] - union_remove_11 (1 second, 575 milliseconds) +[info] - union_remove_12 !!! IGNORED !!! +[info] - union_remove_13 !!! IGNORED !!! +[info] - union_remove_14 !!! IGNORED !!! +[info] - union_remove_15 !!! IGNORED !!! +[info] - union_remove_16 !!! IGNORED !!! +[info] - union_remove_17 !!! IGNORED !!! +[info] - union_remove_18 !!! IGNORED !!! +[info] - union_remove_19 !!! IGNORED !!! +[info] - union_remove_2 !!! IGNORED !!! +[info] - union_remove_20 !!! IGNORED !!! +[info] - union_remove_21 !!! IGNORED !!! +[info] - union_remove_22 !!! IGNORED !!! +[info] - union_remove_23 !!! IGNORED !!! +[info] - union_remove_24 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +[info] - union_remove_3 (1 second, 168 milliseconds) +[info] - union_remove_4 !!! IGNORED !!! +[info] - union_remove_5 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 +[info] - union_remove_6 (1 second, 599 milliseconds) +[info] - union_remove_7 !!! IGNORED !!! +[info] - union_remove_8 !!! IGNORED !!! +[info] - union_remove_9 !!! IGNORED !!! +[info] - union_script (921 milliseconds) +[info] - union_top_level !!! IGNORED !!! +[info] - union_view !!! IGNORED !!! +[info] - unset_table_view_property !!! IGNORED !!! +[info] - varchar_1 !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/varchar_2 +[info] - varchar_2 (1 second, 567 milliseconds) +[info] - varchar_cast !!! IGNORED !!! +[info] - varchar_comparison !!! IGNORED !!! +[info] - varchar_join1 (2 seconds, 558 milliseconds) +[info] - varchar_nested_types !!! IGNORED !!! +[info] - varchar_serde !!! IGNORED !!! +[info] - varchar_udf1 !!! IGNORED !!! +[info] - varchar_union1 (1 second, 549 milliseconds) +[info] - vector_between_in !!! IGNORED !!! +[info] - vector_coalesce !!! IGNORED !!! +[info] - vector_decimal_aggregate !!! IGNORED !!! +[info] - vector_decimal_cast !!! IGNORED !!! +[info] - vector_decimal_expressions !!! IGNORED !!! +[info] - vector_decimal_mapjoin !!! IGNORED !!! +[info] - vector_decimal_math_funcs !!! IGNORED !!! +[info] - vector_left_outer_join !!! IGNORED !!! +[info] - vector_non_string_partition !!! IGNORED !!! +[info] - vectorization_0 !!! IGNORED !!! +[info] - vectorization_1 !!! IGNORED !!! +[info] - vectorization_10 !!! IGNORED !!! +[info] - vectorization_11 !!! IGNORED !!! +[info] - vectorization_12 !!! IGNORED !!! +[info] - vectorization_13 !!! IGNORED !!! +[info] - vectorization_14 !!! IGNORED !!! +[info] - vectorization_15 !!! IGNORED !!! +[info] - vectorization_16 !!! IGNORED !!! +[info] - vectorization_2 !!! IGNORED !!! +[info] - vectorization_3 !!! IGNORED !!! +[info] - vectorization_4 !!! IGNORED !!! +[info] - vectorization_5 !!! IGNORED !!! +[info] - vectorization_6 !!! IGNORED !!! +[info] - vectorization_7 !!! IGNORED !!! +[info] - vectorization_8 !!! IGNORED !!! +[info] - vectorization_9 !!! IGNORED !!! +[info] - vectorization_decimal_date !!! IGNORED !!! +[info] - vectorization_div0 !!! IGNORED !!! +[info] - vectorization_limit !!! IGNORED !!! +[info] - vectorization_nested_udf !!! IGNORED !!! +[info] - vectorization_not !!! IGNORED !!! +[info] - vectorization_part !!! IGNORED !!! +[info] - vectorization_part_project !!! IGNORED !!! +[info] - vectorization_pushdown !!! IGNORED !!! +[info] - vectorization_short_regress !!! IGNORED !!! +[info] - vectorized_case !!! IGNORED !!! +[info] - vectorized_casts !!! IGNORED !!! +[info] - vectorized_context !!! IGNORED !!! +[info] - vectorized_date_funcs !!! IGNORED !!! +[info] - vectorized_distinct_gby !!! IGNORED !!! +[info] - vectorized_mapjoin !!! IGNORED !!! +[info] - vectorized_math_funcs !!! IGNORED !!! +[info] - vectorized_nested_mapjoin !!! IGNORED !!! +[info] - vectorized_rcfile_columnar !!! IGNORED !!! +[info] - vectorized_shufflejoin !!! IGNORED !!! +[info] - vectorized_string_funcs !!! IGNORED !!! +[info] - vectorized_timestamp_funcs !!! IGNORED !!! +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db1.db/table1 +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db1.db/table2 +[info] - view (2 seconds, 160 milliseconds) +[info] - view_cast (1 second, 945 milliseconds) +[info] - view_inputs (1 second, 492 milliseconds) +[info] - virtual_column !!! IGNORED !!! +[info] PlanTest: +[info] PruningSuite: +[info] - Column pruning - with partitioned table - pruning test (63 milliseconds) +[info] - Column pruning - with partitioned table - query test (637 milliseconds) +[info] - Column pruning - with non-partitioned table - pruning test (45 milliseconds) +[info] - Column pruning - with non-partitioned table - query test (558 milliseconds) +[info] - Column pruning - with multiple projects - pruning test (34 milliseconds) +[info] - Column pruning - with multiple projects - query test (601 milliseconds) +[info] - Column pruning - projects alias substituting - pruning test (34 milliseconds) +[info] - Column pruning - projects alias substituting - query test (631 milliseconds) +[info] - Column pruning - filter alias in-lining - pruning test (40 milliseconds) +[info] - Column pruning - filter alias in-lining - query test (644 milliseconds) +[info] - Column pruning - without filters - pruning test (36 milliseconds) +[info] - Column pruning - without filters - query test (657 milliseconds) +[info] - Column pruning - simple top project without aliases - pruning test (45 milliseconds) +[info] - Column pruning - simple top project without aliases - query test (625 milliseconds) +[info] - Column pruning - non-trivial top project with aliases - pruning test (34 milliseconds) +[info] - Column pruning - non-trivial top project with aliases - query test (606 milliseconds) +[info] - Partition pruning - non-partitioned, non-trivial project - pruning test (38 milliseconds) +[info] - Partition pruning - non-partitioned, non-trivial project - query test (651 milliseconds) +[info] - Partition pruning - non-partitioned table - pruning test (41 milliseconds) +[info] - Partition pruning - non-partitioned table - query test (569 milliseconds) +[info] - Partition pruning - with filter on string partition key - pruning test (390 milliseconds) +[info] - Partition pruning - with filter on string partition key - query test (1 second, 443 milliseconds) +[info] - Partition pruning - with filter on int partition key - pruning test (51 milliseconds) +[info] - Partition pruning - with filter on int partition key - query test (1 second, 66 milliseconds) +[info] - Partition pruning - left only 1 partition - pruning test (42 milliseconds) +[info] - Partition pruning - left only 1 partition - query test (1 second, 35 milliseconds) +[info] - Partition pruning - all partitions pruned - pruning test (40 milliseconds) +[info] - Partition pruning - all partitions pruned - query test (1 second, 119 milliseconds) +[info] - Partition pruning - pruning with both column key and partition key - pruning test (36 milliseconds) +[info] - Partition pruning - pruning with both column key and partition key - query test (1 second, 172 milliseconds) +[info] HiveSerDeSuite: +Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/serdeins +[info] - Read and write with LazySimpleSerDe (tab separated) (904 milliseconds) +[info] - Read with RegexSerDe (762 milliseconds) +[info] - Read with AvroSerDe (1 second, 809 milliseconds) +[info] - Read Partitioned with AvroSerDe (1 second, 767 milliseconds) +[info] HiveResolutionSuite: +[info] - SPARK-3698: case insensitive test for nested data (50 milliseconds) +[info] - table.attr (927 milliseconds) +[info] - database.table (574 milliseconds) +[info] - database.table table.attr (1 second, 2 milliseconds) +[info] - database.table table.attr case insensitive (862 milliseconds) +[info] - alias.attr (592 milliseconds) +[info] - subquery-alias.attr (664 milliseconds) +[info] - quoted alias.attr (612 milliseconds) +[info] - attr (658 milliseconds) +[info] - alias.star (621 milliseconds) +[info] - case insensitivity with scala reflection (72 milliseconds) +[info] - case insensitivity with scala reflection joins !!! IGNORED !!! +[info] - nested repeated resolution (41 milliseconds) +[info] HivePlanTest: +[info] - udf constant folding (66 milliseconds) +[info] HiveUdfSuite: +[info] - spark sql udf test that returns a struct (80 milliseconds) +[info] - SPARK-4785 When called with arguments referring column fields, PMOD throws NPE (61 milliseconds) +[info] - hive struct udf (217 milliseconds) +[info] - SPARK-2693 udaf aggregates test (238 milliseconds) +[info] - Generic UDAF aggregates (242 milliseconds) +[info] - UDFIntegerToString (1 second, 431 milliseconds) +[info] - UDFListListInt (1 second, 26 milliseconds) +[info] - UDFListString (676 milliseconds) +[info] - UDFStringString (683 milliseconds) +[info] - UDFTwoListList (686 milliseconds) +[info] HiveExplainSuite: +[info] - explain extended command (116 milliseconds) +[info] - explain create table command (89 milliseconds) +[info] HiveTableScanSuite: +[info] - partition_based_table_scan_with_different_serde (1 second, 169 milliseconds) +[info] - file_split_for_small_table (755 milliseconds) +[info] - Spark-4041: lowercase issue (341 milliseconds) +[info] - Spark-4077: timestamp query for null value (283 milliseconds) +[info] HiveParquetSuite: +[info] - Case insensitive attribute names (213 milliseconds) +[info] - SELECT on Parquet table (115 milliseconds) +[info] - Simple column projection + filter on Parquet table (165 milliseconds) +[info] - Converting Hive to Parquet Table via saveAsParquetFile (790 milliseconds) +[info] - INSERT OVERWRITE TABLE Parquet table (511 milliseconds) +[info] ScalaTest +[info] Run completed in 23 minutes, 42 seconds. +[info] Total number of tests run: 1021 +[info] Suites: completed 24, aborted 0 +[info] Tests: succeeded 1021, failed 0, canceled 0, ignored 602, pending 0 +[info] All tests passed. +[info] Passed: Total 1021, Failed 0, Errors 0, Passed 1021, Ignored 602 +[success] Total time: 1430 s, completed Dec 27, 2014 9:49:25 PM From d8531efc34fbb831ef04fed640d0624301cfa591 Mon Sep 17 00:00:00 2001 From: Fei Wang Date: Sat, 27 Dec 2014 22:23:15 +0800 Subject: [PATCH 2/2] Delete test.log --- test.log | 3897 ------------------------------------------------------ 1 file changed, 3897 deletions(-) delete mode 100644 test.log diff --git a/test.log b/test.log deleted file mode 100644 index 4511efe682782..0000000000000 --- a/test.log +++ /dev/null @@ -1,3897 +0,0 @@ -Using /home/wf/tools/jdk1.7.0_67 as default JAVA_HOME. -Note, this will be overridden by -java-home if it is set. -[info] Loading project definition from /home/wf/code/spark1/project/project -[info] Loading project definition from /home/kf/.sbt/0.13/staging/ad8e8574a5bcb2d22d23/sbt-pom-reader/project -[warn] Multiple resolvers having different access mechanism configured with same name 'sbt-plugin-releases'. To avoid conflict, Remove duplicate project resolvers (`resolvers`) or rename publishing resolver (`publishTo`). -[warn] There may be incompatibilities among your library dependencies. -[warn] Here are some of the libraries that were evicted: -[warn]  * com.typesafe.sbt:sbt-git:0.6.1 -> 0.6.2 -[warn]  * com.typesafe.sbt:sbt-site:0.7.0 -> 0.7.1 -[warn] Run 'evicted' to see detailed eviction warnings -[info] Loading project definition from /home/wf/code/spark1/project -[warn] There may be incompatibilities among your library dependencies. -[warn] Here are some of the libraries that were evicted: -[warn]  * org.apache.maven.wagon:wagon-provider-api:1.0-beta-6 -> 2.2 -[warn] Run 'evicted' to see detailed eviction warnings -Note: We ignore environment variables, when use of profile is detected in conjunction with environment variable. -[info] Set current project to spark-parent (in build file:/home/wf/code/spark1/) -[warn] There may be incompatibilities among your library dependencies. -[warn] Here are some of the libraries that were evicted: -[warn]  * commons-net:commons-net:2.2 -> 3.1 -[warn]  * com.google.guava:guava:11.0.2 -> 14.0.1 -[warn] Run 'evicted' to see detailed eviction warnings -[info] Compiling 1 Scala source to /home/wf/code/spark1/sql/hive/target/scala-2.10/test-classes... -[warn] there were 1 deprecation warning(s); re-run with -deprecation for details -[warn] one warning found -21:25:44.266 WARN org.apache.spark.util.Utils: Your hostname, kf resolves to a loopback address: 127.0.1.1; using 192.168.1.100 instead (on interface eth0) -21:25:44.267 WARN org.apache.spark.util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address -21:25:44.938 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -21:26:21.442 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 0.13.1aa -[info] BigDataBenchmarkSuite: -[info] - No data files found for BigDataBenchmark tests. !!! IGNORED !!! -[info] ConcurrentHiveSuite: -[info] - multiple instances not supported !!! IGNORED !!! -[info] SQLQuerySuite: -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ctas4 -[info] - CTAS with serde (7 seconds, 85 milliseconds) -[info] - ordering not in select (320 milliseconds) -[info] - ordering not in agg (503 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_ctas_1234 -[info] - double nested data (885 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_ctas_123 -[info] - test CTAS (533 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test -[info] - SPARK-4825 save join to table (1 second, 183 milliseconds) -[info] - SPARK-3708 Backticks aren't handled correctly is aliases (194 milliseconds) -[info] - SPARK-3834 Backticks not correctly handled in subquery aliases (181 milliseconds) -[info] - SPARK-3814 Support Bitwise & operator (170 milliseconds) -[info] - SPARK-3814 Support Bitwise | operator (184 milliseconds) -[info] - SPARK-3814 Support Bitwise ^ operator (164 milliseconds) -[info] - SPARK-3814 Support Bitwise ~ operator (162 milliseconds) -[info] - SPARK-4154 Query does not work if it has 'not between' in Spark SQL and HQL (301 milliseconds) -[info] - SPARK-2554 SumDistinct partial aggregation (463 milliseconds) -[info] ParquetMetastoreSuite: -[info] - project the partitioning column partitioned_parquet (483 milliseconds) -[info] - project partitioning and non-partitioning columns partitioned_parquet (419 milliseconds) -[info] - simple count partitioned_parquet (295 milliseconds) -[info] - pruned count partitioned_parquet (136 milliseconds) -[info] - non-existant partition partitioned_parquet (96 milliseconds) -[info] - multi-partition pruned count partitioned_parquet (177 milliseconds) -[info] - non-partition predicates partitioned_parquet (342 milliseconds) -[info] - sum partitioned_parquet (148 milliseconds) -[info] - hive udfs partitioned_parquet (536 milliseconds) -[info] - project the partitioning column partitioned_parquet_with_key (397 milliseconds) -[info] - project partitioning and non-partitioning columns partitioned_parquet_with_key (336 milliseconds) -[info] - simple count partitioned_parquet_with_key (331 milliseconds) -[info] - pruned count partitioned_parquet_with_key (153 milliseconds) -[info] - non-existant partition partitioned_parquet_with_key (87 milliseconds) -[info] - multi-partition pruned count partitioned_parquet_with_key (216 milliseconds) -[info] - non-partition predicates partitioned_parquet_with_key (385 milliseconds) -[info] - sum partitioned_parquet_with_key (175 milliseconds) -[info] - hive udfs partitioned_parquet_with_key (505 milliseconds) -[info] - non-part select(*) (114 milliseconds) -[info] - conversion is working (136 milliseconds) -[info] ParquetSourceSuite: -[info] - project the partitioning column partitioned_parquet (153 milliseconds) -[info] - project partitioning and non-partitioning columns partitioned_parquet (190 milliseconds) -[info] - simple count partitioned_parquet (121 milliseconds) -[info] - pruned count partitioned_parquet (82 milliseconds) -[info] - non-existant partition partitioned_parquet (62 milliseconds) -[info] - multi-partition pruned count partitioned_parquet (91 milliseconds) -[info] - non-partition predicates partitioned_parquet (125 milliseconds) -[info] - sum partitioned_parquet (82 milliseconds) -[info] - hive udfs partitioned_parquet (163 milliseconds) -[info] - project the partitioning column partitioned_parquet_with_key (140 milliseconds) -[info] - project partitioning and non-partitioning columns partitioned_parquet_with_key (178 milliseconds) -[info] - simple count partitioned_parquet_with_key (116 milliseconds) -[info] - pruned count partitioned_parquet_with_key (72 milliseconds) -[info] - non-existant partition partitioned_parquet_with_key (53 milliseconds) -[info] - multi-partition pruned count partitioned_parquet_with_key (85 milliseconds) -[info] - non-partition predicates partitioned_parquet_with_key (125 milliseconds) -[info] - sum partitioned_parquet_with_key (74 milliseconds) -[info] - hive udfs partitioned_parquet_with_key (167 milliseconds) -[info] - non-part select(*) (59 milliseconds) -[info] HiveTypeCoercionSuite: -[info] - 1 + 1 (14 seconds, 36 milliseconds) -[info] - 1 + 1.0 (2 seconds, 196 milliseconds) -[info] - 1 + 1L (988 milliseconds) -[info] - 1 + 1S (3 seconds, 427 milliseconds) -[info] - 1 + 1Y (742 milliseconds) -[info] - 1 + '1' (839 milliseconds) -[info] - 1.0 + 1 (751 milliseconds) -[info] - 1.0 + 1.0 (996 milliseconds) -[info] - 1.0 + 1L (734 milliseconds) -[info] - 1.0 + 1S (680 milliseconds) -[info] - 1.0 + 1Y (681 milliseconds) -[info] - 1.0 + '1' (747 milliseconds) -[info] - 1L + 1 (727 milliseconds) -[info] - 1L + 1.0 (1 second, 23 milliseconds) -[info] - 1L + 1L (692 milliseconds) -[info] - 1L + 1S (658 milliseconds) -[info] - 1L + 1Y (713 milliseconds) -[info] - 1L + '1' (1 second, 73 milliseconds) -[info] - 1S + 1 (757 milliseconds) -[info] - 1S + 1.0 (696 milliseconds) -[info] - 1S + 1L (1 second, 721 milliseconds) -[info] - 1S + 1S (702 milliseconds) -[info] - 1S + 1Y (671 milliseconds) -[info] - 1S + '1' (1 second, 610 milliseconds) -[info] - 1Y + 1 (751 milliseconds) -[info] - 1Y + 1.0 (696 milliseconds) -[info] - 1Y + 1L (685 milliseconds) -[info] - 1Y + 1S (701 milliseconds) -[info] - 1Y + 1Y (684 milliseconds) -[info] - 1Y + '1' (618 milliseconds) -[info] - '1' + 1 (710 milliseconds) -[info] - '1' + 1.0 (1 second, 48 milliseconds) -[info] - '1' + 1L (770 milliseconds) -[info] - '1' + 1S (682 milliseconds) -[info] - '1' + 1Y (676 milliseconds) -[info] - '1' + '1' (638 milliseconds) -[info] - case when then 1 else null end (660 milliseconds) -[info] - case when then null else 1 end (654 milliseconds) -[info] - case when then 1.0 else null end (757 milliseconds) -[info] - case when then null else 1.0 end (597 milliseconds) -[info] - case when then 1L else null end (692 milliseconds) -[info] - case when then null else 1L end (701 milliseconds) -[info] - case when then 1S else null end (671 milliseconds) -[info] - case when then null else 1S end (706 milliseconds) -[info] - case when then 1Y else null end (610 milliseconds) -[info] - case when then null else 1Y end (958 milliseconds) -[info] - [SPARK-2210] boolean cast on boolean value should be removed (44 milliseconds) -[info] QueryTest: -[info] HiveQuerySuite: -[info] - constant object inspector for generic udf (822 milliseconds) -[info] - NaN to Decimal (717 milliseconds) -[info] - constant null testing (828 milliseconds) -[info] - constant array (657 milliseconds) -[info] - count distinct 0 values (704 milliseconds) -[info] - count distinct 1 value strings (828 milliseconds) -[info] - count distinct 1 value (894 milliseconds) -[info] - count distinct 2 values (814 milliseconds) -[info] - count distinct 2 values including null (916 milliseconds) -[info] - count distinct 1 value + null (700 milliseconds) -[info] - count distinct 1 value long (834 milliseconds) -[info] - count distinct 2 values long (725 milliseconds) -[info] - count distinct 1 value + null long (844 milliseconds) -[info] - null case (958 milliseconds) -[info] - single case (744 milliseconds) -[info] - double case (778 milliseconds) -[info] - case else null (868 milliseconds) -[info] - having no references (2 seconds, 779 milliseconds) -[info] - boolean = number (1 second, 353 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/foo -[info] - CREATE TABLE AS runs once (374 milliseconds) -[info] - between (1 second, 501 milliseconds) -[info] - div (1 second, 505 milliseconds) -[info] - division (73 milliseconds) -[info] - modulus (1 second, 826 milliseconds) -[info] - Query expressed in SQL (79 milliseconds) -[info] - Query expressed in HiveQL (78 milliseconds) -[info] - Query with constant folding the CAST (73 milliseconds) -[info] - Constant Folding Optimization for AVG_SUM_COUNT (2 seconds, 474 milliseconds) -[info] - Cast Timestamp to Timestamp in UDF (1 second, 605 milliseconds) -[info] - Simple Average (2 seconds, 322 milliseconds) -[info] - Simple Average + 1 (1 second, 360 milliseconds) -[info] - Simple Average + 1 with group (1 second, 406 milliseconds) -[info] - string literal (878 milliseconds) -[info] - Escape sequences (987 milliseconds) -[info] - IgnoreExplain (946 milliseconds) -[info] - trivial join where clause (1 second, 138 milliseconds) -[info] - trivial join ON clause (853 milliseconds) -[info] - small.cartesian (1 second, 328 milliseconds) -[info] - length.udf (793 milliseconds) -[info] - partitioned table scan (894 milliseconds) -[info] - hash (765 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/createdtable -[info] - create table as (1 second, 177 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/testdb.db/createdtable -[info] - create table as with db name (4 seconds, 52 milliseconds) -[info] - insert table with db name (1 second, 778 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/createdtable -[info] - insert into and insert overwrite (3 seconds, 444 milliseconds) -[info] - transform (2 seconds, 84 milliseconds) -[info] - LIKE (863 milliseconds) -[info] - DISTINCT (1 second, 79 milliseconds) -[info] - empty aggregate input (720 milliseconds) -[info] - lateral view1 (1 second, 143 milliseconds) -[info] - lateral view2 (739 milliseconds) -[info] - lateral view3 (1 second, 108 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 -[info] - lateral view4 (1 second, 295 milliseconds) -[info] - lateral view5 (914 milliseconds) -[info] - lateral view6 (665 milliseconds) -[info] - sampling (30 milliseconds) -[info] - SchemaRDD toString (77 milliseconds) -[info] - case statements with key #1 (1 second, 273 milliseconds) -[info] - case statements with key #2 (907 milliseconds) -[info] - case statements with key #3 (869 milliseconds) -[info] - case statements with key #4 (1 second, 118 milliseconds) -[info] - case statements WITHOUT key #1 (795 milliseconds) -[info] - case statements WITHOUT key #2 (704 milliseconds) -[info] - case statements WITHOUT key #3 (812 milliseconds) -[info] - case statements WITHOUT key #4 (712 milliseconds) -[info] - timestamp cast #1 (44 milliseconds) -[info] - timestamp cast #2 (878 milliseconds) -[info] - timestamp cast #3 (774 milliseconds) -[info] - timestamp cast #4 (767 milliseconds) -[info] - timestamp cast #5 (717 milliseconds) -[info] - timestamp cast #6 (773 milliseconds) -[info] - timestamp cast #7 (731 milliseconds) -[info] - timestamp cast #8 (752 milliseconds) -[info] - select null from table (816 milliseconds) -[info] - predicates contains an empty AttributeSet() references (44 milliseconds) -[info] - implement identity function using case statement (87 milliseconds) -[info] - non-boolean conditions in a CaseWhen are illegal !!! IGNORED !!! -[info] - case sensitivity when query Hive table (1 second, 303 milliseconds) -[info] - case sensitivity: registered table (45 milliseconds) -[info] - SPARK-1704: Explain commands as a SchemaRDD (981 milliseconds) -[info] - SPARK-2180: HAVING support in GROUP BY clauses (positive) (1 second, 297 milliseconds) -[info] - SPARK-2180: HAVING with non-boolean clause raises no exceptions (93 milliseconds) -[info] - SPARK-2225: turn HAVING without GROUP BY into a simple filter (46 milliseconds) -[info] - Query Hive native command execution result (884 milliseconds) -[info] - Exactly once semantics for DDL and command statements (99 milliseconds) -[info] - DESCRIBE commands (482 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/m -[info] - SPARK-2263: Insert Map values (325 milliseconds) -21:29:04.471 ERROR hive.ql.exec.DDLTask: java.lang.RuntimeException: MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) - at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:290) - at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:281) - at org.apache.hadoop.hive.ql.exec.DDLTask.alterTable(DDLTask.java:3644) - at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:312) - at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) - at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) - at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) - at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.execute(commands.scala:61) - at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:425) - at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:425) - at org.apache.spark.sql.SchemaRDDLike$class.$init$(SchemaRDDLike.scala:58) - at org.apache.spark.sql.SchemaRDD.(SchemaRDD.scala:108) - at org.apache.spark.sql.hive.HiveContext.sql(HiveContext.scala:94) - at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22$$anonfun$apply$mcV$sp$9.apply(HiveQuerySuite.scala:635) - at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22$$anonfun$apply$mcV$sp$9.apply(HiveQuerySuite.scala:635) - at org.scalatest.Assertions$class.intercept(Assertions.scala:997) - at org.scalatest.FunSuite.intercept(FunSuite.scala:1555) - at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22.apply$mcV$sp(HiveQuerySuite.scala:634) - at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22.apply(HiveQuerySuite.scala:631) - at org.apache.spark.sql.hive.execution.HiveQuerySuite$$anonfun$22.apply(HiveQuerySuite.scala:631) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveQuerySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveQuerySuite.scala:41) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveQuerySuite.runTest(HiveQuerySuite.scala:41) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveQuerySuite.org$scalatest$BeforeAndAfter$$super$run(HiveQuerySuite.scala:41) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveQuerySuite.run(HiveQuerySuite.scala:41) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) - at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:346) - at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:288) - ... 75 more - -21:29:04.471 ERROR org.apache.hadoop.hive.ql.Driver: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) -21:29:04.483 ERROR org.apache.spark.sql.hive.test.TestHive: -====================== -HIVE FAILURE OUTPUT -====================== -rc stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) -Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) -Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) -Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) -Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -RESET -set hive.table.parameters.default= -set datanucleus.cache.collections=true -set datanucleus.cache.collections.lazy=true -set hive.metastore.partition.name.whitelist.pattern=.* -SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true -SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.src -Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) -Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) -Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) -Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) -Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -RESET -set hive.table.parameters.default= -set datanucleus.cache.collections=true -set datanucleus.cache.collections.lazy=true -set hive.metastore.partition.name.whitelist.pattern=.* -SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true -SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.src -Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) -Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) -Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) -Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) -Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -RESET -set hive.table.parameters.default= -set datanucleus.cache.collections=true -set datanucleus.cache.collections.lazy=true -set hive.metastore.partition.name.whitelist.pattern=.* -SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true -SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.src -Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) -Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) -Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) -Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) -Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -OK -OK -RESET -set hive.table.parameters.default= -set datanucleus.cache.collections=true -set datanucleus.cache.collections.lazy=true -set hive.metastore.partition.name.whitelist.pattern=.* -SET javax.jdo.option.ConnectionURL=jdbc:derby:;databaseName=/tmp/sparkHiveMetastore7605840045424272893;create=true -SET hive.metastore.warehouse.dir=/tmp/sparkHiveWarehouse7773807525406879524 -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.src -Table default.src stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=11) -Partition default.srcpart{ds=2008-04-08, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-08, hr=12) -Partition default.srcpart{ds=2008-04-08, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=11) -Partition default.srcpart{ds=2008-04-09, hr=11} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -Copying data from file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Copying file: file:/home/wf/code/spark1/sql/hive/src/test/resources/data/files/kv1.txt -Loading data to table default.srcpart partition (ds=2008-04-09, hr=12) -Partition default.srcpart{ds=2008-04-09, hr=12} stats: [numFiles=1, numRows=0, totalSize=5812, rawDataSize=0] -OK -OK -OK -OK -OK -OK -OK -OK -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:java.lang.ClassNotFoundException Class org.apache.hadoop.hive.serde2.TestSerDe not found) - -====================== -END HIVE FAILURE OUTPUT -====================== - -[info] - ADD JAR command (176 milliseconds) -[info] - ADD FILE command (86 milliseconds) -[info] - dynamic_partition (2 seconds, 221 milliseconds) -[info] - Dynamic partition folder layout (2 seconds, 751 milliseconds) -[info] - Partition spec validation (153 milliseconds) -[info] - SPARK-3414 regression: should store analyzed logical plan when registering a temp table (73 milliseconds) -[info] - SPARK-3810: PreInsertionCasts static partitioning support (226 milliseconds) -[info] - SPARK-3810: PreInsertionCasts dynamic partitioning support (24 seconds, 607 milliseconds) -[info] - parse HQL set commands (13 milliseconds) -[info] - SET commands semantics for a HiveContext (20 milliseconds) -[info] - select from thrift based table (6 seconds, 264 milliseconds) -[info] HiveMetastoreCatalogSuite: -[info] - struct field should accept underscore in sub-column name (1 millisecond) -[info] - udt to metastore type conversion (1 millisecond) -[info] JavaHiveQLSuite: -[info] - SELECT * FROM src (98 milliseconds) -[info] - Query Hive native command execution result (3 seconds, 401 milliseconds) -[info] - Exactly once semantics for DDL and command statements (132 milliseconds) -[info] CachedTableSuite: -[info] - cache table (293 milliseconds) -[info] - cache invalidation (989 milliseconds) -[info] - Drop cached table (207 milliseconds) -[info] - DROP nonexistant table (27 milliseconds) -[info] - correct error on uncache of non-cached table (22 milliseconds) -[info] - 'CACHE TABLE' and 'UNCACHE TABLE' HiveQL statement (121 milliseconds) -[info] - CACHE TABLE tableName AS SELECT * FROM anotherTable (123 milliseconds) -[info] - CACHE TABLE tableName AS SELECT ... (121 milliseconds) -[info] - CACHE LAZY TABLE tableName (120 milliseconds) -[info] HiveInspectorSuite: -[info] - Test wrap SettableStructObjectInspector (3 milliseconds) -[info] - oi => datatype => oi (4 milliseconds) -########1 -true -0 -0 -0 -0 -0.0 -0.0 -0 -3914-10-23 -123.123 -1969-12-31 16:02:03.123 -[B@3719e702 -List(1, 2, 3) -Map(1 -> 2, 2 -> 1) -[1,2.0,3.0] -######## -true -0 -0 -0 -0 -0.0 -0.0 -0 -3914-10-23 -123.123 -1969-12-31 16:02:03.123 -[B@3719e702 -List(1, 2, 3) -Map(1 -> 2, 2 -> 1) -######## -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector@22c3aade -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector@5c06e6a6 -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector@4910cb7d -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector@465b037d -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector@11df5c7e -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector@6901922b -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector@5f315d5c -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector@2efceed0 -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector@6d9fa104 -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector@b0445d6 -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector@4584ea40 -org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector@192f3368 -org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector@69d7fdc2 -org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector@635501d7 -########2 -3914-10-23 ::: 3914-10-23 -[info] - wrap / unwrap null, constant null and writables (6 milliseconds) -3914-10-23 ::: 3914-10-23 -[info] - wrap / unwrap primitive writable object inspector (1 millisecond) -3914-10-23 ::: 3914-10-23 -[info] - wrap / unwrap primitive java object inspector (1 millisecond) -3914-10-23 ::: 3914-10-23 -[info] - wrap / unwrap Struct Type (1 millisecond) -[info] - wrap / unwrap Array Type (0 milliseconds) -[info] - wrap / unwrap Map Type (2 milliseconds) -[info] StatisticsSuite: -[info] - parse analyze commands (141 milliseconds) -[info] - analyze MetastoreRelations (1 second, 723 milliseconds) -[info] - estimates the size of a test MetastoreRelation (62 milliseconds) -[info] - auto converts to broadcast hash join, by size estimate of a relation (286 milliseconds) -[info] InsertIntoHiveTableSuite: -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/createandinserttest -[info] - insertInto() HiveTable (807 milliseconds) -[info] - Double create fails when allowExisting = false (68 milliseconds) -[info] - Double create does not fail when allowExisting = true (13 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewithmapvalue -[info] - SPARK-4052: scala.collection.Map as value type of MapType (376 milliseconds) -21:29:51.388 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/1419686991353-0 specified for non-external table:table_with_partition -[info] - SPARK-4203:random partition directory order (1 second, 16 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewitharrayvalue -[info] - Insert ArrayType.containsNull == false (391 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewithmapvalue -[info] - Insert MapType.valueContainsNull == false (408 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hivetablewithstructvalue -[info] - Insert StructType.fields.exists(_.nullable == false) (466 milliseconds) -[info] HiveCompatibilitySuite: -[info] - add_part_exist (2 seconds, 69 milliseconds) -[info] - add_part_multiple (3 seconds, 864 milliseconds) -[info] - add_partition_no_whitelist (1 second, 804 milliseconds) -[info] - add_partition_with_whitelist (1 second, 24 milliseconds) -[info] - alias_casted_column (899 milliseconds) -[info] - alter2 (2 seconds, 749 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter3_src -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter3_db.db/alter3_src -[info] - alter3 (3 seconds, 396 milliseconds) -[info] - alter4 (1 second, 555 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5_src -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5/parta -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5_db.db/alter5_src -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter5_db.db/alter5/parta -[info] - alter5 (2 seconds, 199 milliseconds) -[info] - alter_char1 !!! IGNORED !!! -[info] - alter_char2 !!! IGNORED !!! -[info] - alter_db_owner !!! IGNORED !!! -[info] - alter_index (1 second, 287 milliseconds) -21:30:17.528 WARN org.apache.hadoop.mapreduce.JobSubmitter: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this. -21:30:17.646 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/staging/kf1954455609/.staging/job_local1954455609_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval; Ignoring. -21:30:17.648 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/staging/kf1954455609/.staging/job_local1954455609_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts; Ignoring. -21:30:17.709 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/local/localRunner/kf/job_local1954455609_0001/job_local1954455609_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval; Ignoring. -21:30:17.710 WARN org.apache.hadoop.conf.Configuration: file:/tmp/hadoop-kf/mapred/local/localRunner/kf/job_local1954455609_0001/job_local1954455609_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts; Ignoring. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_rc_merge_test_part/ds=2012-01-03/ts=2012-01-03+14%3A46%3A31 -[info] - alter_merge_2 (4 seconds, 211 milliseconds) -[info] - alter_partition_coltype !!! IGNORED !!! -[info] - alter_partition_format_loc (1 second, 724 milliseconds) -[info] - alter_partition_protect_mode (2 seconds, 674 milliseconds) -[info] - alter_partition_with_whitelist (1 second, 1 millisecond) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_rename_partition_src -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_rename_partition_db.db/alter_rename_partition_src -[info] - alter_rename_partition (2 seconds, 826 milliseconds) -[info] - alter_table_serde (1 second, 965 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_varchar_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/alter_varchar_1 -[info] - alter_varchar1 (1 second, 900 milliseconds) -[info] - alter_varchar2 (1 second, 451 milliseconds) -[info] - alter_view_as_select (1 second, 275 milliseconds) -[info] - alter_view_rename !!! IGNORED !!! -[info] - ambiguous_col (1 second, 265 milliseconds) -[info] - ansi_sql_arithmetic !!! IGNORED !!! -[info] - archive !!! IGNORED !!! -[info] - archive_excludeHadoop20 !!! IGNORED !!! -[info] - archive_multi !!! IGNORED !!! -[info] - auto_join0 (1 second, 369 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - auto_join1 (1 second, 287 milliseconds) -[info] - auto_join10 (982 milliseconds) -[info] - auto_join11 (837 milliseconds) -[info] - auto_join12 (909 milliseconds) -[info] - auto_join13 (1 second, 890 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join14 (1 second, 389 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join14_hadoop20 (1 second, 141 milliseconds) -[info] - auto_join15 (1 second, 13 milliseconds) -[info] - auto_join16 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join17 (1 second, 242 milliseconds) -[info] - auto_join18 (1 second, 168 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join19 (1 second, 186 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -[info] - auto_join2 (1 second, 137 milliseconds) -[info] - auto_join20 (1 second, 368 milliseconds) -[info] - auto_join21 (901 milliseconds) -[info] - auto_join22 (1 second, 449 milliseconds) -[info] - auto_join23 (829 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tst1 -[info] - auto_join24 (1 second, 162 milliseconds) -21:30:57.083 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test auto_join25 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - auto_join25 (2 seconds, 120 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - auto_join26 (1 second, 414 milliseconds) -[info] - auto_join27 (1 second, 135 milliseconds) -[info] - auto_join28 (888 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join3 (1 second, 38 milliseconds) -[info] - auto_join30 (2 seconds, 201 milliseconds) -[info] - auto_join31 (935 milliseconds) -[info] - auto_join32 (2 seconds, 864 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join4 (1 second, 251 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join5 (1 second, 263 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join6 (1 second, 640 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join7 (1 second, 365 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join8 (1 second, 84 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - auto_join9 (1 second, 73 milliseconds) -21:31:17.371 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test auto_join_filters -[info] - auto_join_filters (8 seconds, 330 milliseconds) -[info] - auto_join_nulls (3 seconds, 807 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/testsrc -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/orderpayment_small -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/user_small -[info] - auto_join_reordering_values (1 second, 622 milliseconds) -[info] - auto_join_without_localtask !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - auto_smb_mapjoin_14 (5 seconds, 48 milliseconds) -[info] - auto_sortmerge_join_1 (2 seconds, 794 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -[info] - auto_sortmerge_join_10 (2 seconds, 263 milliseconds) -[info] - auto_sortmerge_join_11 (2 seconds, 702 milliseconds) -[info] - auto_sortmerge_join_12 (2 seconds, 946 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - auto_sortmerge_join_13 (3 seconds, 239 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -[info] - auto_sortmerge_join_14 (1 second, 740 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -[info] - auto_sortmerge_join_15 (1 second, 254 milliseconds) -[info] - auto_sortmerge_join_16 (2 seconds, 189 milliseconds) -[info] - auto_sortmerge_join_2 (4 seconds, 958 milliseconds) -[info] - auto_sortmerge_join_3 (4 seconds, 848 milliseconds) -[info] - auto_sortmerge_join_4 (2 seconds, 684 milliseconds) -[info] - auto_sortmerge_join_5 (2 seconds, 109 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl4 -[info] - auto_sortmerge_join_6 (3 seconds, 927 milliseconds) -[info] - auto_sortmerge_join_7 (3 seconds, 97 milliseconds) -[info] - auto_sortmerge_join_8 (3 seconds, 81 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -[info] - auto_sortmerge_join_9 (6 seconds, 209 milliseconds) -[info] - ba_table1 !!! IGNORED !!! -[info] - ba_table2 !!! IGNORED !!! -[info] - ba_table3 !!! IGNORED !!! -[info] - ba_table_udfs !!! IGNORED !!! -[info] - ba_table_union !!! IGNORED !!! -[info] - binary_constant (764 milliseconds) -[info] - binary_output_format !!! IGNORED !!! -[info] - binary_table_bincolserde !!! IGNORED !!! -[info] - binary_table_colserde !!! IGNORED !!! -[info] - binarysortable_1 (957 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - cast1 (1 second, 55 milliseconds) -[info] - cast_to_int !!! IGNORED !!! -[info] - char_1 !!! IGNORED !!! -[info] - char_2 !!! IGNORED !!! -[info] - char_cast !!! IGNORED !!! -[info] - char_comparison !!! IGNORED !!! -[info] - char_join1 !!! IGNORED !!! -[info] - char_nested_types !!! IGNORED !!! -[info] - char_serde !!! IGNORED !!! -[info] - char_udf1 !!! IGNORED !!! -[info] - char_union1 !!! IGNORED !!! -[info] - char_varchar_udf !!! IGNORED !!! -[info] - cluster (2 seconds, 658 milliseconds) -[info] - columnarserde_create_shortcut !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/combine1_1 -[info] - combine1 (1 second, 260 milliseconds) -[info] - combine2 !!! IGNORED !!! -[info] - combine2_hadoop20 !!! IGNORED !!! -[info] - combine2_win !!! IGNORED !!! -[info] - combine3 !!! IGNORED !!! -[info] - compile_processor !!! IGNORED !!! -[info] - compute_stats_binary (1 second, 145 milliseconds) -[info] - compute_stats_boolean (940 milliseconds) -[info] - compute_stats_decimal !!! IGNORED !!! -[info] - compute_stats_double (905 milliseconds) -[info] - compute_stats_empty_table (1 second, 297 milliseconds) -[info] - compute_stats_long (924 milliseconds) -[info] - compute_stats_string (1 second, 720 milliseconds) -[info] - constant_prop !!! IGNORED !!! -[info] - convert_enum_to_string (727 milliseconds) -[info] - correlationoptimizer1 (6 seconds, 230 milliseconds) -[info] - correlationoptimizer10 (3 seconds, 61 milliseconds) -[info] - correlationoptimizer11 (2 seconds, 330 milliseconds) -[info] - correlationoptimizer12 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp -[info] - correlationoptimizer13 (1 second, 630 milliseconds) -[info] - correlationoptimizer14 (2 seconds, 607 milliseconds) -[info] - correlationoptimizer15 (1 second, 512 milliseconds) -[info] - correlationoptimizer2 (3 seconds, 907 milliseconds) -[info] - correlationoptimizer3 (2 seconds, 536 milliseconds) -[info] - correlationoptimizer4 (4 seconds, 283 milliseconds) -[info] - correlationoptimizer5 !!! IGNORED !!! -[info] - correlationoptimizer6 (7 seconds, 831 milliseconds) -[info] - correlationoptimizer7 (2 seconds, 2 milliseconds) -[info] - correlationoptimizer8 (3 seconds, 552 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp -[info] - correlationoptimizer9 (2 seconds, 728 milliseconds) -[info] - count (1 second, 362 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_six_columns -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_two_columns -[info] - cp_mj_rc (1 second, 589 milliseconds) -[info] - create_1 !!! IGNORED !!! -[info] - create_big_view !!! IGNORED !!! -[info] - create_escape !!! IGNORED !!! -[info] - create_func1 !!! IGNORED !!! -[info] - create_genericudaf !!! IGNORED !!! -[info] - create_genericudf !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_test_output_format -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_test_output_format_sequencefile -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_test_output_format_hivesequencefile -[info] - create_insert_outputformat (1 second, 363 milliseconds) -[info] - create_like !!! IGNORED !!! -[info] - create_like_tbl_props (1 second, 154 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table2 -[info] - create_like_view (2 seconds, 348 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table1 -[info] - create_nested_type (854 milliseconds) -[info] - create_or_replace_view !!! IGNORED !!! -[info] - create_skewed_table1 (3 seconds, 447 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/abc -[info] - create_struct_table (852 milliseconds) -[info] - create_udaf !!! IGNORED !!! -[info] - create_union_table !!! IGNORED !!! -[info] - create_view !!! IGNORED !!! -[info] - cross_join (931 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/a -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/b -[info] - cross_product_check_1 (1 second, 281 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/a -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/b -[info] - cross_product_check_2 (1 second, 443 milliseconds) -[info] - ct_case_insensitive (1 second, 190 milliseconds) -[info] - ctas_char !!! IGNORED !!! -[info] - ctas_colname !!! IGNORED !!! -[info] - ctas_date !!! IGNORED !!! -[info] - ctas_uses_database_location !!! IGNORED !!! -[info] - ctas_varchar !!! IGNORED !!! -[info] - cte_1 !!! IGNORED !!! -[info] - cte_2 !!! IGNORED !!! -[info] - custom_input_output_format !!! IGNORED !!! -[info] - date_1 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_2 -[info] - date_2 (1 second, 148 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_3 -[info] - date_3 (982 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_4 -[info] - date_4 (1 second, 59 milliseconds) -[info] - date_comparison (1 second, 85 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_join1 -[info] - date_join1 (964 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_regex -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_lb -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_ls -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_c -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_lbc -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_serde_orc -[info] - date_serde (3 seconds, 909 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_udf -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_udf_string -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/date_udf_flight -[info] - date_udf (2 seconds, 690 milliseconds) -[info] - dbtxnmgr_compact1 !!! IGNORED !!! -[info] - dbtxnmgr_compact2 !!! IGNORED !!! -[info] - dbtxnmgr_compact3 !!! IGNORED !!! -[info] - dbtxnmgr_ddl1 !!! IGNORED !!! -[info] - dbtxnmgr_query1 !!! IGNORED !!! -[info] - dbtxnmgr_query2 !!! IGNORED !!! -[info] - dbtxnmgr_query3 !!! IGNORED !!! -[info] - dbtxnmgr_query4 !!! IGNORED !!! -[info] - dbtxnmgr_query5 !!! IGNORED !!! -[info] - dbtxnmgr_showlocks !!! IGNORED !!! -[info] - ddltime !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/decimal_1 -[info] - decimal_1 (1 second, 441 milliseconds) -[info] - decimal_2 !!! IGNORED !!! -[info] - decimal_3 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/decimal_4_2 -[info] - decimal_4 (1 second, 325 milliseconds) -[info] - decimal_5 !!! IGNORED !!! -[info] - decimal_6 !!! IGNORED !!! -[info] - decimal_join (1 second, 216 milliseconds) -[info] - decimal_precision !!! IGNORED !!! -[info] - decimal_serde !!! IGNORED !!! -[info] - decimal_udf !!! IGNORED !!! -[info] - default_partition_name (976 milliseconds) -[info] - delimiter (1 second, 272 milliseconds) -[info] - desc_non_existent_tbl (627 milliseconds) -[info] - desc_tbl_part_cols !!! IGNORED !!! -[info] - describe_formatted_view_partitioned (907 milliseconds) -[info] - describe_table !!! IGNORED !!! -[info] - describe_xpath !!! IGNORED !!! -[info] - diff_part_input_formats (1 second, 105 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/kv_fileformat_check_txt -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/kv_fileformat_check_seq -[info] - disable_file_format_check (1 second, 20 milliseconds) -[info] - disallow_incompatible_type_change_off (1 second, 341 milliseconds) -[info] - distinct_stats (1 second, 126 milliseconds) -[info] - driverhook !!! IGNORED !!! -21:34:07.620 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_database_removes_partition_dirs_table specified for non-external table:test_table -Deleted file:///tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_database_removes_partition_dirs_table2/part=1 -[info] - drop_database_removes_partition_dirs (1 second, 220 milliseconds) -[info] - drop_function (747 milliseconds) -[info] - drop_index (1 second, 28 milliseconds) -21:34:10.588 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_database_removes_partition_dirs_table specified for non-external table:test_table -[info] - drop_index_removes_partition_dirs (918 milliseconds) -[info] - drop_multi_partitions (1 second, 190 milliseconds) -[info] - drop_partitions_filter (2 seconds, 174 milliseconds) -21:34:15.457 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM -javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 inner join "PARTITION_KEY_VALS" "FILTER1" on "FILTER1"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER1"."INTEGER_IDX" = 1 where ( (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?) and ((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER1"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?)) )". - at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) - at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) - at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -NestedThrowablesStackTrace: -java.sql.SQLDataException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) - at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) - at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) - at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) - at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) - at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) - at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) - at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) - at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) - ... 118 more -Caused by: ERROR 22018: Invalid character string format for type DECIMAL. - at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) - at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) - at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) - at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ec8.e6(Unknown Source) - at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) - ... 112 more -21:34:15.484 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM -javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 inner join "PARTITION_KEY_VALS" "FILTER1" on "FILTER1"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER1"."INTEGER_IDX" = 1 where ( (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?) and ((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER1"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?)) )". - at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) - at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) - at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) - at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) - at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) - at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -NestedThrowablesStackTrace: -java.sql.SQLDataException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) - at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) - at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) - at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) - at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) - at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) - at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) - at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) - at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) - at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) - at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) - at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) - ... 119 more -Caused by: ERROR 22018: Invalid character string format for type DECIMAL. - at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) - at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) - at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) - at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ec8.e6(Unknown Source) - at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) - ... 113 more -21:34:15.618 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM -javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 where (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?))". - at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) - at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) - at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -NestedThrowablesStackTrace: -java.sql.SQLDataException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) - at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) - at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) - at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) - at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) - at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) - at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_expr(HiveMetaStore.java:3779) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.get_partitions_by_expr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByExpr(HiveMetaStoreClient.java:922) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.listPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByExpr(Hive.java:1979) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.addTableDropPartsOutputs(DDLSemanticAnalyzer.java:3084) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeAlterTableDropParts(DDLSemanticAnalyzer.java:2579) - at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:396) - at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:422) - at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322) - at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:975) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1040) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) - ... 118 more -Caused by: ERROR 22018: Invalid character string format for type DECIMAL. - at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) - at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) - at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) - at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ece.e6(Unknown Source) - at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) - ... 112 more -21:34:15.635 ERROR org.apache.hadoop.hive.metastore.ObjectStore: Direct SQL failed, falling back to ORM -javax.jdo.JDODataStoreException: Error executing SQL query "select "PARTITIONS"."PART_ID" from "PARTITIONS" inner join "TBLS" on "PARTITIONS"."TBL_ID" = "TBLS"."TBL_ID" and "TBLS"."TBL_NAME" = ? inner join "DBS" on "TBLS"."DB_ID" = "DBS"."DB_ID" and "DBS"."NAME" = ? inner join "PARTITION_KEY_VALS" "FILTER0" on "FILTER0"."PART_ID" = "PARTITIONS"."PART_ID" and "FILTER0"."INTEGER_IDX" = 0 where (((case when "TBLS"."TBL_NAME" = ? and "DBS"."NAME" = ? then cast("FILTER0"."PART_KEY_VAL" as decimal(21,0)) else null end) = ?))". - at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:321) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) - at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) - at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) - at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) - at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) - at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -NestedThrowablesStackTrace: -java.sql.SQLDataException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) - at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeQuery(Unknown Source) - at com.jolbox.bonecp.PreparedStatementHandle.executeQuery(PreparedStatementHandle.java:174) - at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeQuery(ParamLoggingPreparedStatement.java:381) - at org.datanucleus.store.rdbms.SQLController.executeStatementQuery(SQLController.java:504) - at org.datanucleus.store.rdbms.query.SQLQuery.performExecute(SQLQuery.java:280) - at org.datanucleus.store.query.Query.executeQuery(Query.java:1786) - at org.datanucleus.store.query.AbstractSQLQuery.executeWithArray(AbstractSQLQuery.java:339) - at org.datanucleus.api.jdo.JDOQuery.executeWithArray(JDOQuery.java:312) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilterInternal(MetaStoreDirectSql.java:300) - at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.getPartitionsViaSqlFilter(MetaStoreDirectSql.java:211) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1915) - at org.apache.hadoop.hive.metastore.ObjectStore$3.getSqlResult(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2208) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExprInternal(ObjectStore.java:1909) - at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByExpr(ObjectStore.java:1882) - at sun.reflect.GeneratedMethodAccessor212.invoke(Unknown Source) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108) - at com.sun.proxy.$Proxy13.getPartitionsByExpr(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.drop_partitions_req(HiveMetaStore.java:2318) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:105) - at com.sun.proxy.$Proxy14.drop_partitions_req(Unknown Source) - at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropPartitions(HiveMetaStoreClient.java:709) - at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) - at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) - at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) - at java.lang.reflect.Method.invoke(Method.java:606) - at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89) - at com.sun.proxy.$Proxy15.dropPartitions(Unknown Source) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1696) - at org.apache.hadoop.hive.ql.metadata.Hive.dropPartitions(Hive.java:1681) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropPartitions(DDLTask.java:3860) - at org.apache.hadoop.hive.ql.exec.DDLTask.dropTableOrPartitions(DDLTask.java:3854) - at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306) - at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153) - at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85) - at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1503) - at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1270) - at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1088) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:911) - at org.apache.hadoop.hive.ql.Driver.run(Driver.java:901) - at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:305) - at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276) - at org.apache.spark.sql.hive.test.TestHiveContext.runSqlHive(TestHive.scala:98) - at org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:37) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:53) - at org.apache.spark.sql.execution.ExecutedCommand.executeCollect(commands.scala:59) - at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:383) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:341) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1$$anonfun$31.apply(HiveComparisonTest.scala:339) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) - at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) - at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) - at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) - at scala.collection.AbstractTraversable.map(Traversable.scala:105) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply$mcV$sp(HiveComparisonTest.scala:339) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.apache.spark.sql.hive.execution.HiveComparisonTest$$anonfun$createQueryTest$1.apply(HiveComparisonTest.scala:236) - at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) - at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) - at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) - at org.scalatest.Transformer.apply(Transformer.scala:22) - at org.scalatest.Transformer.apply(Transformer.scala:20) - at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) - at org.scalatest.Suite$class.withFixture(Suite.scala:1122) - at org.scalatest.FunSuite.withFixture(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) - at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) - at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.runTest(HiveCompatibilitySuite.scala:32) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) - at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) - at scala.collection.immutable.List.foreach(List.scala:318) - at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) - at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) - at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) - at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) - at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) - at org.scalatest.Suite$class.run(Suite.scala:1424) - at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) - at org.scalatest.SuperEngine.runImpl(Engine.scala:545) - at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) - at org.apache.spark.sql.hive.execution.HiveComparisonTest.org$scalatest$BeforeAndAfterAll$$super$run(HiveComparisonTest.scala:40) - at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) - at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.org$scalatest$BeforeAndAfter$$super$run(HiveCompatibilitySuite.scala:32) - at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) - at org.apache.spark.sql.hive.execution.HiveCompatibilitySuite.run(HiveCompatibilitySuite.scala:32) - at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) - at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) - at sbt.ForkMain$Run$2.call(ForkMain.java:294) - at sbt.ForkMain$Run$2.call(ForkMain.java:284) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.sql.SQLException: Invalid character string format for type DECIMAL. - at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) - at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source) - ... 119 more -Caused by: ERROR 22018: Invalid character string format for type DECIMAL. - at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) - at org.apache.derby.iapi.types.DataType.invalidFormat(Unknown Source) - at org.apache.derby.iapi.types.DataType.setValue(Unknown Source) - at org.apache.derby.exe.ac5e52817cx014ax8becx98c1x00000b5602b0ece.e6(Unknown Source) - at org.apache.derby.impl.services.reflect.DirectCall.invoke(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.NestedLoopJoinResultSet.getNextRowCore(Unknown Source) - at org.apache.derby.impl.sql.execute.JoinResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.ProjectRestrictResultSet.openCore(Unknown Source) - at org.apache.derby.impl.sql.execute.BasicNoPutResultSetImpl.open(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) - at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) - ... 113 more -[info] - drop_partitions_filter2 (1 second, 725 milliseconds) -[info] - drop_partitions_filter3 (1 second, 459 milliseconds) -[info] - drop_partitions_ignore_protection (1 second, 250 milliseconds) -[info] - drop_table (904 milliseconds) -[info] - drop_table2 (1 second, 138 milliseconds) -21:34:21.586 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_table_removes_partition_dirs_table specified for non-external table:test_table -Deleted file:///tmp/testTempFiles6167202715430906972spark.hive.tmp/drop_table_removes_partition_dirs_table2/part=1 -[info] - drop_table_removes_partition_dirs (1 second, 228 milliseconds) -[info] - drop_udf !!! IGNORED !!! -[info] - drop_view (1 second, 4 milliseconds) -[info] - drop_with_concurrency !!! IGNORED !!! -[info] - dynamic_partition_skip_default (1 second, 361 milliseconds) -[info] - dynpart_sort_opt_vectorization !!! IGNORED !!! -[info] - dynpart_sort_optimization !!! IGNORED !!! -[info] - enforce_order !!! IGNORED !!! -[info] - escape1 !!! IGNORED !!! -[info] - escape2 !!! IGNORED !!! -[info] - escape_clusterby1 (845 milliseconds) -[info] - escape_distributeby1 (769 milliseconds) -[info] - escape_orderby1 (623 milliseconds) -[info] - escape_sortby1 (671 milliseconds) -[info] - exchange_partition !!! IGNORED !!! -[info] - exchange_partition2 !!! IGNORED !!! -[info] - exchange_partition3 !!! IGNORED !!! -[info] - exim_00_nonpart_empty !!! IGNORED !!! -[info] - exim_01_nonpart !!! IGNORED !!! -[info] - exim_02_00_part_empty !!! IGNORED !!! -[info] - exim_02_part !!! IGNORED !!! -[info] - exim_03_nonpart_over_compat !!! IGNORED !!! -[info] - exim_04_all_part !!! IGNORED !!! -[info] - exim_04_evolved_parts !!! IGNORED !!! -[info] - exim_05_some_part !!! IGNORED !!! -[info] - exim_06_one_part !!! IGNORED !!! -[info] - exim_07_all_part_over_nonoverlap !!! IGNORED !!! -[info] - exim_08_nonpart_rename !!! IGNORED !!! -[info] - exim_09_part_spec_nonoverlap !!! IGNORED !!! -[info] - exim_10_external_managed !!! IGNORED !!! -[info] - exim_11_managed_external !!! IGNORED !!! -[info] - exim_12_external_location !!! IGNORED !!! -[info] - exim_13_managed_location !!! IGNORED !!! -[info] - exim_14_managed_location_over_existing !!! IGNORED !!! -[info] - exim_15_external_part !!! IGNORED !!! -[info] - exim_16_part_external !!! IGNORED !!! -[info] - exim_17_part_managed !!! IGNORED !!! -[info] - exim_18_part_external !!! IGNORED !!! -[info] - exim_19_00_part_external_location !!! IGNORED !!! -[info] - exim_19_part_external_location !!! IGNORED !!! -[info] - exim_20_part_managed_location !!! IGNORED !!! -[info] - exim_21_export_authsuccess !!! IGNORED !!! -[info] - exim_22_import_exist_authsuccess !!! IGNORED !!! -[info] - exim_23_import_part_authsuccess !!! IGNORED !!! -[info] - exim_24_import_nonexist_authsuccess !!! IGNORED !!! -[info] - exim_hidden_files !!! IGNORED !!! -[info] - explain_dependency !!! IGNORED !!! -[info] - explain_dependency2 !!! IGNORED !!! -[info] - explain_logical !!! IGNORED !!! -[info] - explain_rearrange (1 second, 59 milliseconds) -[info] - explode_null !!! IGNORED !!! -[info] - external_table_with_space_in_location_path !!! IGNORED !!! -[info] - fetch_aggregation (1 second, 576 milliseconds) -[info] - file_with_header_footer !!! IGNORED !!! -[info] - fileformat_mix (1 second, 182 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - fileformat_sequencefile (955 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - fileformat_text (973 milliseconds) -[info] - filter_join_breaktask (1 second, 631 milliseconds) -[info] - filter_join_breaktask2 (2 seconds, 113 milliseconds) -[info] - filter_numeric !!! IGNORED !!! -[info] - global_limit !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g1 -[info] - groupby1 (3 seconds, 928 milliseconds) -[info] - groupby10 !!! IGNORED !!! -[info] - groupby11 (2 seconds, 439 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby12 (1 second, 543 milliseconds) -21:34:45.512 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby1_limit (1 second, 413 milliseconds) -21:34:47.247 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby1_map (2 seconds, 230 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby1_map_nomap (1 second, 527 milliseconds) -21:34:50.493 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby1_map_skew (1 second, 631 milliseconds) -21:34:52.334 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g1 -[info] - groupby1_noskew (1 second, 878 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g2 -[info] - groupby2 (1 second, 848 milliseconds) -21:34:55.908 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -[info] - groupby2_limit (878 milliseconds) -21:34:57.662 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby2_map (2 seconds, 357 milliseconds) -[info] - groupby2_map_multi_distinct !!! IGNORED !!! -21:35:01.128 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby2_map_skew (3 seconds, 437 milliseconds) -21:35:03.863 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g2 -[info] - groupby2_noskew (3 seconds, 610 milliseconds) -[info] - groupby2_noskew_multi_distinct !!! IGNORED !!! -[info] - groupby3 !!! IGNORED !!! -[info] - groupby3_map !!! IGNORED !!! -[info] - groupby3_map_multi_distinct !!! IGNORED !!! -[info] - groupby3_map_skew !!! IGNORED !!! -[info] - groupby3_noskew !!! IGNORED !!! -[info] - groupby3_noskew_multi_distinct !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby4 (1 second, 252 milliseconds) -21:35:07.354 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby4_map (865 milliseconds) -21:35:08.320 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby4_map_skew (979 milliseconds) -21:35:09.382 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby4_noskew (1 second, 583 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby5 (1 second, 278 milliseconds) -21:35:12.071 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby5_map (901 milliseconds) -21:35:13.035 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby5_map_skew (976 milliseconds) -21:35:14.324 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby5_noskew (2 seconds, 24 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby6 (1 second, 504 milliseconds) -21:35:17.464 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby6_map (1 second, 406 milliseconds) -21:35:18.945 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby6_map_skew (1 second, 527 milliseconds) -21:35:20.449 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby6_noskew (1 second, 366 milliseconds) -21:35:22.074 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.074 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.091 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.096 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.111 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.116 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.128 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.131 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.147 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.151 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.167 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.169 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.188 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.188 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.203 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.204 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.222 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.222 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.243 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.243 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.276 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.276 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.291 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.291 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.312 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.312 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.353 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.353 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.370 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -21:35:22.489 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.490 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.512 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.512 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.530 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.530 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.545 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.549 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.566 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.566 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.585 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.585 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.604 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.604 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.620 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.621 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.641 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.641 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.657 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.684 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.699 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.700 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.717 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.717 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.745 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.745 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.762 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.762 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.782 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.785 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:22.801 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby7 (1 second, 995 milliseconds) -21:35:23.834 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -21:35:24.100 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.101 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.117 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.118 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.138 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.139 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.155 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.155 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.174 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.177 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.191 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.201 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.214 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.221 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.237 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.245 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.258 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.265 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.276 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.280 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.294 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.300 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.315 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.315 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.359 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.360 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.374 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.374 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.392 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.392 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.406 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -21:35:24.535 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.535 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.553 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.553 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.571 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.571 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.588 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.590 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.609 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.609 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.627 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.629 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.649 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.650 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.666 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.691 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.726 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.726 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.743 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.743 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.758 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.760 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.783 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.784 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.800 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.804 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.822 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.822 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:24.840 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby7_map (2 seconds, 688 milliseconds) -21:35:26.540 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -21:35:26.786 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.786 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.804 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.805 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.823 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.823 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.843 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.845 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.862 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.867 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.877 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.881 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.896 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.916 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.930 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.933 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.945 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.950 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.962 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.966 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.978 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.984 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:26.997 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.003 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.017 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.024 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.037 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.048 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.059 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.067 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.081 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -21:35:27.200 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.200 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.214 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.214 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.257 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.277 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.277 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.291 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.292 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.311 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.313 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.331 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.333 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.348 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.349 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.366 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.370 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.384 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.389 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.407 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.411 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.430 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.434 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.452 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.454 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.471 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.472 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.488 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.489 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:27.509 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby7_map_multi_single_reducer (2 seconds, 620 milliseconds) -21:35:29.171 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -21:35:29.540 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.541 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.578 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.581 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.596 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.596 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.613 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.613 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.631 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.631 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.650 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.650 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.669 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.669 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.689 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.689 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.707 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.722 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.722 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.741 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.741 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.755 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.755 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.783 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.783 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.805 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.806 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.826 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.830 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:29.868 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -21:35:30.020 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.020 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.035 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.042 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.052 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.067 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.071 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.089 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.089 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.106 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.107 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.125 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.126 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.146 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.146 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.171 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.171 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.191 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.192 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.218 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.218 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.261 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.262 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.282 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.283 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.301 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.304 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.322 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.322 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.341 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:30.342 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby7_map_skew (2 seconds, 847 milliseconds) -21:35:32.020 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -21:35:32.318 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.318 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.335 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.335 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.355 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.356 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.371 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.371 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.389 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.393 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.407 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.410 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.428 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.428 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.441 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.442 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.461 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.463 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.498 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.499 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.518 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.519 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.544 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.544 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.563 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.563 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.584 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.584 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.602 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.603 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.620 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -21:35:32.760 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.760 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.774 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.774 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.796 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.797 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.815 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.816 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.835 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.835 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.872 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.876 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.889 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.893 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.913 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.916 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.938 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.938 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.952 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.957 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.973 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.982 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:32.997 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.006 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.021 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.030 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.037 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.050 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.055 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.071 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:35:33.074 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby7_noskew (2 seconds, 680 milliseconds) -21:35:35.241 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -21:35:35.600 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -21:35:35.797 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby7_noskew_multi_single_reducer (2 seconds, 32 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby8 (3 seconds, 512 milliseconds) -21:35:40.215 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby8_map (2 seconds, 436 milliseconds) -21:35:42.695 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby8_map_skew (2 seconds, 550 milliseconds) -21:35:45.254 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby8_noskew (2 seconds, 621 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby9 (6 seconds, 710 milliseconds) -[info] - groupby_bigdata !!! IGNORED !!! -[info] - groupby_complex_types !!! IGNORED !!! -[info] - groupby_complex_types_multi_single_reducer !!! IGNORED !!! -[info] - groupby_cube1 !!! IGNORED !!! -[info] - groupby_distinct_samekey (2 seconds, 112 milliseconds) -[info] - groupby_grouping_id1 (1 second, 25 milliseconds) -[info] - groupby_grouping_id2 (1 second, 767 milliseconds) -[info] - groupby_grouping_sets1 (1 second, 498 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 -[info] - groupby_grouping_sets2 (1 second, 431 milliseconds) -[info] - groupby_grouping_sets3 (1 second, 638 milliseconds) -[info] - groupby_grouping_sets4 (1 second, 549 milliseconds) -[info] - groupby_grouping_sets5 (1 second, 354 milliseconds) -21:36:06.957 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby_map_ppr (1 second, 393 milliseconds) -[info] - groupby_map_ppr_multi_distinct !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - groupby_multi_insert_common_distinct (3 seconds, 337 milliseconds) -[info] - groupby_multi_single_reducer !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_g3 -[info] - groupby_multi_single_reducer2 (2 seconds, 167 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -[info] - groupby_multi_single_reducer3 (5 seconds, 180 milliseconds) -[info] - groupby_mutli_insert_common_distinct (740 milliseconds) -[info] - groupby_neg_float (1 second, 511 milliseconds) -[info] - groupby_position !!! IGNORED !!! -[info] - groupby_ppd (818 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - groupby_ppr (1 second, 334 milliseconds) -[info] - groupby_ppr_multi_distinct !!! IGNORED !!! -[info] - groupby_resolution !!! IGNORED !!! -[info] - groupby_rollup1 !!! IGNORED !!! -[info] - groupby_sort_1 !!! IGNORED !!! -[info] - groupby_sort_10 (1 second, 722 milliseconds) -[info] - groupby_sort_11 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -[info] - groupby_sort_2 (1 second, 594 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 -[info] - groupby_sort_3 (2 seconds, 597 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 -[info] - groupby_sort_4 (2 seconds, 361 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 -[info] - groupby_sort_5 (3 seconds, 705 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -[info] - groupby_sort_6 (2 seconds, 882 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1/ds=1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -[info] - groupby_sort_7 (2 seconds, 551 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1/ds=1 -[info] - groupby_sort_8 (1 second, 266 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1/ds=1 -[info] - groupby_sort_9 (1 second, 728 milliseconds) -[info] - groupby_sort_skew_1 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -[info] - groupby_sort_test_1 (1 second, 59 milliseconds) -[info] - having (1 second, 303 milliseconds) -[info] - implicit_cast1 (895 milliseconds) -[info] - import_exported_table !!! IGNORED !!! -[info] - index_auto !!! IGNORED !!! -[info] - index_bitmap !!! IGNORED !!! -[info] - index_bitmap1 !!! IGNORED !!! -[info] - index_bitmap2 !!! IGNORED !!! -[info] - index_bitmap3 !!! IGNORED !!! -[info] - index_bitmap_auto !!! IGNORED !!! -[info] - index_bitmap_rc !!! IGNORED !!! -[info] - index_compact !!! IGNORED !!! -[info] - index_compact_1 !!! IGNORED !!! -[info] - index_compact_2 !!! IGNORED !!! -[info] - index_compact_3 !!! IGNORED !!! -[info] - index_creation !!! IGNORED !!! -[info] - infer_const_type !!! IGNORED !!! -[info] - init_file !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - innerjoin (1 second, 521 milliseconds) -[info] - inoutdriver (835 milliseconds) -[info] - input (801 milliseconds) -[info] - input0 (686 milliseconds) -[info] - input1 (801 milliseconds) -[info] - input10 (678 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input11 (883 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input11_limit (1 second, 339 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - input12 (1 second, 382 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - input12_hadoop20 (1 second, 545 milliseconds) -[info] - input13 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input14 (1 second, 581 milliseconds) -[info] - input14_limit !!! IGNORED !!! -[info] - input15 (864 milliseconds) -[info] - input16_cc !!! IGNORED !!! -[info] - input17 !!! IGNORED !!! -[info] - input18 !!! IGNORED !!! -[info] - input19 (877 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - input1_limit (1 second, 220 milliseconds) -[info] - input2 (1 second, 227 milliseconds) -[info] - input20 !!! IGNORED !!! -[info] - input21 (1 second, 228 milliseconds) -[info] - input22 (864 milliseconds) -[info] - input23 (781 milliseconds) -[info] - input24 (1 second, 169 milliseconds) -[info] - input25 (997 milliseconds) -[info] - input26 (886 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tst/d=2009-01-01 -[info] - input28 (1 second, 528 milliseconds) -[info] - input2_limit (793 milliseconds) -[info] - input3 (1 second, 204 milliseconds) -[info] - input30 !!! IGNORED !!! -[info] - input31 !!! IGNORED !!! -[info] - input32 !!! IGNORED !!! -[info] - input33 !!! IGNORED !!! -[info] - input34 !!! IGNORED !!! -[info] - input35 !!! IGNORED !!! -[info] - input36 !!! IGNORED !!! -[info] - input37 !!! IGNORED !!! -[info] - input38 !!! IGNORED !!! -[info] - input39 !!! IGNORED !!! -[info] - input39_hadoop20 !!! IGNORED !!! -[info] - input3_limit !!! IGNORED !!! -[info] - input4 (1 second, 52 milliseconds) -[info] - input40 (2 seconds, 286 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_sp -[info] - input41 (1 second, 71 milliseconds) -[info] - input43 !!! IGNORED !!! -[info] - input45 !!! IGNORED !!! -[info] - input46 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/intable -[info] - input49 (1 second, 2 milliseconds) -[info] - input4_cb_delim (771 milliseconds) -[info] - input4_limit !!! IGNORED !!! -[info] - input5 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input6 (1 second, 472 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input7 (1 second, 62 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input8 (1 second, 156 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input9 (1 second, 246 milliseconds) -[info] - input_columnarserde !!! IGNORED !!! -[info] - input_dynamicserde !!! IGNORED !!! -[info] - input_lazyserde !!! IGNORED !!! -[info] - input_limit (1 second, 106 milliseconds) -[info] - input_part0 (880 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - input_part1 (1 second, 571 milliseconds) -[info] - input_part10 (1 second, 10 milliseconds) -[info] - input_part10_win (1 second, 96 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - input_part2 (1 second, 414 milliseconds) -[info] - input_part3 (1 second, 316 milliseconds) -[info] - input_part4 (1 second, 728 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -[info] - input_part5 (943 milliseconds) -[info] - input_part6 (950 milliseconds) -[info] - input_part7 (732 milliseconds) -[info] - input_part8 (702 milliseconds) -[info] - input_part9 (831 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest4_sequencefile -[info] - input_testsequencefile (902 milliseconds) -[info] - input_testxpath !!! IGNORED !!! -[info] - input_testxpath2 !!! IGNORED !!! -[info] - input_testxpath3 !!! IGNORED !!! -[info] - input_testxpath4 !!! IGNORED !!! -[info] - inputddl1 (915 milliseconds) -[info] - inputddl2 (2 seconds, 409 milliseconds) -[info] - inputddl3 (660 milliseconds) -[info] - inputddl4 (819 milliseconds) -[info] - inputddl5 !!! IGNORED !!! -[info] - inputddl6 (1 second, 310 milliseconds) -[info] - inputddl7 (3 seconds, 334 milliseconds) -[info] - inputddl8 (3 seconds, 508 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/insert1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db2.db/result -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db1.db/result -[info] - insert1 (2 seconds, 286 milliseconds) -[info] - insert1_overwrite_partitions !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db2.db/destintable/ds=2011-11-11 -[info] - insert2_overwrite_partitions (4 seconds, 370 milliseconds) -21:37:58.947 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/insert_compressed -21:37:59.113 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:37:59.290 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -[info] - insert_compressed (1 second, 400 milliseconds) -[info] - insert_into1 !!! IGNORED !!! -[info] - insert_into2 !!! IGNORED !!! -[info] - insert_into3 !!! IGNORED !!! -[info] - insert_into4 !!! IGNORED !!! -[info] - insert_into5 !!! IGNORED !!! -[info] - insert_into6 !!! IGNORED !!! -[info] - insert_overwrite_local_directory_1 !!! IGNORED !!! -[info] - insertexternal1 !!! IGNORED !!! -[info] - join0 (728 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join1 (1 second, 72 milliseconds) -[info] - join10 (903 milliseconds) -[info] - join11 (801 milliseconds) -[info] - join12 (1 second, 161 milliseconds) -[info] - join13 (1 second, 21 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join14 (1 second, 406 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join14_hadoop20 (1 second, 414 milliseconds) -[info] - join15 (909 milliseconds) -[info] - join16 (847 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join17 (957 milliseconds) -[info] - join18 (1 second, 338 milliseconds) -[info] - join19 (863 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -[info] - join2 (1 second, 63 milliseconds) -[info] - join20 (1 second, 190 milliseconds) -[info] - join21 (1 second, 213 milliseconds) -[info] - join22 (948 milliseconds) -[info] - join23 (1 second, 65 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tst1 -[info] - join24 (2 seconds, 46 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join25 (1 second, 581 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join26 (1 second, 498 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join27 (1 second, 769 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join28 (1 second, 865 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join29 (1 second, 834 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join3 (1 second, 355 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join30 (1 second, 572 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join31 (1 second, 645 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join32 (1 second, 715 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j2 -[info] - join32_lessSize (3 seconds, 47 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join33 (1 second, 434 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join34 (1 second, 328 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join35 (2 seconds, 6 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join36 (2 seconds, 773 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join37 (1 second, 293 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp -[info] - join38 (1 second, 203 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join39 (1 second, 602 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join4 (1 second, 468 milliseconds) -[info] - join40 (2 seconds, 323 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/s1 -[info] - join41 (1 second, 666 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join5 (1 second, 370 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join6 (1 second, 571 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join7 (1 second, 581 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join8 (1 second, 464 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - join9 (1 second, 38 milliseconds) -21:39:02.456 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test join_1to1 -[info] - join_1to1 (9 seconds, 893 milliseconds) -[info] - join_alt_syntax !!! IGNORED !!! -[info] - join_array (1 second, 315 milliseconds) -[info] - join_casesensitive (1 second, 306 milliseconds) -[info] - join_cond_pushdown_1 !!! IGNORED !!! -[info] - join_cond_pushdown_2 !!! IGNORED !!! -[info] - join_cond_pushdown_3 !!! IGNORED !!! -[info] - join_cond_pushdown_4 !!! IGNORED !!! -[info] - join_cond_pushdown_unqual1 !!! IGNORED !!! -[info] - join_cond_pushdown_unqual2 !!! IGNORED !!! -[info] - join_cond_pushdown_unqual3 !!! IGNORED !!! -[info] - join_cond_pushdown_unqual4 !!! IGNORED !!! -[info] - join_empty (1 second, 102 milliseconds) -21:39:16.078 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test join_filters -[info] - join_filters (20 seconds, 426 milliseconds) -[info] - join_filters_overlap !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_foo -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_bar -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_count -[info] - join_hive_626 (1 second, 283 milliseconds) -[info] - join_literals !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_copy -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src1_copy -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_j1 -[info] - join_map_ppr (2 seconds, 141 milliseconds) -[info] - join_merging !!! IGNORED !!! -[info] - join_nulls (8 seconds, 917 milliseconds) -[info] - join_nullsafe (5 seconds, 728 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/join_rc1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/join_rc2 -[info] - join_rc (1 second, 256 milliseconds) -[info] - join_reorder !!! IGNORED !!! -[info] - join_reorder2 (1 second, 652 milliseconds) -[info] - join_reorder3 (2 seconds, 44 milliseconds) -[info] - join_reorder4 (1 second, 634 milliseconds) -[info] - join_star (3 seconds, 366 milliseconds) -[info] - join_thrift !!! IGNORED !!! -[info] - join_vc !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp_pyang_lv -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp_pyang_src_rcfile -[info] - lateral_view (2 seconds, 688 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/array_valued_src -[info] - lateral_view_cp (1 second, 37 milliseconds) -[info] - lateral_view_noalias !!! IGNORED !!! -[info] - lateral_view_ppd (1 second, 345 milliseconds) -[info] - lb_fs_stats !!! IGNORED !!! -[info] - leadlag !!! IGNORED !!! -[info] - leadlag_queries !!! IGNORED !!! -[info] - leftsemijoin (1 second, 950 milliseconds) -21:40:12.617 WARN org.apache.spark.sql.execution.SetCommand: Property mapred.reduce.tasks is deprecated, automatically converted to spark.sql.shuffle.partitions instead. -[info] - leftsemijoin_mr (1 second, 222 milliseconds) -[info] - limit_partition_metadataonly !!! IGNORED !!! -[info] - limit_pushdown !!! IGNORED !!! -[info] - limit_pushdown_negative (1 second, 127 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_l1 -[info] - lineage1 (1 second, 420 milliseconds) -[info] - literal_decimal !!! IGNORED !!! -[info] - literal_double (784 milliseconds) -[info] - literal_ints (732 milliseconds) -[info] - literal_string (737 milliseconds) -[info] - load_binary_data !!! IGNORED !!! -[info] - load_dyn_part1 (1 second, 484 milliseconds) -[info] - load_dyn_part10 (1 second, 576 milliseconds) -21:40:21.635 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:40:21.635 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:40:21.665 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -21:40:21.665 WARN org.apache.hadoop.io.compress.DefaultCodec: DefaultCodec.createOutputStream() may leak memory. Create a compressor first. -[info] - load_dyn_part11 (1 second, 225 milliseconds) -[info] - load_dyn_part12 (1 second, 229 milliseconds) -[info] - load_dyn_part13 (1 second, 184 milliseconds) -[info] - load_dyn_part14 (1 second, 390 milliseconds) -[info] - load_dyn_part14_win (1 second, 551 milliseconds) -[info] - load_dyn_part15 !!! IGNORED !!! -[info] - load_dyn_part2 (1 second, 292 milliseconds) -[info] - load_dyn_part3 (1 second, 404 milliseconds) -[info] - load_dyn_part4 (1 second, 585 milliseconds) -[info] - load_dyn_part5 (14 seconds, 550 milliseconds) -[info] - load_dyn_part6 (7 seconds, 923 milliseconds) -[info] - load_dyn_part7 (2 seconds, 413 milliseconds) -[info] - load_dyn_part8 (2 seconds, 30 milliseconds) -[info] - load_dyn_part9 (1 second, 176 milliseconds) -[info] - load_exist_part_authsuccess !!! IGNORED !!! -[info] - load_file_with_space_in_the_name (1 second, 575 milliseconds) -[info] - load_fs !!! IGNORED !!! -[info] - load_fs2 !!! IGNORED !!! -[info] - load_fs_overwrite !!! IGNORED !!! -[info] - load_hdfs_file_with_space_in_the_name !!! IGNORED !!! -[info] - load_nonpart_authsuccess !!! IGNORED !!! -[info] - load_part_authsuccess !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_test_src -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/hive_test_dst/pcol1=test_part/pcol2=test_Part -[info] - loadpart1 (1 second, 826 milliseconds) -[info] - loadpart2 !!! IGNORED !!! -[info] - loadpart_err !!! IGNORED !!! -[info] - lock1 !!! IGNORED !!! -[info] - lock2 !!! IGNORED !!! -[info] - lock3 !!! IGNORED !!! -[info] - lock4 !!! IGNORED !!! -[info] - louter_join_ppr (1 second, 398 milliseconds) -[info] - macro !!! IGNORED !!! -[info] - mapjoin1 !!! IGNORED !!! -[info] - mapjoin_addjar !!! IGNORED !!! -[info] - mapjoin_decimal !!! IGNORED !!! -[info] - mapjoin_distinct (1 second, 301 milliseconds) -[info] - mapjoin_filter_on_outerjoin (1 second, 293 milliseconds) -[info] - mapjoin_mapjoin (1 second, 455 milliseconds) -[info] - mapjoin_memcheck !!! IGNORED !!! -[info] - mapjoin_subquery (1 second, 264 milliseconds) -[info] - mapjoin_subquery2 (1 second, 478 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest_1 -[info] - mapjoin_test_outer (2 seconds, 507 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce1 (1 second, 53 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce2 (1 second, 689 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce3 (899 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce4 (1 second, 217 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce5 (867 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce6 (959 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce7 (872 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - mapreduce8 (937 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - merge1 (1 second, 645 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test1 -[info] - merge2 (1 second, 772 milliseconds) -[info] - merge3 !!! IGNORED !!! -[info] - merge4 !!! IGNORED !!! -[info] - merge_dynamic_partition !!! IGNORED !!! -[info] - merge_dynamic_partition2 !!! IGNORED !!! -[info] - merge_dynamic_partition3 !!! IGNORED !!! -[info] - merge_dynamic_partition4 !!! IGNORED !!! -[info] - merge_dynamic_partition5 !!! IGNORED !!! -[info] - mergejoins (1 second, 122 milliseconds) -[info] - metadata_only_queries !!! IGNORED !!! -[info] - metadata_only_queries_with_filters !!! IGNORED !!! -[info] - metadataonly1 !!! IGNORED !!! -[info] - mi !!! IGNORED !!! -[info] - mrr !!! IGNORED !!! -21:41:26.777 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test multiMapJoin1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smalltbl4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bigtbl -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bigtbl -[info] - multiMapJoin1 (4 seconds, 161 milliseconds) -21:41:30.940 WARN org.apache.spark.sql.hive.execution.HiveCompatibilitySuite: Simplifications made on unsupported operations for test multiMapJoin2 -[info] - multiMapJoin2 (3 seconds, 492 milliseconds) -[info] - multi_insert !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -[info] - multi_insert_gby (3 seconds, 321 milliseconds) -[info] - multi_insert_gby2 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/e1 -[info] - multi_insert_gby3 (1 second, 800 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_10 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_lv4 -[info] - multi_insert_lateral_view (4 seconds, 316 milliseconds) -[info] - multi_insert_move_tasks_share_dependencies !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src11 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src12 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src13 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src14 -[info] - multi_join_union (1 second, 515 milliseconds) -[info] - multigroupby_singlemr (1 second, 238 milliseconds) -[info] - nested_complex !!! IGNORED !!! -[info] - nestedvirtual !!! IGNORED !!! -[info] - newline !!! IGNORED !!! -[info] - no_hooks !!! IGNORED !!! -[info] - noalias_subq1 (935 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ambiguous -[info] - nomore_ambiguous_table_col (922 milliseconds) -[info] - nonblock_op_deduplicate (1 second, 17 milliseconds) -[info] - nonmr_fetch !!! IGNORED !!! -[info] - nonmr_fetch_threshold !!! IGNORED !!! -[info] - nonreserved_keywords_input37 !!! IGNORED !!! -[info] - nonreserved_keywords_insert_into1 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - notable_alias1 (1 second, 628 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - notable_alias2 (1 second, 291 milliseconds) -[info] - notable_alias3 !!! IGNORED !!! -[info] - null_cast !!! IGNORED !!! -[info] - null_column !!! IGNORED !!! -[info] - nullformat !!! IGNORED !!! -[info] - nullformatCTAS !!! IGNORED !!! -[info] - nullformatdir !!! IGNORED !!! -[info] - nullgroup (985 milliseconds) -[info] - nullgroup2 (986 milliseconds) -[info] - nullgroup3 (2 seconds, 147 milliseconds) -[info] - nullgroup4 (1 second, 141 milliseconds) -[info] - nullgroup4_multi_distinct (893 milliseconds) -[info] - nullgroup5 (1 second, 149 milliseconds) -[info] - nullinput (1 second, 76 milliseconds) -[info] - nullinput2 (702 milliseconds) -[info] - nullscript (898 milliseconds) -[info] - num_op_type_conv !!! IGNORED !!! -[info] - optional_outer (1 second, 674 milliseconds) -[info] - orc_analyze !!! IGNORED !!! -[info] - orc_create !!! IGNORED !!! -[info] - orc_createas1 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_orc -21:42:05.492 WARN org.apache.spark.scheduler.TaskSetManager: Stage 5620 contains a task of very large size (249 KB). The maximum recommended task size is 100 KB. -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_orc -[info] - orc_dictionary_threshold (1 second, 599 milliseconds) -[info] - orc_diff_part_cols !!! IGNORED !!! -[info] - orc_diff_part_cols2 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_orc -[info] - orc_empty_files (917 milliseconds) -[info] - orc_empty_strings !!! IGNORED !!! -[info] - orc_min_max !!! IGNORED !!! -[info] - orc_ppd_char !!! IGNORED !!! -[info] - orc_ppd_date !!! IGNORED !!! -[info] - orc_ppd_decimal !!! IGNORED !!! -[info] - orc_ppd_varchar !!! IGNORED !!! -[info] - orc_split_elimination !!! IGNORED !!! -[info] - orc_vectorization_ppd !!! IGNORED !!! -[info] - order (804 milliseconds) -[info] - order2 (634 milliseconds) -[info] - order_within_subquery !!! IGNORED !!! -[info] - outer_join_ppr (1 second, 52 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_a -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_b -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_a -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src_b -[info] - parallel (1 second, 558 milliseconds) -[info] - parallel_orderby !!! IGNORED !!! -[info] - parenthesis_star_by (1 second, 75 milliseconds) -[info] - parquet_create !!! IGNORED !!! -[info] - parquet_ctas !!! IGNORED !!! -[info] - parquet_partitioned !!! IGNORED !!! -[info] - parquet_types !!! IGNORED !!! -[info] - partInit !!! IGNORED !!! -[info] - part_inherit_tbl_props (824 milliseconds) -[info] - part_inherit_tbl_props_empty (918 milliseconds) -[info] - part_inherit_tbl_props_with_star (934 milliseconds) -[info] - partcols1 (1 second, 552 milliseconds) -[info] - partition_date (2 seconds, 933 milliseconds) -[info] - partition_date2 !!! IGNORED !!! -[info] - partition_decode_name !!! IGNORED !!! -[info] - partition_schema1 (1 second, 96 milliseconds) -[info] - partition_serde_format (1 second, 139 milliseconds) -[info] - partition_special_char !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tab1/month=June/day=2008-01-01 -[info] - partition_type_check (1 second, 638 milliseconds) -[info] - partition_varchar1 (2 seconds, 756 milliseconds) -[info] - partition_varchar2 !!! IGNORED !!! -[info] - partition_vs_table_metadata !!! IGNORED !!! -[info] - partition_wise_fileformat !!! IGNORED !!! -[info] - partition_wise_fileformat10 !!! IGNORED !!! -[info] - partition_wise_fileformat11 !!! IGNORED !!! -[info] - partition_wise_fileformat12 !!! IGNORED !!! -[info] - partition_wise_fileformat13 !!! IGNORED !!! -[info] - partition_wise_fileformat14 !!! IGNORED !!! -[info] - partition_wise_fileformat15 !!! IGNORED !!! -[info] - partition_wise_fileformat16 !!! IGNORED !!! -[info] - partition_wise_fileformat17 !!! IGNORED !!! -[info] - partition_wise_fileformat18 !!! IGNORED !!! -[info] - partition_wise_fileformat2 !!! IGNORED !!! -[info] - partition_wise_fileformat3 !!! IGNORED !!! -[info] - partition_wise_fileformat4 (1 second, 311 milliseconds) -[info] - partition_wise_fileformat5 (1 second, 722 milliseconds) -[info] - partition_wise_fileformat6 (1 second, 532 milliseconds) -[info] - partition_wise_fileformat7 (1 second, 763 milliseconds) -[info] - partition_wise_fileformat8 !!! IGNORED !!! -[info] - partition_wise_fileformat9 (1 second, 335 milliseconds) -[info] - pcr !!! IGNORED !!! -[info] - plan_json (758 milliseconds) -[info] - ppd1 (727 milliseconds) -[info] - ppd2 (1 second, 7 milliseconds) -[info] - ppd_clusterby (1 second, 91 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppd_constant_expr -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppd_constant_expr -[info] - ppd_constant_expr (1 second, 101 milliseconds) -[info] - ppd_constant_where (915 milliseconds) -[info] - ppd_gby (773 milliseconds) -[info] - ppd_gby2 (875 milliseconds) -[info] - ppd_gby_join (717 milliseconds) -[info] - ppd_join (1 second, 151 milliseconds) -[info] - ppd_join2 (1 second, 152 milliseconds) -[info] - ppd_join3 (1 second, 47 milliseconds) -[info] - ppd_join4 !!! IGNORED !!! -[info] - ppd_join_filter (1 second, 989 milliseconds) -[info] - ppd_multi_insert !!! IGNORED !!! -[info] - ppd_outer_join1 (973 milliseconds) -[info] - ppd_outer_join2 (797 milliseconds) -[info] - ppd_outer_join3 (1 second, 33 milliseconds) -[info] - ppd_outer_join4 (1 second, 26 milliseconds) -[info] - ppd_outer_join5 (1 second, 202 milliseconds) -[info] - ppd_random (726 milliseconds) -[info] - ppd_repeated_alias (1 second, 108 milliseconds) -[info] - ppd_transform !!! IGNORED !!! -[info] - ppd_udf_case !!! IGNORED !!! -[info] - ppd_udf_col (868 milliseconds) -[info] - ppd_udtf !!! IGNORED !!! -[info] - ppd_union (1 second, 711 milliseconds) -[info] - ppd_union_view !!! IGNORED !!! -[info] - ppd_vc !!! IGNORED !!! -[info] - ppr_allchildsarenull (815 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=1234 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=1224 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=1214 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12+4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12.4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12%3A4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12%254 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/ppr_test/ds=12%2A4 -[info] - ppr_pushdown (3 seconds, 280 milliseconds) -[info] - ppr_pushdown2 (3 seconds, 781 milliseconds) -[info] - ppr_pushdown3 (1 second, 177 milliseconds) -[info] - print_header !!! IGNORED !!! -[info] - progress_1 (810 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -[info] - protectmode (2 seconds, 557 milliseconds) -[info] - ptf !!! IGNORED !!! -[info] - ptf_decimal !!! IGNORED !!! -[info] - ptf_general_queries !!! IGNORED !!! -[info] - ptf_matchpath !!! IGNORED !!! -[info] - ptf_rcfile !!! IGNORED !!! -[info] - ptf_register_tblfn !!! IGNORED !!! -[info] - ptf_seqfile !!! IGNORED !!! -[info] - push_or (1 second, 210 milliseconds) -[info] - query_result_fileformat !!! IGNORED !!! -[info] - query_with_semi (1 second, 498 milliseconds) -[info] - quote1 (4 seconds, 635 milliseconds) -[info] - quote2 (1 second, 180 milliseconds) -[info] - quotedid_alter !!! IGNORED !!! -[info] - quotedid_basic !!! IGNORED !!! -[info] - quotedid_partition !!! IGNORED !!! -[info] - quotedid_skew !!! IGNORED !!! -[info] - quotedid_smb !!! IGNORED !!! -[info] - quotedid_tblproperty !!! IGNORED !!! -[info] - rand_partitionpruner1 !!! IGNORED !!! -[info] - rand_partitionpruner2 !!! IGNORED !!! -[info] - rand_partitionpruner3 !!! IGNORED !!! -[info] - rcfile_bigdata !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/columntable -[info] - rcfile_columnar (870 milliseconds) -[info] - rcfile_createas1 !!! IGNORED !!! -[info] - rcfile_default_format !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/rcfiletablelazydecompress -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/rcfiletablelazydecompress -[info] - rcfile_lazydecompress (1 second, 433 milliseconds) -[info] - rcfile_merge1 !!! IGNORED !!! -[info] - rcfile_merge2 !!! IGNORED !!! -[info] - rcfile_merge3 !!! IGNORED !!! -[info] - rcfile_merge4 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src1_rc -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1_rc -[info] - rcfile_null_value (1 second, 526 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_src -[info] - rcfile_toleratecorruptions (1 second, 66 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/rcfile_uniontable -[info] - rcfile_union (913 milliseconds) -[info] - recursive_dir !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bucket5_1 -[info] - reduce_deduplicate (1 second, 70 milliseconds) -[info] - reduce_deduplicate_exclude_gby (940 milliseconds) -[info] - reduce_deduplicate_exclude_join (633 milliseconds) -[info] - reduce_deduplicate_extended (2 seconds, 387 milliseconds) -[info] - reducesink_dedup (931 milliseconds) -[info] - regex_col !!! IGNORED !!! -[info] - regexp_extract !!! IGNORED !!! -[info] - remote_script !!! IGNORED !!! -[info] - rename_column (2 seconds, 318 milliseconds) -[info] - rename_external_partition_location !!! IGNORED !!! -[info] - rename_partition_location !!! IGNORED !!! -[info] - rename_table_location !!! IGNORED !!! -[info] - reset_conf !!! IGNORED !!! -[info] - root_dir_external_table !!! IGNORED !!! -[info] - router_join_ppr (1 second, 710 milliseconds) -[info] - sample1 !!! IGNORED !!! -[info] - sample2 !!! IGNORED !!! -[info] - sample3 !!! IGNORED !!! -[info] - sample4 !!! IGNORED !!! -[info] - sample5 !!! IGNORED !!! -[info] - sample6 !!! IGNORED !!! -[info] - sample7 !!! IGNORED !!! -[info] - sample8 !!! IGNORED !!! -[info] - sample9 !!! IGNORED !!! -[info] - sample_islocalmode_hook !!! IGNORED !!! -[info] - sample_islocalmode_hook_hadoop20 !!! IGNORED !!! -[info] - schemeAuthority !!! IGNORED !!! -[info] - schemeAuthority2 !!! IGNORED !!! -[info] - script_env_var1 !!! IGNORED !!! -[info] - script_env_var2 !!! IGNORED !!! -[info] - script_pipe !!! IGNORED !!! -[info] - scriptfile1 !!! IGNORED !!! -[info] - scriptfile1_win !!! IGNORED !!! -[info] - select_as_omitted (825 milliseconds) -[info] - select_dummy_source !!! IGNORED !!! -[info] - select_transform_hint !!! IGNORED !!! -[info] - select_unquote_and (1 second, 105 milliseconds) -[info] - select_unquote_not (1 second, 892 milliseconds) -[info] - select_unquote_or (1 second, 333 milliseconds) -[info] - semicolon !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t3 -[info] - semijoin (4 seconds, 528 milliseconds) -[info] - serde_regex (1 second, 421 milliseconds) -[info] - serde_reported_schema (701 milliseconds) -[info] - serde_user_properties !!! IGNORED !!! -[info] - set_processor_namespaces !!! IGNORED !!! -[info] - set_variable_sub (980 milliseconds) -[info] - show_columns (1 second, 474 milliseconds) -[info] - show_create_table_alter (1 second, 219 milliseconds) -[info] - show_create_table_db_table (902 milliseconds) -21:43:49.858 WARN org.apache.hadoop.hive.metastore.HiveMetaStore: Location: file:/tmp/testTempFiles6167202715430906972spark.hive.tmp/tmp_showcrt1 specified for non-external table:tmp_showcrt1 -[info] - show_create_table_delimited (783 milliseconds) -[info] - show_create_table_partitioned (808 milliseconds) -[info] - show_create_table_serde (941 milliseconds) -[info] - show_create_table_view (800 milliseconds) -[info] - show_describe_func_quotes (789 milliseconds) -[info] - show_functions (815 milliseconds) -[info] - show_indexes_edge_cases !!! IGNORED !!! -[info] - show_indexes_syntax !!! IGNORED !!! -[info] - show_partitions (778 milliseconds) -[info] - show_roles !!! IGNORED !!! -[info] - show_tables !!! IGNORED !!! -[info] - show_tablestatus !!! IGNORED !!! -[info] - show_tblproperties (959 milliseconds) -[info] - showparts !!! IGNORED !!! -[info] - skewjoin_noskew !!! IGNORED !!! -[info] - skewjoin_union_remove_1 !!! IGNORED !!! -[info] - skewjoin_union_remove_2 !!! IGNORED !!! -[info] - skewjoinopt1 !!! IGNORED !!! -[info] - skewjoinopt10 !!! IGNORED !!! -[info] - skewjoinopt11 !!! IGNORED !!! -[info] - skewjoinopt12 !!! IGNORED !!! -[info] - skewjoinopt13 (2 seconds, 420 milliseconds) -[info] - skewjoinopt14 !!! IGNORED !!! -[info] - skewjoinopt15 !!! IGNORED !!! -[info] - skewjoinopt16 !!! IGNORED !!! -[info] - skewjoinopt17 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -[info] - skewjoinopt18 (1 second, 802 milliseconds) -[info] - skewjoinopt19 !!! IGNORED !!! -[info] - skewjoinopt2 !!! IGNORED !!! -[info] - skewjoinopt20 !!! IGNORED !!! -[info] - skewjoinopt3 !!! IGNORED !!! -[info] - skewjoinopt4 !!! IGNORED !!! -[info] - skewjoinopt5 !!! IGNORED !!! -[info] - skewjoinopt6 !!! IGNORED !!! -[info] - skewjoinopt7 !!! IGNORED !!! -[info] - skewjoinopt8 !!! IGNORED !!! -[info] - skewjoinopt9 (1 second, 356 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_mapjoin9_results -[info] - smb_mapjoin9 (2 seconds, 170 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 -[info] - smb_mapjoin_1 (2 seconds, 97 milliseconds) -[info] - smb_mapjoin_10 (1 second, 339 milliseconds) -[info] - smb_mapjoin_11 !!! IGNORED !!! -[info] - smb_mapjoin_12 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table4 -[info] - smb_mapjoin_13 (1 second, 622 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tbl2 -[info] - smb_mapjoin_14 (4 seconds, 50 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 -[info] - smb_mapjoin_15 (2 seconds, 544 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 -[info] - smb_mapjoin_16 (1 second, 141 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table5 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table6 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table7 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_table8 -[info] - smb_mapjoin_17 (3 seconds, 495 milliseconds) -[info] - smb_mapjoin_18 !!! IGNORED !!! -[info] - smb_mapjoin_19 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 -[info] - smb_mapjoin_2 (2 seconds, 512 milliseconds) -[info] - smb_mapjoin_20 !!! IGNORED !!! -[info] - smb_mapjoin_21 (1 second, 786 milliseconds) -[info] - smb_mapjoin_22 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 -[info] - smb_mapjoin_25 (1 second, 489 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 -[info] - smb_mapjoin_3 (2 seconds, 92 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 -[info] - smb_mapjoin_4 (3 seconds, 601 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket_3 -[info] - smb_mapjoin_5 (3 seconds, 274 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/normal_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/normal_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results -[info] - smb_mapjoin_6 (3 seconds, 355 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results_empty_bigtable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results_empty_bigtable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_join_results -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/normal_join_results -[info] - smb_mapjoin_7 (2 seconds, 473 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/smb_bucket4_3 -[info] - smb_mapjoin_8 (5 seconds, 989 milliseconds) -[info] - sort (824 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 -[info] - sort_merge_join_desc_1 (1 second, 183 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 -[info] - sort_merge_join_desc_2 (1 second, 623 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 -[info] - sort_merge_join_desc_3 (1 second, 295 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_desc2 -[info] - sort_merge_join_desc_4 (1 second, 166 milliseconds) -[info] - sort_merge_join_desc_5 (1 second, 247 milliseconds) -[info] - sort_merge_join_desc_6 (1 second, 443 milliseconds) -[info] - sort_merge_join_desc_7 (1 second, 741 milliseconds) -[info] - source !!! IGNORED !!! -[info] - split !!! IGNORED !!! -[info] - split_sample !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/stats_non_partitioned -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/stats_non_partitioned -[info] - stats0 (2 seconds, 205 milliseconds) -[info] - stats2 !!! IGNORED !!! -[info] - stats3 !!! IGNORED !!! -[info] - stats4 !!! IGNORED !!! -[info] - stats5 !!! IGNORED !!! -[info] - stats6 !!! IGNORED !!! -[info] - stats7 !!! IGNORED !!! -[info] - stats8 !!! IGNORED !!! -[info] - stats9 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -[info] - stats_aggregator_error_1 (1 second, 520 milliseconds) -[info] - stats_counter !!! IGNORED !!! -[info] - stats_counter_partitioned !!! IGNORED !!! -[info] - stats_empty_dyn_part !!! IGNORED !!! -[info] - stats_empty_partition (982 milliseconds) -[info] - stats_invalidation !!! IGNORED !!! -[info] - stats_noscan_1 !!! IGNORED !!! -[info] - stats_noscan_2 !!! IGNORED !!! -[info] - stats_only_null !!! IGNORED !!! -[info] - stats_partscan_1 !!! IGNORED !!! -[info] - stats_partscan_1_23 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -[info] - stats_publisher_error_1 (1 second, 796 milliseconds) -[info] - statsfs !!! IGNORED !!! -[info] - str_to_map !!! IGNORED !!! -[info] - subq !!! IGNORED !!! -[info] - subq2 (782 milliseconds) -[info] - subq_where_serialization !!! IGNORED !!! -[info] - subquery_alias !!! IGNORED !!! -[info] - subquery_exists !!! IGNORED !!! -[info] - subquery_exists_having !!! IGNORED !!! -[info] - subquery_in !!! IGNORED !!! -[info] - subquery_in_having !!! IGNORED !!! -[info] - subquery_multiinsert !!! IGNORED !!! -[info] - subquery_notexists !!! IGNORED !!! -[info] - subquery_notexists_having !!! IGNORED !!! -[info] - subquery_notin !!! IGNORED !!! -[info] - subquery_notin_having !!! IGNORED !!! -[info] - subquery_unqualcolumnrefs !!! IGNORED !!! -[info] - subquery_views !!! IGNORED !!! -[info] - table_access_keys_stats !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmp_select -[info] - tablename_with_select (822 milliseconds) -[info] - test_boolean_whereclause !!! IGNORED !!! -[info] - tez_dml !!! IGNORED !!! -[info] - tez_fsstat !!! IGNORED !!! -[info] - tez_insert_overwrite_local_directory_1 !!! IGNORED !!! -[info] - tez_join_tests !!! IGNORED !!! -[info] - tez_joins_explain !!! IGNORED !!! -[info] - tez_schema_evolution !!! IGNORED !!! -[info] - tez_union !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_1 -[info] - timestamp_1 (3 seconds, 957 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_2 -[info] - timestamp_2 (3 seconds, 966 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_3 -[info] - timestamp_3 (1 second, 309 milliseconds) -[info] - timestamp_comparison (921 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_lazy -[info] - timestamp_lazy (1 second, 423 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_null -[info] - timestamp_null (921 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_udf -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/timestamp_udf_string -[info] - timestamp_udf (1 second, 985 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tstsrc -[info] - touch (1 second, 810 milliseconds) -[info] - transform1 !!! IGNORED !!! -[info] - transform2 !!! IGNORED !!! -[info] - transform_ppr1 (750 milliseconds) -[info] - transform_ppr2 (694 milliseconds) -[info] - truncate_column !!! IGNORED !!! -[info] - truncate_column_merge !!! IGNORED !!! -[info] - truncate_table (1 second, 787 milliseconds) -[info] - type_cast_1 (879 milliseconds) -[info] - type_conversions_1 !!! IGNORED !!! -[info] - type_widening (813 milliseconds) -[info] - udaf_collect_set (993 milliseconds) -[info] - udaf_context_ngrams !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/covar_tab -[info] - udaf_corr (1 second, 276 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/covar_tab -[info] - udaf_covar_pop (1 second, 193 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/covar_tab -[info] - udaf_covar_samp (1 second, 115 milliseconds) -[info] - udaf_histogram_numeric (827 milliseconds) -[info] - udaf_ngrams !!! IGNORED !!! -[info] - udaf_number_format !!! IGNORED !!! -[info] - udaf_percentile !!! IGNORED !!! -[info] - udaf_percentile_approx_20 !!! IGNORED !!! -[info] - udaf_percentile_approx_23 !!! IGNORED !!! -[info] - udaf_sum_list !!! IGNORED !!! -[info] - udf1 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf2 (1 second, 211 milliseconds) -[info] - udf3 !!! IGNORED !!! -[info] - udf4 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf5 (944 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf6 (1 second, 87 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf7 (945 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf8 (1 second, 92 milliseconds) -[info] - udf9 (756 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf_10_trims (810 milliseconds) -[info] - udf_E (973 milliseconds) -[info] - udf_PI (947 milliseconds) -[info] - udf_abs (821 milliseconds) -[info] - udf_acos (3 seconds, 443 milliseconds) -[info] - udf_add (2 seconds, 407 milliseconds) -[info] - udf_array (2 seconds, 65 milliseconds) -[info] - udf_array_contains (747 milliseconds) -[info] - udf_ascii (1 second, 256 milliseconds) -[info] - udf_asin (877 milliseconds) -[info] - udf_atan (944 milliseconds) -[info] - udf_avg (602 milliseconds) -[info] - udf_between !!! IGNORED !!! -[info] - udf_bigint (750 milliseconds) -[info] - udf_bin (752 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bitmap_test -[info] - udf_bitmap_and (997 milliseconds) -[info] - udf_bitmap_empty (725 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/bitmap_test -[info] - udf_bitmap_or (1 second, 699 milliseconds) -[info] - udf_bitwise_and (814 milliseconds) -[info] - udf_bitwise_not (1 second, 100 milliseconds) -[info] - udf_bitwise_or (652 milliseconds) -[info] - udf_bitwise_xor (725 milliseconds) -[info] - udf_boolean (662 milliseconds) -[info] - udf_case_column_pruning !!! IGNORED !!! -[info] - udf_case_thrift !!! IGNORED !!! -[info] - udf_ceil (695 milliseconds) -[info] - udf_ceiling (648 milliseconds) -[info] - udf_coalesce !!! IGNORED !!! -[info] - udf_compare_java_string !!! IGNORED !!! -[info] - udf_concat (887 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf_concat_insert1 (871 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf_concat_insert2 (915 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf_concat_ws (1 second, 96 milliseconds) -[info] - udf_context_aware !!! IGNORED !!! -[info] - udf_conv (1 second, 217 milliseconds) -[info] - udf_cos (804 milliseconds) -[info] - udf_count (1 second, 269 milliseconds) -[info] - udf_current_database !!! IGNORED !!! -[info] - udf_date_add (1 second, 52 milliseconds) -[info] - udf_date_sub (699 milliseconds) -[info] - udf_datediff (680 milliseconds) -[info] - udf_day (639 milliseconds) -[info] - udf_dayofmonth (651 milliseconds) -[info] - udf_degrees (1 second, 37 milliseconds) -[info] - udf_div (668 milliseconds) -[info] - udf_divide !!! IGNORED !!! -[info] - udf_double (706 milliseconds) -[info] - udf_elt (893 milliseconds) -[info] - udf_equal (1 second, 400 milliseconds) -[info] - udf_exp (705 milliseconds) -[info] - udf_explode !!! IGNORED !!! -[info] - udf_field (1 second, 240 milliseconds) -[info] - udf_find_in_set (2 seconds, 609 milliseconds) -[info] - udf_float (743 milliseconds) -[info] - udf_floor (687 milliseconds) -[info] - udf_format_number (1 second, 488 milliseconds) -[info] - udf_from_unixtime (622 milliseconds) -[info] - udf_get_json_object !!! IGNORED !!! -[info] - udf_greaterthan (798 milliseconds) -[info] - udf_greaterthanorequal (775 milliseconds) -[info] - udf_hash (764 milliseconds) -[info] - udf_hex (869 milliseconds) -[info] - udf_hour !!! IGNORED !!! -[info] - udf_if (830 milliseconds) -[info] - udf_in !!! IGNORED !!! -[info] - udf_in_file !!! IGNORED !!! -[info] - udf_index (628 milliseconds) -[info] - udf_inline !!! IGNORED !!! -[info] - udf_instr (838 milliseconds) -[info] - udf_int (1 second, 41 milliseconds) -[info] - udf_isnotnull (646 milliseconds) -[info] - udf_isnull (605 milliseconds) -[info] - udf_isnull_isnotnull !!! IGNORED !!! -[info] - udf_java_method (792 milliseconds) -[info] - udf_lcase (714 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -[info] - udf_length (1 second, 267 milliseconds) -[info] - udf_lessthan (864 milliseconds) -[info] - udf_lessthanorequal (728 milliseconds) -[info] - udf_like (793 milliseconds) -[info] - udf_ln (730 milliseconds) -[info] - udf_locate (747 milliseconds) -[info] - udf_log (721 milliseconds) -[info] - udf_log10 (1 second, 11 milliseconds) -[info] - udf_log2 (696 milliseconds) -[info] - udf_logic_java_boolean !!! IGNORED !!! -[info] - udf_lower (689 milliseconds) -[info] - udf_lpad (935 milliseconds) -[info] - udf_ltrim (743 milliseconds) -[info] - udf_map (796 milliseconds) -[info] - udf_map_keys !!! IGNORED !!! -[info] - udf_map_values !!! IGNORED !!! -[info] - udf_max !!! IGNORED !!! -[info] - udf_min !!! IGNORED !!! -[info] - udf_minute (721 milliseconds) -[info] - udf_modulo (692 milliseconds) -[info] - udf_month (578 milliseconds) -[info] - udf_named_struct (788 milliseconds) -[info] - udf_negative (974 milliseconds) -[info] - udf_not (823 milliseconds) -[info] - udf_notequal (823 milliseconds) -[info] - udf_notop (1 second, 95 milliseconds) -[info] - udf_nvl (746 milliseconds) -[info] - udf_or (654 milliseconds) -[info] - udf_parse_url (735 milliseconds) -[info] - udf_percentile !!! IGNORED !!! -[info] - udf_pmod (1 second, 210 milliseconds) -[info] - udf_positive (710 milliseconds) -[info] - udf_pow (742 milliseconds) -[info] - udf_power (591 milliseconds) -[info] - udf_printf !!! IGNORED !!! -[info] - udf_radians (1 second, 5 milliseconds) -[info] - udf_rand (981 milliseconds) -[info] - udf_reflect !!! IGNORED !!! -[info] - udf_reflect2 !!! IGNORED !!! -[info] - udf_regexp (1 second, 84 milliseconds) -[info] - udf_regexp_extract (1 second, 367 milliseconds) -[info] - udf_regexp_replace (1 second, 529 milliseconds) -[info] - udf_repeat (958 milliseconds) -[info] - udf_reverse !!! IGNORED !!! -[info] - udf_rlike (734 milliseconds) -[info] - udf_round_2 !!! IGNORED !!! -[info] - udf_round_3 (881 milliseconds) -[info] - udf_rpad (831 milliseconds) -[info] - udf_rtrim (608 milliseconds) -[info] - udf_second (813 milliseconds) -[info] - udf_sentences !!! IGNORED !!! -[info] - udf_sign (1 second, 45 milliseconds) -[info] - udf_sin (792 milliseconds) -[info] - udf_size !!! IGNORED !!! -[info] - udf_smallint (652 milliseconds) -[info] - udf_space (810 milliseconds) -[info] - udf_split !!! IGNORED !!! -[info] - udf_sqrt (730 milliseconds) -[info] - udf_std (983 milliseconds) -[info] - udf_stddev (789 milliseconds) -[info] - udf_stddev_pop (737 milliseconds) -[info] - udf_stddev_samp (656 milliseconds) -[info] - udf_string (672 milliseconds) -[info] - udf_struct (725 milliseconds) -[info] - udf_substr !!! IGNORED !!! -[info] - udf_substring (677 milliseconds) -[info] - udf_subtract (658 milliseconds) -[info] - udf_sum (715 milliseconds) -[info] - udf_tan (908 milliseconds) -[info] - udf_testlength !!! IGNORED !!! -[info] - udf_testlength2 !!! IGNORED !!! -[info] - udf_tinyint (703 milliseconds) -[info] - udf_to_boolean !!! IGNORED !!! -[info] - udf_to_byte (1 second, 48 milliseconds) -[info] - udf_to_date (781 milliseconds) -[info] - udf_to_double (1 second, 597 milliseconds) -[info] - udf_to_float (1 second, 110 milliseconds) -[info] - udf_to_long (1 second, 34 milliseconds) -[info] - udf_to_short (1 second, 157 milliseconds) -[info] - udf_to_string !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_input -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/table_translate -[info] - udf_translate (1 second, 441 milliseconds) -[info] - udf_trim (804 milliseconds) -[info] - udf_ucase (603 milliseconds) -[info] - udf_unhex !!! IGNORED !!! -[info] - udf_union !!! IGNORED !!! -[info] - udf_unix_timestamp (1 second, 6 milliseconds) -[info] - udf_upper (797 milliseconds) -[info] - udf_using !!! IGNORED !!! -[info] - udf_var_pop (3 seconds, 398 milliseconds) -[info] - udf_var_samp (1 second, 435 milliseconds) -[info] - udf_variance (894 milliseconds) -[info] - udf_weekofyear (1 second, 33 milliseconds) -[info] - udf_xpath (991 milliseconds) -[info] - udf_xpath_boolean (974 milliseconds) -[info] - udf_xpath_double (1 second, 224 milliseconds) -[info] - udf_xpath_float (1 second, 163 milliseconds) -[info] - udf_xpath_int (1 second, 424 milliseconds) -[info] - udf_xpath_long (1 second, 36 milliseconds) -[info] - udf_xpath_short (1 second, 197 milliseconds) -[info] - udf_xpath_string (1 second, 90 milliseconds) -[info] - udtf_explode !!! IGNORED !!! -[info] - udtf_json_tuple !!! IGNORED !!! -[info] - udtf_parse_url_tuple !!! IGNORED !!! -[info] - udtf_posexplode !!! IGNORED !!! -[info] - udtf_stack !!! IGNORED !!! -[info] - unicode_notation (1 second, 148 milliseconds) -[info] - union !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -[info] - union10 (1 second, 8 milliseconds) -[info] - union11 (938 milliseconds) -[info] - union12 !!! IGNORED !!! -[info] - union13 (714 milliseconds) -[info] - union14 (1 second, 342 milliseconds) -[info] - union15 (1 second, 48 milliseconds) -[info] - union16 (1 second, 90 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - union17 (1 second, 243 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - union18 (1 second, 352 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dest2 -[info] - union19 (1 second, 312 milliseconds) -[info] - union2 (848 milliseconds) -[info] - union20 (830 milliseconds) -[info] - union21 !!! IGNORED !!! -[info] - union22 (1 second, 558 milliseconds) -[info] - union23 (1 second, 86 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src5 -[info] - union24 (2 seconds, 962 milliseconds) -[info] - union25 (1 second, 19 milliseconds) -[info] - union26 (1 second, 426 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/jackson_sev_same -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/dim_pho -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/jackson_sev_add -[info] - union27 (1 second, 299 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_subq_union -[info] - union28 (1 second, 90 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_subq_union -[info] - union29 (909 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_out -[info] - union3 (1 second, 87 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_subq_union -[info] - union30 (1 second, 188 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t4 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t5 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t6 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t7 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/t8 -[info] - union31 (3 seconds, 557 milliseconds) -[info] - union32 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_src -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_src -[info] - union33 (1 second, 496 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_2 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_3 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/src10_4 -[info] - union34 (2 seconds, 174 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -[info] - union4 (1 second, 133 milliseconds) -[info] - union5 (809 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/tmptable -[info] - union6 (1 second, 308 milliseconds) -[info] - union7 (1 second, 371 milliseconds) -[info] - union8 (853 milliseconds) -[info] - union9 (860 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_date_1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/union_date_2 -[info] - union_date (1 second, 100 milliseconds) -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/test_union_lateral_view -[info] - union_lateralview (1 second, 96 milliseconds) -[info] - union_null !!! IGNORED !!! -[info] - union_ppr (778 milliseconds) -[info] - union_remove_1 !!! IGNORED !!! -[info] - union_remove_10 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -[info] - union_remove_11 (1 second, 575 milliseconds) -[info] - union_remove_12 !!! IGNORED !!! -[info] - union_remove_13 !!! IGNORED !!! -[info] - union_remove_14 !!! IGNORED !!! -[info] - union_remove_15 !!! IGNORED !!! -[info] - union_remove_16 !!! IGNORED !!! -[info] - union_remove_17 !!! IGNORED !!! -[info] - union_remove_18 !!! IGNORED !!! -[info] - union_remove_19 !!! IGNORED !!! -[info] - union_remove_2 !!! IGNORED !!! -[info] - union_remove_20 !!! IGNORED !!! -[info] - union_remove_21 !!! IGNORED !!! -[info] - union_remove_22 !!! IGNORED !!! -[info] - union_remove_23 !!! IGNORED !!! -[info] - union_remove_24 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -[info] - union_remove_3 (1 second, 168 milliseconds) -[info] - union_remove_4 !!! IGNORED !!! -[info] - union_remove_5 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/outputtbl2 -[info] - union_remove_6 (1 second, 599 milliseconds) -[info] - union_remove_7 !!! IGNORED !!! -[info] - union_remove_8 !!! IGNORED !!! -[info] - union_remove_9 !!! IGNORED !!! -[info] - union_script (921 milliseconds) -[info] - union_top_level !!! IGNORED !!! -[info] - union_view !!! IGNORED !!! -[info] - unset_table_view_property !!! IGNORED !!! -[info] - varchar_1 !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/varchar_2 -[info] - varchar_2 (1 second, 567 milliseconds) -[info] - varchar_cast !!! IGNORED !!! -[info] - varchar_comparison !!! IGNORED !!! -[info] - varchar_join1 (2 seconds, 558 milliseconds) -[info] - varchar_nested_types !!! IGNORED !!! -[info] - varchar_serde !!! IGNORED !!! -[info] - varchar_udf1 !!! IGNORED !!! -[info] - varchar_union1 (1 second, 549 milliseconds) -[info] - vector_between_in !!! IGNORED !!! -[info] - vector_coalesce !!! IGNORED !!! -[info] - vector_decimal_aggregate !!! IGNORED !!! -[info] - vector_decimal_cast !!! IGNORED !!! -[info] - vector_decimal_expressions !!! IGNORED !!! -[info] - vector_decimal_mapjoin !!! IGNORED !!! -[info] - vector_decimal_math_funcs !!! IGNORED !!! -[info] - vector_left_outer_join !!! IGNORED !!! -[info] - vector_non_string_partition !!! IGNORED !!! -[info] - vectorization_0 !!! IGNORED !!! -[info] - vectorization_1 !!! IGNORED !!! -[info] - vectorization_10 !!! IGNORED !!! -[info] - vectorization_11 !!! IGNORED !!! -[info] - vectorization_12 !!! IGNORED !!! -[info] - vectorization_13 !!! IGNORED !!! -[info] - vectorization_14 !!! IGNORED !!! -[info] - vectorization_15 !!! IGNORED !!! -[info] - vectorization_16 !!! IGNORED !!! -[info] - vectorization_2 !!! IGNORED !!! -[info] - vectorization_3 !!! IGNORED !!! -[info] - vectorization_4 !!! IGNORED !!! -[info] - vectorization_5 !!! IGNORED !!! -[info] - vectorization_6 !!! IGNORED !!! -[info] - vectorization_7 !!! IGNORED !!! -[info] - vectorization_8 !!! IGNORED !!! -[info] - vectorization_9 !!! IGNORED !!! -[info] - vectorization_decimal_date !!! IGNORED !!! -[info] - vectorization_div0 !!! IGNORED !!! -[info] - vectorization_limit !!! IGNORED !!! -[info] - vectorization_nested_udf !!! IGNORED !!! -[info] - vectorization_not !!! IGNORED !!! -[info] - vectorization_part !!! IGNORED !!! -[info] - vectorization_part_project !!! IGNORED !!! -[info] - vectorization_pushdown !!! IGNORED !!! -[info] - vectorization_short_regress !!! IGNORED !!! -[info] - vectorized_case !!! IGNORED !!! -[info] - vectorized_casts !!! IGNORED !!! -[info] - vectorized_context !!! IGNORED !!! -[info] - vectorized_date_funcs !!! IGNORED !!! -[info] - vectorized_distinct_gby !!! IGNORED !!! -[info] - vectorized_mapjoin !!! IGNORED !!! -[info] - vectorized_math_funcs !!! IGNORED !!! -[info] - vectorized_nested_mapjoin !!! IGNORED !!! -[info] - vectorized_rcfile_columnar !!! IGNORED !!! -[info] - vectorized_shufflejoin !!! IGNORED !!! -[info] - vectorized_string_funcs !!! IGNORED !!! -[info] - vectorized_timestamp_funcs !!! IGNORED !!! -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db1.db/table1 -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/db1.db/table2 -[info] - view (2 seconds, 160 milliseconds) -[info] - view_cast (1 second, 945 milliseconds) -[info] - view_inputs (1 second, 492 milliseconds) -[info] - virtual_column !!! IGNORED !!! -[info] PlanTest: -[info] PruningSuite: -[info] - Column pruning - with partitioned table - pruning test (63 milliseconds) -[info] - Column pruning - with partitioned table - query test (637 milliseconds) -[info] - Column pruning - with non-partitioned table - pruning test (45 milliseconds) -[info] - Column pruning - with non-partitioned table - query test (558 milliseconds) -[info] - Column pruning - with multiple projects - pruning test (34 milliseconds) -[info] - Column pruning - with multiple projects - query test (601 milliseconds) -[info] - Column pruning - projects alias substituting - pruning test (34 milliseconds) -[info] - Column pruning - projects alias substituting - query test (631 milliseconds) -[info] - Column pruning - filter alias in-lining - pruning test (40 milliseconds) -[info] - Column pruning - filter alias in-lining - query test (644 milliseconds) -[info] - Column pruning - without filters - pruning test (36 milliseconds) -[info] - Column pruning - without filters - query test (657 milliseconds) -[info] - Column pruning - simple top project without aliases - pruning test (45 milliseconds) -[info] - Column pruning - simple top project without aliases - query test (625 milliseconds) -[info] - Column pruning - non-trivial top project with aliases - pruning test (34 milliseconds) -[info] - Column pruning - non-trivial top project with aliases - query test (606 milliseconds) -[info] - Partition pruning - non-partitioned, non-trivial project - pruning test (38 milliseconds) -[info] - Partition pruning - non-partitioned, non-trivial project - query test (651 milliseconds) -[info] - Partition pruning - non-partitioned table - pruning test (41 milliseconds) -[info] - Partition pruning - non-partitioned table - query test (569 milliseconds) -[info] - Partition pruning - with filter on string partition key - pruning test (390 milliseconds) -[info] - Partition pruning - with filter on string partition key - query test (1 second, 443 milliseconds) -[info] - Partition pruning - with filter on int partition key - pruning test (51 milliseconds) -[info] - Partition pruning - with filter on int partition key - query test (1 second, 66 milliseconds) -[info] - Partition pruning - left only 1 partition - pruning test (42 milliseconds) -[info] - Partition pruning - left only 1 partition - query test (1 second, 35 milliseconds) -[info] - Partition pruning - all partitions pruned - pruning test (40 milliseconds) -[info] - Partition pruning - all partitions pruned - query test (1 second, 119 milliseconds) -[info] - Partition pruning - pruning with both column key and partition key - pruning test (36 milliseconds) -[info] - Partition pruning - pruning with both column key and partition key - query test (1 second, 172 milliseconds) -[info] HiveSerDeSuite: -Deleted file:///tmp/sparkHiveWarehouse7773807525406879524/serdeins -[info] - Read and write with LazySimpleSerDe (tab separated) (904 milliseconds) -[info] - Read with RegexSerDe (762 milliseconds) -[info] - Read with AvroSerDe (1 second, 809 milliseconds) -[info] - Read Partitioned with AvroSerDe (1 second, 767 milliseconds) -[info] HiveResolutionSuite: -[info] - SPARK-3698: case insensitive test for nested data (50 milliseconds) -[info] - table.attr (927 milliseconds) -[info] - database.table (574 milliseconds) -[info] - database.table table.attr (1 second, 2 milliseconds) -[info] - database.table table.attr case insensitive (862 milliseconds) -[info] - alias.attr (592 milliseconds) -[info] - subquery-alias.attr (664 milliseconds) -[info] - quoted alias.attr (612 milliseconds) -[info] - attr (658 milliseconds) -[info] - alias.star (621 milliseconds) -[info] - case insensitivity with scala reflection (72 milliseconds) -[info] - case insensitivity with scala reflection joins !!! IGNORED !!! -[info] - nested repeated resolution (41 milliseconds) -[info] HivePlanTest: -[info] - udf constant folding (66 milliseconds) -[info] HiveUdfSuite: -[info] - spark sql udf test that returns a struct (80 milliseconds) -[info] - SPARK-4785 When called with arguments referring column fields, PMOD throws NPE (61 milliseconds) -[info] - hive struct udf (217 milliseconds) -[info] - SPARK-2693 udaf aggregates test (238 milliseconds) -[info] - Generic UDAF aggregates (242 milliseconds) -[info] - UDFIntegerToString (1 second, 431 milliseconds) -[info] - UDFListListInt (1 second, 26 milliseconds) -[info] - UDFListString (676 milliseconds) -[info] - UDFStringString (683 milliseconds) -[info] - UDFTwoListList (686 milliseconds) -[info] HiveExplainSuite: -[info] - explain extended command (116 milliseconds) -[info] - explain create table command (89 milliseconds) -[info] HiveTableScanSuite: -[info] - partition_based_table_scan_with_different_serde (1 second, 169 milliseconds) -[info] - file_split_for_small_table (755 milliseconds) -[info] - Spark-4041: lowercase issue (341 milliseconds) -[info] - Spark-4077: timestamp query for null value (283 milliseconds) -[info] HiveParquetSuite: -[info] - Case insensitive attribute names (213 milliseconds) -[info] - SELECT on Parquet table (115 milliseconds) -[info] - Simple column projection + filter on Parquet table (165 milliseconds) -[info] - Converting Hive to Parquet Table via saveAsParquetFile (790 milliseconds) -[info] - INSERT OVERWRITE TABLE Parquet table (511 milliseconds) -[info] ScalaTest -[info] Run completed in 23 minutes, 42 seconds. -[info] Total number of tests run: 1021 -[info] Suites: completed 24, aborted 0 -[info] Tests: succeeded 1021, failed 0, canceled 0, ignored 602, pending 0 -[info] All tests passed. -[info] Passed: Total 1021, Failed 0, Errors 0, Passed 1021, Ignored 602 -[success] Total time: 1430 s, completed Dec 27, 2014 9:49:25 PM