Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,4 @@ class ClientSuite extends FunSuite with Matchers {
// Invalid syntax.
ClientArguments.isValidJarUrl("hdfs:") should be (false)
}

}
5 changes: 3 additions & 2 deletions dev/scalastyle
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@
#

echo -e "q\n" | build/sbt -Phive -Phive-thriftserver scalastyle > scalastyle.txt
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver test:scalastyle >> scalastyle.txt
# Check style with YARN built too
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 scalastyle \
>> scalastyle.txt
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 scalastyle >> scalastyle.txt
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 test:scalastyle >> scalastyle.txt

ERRORS=$(cat scalastyle.txt | awk '{if($1~/error/)print}')
rm scalastyle.txt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,8 @@ class VectorIndexerSuite extends FunSuite with MLlibTestSparkContext {
val vectorIndexer = getIndexer.setMaxCategories(maxCategories)
val model = vectorIndexer.fit(data)
val categoryMaps = model.categoryMaps
assert(categoryMaps.keys.toSet === categoricalFeatures) // Chose correct categorical features
// Chose correct categorical features
assert(categoryMaps.keys.toSet === categoricalFeatures)
val transformed = model.transform(data).select("indexed")
val indexedRDD: RDD[Vector] = transformed.map(_.getAs[Vector](0))
val featureAttrs = AttributeGroup.fromStructField(transformed.schema("indexed"))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.spark.sql.hive.thriftserver

import java.io.File
import java.net.URL
import java.sql.{Date, DriverManager, Statement}

import scala.collection.mutable.ArrayBuffer
Expand All @@ -41,7 +42,7 @@ import org.apache.spark.sql.hive.HiveShim
import org.apache.spark.util.Utils

object TestData {
def getTestDataFilePath(name: String) = {
def getTestDataFilePath(name: String): URL = {
Thread.currentThread().getContextClassLoader.getResource(s"data/files/$name")
}

Expand All @@ -50,7 +51,7 @@ object TestData {
}

class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
override def mode = ServerMode.binary
override def mode: ServerMode.Value = ServerMode.binary

private def withCLIServiceClient(f: ThriftCLIServiceClient => Unit): Unit = {
// Transport creation logics below mimics HiveConnection.createBinaryTransport
Expand Down Expand Up @@ -337,7 +338,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
}

class HiveThriftHttpServerSuite extends HiveThriftJdbcTest {
override def mode = ServerMode.http
override def mode: ServerMode.Value = ServerMode.http

test("JDBC query execution") {
withJdbcStatement { statement =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,12 @@

package org.apache.spark.sql.hive

import java.io.File

import com.google.common.io.Files

import org.apache.spark.sql.{QueryTest, _}
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.util.Utils
/* Implicits */
import org.apache.spark.sql.hive.test.TestHive._

import org.apache.spark.util.Utils


class QueryPartitionSuite extends QueryTest {
Expand All @@ -37,23 +34,28 @@ class QueryPartitionSuite extends QueryTest {
testData.registerTempTable("testData")

val tmpDir = Files.createTempDir()
//create the table for test
sql(s"CREATE TABLE table_with_partition(key int,value string) PARTITIONED by (ds string) location '${tmpDir.toURI.toString}' ")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='1') SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='2') SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='3') SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='4') SELECT key,value FROM testData")
// create the table for test
sql(s"CREATE TABLE table_with_partition(key int,value string) " +
s"PARTITIONED by (ds string) location '${tmpDir.toURI.toString}' ")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='1') " +
"SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='2') " +
"SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='3') " +
"SELECT key,value FROM testData")
sql("INSERT OVERWRITE TABLE table_with_partition partition (ds='4') " +
"SELECT key,value FROM testData")

//test for the exist path
// test for the exist path
checkAnswer(sql("select key,value from table_with_partition"),
testData.toSchemaRDD.collect ++ testData.toSchemaRDD.collect
++ testData.toSchemaRDD.collect ++ testData.toSchemaRDD.collect)

//delect the path of one partition
// delete the path of one partition
val folders = tmpDir.listFiles.filter(_.isDirectory)
Utils.deleteRecursively(folders(0))

//test for affter delete the path
// test for after delete the path
checkAnswer(sql("select key,value from table_with_partition"),
testData.toSchemaRDD.collect ++ testData.toSchemaRDD.collect
++ testData.toSchemaRDD.collect)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ import org.scalatest.Matchers

import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.scheduler._
import org.apache.spark.streaming.{Time, Milliseconds, TestSuiteBase}
import org.apache.spark.streaming.{Duration, Time, Milliseconds, TestSuiteBase}

class StreamingJobProgressListenerSuite extends TestSuiteBase with Matchers {

val input = (1 to 4).map(Seq(_)).toSeq
val operation = (d: DStream[Int]) => d.map(x => x)

override def batchDuration = Milliseconds(100)
override def batchDuration: Duration = Milliseconds(100)

test("onBatchSubmitted, onBatchStarted, onBatchCompleted, " +
"onReceiverStarted, onReceiverError, onReceiverStopped") {
Expand Down
19 changes: 13 additions & 6 deletions yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -232,19 +232,26 @@ class ClientSuite extends FunSuite with Matchers with BeforeAndAfterAll {
testCode(conf)
}

def newEnv = MutableHashMap[String, String]()
def newEnv: MutableHashMap[String, String] = MutableHashMap[String, String]()

def classpath(env: MutableHashMap[String, String]) = env(Environment.CLASSPATH.name).split(":|;|<CPS>")
def classpath(env: MutableHashMap[String, String]): Array[String] =
env(Environment.CLASSPATH.name).split(":|;|<CPS>")

def flatten(a: Option[Seq[String]], b: Option[Seq[String]]) = (a ++ b).flatten.toArray
def flatten(a: Option[Seq[String]], b: Option[Seq[String]]): Array[String] =
(a ++ b).flatten.toArray

def getFieldValue[A, B](clazz: Class[_], field: String, defaults: => B)(mapTo: A => B): B =
Try(clazz.getField(field)).map(_.get(null).asInstanceOf[A]).toOption.map(mapTo).getOrElse(defaults)
def getFieldValue[A, B](clazz: Class[_], field: String, defaults: => B)(mapTo: A => B): B = {
Try(clazz.getField(field))
.map(_.get(null).asInstanceOf[A])
.toOption
.map(mapTo)
.getOrElse(defaults)
}

def getFieldValue2[A: ClassTag, A1: ClassTag, B](
clazz: Class[_],
field: String,
defaults: => B)(mapTo: A => B)(mapTo1: A1 => B) : B = {
defaults: => B)(mapTo: A => B)(mapTo1: A1 => B): B = {
Try(clazz.getField(field)).map(_.get(null)).map {
case v: A => mapTo(v)
case v1: A1 => mapTo1(v1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ class YarnAllocatorSuite extends FunSuite with Matchers with BeforeAndAfterEach
}

class MockSplitInfo(host: String) extends SplitInfo(null, host, null, 1, null) {
override def equals(other: Any) = false
override def equals(other: Any): Boolean = false
}

def createAllocator(maxExecutors: Int = 5): YarnAllocator = {
Expand Down Expand Up @@ -118,7 +118,9 @@ class YarnAllocatorSuite extends FunSuite with Matchers with BeforeAndAfterEach
handler.getNumExecutorsRunning should be (1)
handler.allocatedContainerToHostMap.get(container.getId).get should be ("host1")
handler.allocatedHostToContainersMap.get("host1").get should contain (container.getId)
rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size should be (0)

val size = rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size
size should be (0)
}

test("some containers allocated") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class YarnSparkHadoopUtilSuite extends FunSuite with Matchers with Logging {
logWarning("Cannot execute bash, skipping bash tests.")
}

def bashTest(name: String)(fn: => Unit) =
def bashTest(name: String)(fn: => Unit): Unit =
if (hasBash) test(name)(fn) else ignore(name)(fn)

bashTest("shell script escaping") {
Expand Down