@@ -23,7 +23,7 @@ import java.util.regex.Pattern
2323
2424import com .google .common .io .PatternFilenameFilter
2525import io .fabric8 .kubernetes .api .model .Pod
26- import org .scalatest .{BeforeAndAfter , BeforeAndAfterAll }
26+ import org .scalatest .{BeforeAndAfter , BeforeAndAfterAll , Tag }
2727import org .scalatest .concurrent .{Eventually , PatienceConfiguration }
2828import org .scalatest .time .{Minutes , Seconds , Span }
2929import scala .collection .JavaConverters ._
@@ -47,6 +47,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
4747 private var containerLocalSparkDistroExamplesJar : String = _
4848 private var appLocator : String = _
4949 private var driverPodName : String = _
50+ private val k8sTestTag = Tag (" k8s" )
5051
5152 override def beforeAll (): Unit = {
5253 // The scalatest-maven-plugin gives system properties that are referenced but not set null
@@ -102,22 +103,22 @@ private[spark] class KubernetesSuite extends SparkFunSuite
102103 deleteDriverPod()
103104 }
104105
105- test(" Run SparkPi with no resources" ) {
106+ test(" Run SparkPi with no resources" , k8sTestTag ) {
106107 runSparkPiAndVerifyCompletion()
107108 }
108109
109- test(" Run SparkPi with a very long application name." ) {
110+ test(" Run SparkPi with a very long application name." , k8sTestTag ) {
110111 sparkAppConf.set(" spark.app.name" , " long" * 40 )
111112 runSparkPiAndVerifyCompletion()
112113 }
113114
114- test(" Use SparkLauncher.NO_RESOURCE" ) {
115+ test(" Use SparkLauncher.NO_RESOURCE" , k8sTestTag ) {
115116 sparkAppConf.setJars(Seq (containerLocalSparkDistroExamplesJar))
116117 runSparkPiAndVerifyCompletion(
117118 appResource = SparkLauncher .NO_RESOURCE )
118119 }
119120
120- test(" Run SparkPi with a master URL without a scheme." ) {
121+ test(" Run SparkPi with a master URL without a scheme." , k8sTestTag ) {
121122 val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
122123 val k8sMasterUrl = if (url.getPort < 0 ) {
123124 s " k8s:// ${url.getHost}"
@@ -128,11 +129,11 @@ private[spark] class KubernetesSuite extends SparkFunSuite
128129 runSparkPiAndVerifyCompletion()
129130 }
130131
131- test(" Run SparkPi with an argument." ) {
132+ test(" Run SparkPi with an argument." , k8sTestTag ) {
132133 runSparkPiAndVerifyCompletion(appArgs = Array (" 5" ))
133134 }
134135
135- test(" Run SparkPi with custom labels, annotations, and environment variables." ) {
136+ test(" Run SparkPi with custom labels, annotations, and environment variables." , k8sTestTag ) {
136137 sparkAppConf
137138 .set(" spark.kubernetes.driver.label.label1" , " label1-value" )
138139 .set(" spark.kubernetes.driver.label.label2" , " label2-value" )
@@ -158,21 +159,21 @@ private[spark] class KubernetesSuite extends SparkFunSuite
158159 })
159160 }
160161
161- test(" Run extraJVMOptions check on driver" ) {
162+ test(" Run extraJVMOptions check on driver" , k8sTestTag ) {
162163 sparkAppConf
163164 .set(" spark.driver.extraJavaOptions" , " -Dspark.test.foo=spark.test.bar" )
164165 runSparkJVMCheckAndVerifyCompletion(
165166 expectedJVMValue = Seq (" (spark.test.foo,spark.test.bar)" ))
166167 }
167168
168- test(" Run SparkRemoteFileTest using a remote data file" ) {
169+ test(" Run SparkRemoteFileTest using a remote data file" , k8sTestTag ) {
169170 sparkAppConf
170171 .set(" spark.files" , REMOTE_PAGE_RANK_DATA_FILE )
171172 runSparkRemoteCheckAndVerifyCompletion(
172173 appArgs = Array (REMOTE_PAGE_RANK_FILE_NAME ))
173174 }
174175
175- test(" Run PySpark on simple pi.py example" ) {
176+ test(" Run PySpark on simple pi.py example" , k8sTestTag ) {
176177 sparkAppConf
177178 .set(" spark.kubernetes.container.image" , s " ${getTestImageRepo}/spark-py: ${getTestImageTag}" )
178179 runSparkApplicationAndVerifyCompletion(
@@ -186,7 +187,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
186187 isJVM = false )
187188 }
188189
189- test(" Run PySpark with Python2 to test a pyfiles example" ) {
190+ test(" Run PySpark with Python2 to test a pyfiles example" , k8sTestTag ) {
190191 sparkAppConf
191192 .set(" spark.kubernetes.container.image" , s " ${getTestImageRepo}/spark-py: ${getTestImageTag}" )
192193 .set(" spark.kubernetes.pyspark.pythonversion" , " 2" )
@@ -204,7 +205,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
204205 pyFiles = Some (PYSPARK_CONTAINER_TESTS ))
205206 }
206207
207- test(" Run PySpark with Python3 to test a pyfiles example" ) {
208+ test(" Run PySpark with Python3 to test a pyfiles example" , k8sTestTag ) {
208209 sparkAppConf
209210 .set(" spark.kubernetes.container.image" , s " ${getTestImageRepo}/spark-py: ${getTestImageTag}" )
210211 .set(" spark.kubernetes.pyspark.pythonversion" , " 3" )
0 commit comments