@@ -1133,20 +1133,20 @@ def get_output(_, rdd):
11331133 kinesisTestUtils .deleteDynamoDBTable (kinesisAppName )
11341134
11351135
1136- # Search jar in the project dir using name_pattern for both sbt build and maven build because the
1137- # artifact jars are in different directories.
1138- def search_jar (dir , name_pattern ):
1136+ # Search jar in the project dir using the jar name_prefix for both sbt build and maven build because
1137+ # the artifact jars are in different directories.
1138+ def search_jar (dir , name_prefix ):
11391139 # We should ignore the following jars
11401140 ignored_jar_suffixes = ("javadoc.jar" , "sources.jar" , "test-sources.jar" , "tests.jar" )
1141- jars = (glob .glob (os .path .join (dir , "target/scala-*/" + name_pattern )) + # sbt build
1142- glob .glob (os .path .join (dir , "target/" + name_pattern ))) # maven build
1141+ jars = (glob .glob (os .path .join (dir , "target/scala-*/" + name_prefix + "-*.jar" )) + # sbt build
1142+ glob .glob (os .path .join (dir , "target/" + name_prefix + "_*.jar" ))) # maven build
11431143 return [jar for jar in jars if not jar .endswith (ignored_jar_suffixes )]
11441144
11451145
11461146def search_kafka_assembly_jar ():
11471147 SPARK_HOME = os .environ ["SPARK_HOME" ]
11481148 kafka_assembly_dir = os .path .join (SPARK_HOME , "external/kafka-assembly" )
1149- jars = search_jar (kafka_assembly_dir , "spark-streaming-kafka-assembly*.jar " )
1149+ jars = search_jar (kafka_assembly_dir , "spark-streaming-kafka-assembly" )
11501150 if not jars :
11511151 raise Exception (
11521152 ("Failed to find Spark Streaming kafka assembly jar in %s. " % kafka_assembly_dir ) +
@@ -1163,7 +1163,7 @@ def search_kafka_assembly_jar():
11631163def search_flume_assembly_jar ():
11641164 SPARK_HOME = os .environ ["SPARK_HOME" ]
11651165 flume_assembly_dir = os .path .join (SPARK_HOME , "external/flume-assembly" )
1166- jars = search_jar (flume_assembly_dir , "spark-streaming-flume-assembly*.jar " )
1166+ jars = search_jar (flume_assembly_dir , "spark-streaming-flume-assembly" )
11671167 if not jars :
11681168 raise Exception (
11691169 ("Failed to find Spark Streaming Flume assembly jar in %s. " % flume_assembly_dir ) +
@@ -1180,7 +1180,7 @@ def search_flume_assembly_jar():
11801180def search_mqtt_assembly_jar ():
11811181 SPARK_HOME = os .environ ["SPARK_HOME" ]
11821182 mqtt_assembly_dir = os .path .join (SPARK_HOME , "external/mqtt-assembly" )
1183- jars = search_jar (mqtt_assembly_dir , "spark-streaming-mqtt-assembly*.jar " )
1183+ jars = search_jar (mqtt_assembly_dir , "spark-streaming-mqtt-assembly" )
11841184 if not jars :
11851185 raise Exception (
11861186 ("Failed to find Spark Streaming MQTT assembly jar in %s. " % mqtt_assembly_dir ) +
@@ -1214,7 +1214,7 @@ def search_mqtt_test_jar():
12141214def search_kinesis_asl_assembly_jar ():
12151215 SPARK_HOME = os .environ ["SPARK_HOME" ]
12161216 kinesis_asl_assembly_dir = os .path .join (SPARK_HOME , "extras/kinesis-asl-assembly" )
1217- jars = search_jar (kinesis_asl_assembly_dir , "spark-streaming-kinesis-asl-assembly*.jar " )
1217+ jars = search_jar (kinesis_asl_assembly_dir , "spark-streaming-kinesis-asl-assembly" )
12181218 if not jars :
12191219 return None
12201220 elif len (jars ) > 1 :
0 commit comments