File tree Expand file tree Collapse file tree 2 files changed +4
-5
lines changed
main/scala/org/apache/spark/sql/execution/datasources/jdbc
test/scala/org/apache/spark/sql/jdbc Expand file tree Collapse file tree 2 files changed +4
-5
lines changed Original file line number Diff line number Diff line change @@ -163,8 +163,8 @@ private[sql] object JDBCRDD extends Logging {
163163 * @return A Catalyst schema corresponding to columns in the given order.
164164 */
165165 private def pruneSchema (schema : StructType , columns : Array [String ]): StructType = {
166- val fieldMap = Map (schema.fields map { x => x.metadata.getString(" name" ) -> x } : _* )
167- new StructType (columns map { name => fieldMap(name) } )
166+ val fieldMap = Map (schema.fields. map( x => x.metadata.getString(" name" ) -> x) : _* )
167+ new StructType (columns. map( name => fieldMap(name)) )
168168 }
169169
170170 /**
@@ -296,7 +296,7 @@ private[sql] class JDBCRDD(
296296 * `filters`, but as a WHERE clause suitable for injection into a SQL query.
297297 */
298298 private val filterWhereClause : String = {
299- val filterStrings = filters map JDBCRDD .compileFilter filter (_ != null )
299+ val filterStrings = filters. map( JDBCRDD .compileFilter). filter(_ != null )
300300 if (filterStrings.size > 0 ) {
301301 val sb = new StringBuilder (" WHERE " )
302302 filterStrings.foreach(x => sb.append(x).append(" AND " ))
Original file line number Diff line number Diff line change @@ -33,8 +33,7 @@ import org.apache.spark.sql.sources._
3333import org .apache .spark .util .Utils
3434
3535class JDBCSuite extends SparkFunSuite
36- with BeforeAndAfter with PrivateMethodTester with SharedSQLContext
37- {
36+ with BeforeAndAfter with PrivateMethodTester with SharedSQLContext {
3837 import testImplicits ._
3938
4039 val url = " jdbc:h2:mem:testdb0"
You can’t perform that action at this time.
0 commit comments