This repository was archived by the owner on Feb 27, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
src/main/scala/com/microsoft/sqlserver/jdbc/spark/utils Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -300,7 +300,7 @@ object BulkCopyUtils extends Logging {
300300 assertIfCheckEnabled(dfCols.length + autoCols.length == tableCols.length, strictSchemaCheck,
301301 s " ${prefix} numbers of columns " )
302302
303- if (columnsToWriteSet.isEmpty() ) {
303+ if (columnsToWriteSet.isEmpty) {
304304 val result = new Array [ColumnMetadata ](tableCols.length - autoCols.length)
305305 } else {
306306 val result = new Array [ColumnMetadata ](columnsToWriteSet.size)
@@ -311,7 +311,7 @@ object BulkCopyUtils extends Logging {
311311 for (i <- 0 to tableCols.length- 1 ) {
312312 val tableColName = tableCols(i).name
313313 var dfFieldIndex = - 1
314- if (! columnsToWriteSet.isEmpty() && ! columnsToWriteSet.contains(tableColName)) {
314+ if (! columnsToWriteSet.isEmpty && ! columnsToWriteSet.contains(tableColName)) {
315315 // if columnsToWrite provided, and column name not in it, skip column mapping and ColumnMetadata
316316 logDebug(s " skipping col index $i col name $tableColName, user not provided in columnsToWrite list " )
317317 } else if (autoCols.contains(tableColName)) {
You can’t perform that action at this time.
0 commit comments