Skip to content

Commit fa60a7e

Browse files
committed
[SPARK-46795][SQL] Replace UnsupportedOperationException by SparkUnsupportedOperationException in sql/core
### What changes were proposed in this pull request? In the PR, I propose to replace all `UnsupportedOperationException` by `SparkUnsupportedOperationException` in `sql/core` code base, and introduce new legacy error classes with the `_LEGACY_ERROR_TEMP_` prefix. ### Why are the changes needed? To unify Spark SQL exception, and port Java exceptions on Spark exceptions with error classes. ### Does this PR introduce _any_ user-facing change? Yes, it can if user's code assumes some particular format of `UnsupportedOperationException` messages. ### How was this patch tested? By running the modified test suites: ``` $ build/sbt "core/testOnly *SparkThrowableSuite" $ build/sbt "test:testOnly *FileBasedDataSourceSuite" $ build/sbt "test:testOnly *ColumnarRulesSuite" ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #44772 from MaxGekk/migrate-UnsupportedOperationException-sql. Authored-by: Max Gekk <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent d26e871 commit fa60a7e

File tree

49 files changed

+451
-216
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+451
-216
lines changed

common/utils/src/main/resources/error/error-classes.json

Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7297,6 +7297,156 @@
72977297
""
72987298
]
72997299
},
7300+
"_LEGACY_ERROR_TEMP_3161" : {
7301+
"message" : [
7302+
"Uploading artifact file to local file system destination path is not supported."
7303+
]
7304+
},
7305+
"_LEGACY_ERROR_TEMP_3162" : {
7306+
"message" : [
7307+
"Unsupported physical type <type>."
7308+
]
7309+
},
7310+
"_LEGACY_ERROR_TEMP_3163" : {
7311+
"message" : [
7312+
"Unsupported number of children: <num>."
7313+
]
7314+
},
7315+
"_LEGACY_ERROR_TEMP_3165" : {
7316+
"message" : [
7317+
"Cannot merge <classA> with <classB>"
7318+
]
7319+
},
7320+
"_LEGACY_ERROR_TEMP_3166" : {
7321+
"message" : [
7322+
"latestOffset(Offset, ReadLimit) should be called instead of this method"
7323+
]
7324+
},
7325+
"_LEGACY_ERROR_TEMP_3167" : {
7326+
"message" : [
7327+
"continuous mode is not supported!"
7328+
]
7329+
},
7330+
"_LEGACY_ERROR_TEMP_3168" : {
7331+
"message" : [
7332+
"hasTimedOut is true however there's no timeout configured"
7333+
]
7334+
},
7335+
"_LEGACY_ERROR_TEMP_3169" : {
7336+
"message" : [
7337+
"AcceptsLatestSeenOffset is not supported with DSv1 streaming source: <unsupportedSources>"
7338+
]
7339+
},
7340+
"_LEGACY_ERROR_TEMP_3170" : {
7341+
"message" : [
7342+
"SortAggregate code-gen does not support grouping keys"
7343+
]
7344+
},
7345+
"_LEGACY_ERROR_TEMP_3171" : {
7346+
"message" : [
7347+
"Number of nulls not set for Parquet file <filePath>. Set SQLConf <config> to false and execute again."
7348+
]
7349+
},
7350+
"_LEGACY_ERROR_TEMP_3172" : {
7351+
"message" : [
7352+
"No min/max found for Parquet file <filePath>. Set SQLConf <config> to false and execute again."
7353+
]
7354+
},
7355+
"_LEGACY_ERROR_TEMP_3173" : {
7356+
"message" : [
7357+
"Cannot specify 'USING index_type' in 'CREATE INDEX'"
7358+
]
7359+
},
7360+
"_LEGACY_ERROR_TEMP_3175" : {
7361+
"message" : [
7362+
"Index Type <v> is not supported. The supported Index Types are: <supportedIndexTypeList>"
7363+
]
7364+
},
7365+
"_LEGACY_ERROR_TEMP_3176" : {
7366+
"message" : [
7367+
"applyInPandasWithState is unsupported in batch query. Use applyInPandas instead."
7368+
]
7369+
},
7370+
"_LEGACY_ERROR_TEMP_3177" : {
7371+
"message" : [
7372+
"<class> does not support function: <funcName>"
7373+
]
7374+
},
7375+
"_LEGACY_ERROR_TEMP_3178" : {
7376+
"message" : [
7377+
"<class> does not support inverse distribution function: <funcName>"
7378+
]
7379+
},
7380+
"_LEGACY_ERROR_TEMP_3179" : {
7381+
"message" : [
7382+
"createIndex is not supported"
7383+
]
7384+
},
7385+
"_LEGACY_ERROR_TEMP_3180" : {
7386+
"message" : [
7387+
"indexExists is not supported"
7388+
]
7389+
},
7390+
"_LEGACY_ERROR_TEMP_3181" : {
7391+
"message" : [
7392+
"dropIndex is not supported"
7393+
]
7394+
},
7395+
"_LEGACY_ERROR_TEMP_3182" : {
7396+
"message" : [
7397+
"listIndexes is not supported"
7398+
]
7399+
},
7400+
"_LEGACY_ERROR_TEMP_3183" : {
7401+
"message" : [
7402+
"TableSample is not supported by this data source"
7403+
]
7404+
},
7405+
"_LEGACY_ERROR_TEMP_3184" : {
7406+
"message" : [
7407+
"<class> does not support aggregate function: <funcName> with DISTINCT"
7408+
]
7409+
},
7410+
"_LEGACY_ERROR_TEMP_3185" : {
7411+
"message" : [
7412+
"Schema evolution not supported."
7413+
]
7414+
},
7415+
"_LEGACY_ERROR_TEMP_3186" : {
7416+
"message" : [
7417+
"Boolean is not supported"
7418+
]
7419+
},
7420+
"_LEGACY_ERROR_TEMP_3187" : {
7421+
"message" : [
7422+
"only readInts is valid."
7423+
]
7424+
},
7425+
"_LEGACY_ERROR_TEMP_3188" : {
7426+
"message" : [
7427+
"only skipIntegers is valid"
7428+
]
7429+
},
7430+
"_LEGACY_ERROR_TEMP_3189" : {
7431+
"message" : [
7432+
"Unsupported encoding: <encoding>"
7433+
]
7434+
},
7435+
"_LEGACY_ERROR_TEMP_3190" : {
7436+
"message" : [
7437+
"RLE encoding is not supported for values of type: <typeName>"
7438+
]
7439+
},
7440+
"_LEGACY_ERROR_TEMP_3191" : {
7441+
"message" : [
7442+
"Dictionary encoding does not support String"
7443+
]
7444+
},
7445+
"_LEGACY_ERROR_TEMP_3192" : {
7446+
"message" : [
7447+
"Datatype not supported <dt>"
7448+
]
7449+
},
73007450
"_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
73017451
"message" : [
73027452
"<errorMessage>"

sql/core/src/main/java/org/apache/spark/sql/execution/columnar/ColumnDictionary.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.sql.execution.columnar;
1919

20+
import org.apache.spark.SparkUnsupportedOperationException;
2021
import org.apache.spark.sql.execution.vectorized.Dictionary;
2122

2223
public final class ColumnDictionary implements Dictionary {
@@ -59,6 +60,6 @@ public long decodeToLong(int id) {
5960

6061
@Override
6162
public byte[] decodeToBinary(int id) {
62-
throw new UnsupportedOperationException("Dictionary encoding does not support String");
63+
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3191");
6364
}
6465
}

sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcArrayColumnVector.java

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
2121
import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
2222

23+
import org.apache.spark.SparkUnsupportedOperationException;
2324
import org.apache.spark.sql.types.ArrayType;
2425
import org.apache.spark.sql.types.DataType;
2526
import org.apache.spark.sql.types.Decimal;
@@ -52,61 +53,61 @@ public ColumnarArray getArray(int rowId) {
5253

5354
@Override
5455
public boolean getBoolean(int rowId) {
55-
throw new UnsupportedOperationException();
56+
throw SparkUnsupportedOperationException.apply();
5657
}
5758

5859
@Override
5960
public byte getByte(int rowId) {
60-
throw new UnsupportedOperationException();
61+
throw SparkUnsupportedOperationException.apply();
6162
}
6263

6364
@Override
6465
public short getShort(int rowId) {
65-
throw new UnsupportedOperationException();
66+
throw SparkUnsupportedOperationException.apply();
6667
}
6768

6869
@Override
6970
public int getInt(int rowId) {
70-
throw new UnsupportedOperationException();
71+
throw SparkUnsupportedOperationException.apply();
7172
}
7273

7374
@Override
7475
public long getLong(int rowId) {
75-
throw new UnsupportedOperationException();
76+
throw SparkUnsupportedOperationException.apply();
7677
}
7778

7879
@Override
7980
public float getFloat(int rowId) {
80-
throw new UnsupportedOperationException();
81+
throw SparkUnsupportedOperationException.apply();
8182
}
8283

8384
@Override
8485
public double getDouble(int rowId) {
85-
throw new UnsupportedOperationException();
86+
throw SparkUnsupportedOperationException.apply();
8687
}
8788

8889
@Override
8990
public Decimal getDecimal(int rowId, int precision, int scale) {
90-
throw new UnsupportedOperationException();
91+
throw SparkUnsupportedOperationException.apply();
9192
}
9293

9394
@Override
9495
public UTF8String getUTF8String(int rowId) {
95-
throw new UnsupportedOperationException();
96+
throw SparkUnsupportedOperationException.apply();
9697
}
9798

9899
@Override
99100
public byte[] getBinary(int rowId) {
100-
throw new UnsupportedOperationException();
101+
throw SparkUnsupportedOperationException.apply();
101102
}
102103

103104
@Override
104105
public ColumnarMap getMap(int rowId) {
105-
throw new UnsupportedOperationException();
106+
throw SparkUnsupportedOperationException.apply();
106107
}
107108

108109
@Override
109110
public org.apache.spark.sql.vectorized.ColumnVector getChild(int ordinal) {
110-
throw new UnsupportedOperationException();
111+
throw SparkUnsupportedOperationException.apply();
111112
}
112113
}

sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcAtomicColumnVector.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121

2222
import org.apache.hadoop.hive.ql.exec.vector.*;
2323

24+
import org.apache.spark.SparkUnsupportedOperationException;
2425
import org.apache.spark.sql.catalyst.util.DateTimeUtils;
2526
import org.apache.spark.sql.catalyst.util.RebaseDateTime;
2627
import org.apache.spark.sql.types.DataType;
@@ -71,7 +72,7 @@ public class OrcAtomicColumnVector extends OrcColumnVector {
7172
} else if (vector instanceof TimestampColumnVector timestampColumnVector) {
7273
timestampData = timestampColumnVector;
7374
} else {
74-
throw new UnsupportedOperationException();
75+
throw SparkUnsupportedOperationException.apply();
7576
}
7677
}
7778

@@ -146,16 +147,16 @@ public byte[] getBinary(int rowId) {
146147

147148
@Override
148149
public ColumnarArray getArray(int rowId) {
149-
throw new UnsupportedOperationException();
150+
throw SparkUnsupportedOperationException.apply();
150151
}
151152

152153
@Override
153154
public ColumnarMap getMap(int rowId) {
154-
throw new UnsupportedOperationException();
155+
throw SparkUnsupportedOperationException.apply();
155156
}
156157

157158
@Override
158159
public org.apache.spark.sql.vectorized.ColumnVector getChild(int ordinal) {
159-
throw new UnsupportedOperationException();
160+
throw SparkUnsupportedOperationException.apply();
160161
}
161162
}

sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcMapColumnVector.java

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
2121
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
2222

23+
import org.apache.spark.SparkUnsupportedOperationException;
2324
import org.apache.spark.sql.types.DataType;
2425
import org.apache.spark.sql.types.Decimal;
2526
import org.apache.spark.sql.types.MapType;
@@ -55,61 +56,61 @@ public ColumnarMap getMap(int ordinal) {
5556

5657
@Override
5758
public boolean getBoolean(int rowId) {
58-
throw new UnsupportedOperationException();
59+
throw SparkUnsupportedOperationException.apply();
5960
}
6061

6162
@Override
6263
public byte getByte(int rowId) {
63-
throw new UnsupportedOperationException();
64+
throw SparkUnsupportedOperationException.apply();
6465
}
6566

6667
@Override
6768
public short getShort(int rowId) {
68-
throw new UnsupportedOperationException();
69+
throw SparkUnsupportedOperationException.apply();
6970
}
7071

7172
@Override
7273
public int getInt(int rowId) {
73-
throw new UnsupportedOperationException();
74+
throw SparkUnsupportedOperationException.apply();
7475
}
7576

7677
@Override
7778
public long getLong(int rowId) {
78-
throw new UnsupportedOperationException();
79+
throw SparkUnsupportedOperationException.apply();
7980
}
8081

8182
@Override
8283
public float getFloat(int rowId) {
83-
throw new UnsupportedOperationException();
84+
throw SparkUnsupportedOperationException.apply();
8485
}
8586

8687
@Override
8788
public double getDouble(int rowId) {
88-
throw new UnsupportedOperationException();
89+
throw SparkUnsupportedOperationException.apply();
8990
}
9091

9192
@Override
9293
public Decimal getDecimal(int rowId, int precision, int scale) {
93-
throw new UnsupportedOperationException();
94+
throw SparkUnsupportedOperationException.apply();
9495
}
9596

9697
@Override
9798
public UTF8String getUTF8String(int rowId) {
98-
throw new UnsupportedOperationException();
99+
throw SparkUnsupportedOperationException.apply();
99100
}
100101

101102
@Override
102103
public byte[] getBinary(int rowId) {
103-
throw new UnsupportedOperationException();
104+
throw SparkUnsupportedOperationException.apply();
104105
}
105106

106107
@Override
107108
public ColumnarArray getArray(int rowId) {
108-
throw new UnsupportedOperationException();
109+
throw SparkUnsupportedOperationException.apply();
109110
}
110111

111112
@Override
112113
public org.apache.spark.sql.vectorized.ColumnVector getChild(int ordinal) {
113-
throw new UnsupportedOperationException();
114+
throw SparkUnsupportedOperationException.apply();
114115
}
115116
}

0 commit comments

Comments
 (0)