Skip to content

Commit 74abe85

Browse files
committed
[KYUUBI #1950] Remove ambiguous SPARK_HADOOP_VERSION
<!-- Thanks for sending a pull request! Here are some tips for you: 1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html 2. If the PR is related to an issue in https://github.com/apache/incubator-kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'. 3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'. --> ### _Why are the changes needed?_ <!-- Please clarify why the changes are needed. For instance, 1. If you add a feature, you can talk about the use case of it. 2. If you fix a bug, you can clarify why it is a bug. --> The original idea of SPARK_HADOOP_VERSION is used to concat spark release names only, now we need to remove it as - SPARK_HADOOP_VERSION is misunderstood by developers and misused somewhere like the one of kyuubi compiled - multi-engine support now - the release names of spark(or something else) are very easy to get through code with different environments, prod/test/dev - A `mvn` job is bundled with `bin/load-kyuubi-env.sh` which is truly worrisome - SPARK_HADOOP_VERSION on spark side hass broken already for spark 3.2 which actually bundled with hadoop 3.3, see apache/spark-website#361 (comment) ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [x] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request Closes #1950 from yaooqinn/hadoop. Closes #1950 b47be7c [Kent Yao] Remove ambiguous SPARK_HADOOP_VERSION 3b33ee5 [Kent Yao] Remove ambiguous SPARK_HADOOP_VERSION Authored-by: Kent Yao <[email protected]> Signed-off-by: Kent Yao <[email protected]>
1 parent f25e5c9 commit 74abe85

File tree

3 files changed

+10
-53
lines changed

3 files changed

+10
-53
lines changed

bin/load-kyuubi-env.sh

Lines changed: 4 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -71,32 +71,11 @@ fi
7171
export KYUUBI_SCALA_VERSION="${KYUUBI_SCALA_VERSION:-"2.12"}"
7272

7373
if [[ -f ${KYUUBI_HOME}/RELEASE ]]; then
74-
KYUUBI_VERSION="$(grep "Kyuubi " "$KYUUBI_HOME/RELEASE" | awk -F ' ' '{print $2}')"
75-
FLINK_VERSION_BUILD="$(grep "Flink " "$KYUUBI_HOME/RELEASE" | awk -F ' ' '{print $2}')"
76-
SPARK_VERSION_BUILD="$(grep "Spark " "$KYUUBI_HOME/RELEASE" | awk -F ' ' '{print $2}' | grep -v 'Hadoop')"
77-
HADOOP_VERSION_BUILD="$(grep "Spark Hadoop " "$KYUUBI_HOME/RELEASE" | awk -F ' ' '{print $3}')"
78-
FLINK_BUILTIN="${KYUUBI_HOME}/externals/flink-$FLINK_VERSION_BUILD"
79-
SPARK_BUILTIN="${KYUUBI_HOME}/externals/spark-$SPARK_VERSION_BUILD-bin-hadoop${HADOOP_VERSION_BUILD:0:3}"
74+
FLINK_BUILTIN="$(find "$KYUUBI_HOME/externals" -name 'flink-*' -type d | head -n 1)"
75+
SPARK_BUILTIN="$(find "$KYUUBI_HOME/externals" -name 'spark-*' -type d | head -n 1)"
8076
else
81-
MVN="${MVN:-"${KYUUBI_HOME}/build/mvn"}"
82-
KYUUBI_VERSION=$("$MVN" help:evaluate -Dexpression=project.version 2>/dev/null\
83-
| grep -v "INFO"\
84-
| grep -v "WARNING"\
85-
| tail -n 1)
86-
FLINK_VERSION_BUILD=$("$MVN" help:evaluate -Dexpression=flink.version 2>/dev/null\
87-
| grep -v "INFO"\
88-
| grep -v "WARNING"\
89-
| tail -n 1)
90-
SPARK_VERSION_BUILD=$("$MVN" help:evaluate -Dexpression=spark.version 2>/dev/null\
91-
| grep -v "INFO"\
92-
| grep -v "WARNING"\
93-
| tail -n 1)
94-
HADOOP_VERSION_BUILD=$("$MVN" help:evaluate -Dexpression=hadoop.binary.version 2>/dev/null\
95-
| grep -v "INFO"\
96-
| grep -v "WARNING"\
97-
| tail -n 1)
98-
FLINK_BUILTIN="${KYUUBI_HOME}/externals/kyuubi-download/target/flink-$FLINK_VERSION_BUILD"
99-
SPARK_BUILTIN="${KYUUBI_HOME}/externals/kyuubi-download/target/spark-$SPARK_VERSION_BUILD-bin-hadoop${HADOOP_VERSION_BUILD}"
77+
FLINK_BUILTIN="$(find "$KYUUBI_HOME/externals/kyuubi-download/target" -name 'flink-*' -type d | head -n 1)"
78+
SPARK_BUILTIN="$(find "$KYUUBI_HOME/externals/kyuubi-download/target" -name 'spark-*' -type d | head -n 1)"
10079
fi
10180

10281
export FLINK_HOME="${FLINK_HOME:-"${FLINK_BUILTIN}"}"
@@ -109,7 +88,6 @@ if [ $silent -eq 0 ]; then
10988
echo "JAVA_HOME: ${JAVA_HOME}"
11089

11190
echo "KYUUBI_HOME: ${KYUUBI_HOME}"
112-
echo "KYUUBI_VERSION: ${KYUUBI_VERSION}"
11391
echo "KYUUBI_CONF_DIR: ${KYUUBI_CONF_DIR}"
11492
echo "KYUUBI_LOG_DIR: ${KYUUBI_LOG_DIR}"
11593
echo "KYUUBI_PID_DIR: ${KYUUBI_PID_DIR}"

build/dist

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -153,11 +153,6 @@ SPARK_VERSION=$("$MVN" help:evaluate -Dexpression=spark.version $@ 2>/dev/null\
153153
| grep -v "WARNING"\
154154
| tail -n 1)
155155

156-
SPARK_HADOOP_VERSION=$("$MVN" help:evaluate -Dexpression=hadoop.binary.version $@ 2>/dev/null\
157-
| grep -v "INFO"\
158-
| grep -v "WARNING"\
159-
| tail -n 1)
160-
161156
HADOOP_VERSION=$("$MVN" help:evaluate -Dexpression=hadoop.version $@ 2>/dev/null\
162157
| grep -v "INFO"\
163158
| grep -v "WARNING"\
@@ -175,7 +170,7 @@ if [[ "$NAME" == "none" ]]; then
175170
if [[ "$SPARK_PROVIDED" == "true" ]]; then
176171
SUFFIX=""
177172
else
178-
SUFFIX="-spark-${SPARK_VERSION:0:3}-hadoop${SPARK_HADOOP_VERSION}"
173+
SUFFIX="-spark-${SPARK_VERSION:0:3}"
179174
fi
180175
fi
181176

@@ -219,7 +214,6 @@ echo "Java $JAVA_VERSION" >> "$DISTDIR/RELEASE"
219214
echo "Scala $SCALA_VERSION" >> "$DISTDIR/RELEASE"
220215
echo "Flink $FLINK_VERSION" >> "$DISTDIR/RELEASE"
221216
echo "Spark $SPARK_VERSION" >> "$DISTDIR/RELEASE"
222-
echo "Spark Hadoop $SPARK_HADOOP_VERSION" >> "$DISTDIR/RELEASE"
223217
echo "Kyuubi Hadoop $HADOOP_VERSION" >> "$DISTDIR/RELEASE"
224218
echo "Hive $HIVE_VERSION" >> "$DISTDIR/RELEASE"
225219
echo "Build flags: $@" >> "$DISTDIR/RELEASE"
@@ -275,14 +269,14 @@ done
275269

276270
if [[ "$FLINK_PROVIDED" != "true" ]]; then
277271
# Copy flink binary dist
278-
cp -r "$KYUUBI_HOME/externals/kyuubi-download/target/flink-$FLINK_VERSION/" \
279-
"$DISTDIR/externals/flink-$FLINK_VERSION/"
272+
FLINK_BUILTIN="$(find "$KYUUBI_HOME/externals/kyuubi-download/target" -name 'flink-*' -type d)"
273+
cp -r "$FLINK_BUILTIN" "$DISTDIR/externals/"
280274
fi
281275

282276
if [[ "$SPARK_PROVIDED" != "true" ]]; then
283277
# Copy spark binary dist
284-
cp -r "$KYUUBI_HOME/externals/kyuubi-download/target/spark-$SPARK_VERSION-bin-hadoop${SPARK_HADOOP_VERSION}$HIVE_VERSION_SUFFIX/" \
285-
"$DISTDIR/externals/spark-$SPARK_VERSION-bin-hadoop${SPARK_HADOOP_VERSION}$HIVE_VERSION_SUFFIX/"
278+
SPARK_BUILTIN="$(find "$KYUUBI_HOME/externals/kyuubi-download/target" -name 'spark-*' -type d)"
279+
cp -r "$SPARK_BUILTIN" "$DISTDIR/externals/"
286280
fi
287281

288282
# Copy license files

pom.xml

Lines changed: 1 addition & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,6 @@
110110
<guava.version>30.1-jre</guava.version>
111111
<guava.failureaccess.version>1.0.1</guava.failureaccess.version>
112112
<hadoop.version>3.3.1</hadoop.version>
113-
<hadoop.binary.version>3.2</hadoop.binary.version>
114113
<hive.version>2.3.9</hive.version>
115114
<hudi.version>0.10.0</hudi.version>
116115
<iceberg.name>iceberg-spark-runtime-3.2_${scala.binary.version}</iceberg.name>
@@ -138,7 +137,7 @@
138137
`delta.version`, `iceberg.name`
139138
-->
140139
<spark.version>3.2.1</spark.version>
141-
<spark.archive.name>spark-${spark.version}-bin-hadoop${hadoop.binary.version}.tgz</spark.archive.name>
140+
<spark.archive.name>spark-${spark.version}-bin-hadoop3.2.tgz</spark.archive.name>
142141
<spark.archive.mirror>https://archive.apache.org/dist/spark/spark-${spark.version}</spark.archive.mirror>
143142
<spark.archive.download.skip>false</spark.archive.download.skip>
144143
<swagger.version>2.1.11</swagger.version>
@@ -1901,20 +1900,6 @@
19011900
</properties>
19021901
</profile>
19031902

1904-
<profile>
1905-
<id>spark-hadoop-2.7</id>
1906-
<properties>
1907-
<hadoop.binary.version>2.7</hadoop.binary.version>
1908-
</properties>
1909-
</profile>
1910-
1911-
<profile>
1912-
<id>spark-hadoop-3.2</id>
1913-
<properties>
1914-
<hadoop.binary.version>3.2</hadoop.binary.version>
1915-
</properties>
1916-
</profile>
1917-
19181903
<profile>
19191904
<id>spark-provided</id>
19201905
<properties>

0 commit comments

Comments
 (0)