Skip to content

Commit 9986df3

Browse files
committed
HBASE-24049 use hadoop-2.10.0 for "packaging and integration" check
1 parent d318ca1 commit 9986df3

File tree

3 files changed

+16
-8
lines changed

3 files changed

+16
-8
lines changed

dev-support/Jenkinsfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ pipeline {
121121
}
122122
stage ('hadoop 2 cache') {
123123
environment {
124-
HADOOP2_VERSION="2.8.5"
124+
HADOOP2_VERSION="2.10.0"
125125
}
126126
steps {
127127
// directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(

dev-support/hbase_nightly_pseudo-distributed-test.sh

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
set -e
2020
function usage {
21-
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
21+
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/share/hadoop/yarn/timelineservice /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
2222
echo ""
2323
echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data."
2424
echo " defaults to 'zk-data' in the working-dir."
@@ -67,9 +67,10 @@ if [ $# -lt 5 ]; then
6767
fi
6868
component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
6969
hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
70-
yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
71-
mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
72-
mapred_exec="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
70+
timeline_service_dir="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
71+
yarn_server_tests_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
72+
mapred_jobclient_test_jar="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
73+
mapred_exec="$(cd "$(dirname "$6")"; pwd)/$(basename "$6")"
7374

7475
if [ ! -x "${hadoop_exec}" ]; then
7576
echo "hadoop cli does not appear to be executable." >&2
@@ -285,18 +286,25 @@ echo "Starting up Hadoop"
285286
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
286287
"${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
287288
else
288-
HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
289+
HADOOP_CLASSPATH="${timeline_service_dir}/*:${timeline_service_dir}/lib/*:${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
289290
fi
290291

291292
echo "$!" > "${working_dir}/hadoop.pid"
292293

294+
# 2 + 4 + 8 + .. + 256 ~= 8.5 minutes.
295+
max_sleep_time=512
293296
sleep_time=2
294-
until [ -s "${working_dir}/hbase-conf/core-site.xml" ]; do
297+
until [[ -s "${working_dir}/hbase-conf/core-site.xml" || "${sleep_time}" -ge "${max_sleep_time}" ]]; do
295298
printf '\twaiting for Hadoop to finish starting up.\n'
296299
sleep "${sleep_time}"
297300
sleep_time="$((sleep_time*2))"
298301
done
299302

303+
if [ "${sleep_time}" -ge "${max_sleep_time}" ] ; then
304+
echo "time out waiting for Hadoop to startup" >&2
305+
exit 1
306+
fi
307+
300308
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
301309
echo "Verifying configs"
302310
"${hadoop_exec}" --config "${working_dir}/hbase-conf/" conftest

dev-support/hbase_nightly_source-artifact.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ if mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" cl
182182
fi
183183
done
184184
fi
185-
echo "Building a binary tarball from the source tarball failed. see srctarball_install.log for details."
185+
echo "Building a binary tarball from the source tarball failed. see ${working_dir}/srctarball_install.log for details."
186186
# Copy up the rat.txt to the working dir so available in build archive in case rat complaints.
187187
# rat.txt can be under any module target dir... copy them all up renaming them to include parent dir as we go.
188188
find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v "$NAME" "${working_dir}/${NAME//\//_}"; done

0 commit comments

Comments
 (0)