Skip to content

Commit 4079647

Browse files
committed
HBASE-27293 Remove jenkins and personality scripts support for 1.x (#4690)
Signed-off-by: GeorryHuang <[email protected]> (cherry picked from commit bffae99)
1 parent 2941b8e commit 4079647

File tree

2 files changed

+71
-265
lines changed

2 files changed

+71
-265
lines changed

dev-support/Jenkinsfile

Lines changed: 59 additions & 173 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@ pipeline {
3434
YETUS_RELEASE = '0.12.0'
3535
// where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
3636
OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
37-
OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
3837
OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
3938
OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
4039
OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
@@ -186,7 +185,6 @@ pipeline {
186185
// stash with given name for all tests we might run, so that we can unstash all of them even if
187186
// we skip some due to e.g. branch-specific JDK or Hadoop support
188187
stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
189-
stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
190188
stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
191189
stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
192190
stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
@@ -296,121 +294,14 @@ pipeline {
296294
}
297295
}
298296
}
299-
stage ('yetus jdk7 checks') {
297+
stage ('yetus jdk8 hadoop2 checks') {
300298
agent {
301299
node {
302300
label 'hbase'
303301
}
304302
}
305303
when {
306-
branch 'branch-1*'
307-
}
308-
environment {
309-
BASEDIR = "${env.WORKSPACE}/component"
310-
TESTS = "${env.DEEP_CHECKS}"
311-
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
312-
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
313-
SET_JAVA_HOME = "/usr/lib/jvm/java-7"
314-
}
315-
steps {
316-
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
317-
sh '''#!/usr/bin/env bash
318-
set -e
319-
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
320-
echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
321-
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
322-
'''
323-
unstash 'yetus'
324-
dir('component') {
325-
checkout scm
326-
}
327-
sh '''#!/usr/bin/env bash
328-
set -e
329-
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
330-
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
331-
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
332-
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
333-
'''
334-
script {
335-
def ret = sh(
336-
returnStatus: true,
337-
script: '''#!/usr/bin/env bash
338-
set -e
339-
declare -i status=0
340-
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
341-
echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
342-
else
343-
echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
344-
status=1
345-
fi
346-
echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_20Nightly_20Build_20Report/]" >> "${OUTPUT_DIR}/commentfile"
347-
exit "${status}"
348-
'''
349-
)
350-
if (ret != 0) {
351-
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
352-
// test output. See HBASE-26339 for more details.
353-
currentBuild.result = 'UNSTABLE'
354-
}
355-
}
356-
}
357-
post {
358-
always {
359-
stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
360-
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
361-
// zip surefire reports.
362-
sh '''#!/bin/bash -e
363-
if [ -d "${OUTPUT_DIR}/archiver" ]; then
364-
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
365-
if [[ 0 -ne ${count} ]]; then
366-
echo "zipping ${count} archived files"
367-
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
368-
else
369-
echo "No archived files, skipping compressing."
370-
fi
371-
else
372-
echo "No archiver directory, skipping compressing."
373-
fi
374-
'''
375-
sshPublisher(publishers: [
376-
sshPublisherDesc(configName: 'Nightlies',
377-
transfers: [
378-
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
379-
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
380-
)
381-
]
382-
)
383-
])
384-
// remove the big test logs zip file, store the nightlies url in test_logs.html
385-
sh '''#!/bin/bash -e
386-
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
387-
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
388-
rm -rf "${OUTPUT_DIR}/test_logs.zip"
389-
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
390-
else
391-
echo "No test_logs.zip, skipping"
392-
fi
393-
'''
394-
// Has to be relative to WORKSPACE.
395-
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
396-
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
397-
publishHTML target: [
398-
allowMissing : true,
399-
keepAll : true,
400-
alwaysLinkToLastBuild: true,
401-
// Has to be relative to WORKSPACE.
402-
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
403-
reportFiles : 'console-report.html',
404-
reportName : 'JDK7 Nightly Build Report'
405-
]
406-
}
407-
}
408-
}
409-
stage ('yetus jdk8 hadoop2 checks') {
410-
agent {
411-
node {
412-
label 'hbase'
413-
}
304+
branch 'branch-2*'
414305
}
415306
environment {
416307
BASEDIR = "${env.WORKSPACE}/component"
@@ -519,11 +410,6 @@ pipeline {
519410
label 'hbase'
520411
}
521412
}
522-
when {
523-
not {
524-
branch 'branch-1*'
525-
}
526-
}
527413
environment {
528414
BASEDIR = "${env.WORKSPACE}/component"
529415
TESTS = "${env.DEEP_CHECKS}"
@@ -633,11 +519,6 @@ pipeline {
633519
label 'hbase'
634520
}
635521
}
636-
when {
637-
not {
638-
branch 'branch-1*'
639-
}
640-
}
641522
environment {
642523
BASEDIR = "${env.WORKSPACE}/component"
643524
TESTS = "${env.DEEP_CHECKS}"
@@ -814,7 +695,7 @@ pipeline {
814695
'''
815696
unstash 'hadoop-2'
816697
sh '''#!/bin/bash -xe
817-
if [[ "${BRANCH}" = branch-2* ]] || [[ "${BRANCH}" = branch-1* ]]; then
698+
if [[ "${BRANCH}" = branch-2* ]]; then
818699
echo "Attempting to use run an instance on top of Hadoop 2."
819700
artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
820701
tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
@@ -838,44 +719,40 @@ pipeline {
838719
'''
839720
unstash 'hadoop-3'
840721
sh '''#!/bin/bash -e
841-
if [[ "${BRANCH}" = branch-1* ]]; then
842-
echo "Skipping to run against Hadoop 3 for branch ${BRANCH}"
843-
else
844-
echo "Attempting to use run an instance on top of Hadoop 3."
845-
artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
846-
tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
847-
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
848-
--single-process \
849-
--working-dir output-integration/hadoop-3 \
850-
--hbase-client-install hbase-client \
851-
hbase-install \
852-
hadoop-3/bin/hadoop \
853-
hadoop-3/share/hadoop/yarn/timelineservice \
854-
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
855-
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
856-
hadoop-3/bin/mapred \
857-
>output-integration/hadoop-3.log 2>&1 ; then
858-
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
859-
exit 2
860-
fi
861-
echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
862-
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
863-
--single-process \
864-
--hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
865-
--working-dir output-integration/hadoop-3-shaded \
866-
--hbase-client-install hbase-client \
867-
hbase-install \
868-
hadoop-3/bin/hadoop \
869-
hadoop-3/share/hadoop/yarn/timelineservice \
870-
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
871-
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
872-
hadoop-3/bin/mapred \
873-
>output-integration/hadoop-3-shaded.log 2>&1 ; then
874-
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
875-
exit 2
876-
fi
877-
echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
722+
echo "Attempting to use run an instance on top of Hadoop 3."
723+
artifact=$(ls -1 "${WORKSPACE}"/hadoop-3*.tar.gz | head -n 1)
724+
tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
725+
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
726+
--single-process \
727+
--working-dir output-integration/hadoop-3 \
728+
--hbase-client-install hbase-client \
729+
hbase-install \
730+
hadoop-3/bin/hadoop \
731+
hadoop-3/share/hadoop/yarn/timelineservice \
732+
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
733+
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
734+
hadoop-3/bin/mapred \
735+
>output-integration/hadoop-3.log 2>&1 ; then
736+
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
737+
exit 2
738+
fi
739+
echo "Attempting to use run an instance on top of Hadoop 3, relying on the Hadoop client artifacts for the example client program."
740+
if ! "${BASEDIR}/dev-support/hbase_nightly_pseudo-distributed-test.sh" \
741+
--single-process \
742+
--hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
743+
--working-dir output-integration/hadoop-3-shaded \
744+
--hbase-client-install hbase-client \
745+
hbase-install \
746+
hadoop-3/bin/hadoop \
747+
hadoop-3/share/hadoop/yarn/timelineservice \
748+
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
749+
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
750+
hadoop-3/bin/mapred \
751+
>output-integration/hadoop-3-shaded.log 2>&1 ; then
752+
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
753+
exit 2
878754
fi
755+
echo "(/) {color:green}+1 client integration test{color}" >output-integration/commentfile
879756
'''
880757
}
881758
post {
@@ -916,14 +793,12 @@ pipeline {
916793
script {
917794
try {
918795
unstash 'general-result'
919-
unstash 'jdk7-result'
920796
unstash 'jdk8-hadoop2-result'
921797
unstash 'jdk8-hadoop3-result'
922798
unstash 'jdk11-hadoop3-result'
923799
unstash 'srctarball-result'
924800
sh "printenv"
925801
def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
926-
"${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
927802
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
928803
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
929804
"${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
@@ -948,8 +823,14 @@ pipeline {
948823
echo "[INFO] Comment:"
949824
echo comment
950825
echo ""
951-
echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
952-
getJirasToComment(currentBuild).each { currentIssue ->
826+
echo "[DEBUG] checking to see if feature branch"
827+
def jiras = getJirasToComment(env.BRANCH_NAME, [])
828+
if (jiras.isEmpty()) {
829+
echo "[DEBUG] non-feature branch, checking change messages for jira keys."
830+
echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
831+
jiras = getJirasToCommentFromChangesets(currentBuild)
832+
}
833+
jiras.each { currentIssue ->
953834
jiraComment issueKey: currentIssue, body: comment
954835
}
955836
} catch (Exception exception) {
@@ -962,7 +843,7 @@ pipeline {
962843
}
963844
import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
964845
@NonCPS
965-
List<String> getJirasToComment(RunWrapper thisBuild) {
846+
List<String> getJirasToCommentFromChangesets(RunWrapper thisBuild) {
966847
def seenJiras = []
967848
thisBuild.changeSets.each { cs ->
968849
cs.getItems().each { change ->
@@ -972,16 +853,21 @@ List<String> getJirasToComment(RunWrapper thisBuild) {
972853
echo " ${change.commitId}"
973854
echo " ${change.author}"
974855
echo ""
975-
msg.eachMatch("HBASE-[0-9]+") { currentIssue ->
976-
echo "[DEBUG] found jira key: ${currentIssue}"
977-
if (currentIssue in seenJiras) {
978-
echo "[DEBUG] already commented on ${currentIssue}."
979-
} else {
980-
echo "[INFO] commenting on ${currentIssue}."
981-
seenJiras << currentIssue
982-
}
983-
}
856+
seenJiras = getJirasToComment(msg, seenJiras)
984857
}
985858
}
986859
return seenJiras
987860
}
861+
@NonCPS
862+
List<String> getJirasToComment(CharSequence source, List<String> seen) {
863+
source.eachMatch("HBASE-[0-9]+") { currentIssue ->
864+
echo "[DEBUG] found jira key: ${currentIssue}"
865+
if (currentIssue in seen) {
866+
echo "[DEBUG] already commented on ${currentIssue}."
867+
} else {
868+
echo "[INFO] commenting on ${currentIssue}."
869+
seen << currentIssue
870+
}
871+
}
872+
return seen
873+
}

0 commit comments

Comments
 (0)