diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index b9e56fd205498..00d17ff077feb 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -26,6 +26,7 @@ pipeline { timeout (time: 24, unit: 'HOURS') timestamps() checkoutToSubdirectory('src') + parallelsAlwaysFailFast() } environment { @@ -89,238 +90,242 @@ pipeline { } } - // This is an optional stage which runs only when there's a change in - // C++/C++ build/platform. - // This stage serves as a means of cross platform validation, which is - // really needed to ensure that any C++ related/platform change doesn't - // break the Hadoop build on Centos 7. - stage ('precommit-run Centos 7') { - environment { - SOURCEDIR = "${WORKSPACE}/centos-7/src" - PATCHDIR = "${WORKSPACE}/centos-7/out" - DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_7" - IS_OPTIONAL = 1 - } + stage ('platform validation') { + parallel { + // This is an optional stage which runs only when there's a change in + // C++/C++ build/platform. + // This stage serves as a means of cross platform validation, which is + // really needed to ensure that any C++ related/platform change doesn't + // break the Hadoop build on Centos 7. + stage ('precommit-run Centos 7') { + environment { + SOURCEDIR = "${WORKSPACE}/centos-7/src" + PATCHDIR = "${WORKSPACE}/centos-7/out" + DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_7" + IS_OPTIONAL = 1 + } - steps { - withCredentials( - [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', - passwordVariable: 'GITHUB_TOKEN', - usernameVariable: 'GITHUB_USER'), - usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', - passwordVariable: 'JIRA_PASSWORD', - usernameVariable: 'JIRA_USER')]) { - sh '''#!/usr/bin/env bash - - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" run_ci - ''' - } - } + steps { + withCredentials( + [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', + passwordVariable: 'GITHUB_TOKEN', + usernameVariable: 'GITHUB_USER'), + usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', + passwordVariable: 'JIRA_PASSWORD', + usernameVariable: 'JIRA_USER')]) { + sh '''#!/usr/bin/env bash + + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" run_ci + ''' + } + } - post { - // Since this is an optional platform, we want to copy the artifacts - // and archive it only if the build fails, to help with debugging. - failure { - sh '''#!/usr/bin/env bash + post { + // Since this is an optional platform, we want to copy the artifacts + // and archive it only if the build fails, to help with debugging. + failure { + sh '''#!/usr/bin/env bash - cp -Rp "${WORKSPACE}/centos-7/out" "${WORKSPACE}" - ''' - archiveArtifacts "out/**" - } + cp -Rp "${WORKSPACE}/centos-7/out" "${WORKSPACE}" + ''' + archiveArtifacts "out/**" + } - cleanup() { - script { - sh '''#!/usr/bin/env bash + cleanup() { + script { + sh '''#!/usr/bin/env bash - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc - ''' + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc + ''' + } + } } } - } - } - // This is an optional stage which runs only when there's a change in - // C++/C++ build/platform. - // This stage serves as a means of cross platform validation, which is - // really needed to ensure that any C++ related/platform change doesn't - // break the Hadoop build on Centos 8. - stage ('precommit-run Centos 8') { - environment { - SOURCEDIR = "${WORKSPACE}/centos-8/src" - PATCHDIR = "${WORKSPACE}/centos-8/out" - DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_8" - IS_OPTIONAL = 1 - } + // This is an optional stage which runs only when there's a change in + // C++/C++ build/platform. + // This stage serves as a means of cross platform validation, which is + // really needed to ensure that any C++ related/platform change doesn't + // break the Hadoop build on Centos 8. + stage ('precommit-run Centos 8') { + environment { + SOURCEDIR = "${WORKSPACE}/centos-8/src" + PATCHDIR = "${WORKSPACE}/centos-8/out" + DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_8" + IS_OPTIONAL = 1 + } - steps { - withCredentials( - [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', - passwordVariable: 'GITHUB_TOKEN', - usernameVariable: 'GITHUB_USER'), - usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', - passwordVariable: 'JIRA_PASSWORD', - usernameVariable: 'JIRA_USER')]) { - sh '''#!/usr/bin/env bash - - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" run_ci - ''' - } - } + steps { + withCredentials( + [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', + passwordVariable: 'GITHUB_TOKEN', + usernameVariable: 'GITHUB_USER'), + usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', + passwordVariable: 'JIRA_PASSWORD', + usernameVariable: 'JIRA_USER')]) { + sh '''#!/usr/bin/env bash + + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" run_ci + ''' + } + } - post { - // Since this is an optional platform, we want to copy the artifacts - // and archive it only if the build fails, to help with debugging. - failure { - sh '''#!/usr/bin/env bash + post { + // Since this is an optional platform, we want to copy the artifacts + // and archive it only if the build fails, to help with debugging. + failure { + sh '''#!/usr/bin/env bash - cp -Rp "${WORKSPACE}/centos-8/out" "${WORKSPACE}" - ''' - archiveArtifacts "out/**" - } + cp -Rp "${WORKSPACE}/centos-8/out" "${WORKSPACE}" + ''' + archiveArtifacts "out/**" + } - cleanup() { - script { - sh '''#!/usr/bin/env bash + cleanup() { + script { + sh '''#!/usr/bin/env bash - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc - ''' + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc + ''' + } + } } } - } - } - // This is an optional stage which runs only when there's a change in - // C++/C++ build/platform. - // This stage serves as a means of cross platform validation, which is - // really needed to ensure that any C++ related/platform change doesn't - // break the Hadoop build on Debian 10. - stage ('precommit-run Debian 10') { - environment { - SOURCEDIR = "${WORKSPACE}/debian-10/src" - PATCHDIR = "${WORKSPACE}/debian-10/out" - DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_debian_10" - IS_OPTIONAL = 1 - } + // This is an optional stage which runs only when there's a change in + // C++/C++ build/platform. + // This stage serves as a means of cross platform validation, which is + // really needed to ensure that any C++ related/platform change doesn't + // break the Hadoop build on Debian 10. + stage ('precommit-run Debian 10') { + environment { + SOURCEDIR = "${WORKSPACE}/debian-10/src" + PATCHDIR = "${WORKSPACE}/debian-10/out" + DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_debian_10" + IS_OPTIONAL = 1 + } - steps { - withCredentials( - [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', - passwordVariable: 'GITHUB_TOKEN', - usernameVariable: 'GITHUB_USER'), - usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', - passwordVariable: 'JIRA_PASSWORD', - usernameVariable: 'JIRA_USER')]) { - sh '''#!/usr/bin/env bash - - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" run_ci - ''' - } - } + steps { + withCredentials( + [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', + passwordVariable: 'GITHUB_TOKEN', + usernameVariable: 'GITHUB_USER'), + usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', + passwordVariable: 'JIRA_PASSWORD', + usernameVariable: 'JIRA_USER')]) { + sh '''#!/usr/bin/env bash + + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" run_ci + ''' + } + } - post { - // Since this is an optional platform, we want to copy the artifacts - // and archive it only if the build fails, to help with debugging. - failure { - sh '''#!/usr/bin/env bash + post { + // Since this is an optional platform, we want to copy the artifacts + // and archive it only if the build fails, to help with debugging. + failure { + sh '''#!/usr/bin/env bash - cp -Rp "${WORKSPACE}/debian-10/out" "${WORKSPACE}" - ''' - archiveArtifacts "out/**" - } + cp -Rp "${WORKSPACE}/debian-10/out" "${WORKSPACE}" + ''' + archiveArtifacts "out/**" + } - cleanup() { - script { - sh '''#!/usr/bin/env bash + cleanup() { + script { + sh '''#!/usr/bin/env bash - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc - ''' + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc + ''' + } + } } } - } - } - - // We want to use Ubuntu Focal as our main CI and thus, this stage - // isn't optional (runs for all the PRs). - stage ('precommit-run Ubuntu focal') { - environment { - SOURCEDIR = "${WORKSPACE}/ubuntu-focal/src" - PATCHDIR = "${WORKSPACE}/ubuntu-focal/out" - DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile" - IS_OPTIONAL = 0 - } - steps { - withCredentials( - [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', - passwordVariable: 'GITHUB_TOKEN', - usernameVariable: 'GITHUB_USER'), - usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', - passwordVariable: 'JIRA_PASSWORD', - usernameVariable: 'JIRA_USER')]) { - sh '''#!/usr/bin/env bash - - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" run_ci - ''' - } - } + // We want to use Ubuntu Focal as our main CI and thus, this stage + // isn't optional (runs for all the PRs). + stage ('precommit-run Ubuntu focal') { + environment { + SOURCEDIR = "${WORKSPACE}/ubuntu-focal/src" + PATCHDIR = "${WORKSPACE}/ubuntu-focal/out" + DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile" + IS_OPTIONAL = 0 + } - post { - always { - script { - // Publish status if it was missed (YETUS-1059) + steps { withCredentials( - [usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd', - passwordVariable: 'GITHUB_TOKEN', - usernameVariable: 'GITHUB_USER')]) { - sh '''#!/usr/bin/env bash - - # Copy the artifacts of Ubuntu focal build to workspace - cp -Rp "${WORKSPACE}/ubuntu-focal/out" "${WORKSPACE}" - - # Send Github status - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" github_status_recovery - ''' + [usernamePassword(credentialsId: 'apache-hadoop-at-github.com', + passwordVariable: 'GITHUB_TOKEN', + usernameVariable: 'GITHUB_USER'), + usernamePassword(credentialsId: 'hadoopqa-at-asf-jira', + passwordVariable: 'JIRA_PASSWORD', + usernameVariable: 'JIRA_USER')]) { + sh '''#!/usr/bin/env bash + + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" run_ci + ''' } + } - // YETUS output - archiveArtifacts "out/**" - - // Publish the HTML report so that it can be looked at - // Has to be relative to WORKSPACE. - publishHTML (target: [ - allowMissing: true, - keepAll: true, - alwaysLinkToLastBuild: true, - // Has to be relative to WORKSPACE - reportDir: "out", - reportFiles: 'report.html', - reportName: 'Yetus Report' - ]) - - // Publish JUnit results - try { - junit "${SOURCEDIR}/**/target/surefire-reports/*.xml" - } catch(e) { - echo 'junit processing: ' + e.toString() + post { + always { + script { + // Publish status if it was missed (YETUS-1059) + withCredentials( + [usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd', + passwordVariable: 'GITHUB_TOKEN', + usernameVariable: 'GITHUB_USER')]) { + sh '''#!/usr/bin/env bash + + # Copy the artifacts of Ubuntu focal build to workspace + cp -Rp "${WORKSPACE}/ubuntu-focal/out" "${WORKSPACE}" + + # Send Github status + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" github_status_recovery + ''' + } + + // YETUS output + archiveArtifacts "out/**" + + // Publish the HTML report so that it can be looked at + // Has to be relative to WORKSPACE. + publishHTML (target: [ + allowMissing: true, + keepAll: true, + alwaysLinkToLastBuild: true, + // Has to be relative to WORKSPACE + reportDir: "out", + reportFiles: 'report.html', + reportName: 'Yetus Report' + ]) + + // Publish JUnit results + try { + junit "${SOURCEDIR}/**/target/surefire-reports/*.xml" + } catch(e) { + echo 'junit processing: ' + e.toString() + } + } } - } - } - cleanup() { - script { - sh '''#!/usr/bin/env bash + cleanup() { + script { + sh '''#!/usr/bin/env bash - chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" - "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc - ''' + chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh" + "${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc + ''' + } + } } } } @@ -340,4 +345,4 @@ pipeline { } } } -} +} \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/lib/x-platform/utils.h b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/lib/x-platform/utils.h index 06c608ba969c8..3fe126373c36a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/lib/x-platform/utils.h +++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/lib/x-platform/utils.h @@ -16,6 +16,7 @@ * limitations under the License. */ + #ifndef NATIVE_LIBHDFSPP_LIB_CROSS_PLATFORM_UTILS #define NATIVE_LIBHDFSPP_LIB_CROSS_PLATFORM_UTILS diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc index 973212647e764..d6dcf96f9f705 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc +++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc @@ -31,7 +31,6 @@ #include #include - using hadoop::common::TokenProto; using hadoop::hdfs::DatanodeInfoProto; using hadoop::hdfs::DatanodeIDProto;