Skip to content
Merged

sync #12

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
db2c320
HDFS-16423. Balancer should not get blocks on stale storages (#3883)
liubingxing Jan 19, 2022
7c97c0f
HADOOP-18084. ABFS: Add testfilePath while verifying test contents ar…
anmolanmol1234 Jan 19, 2022
dae33cf
YARN-11065. Bump follow-redirects from 1.13.3 to 1.14.7 in hadoop-yar…
dependabot[bot] Jan 20, 2022
15b820c
HDFS-16402. Improve HeartbeatManager logic to avoid incorrect stats. …
tomscut Jan 24, 2022
5ef335d
HDFS-16430. Add validation to maximum blocks in EC group when adding …
cndaimin Jan 24, 2022
795a5ef
HADOOP-17593. hadoop-huaweicloud and hadoop-cloud-storage to remove l…
zhongjun2 Mar 29, 2021
b795f6f
HADOOP-18094. Disable S3A auditing by default.
steveloughran Jan 24, 2022
3ed3c74
YARN-11015. Decouple queue capacity with ability to run OPPORTUNISTIC…
afchung Jan 24, 2022
d699389
HDFS-16403. Improve FUSE IO performance by supporting FUSE parameter …
cndaimin Jan 25, 2022
43153e8
HDFS-16428. Source path with storagePolicy cause wrong typeConsumed w…
ThinkerLei Jan 25, 2022
94b884a
HDFS-16262. Async refresh of cached locations in DFSInputStream (#3527)
bbeaudreault Jan 25, 2022
8c7c49d
HDFS-16401.Remove the worthless DatasetVolumeChecker#numAsyncDatasetC…
jianghuazhu Jan 25, 2022
0d17b62
HADOOP-18093. Better exception handling for testFileStatusOnMountLink…
xinglin Jan 25, 2022
4b26635
YARN-11034. Add enhanced headroom in AllocateResponse (#3766)
minni31 Jan 25, 2022
4faac58
HADOOP-18089. Test coverage for Async profiler servlets (#3913)
virajjasani Jan 26, 2022
c2ff390
HDFS-16398. Reconfig block report parameters for datanode (#3831)
tomscut Jan 26, 2022
6136d63
HDFS-16427. Add debug log for BlockManager#chooseExcessRedundancyStri…
tomscut Jan 27, 2022
e17c96a
HDFS-16429. Add DataSetLockManager to manage fine-grain locks for FsD…
MingXiangLi Jan 27, 2022
1c01944
YARN-11068. Exclude transitive log4j2 dependency coming from solr 8. …
jojochuang Jan 27, 2022
02f6bad
Revert "YARN-11068. Exclude transitive log4j2 dependency coming from …
aajisaka Jan 27, 2022
9cb535c
YARN-10561. Upgrade node.js to 12.22.1 and yarn to 1.22.5 in YARN app…
aajisaka Jan 28, 2022
39cad5f
HDFS-16169. Fix TestBlockTokenWithDFSStriped#testEnd2End failure (#3850)
secfree Jan 28, 2022
bd50b91
HDFS-16444. Show start time of JournalNode on Web (#3943)
tomscut Jan 30, 2022
089e06d
HDFS-16443. Fix edge case where DatanodeAdminDefaultMonitor doubly en…
KevinWikant Jan 31, 2022
e8f767f
YARN-11026. Make default AppPlacementAllocator configurable in AppSch…
minni31 Feb 1, 2022
aeae571
Revert "HADOOP-18024. SocketChannel is not closed when IOException ha…
ayushtkn Feb 1, 2022
87abc43
YARN-10822. Containers going from New to Scheduled transition for kil…
minni31 Feb 1, 2022
ec2fd01
YARN-10459. containerLaunchedOnNode method not need to hold scheduler…
minni31 Feb 1, 2022
ed44662
HDFS-16445. Make HDFS count, mkdir, rm cross platform (#3945)
GauthamBanasandra Feb 2, 2022
b5b07af
HDFS-16435. Remove no need TODO comment for ObserverReadProxyProvider…
tomscut Feb 3, 2022
41c86b6
HADOOP-18101. Bump aliyun-sdk-oss to 3.13.2 and jdom2 to 2.0.6.1 (#3951)
aswinshakil Feb 3, 2022
3684c7f
HADOOP-18100: Change scope of inner classes in InodeTree to make them…
abhishekdas99 Jan 31, 2022
5e7ce26
HADOOP-18085. S3 SDK Upgrade causes AccessPoint ARN endpoint mistrans…
bogthe Feb 4, 2022
125e3b6
HDFS-16437 ReverseXML processor doesn't accept XML files without the …
singer-bin Feb 6, 2022
b39b334
HADOOP-18098. Basic verification for the release candidate vote (#3944)
virajjasani Feb 7, 2022
3e7a7c3
HDFS-16411 RBF: RouterId is NULL when disable RourterRpcServer (#3878)
yulongz Feb 8, 2022
bf0cefb
HDFS-16406. ReadsFromLocalClient counts short-circuit reads (#3847)
secfree Feb 9, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ com.aliyun:aliyun-java-sdk-core:3.4.0
com.aliyun:aliyun-java-sdk-ecs:4.2.0
com.aliyun:aliyun-java-sdk-ram:3.0.0
com.aliyun:aliyun-java-sdk-sts:3.0.0
com.aliyun.oss:aliyun-sdk-oss:3.13.0
com.aliyun.oss:aliyun-sdk-oss:3.13.2
com.amazonaws:aws-java-sdk-bundle:1.11.901
com.cedarsoftware:java-util:1.9.0
com.cedarsoftware:json-io:2.5.1
Expand Down Expand Up @@ -514,7 +514,7 @@ org.hsqldb:hsqldb:2.3.4
JDOM License
------------

org.jdom:jdom:1.1
org.jdom:jdom2:2.0.6.1


Public Domain
Expand Down
201 changes: 201 additions & 0 deletions dev-support/hadoop-vote.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

# This script is useful to perform basic sanity tests for the given
# Hadoop RC. It checks for the Checksum, Signature, Rat check,
# Build from source and building tarball from the source.

set -e -o pipefail

usage() {
SCRIPT=$(basename "${BASH_SOURCE[@]}")

cat << __EOF
hadoop-vote. A script for standard vote which verifies the following items
1. Checksum of sources and binaries
2. Signature of sources and binaries
3. Rat check
4. Built from source
5. Built tar from source

Usage: ${SCRIPT} -s | --source <url> [-k | --key <signature>] [-f | --keys-file-url <url>] [-o | --output-dir </path/to/use>] [-D property[=value]] [-P profiles]
${SCRIPT} -h | --help

-h | --help Show this screen.
-s | --source '<url>' A URL pointing to the release candidate sources and binaries
e.g. https://dist.apache.org/repos/dist/dev/hadoop/hadoop-<version>RC0/
-k | --key '<signature>' A signature of the public key, e.g. 9AD2AE49
-f | --keys-file-url '<url>' the URL of the key file, default is
https://downloads.apache.org/hadoop/common/KEYS
-o | --output-dir '</path>' directory which has the stdout and stderr of each verification target
-D | list of maven properties to set for the mvn invocations, e.g. <-D hbase.profile=2.0 -D skipTests> Defaults to unset
-P | list of maven profiles to set for the build from source, e.g. <-P native -P yarn-ui>
__EOF
}

MVN_PROPERTIES=()
MVN_PROFILES=()

while ((${#})); do
case "${1}" in
-h | --help )
usage; exit 0 ;;
-s | --source )
SOURCE_URL="${2}"; shift 2 ;;
-k | --key )
SIGNING_KEY="${2}"; shift 2 ;;
-f | --keys-file-url )
KEY_FILE_URL="${2}"; shift 2 ;;
-o | --output-dir )
OUTPUT_DIR="${2}"; shift 2 ;;
-D )
MVN_PROPERTIES+=("-D ${2}"); shift 2 ;;
-P )
MVN_PROFILES+=("-P ${2}"); shift 2 ;;
* )
usage >&2; exit 1 ;;
esac
done

# Source url must be provided
if [ -z "${SOURCE_URL}" ]; then
usage;
exit 1
fi

cat << __EOF
Although This tool helps verifying Hadoop RC build and unit tests,
operator may still consider verifying the following manually:
1. Verify the API compatibility report
2. Integration/performance/benchmark tests
3. Object store specific Integration tests against an endpoint
4. Verify overall unit test stability from Jenkins builds or locally
5. Other concerns if any
__EOF

[[ "${SOURCE_URL}" != */ ]] && SOURCE_URL="${SOURCE_URL}/"
HADOOP_RC_VERSION=$(tr "/" "\n" <<< "${SOURCE_URL}" | tail -n2)
HADOOP_VERSION=$(echo "${HADOOP_RC_VERSION}" | sed -e 's/-RC[0-9]//g' | sed -e 's/hadoop-//g')
JAVA_VERSION=$(java -version 2>&1 | cut -f3 -d' ' | head -n1 | sed -e 's/"//g')
OUTPUT_DIR="${OUTPUT_DIR:-$(pwd)}"

if [ ! -d "${OUTPUT_DIR}" ]; then
echo "Output directory ${OUTPUT_DIR} does not exist, please create it before running this script."
exit 1
fi

OUTPUT_PATH_PREFIX="${OUTPUT_DIR}"/"${HADOOP_RC_VERSION}"

# default value for verification targets, 0 = failed
SIGNATURE_PASSED=0
CHECKSUM_PASSED=0
RAT_CHECK_PASSED=0
BUILD_FROM_SOURCE_PASSED=0
BUILD_TAR_FROM_SOURCE_PASSED=0

function download_and_import_keys() {
KEY_FILE_URL="${KEY_FILE_URL:-https://downloads.apache.org/hadoop/common/KEYS}"
echo "Obtain and import the publisher key(s) from ${KEY_FILE_URL}"
# download the keys file into file KEYS
wget -O KEYS "${KEY_FILE_URL}"
gpg --import KEYS
if [ -n "${SIGNING_KEY}" ]; then
gpg --list-keys "${SIGNING_KEY}"
fi
}

function download_release_candidate () {
# get all files from release candidate repo
wget -r -np -N -nH --cut-dirs 4 "${SOURCE_URL}"
}

function verify_signatures() {
rm -f "${OUTPUT_PATH_PREFIX}"_verify_signatures
for file in *.tar.gz; do
gpg --verify "${file}".asc "${file}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_signatures && SIGNATURE_PASSED=1 || SIGNATURE_PASSED=0
done
}

function verify_checksums() {
rm -f "${OUTPUT_PATH_PREFIX}"_verify_checksums
SHA_EXT=$(find . -name "*.sha*" | awk -F '.' '{ print $NF }' | head -n 1)
for file in *.tar.gz; do
sha512sum --tag "${file}" > "${file}"."${SHA_EXT}".tmp
diff "${file}"."${SHA_EXT}".tmp "${file}"."${SHA_EXT}" 2>&1 | tee -a "${OUTPUT_PATH_PREFIX}"_verify_checksums && CHECKSUM_PASSED=1 || CHECKSUM_PASSED=0
rm -f "${file}"."${SHA_EXT}".tmp
done
}

function unzip_from_source() {
tar -zxvf hadoop-"${HADOOP_VERSION}"-src.tar.gz
cd hadoop-"${HADOOP_VERSION}"-src
}

function rat_test() {
rm -f "${OUTPUT_PATH_PREFIX}"_rat_test
mvn clean apache-rat:check "${MVN_PROPERTIES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_rat_test && RAT_CHECK_PASSED=1
}

function build_from_source() {
rm -f "${OUTPUT_PATH_PREFIX}"_build_from_source
# No unit test run.
mvn clean install "${MVN_PROPERTIES[@]}" -DskipTests "${MVN_PROFILES[@]}" 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_from_source && BUILD_FROM_SOURCE_PASSED=1
}

function build_tar_from_source() {
rm -f "${OUTPUT_PATH_PREFIX}"_build_tar_from_source
# No unit test run.
mvn clean package "${MVN_PROPERTIES[@]}" -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true 2>&1 | tee "${OUTPUT_PATH_PREFIX}"_build_tar_from_source && BUILD_TAR_FROM_SOURCE_PASSED=1
}

function execute() {
${1} || print_when_exit
}

function print_when_exit() {
cat << __EOF
* Signature: $( ((SIGNATURE_PASSED)) && echo "ok" || echo "failed" )
* Checksum : $( ((CHECKSUM_PASSED)) && echo "ok" || echo "failed" )
* Rat check (${JAVA_VERSION}): $( ((RAT_CHECK_PASSED)) && echo "ok" || echo "failed" )
- mvn clean apache-rat:check ${MVN_PROPERTIES[@]}
* Built from source (${JAVA_VERSION}): $( ((BUILD_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
- mvn clean install ${MVN_PROPERTIES[@]} -DskipTests ${MVN_PROFILES[@]}
* Built tar from source (${JAVA_VERSION}): $( ((BUILD_TAR_FROM_SOURCE_PASSED)) && echo "ok" || echo "failed" )
- mvn clean package ${MVN_PROPERTIES[@]} -Pdist -DskipTests -Dtar -Dmaven.javadoc.skip=true
__EOF
if ((CHECKSUM_PASSED)) && ((SIGNATURE_PASSED)) && ((RAT_CHECK_PASSED)) && ((BUILD_FROM_SOURCE_PASSED)) && ((BUILD_TAR_FROM_SOURCE_PASSED)) ; then
exit 0
fi
exit 1
}

pushd "${OUTPUT_DIR}"

download_and_import_keys
download_release_candidate

execute verify_signatures
execute verify_checksums
execute unzip_from_source
execute rat_test
execute build_from_source
execute build_tar_from_source

popd

print_when_exit
10 changes: 9 additions & 1 deletion hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,14 @@
<artifactId>okio</artifactId>
<groupId>com.squareup.okio</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-core</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-api</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand All @@ -176,4 +184,4 @@
<scope>test</scope>
</dependency>
</dependencies>
</project>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,17 @@
import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

/**
* File system instance getter.
*/
@Private
class FsGetter {
@InterfaceAudience.LimitedPrivate({"Common"})
@InterfaceStability.Unstable
public class FsGetter {

/**
* Gets new file system instance of given uri.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,14 +81,30 @@ enum ResultKind {
private List<RegexMountPoint<T>> regexMountPointList =
new ArrayList<RegexMountPoint<T>>();

static class MountPoint<T> {
public static class MountPoint<T> {
String src;
INodeLink<T> target;

MountPoint(String srcPath, INodeLink<T> mountLink) {
src = srcPath;
target = mountLink;
}

/**
* Returns the source of mount point.
* @return The source
*/
public String getSource() {
return this.src;
}

/**
* Returns the target link.
* @return The target INode link
*/
public INodeLink<T> getTarget() {
return this.target;
}
}

/**
Expand Down Expand Up @@ -256,7 +272,7 @@ enum LinkType {
* For a merge, each target is checked to be dir when created but if target
* is changed later it is then ignored (a dir with null entries)
*/
static class INodeLink<T> extends INode<T> {
public static class INodeLink<T> extends INode<T> {
final URI[] targetDirLinkList;
private T targetFileSystem; // file system object created from the link.
// Function to initialize file system. Only applicable for simple links
Expand Down Expand Up @@ -290,7 +306,7 @@ static class INodeLink<T> extends INode<T> {
* Get the target of the link. If a merge link then it returned
* as "," separated URI list.
*/
Path getTargetLink() {
public Path getTargetLink() {
StringBuilder result = new StringBuilder(targetDirLinkList[0].toString());
// If merge link, use "," as separator between the merged URIs
for (int i = 1; i < targetDirLinkList.length; ++i) {
Expand Down Expand Up @@ -932,7 +948,7 @@ protected ResolveResult<T> buildResolveResultForRegexMountPoint(
}
}

List<MountPoint<T>> getMountPoints() {
public List<MountPoint<T>> getMountPoints() {
return mountPoints;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,9 @@ public class ProfileServlet extends HttpServlet {

static final String OUTPUT_DIR = System.getProperty("java.io.tmpdir") + "/prof-output-hadoop";

// This flag is only allowed to be reset by tests.
private static boolean isTestRun = false;

private enum Event {

CPU("cpu"),
Expand Down Expand Up @@ -177,6 +180,10 @@ public ProfileServlet() {
LOG.info("Servlet process PID: {} asyncProfilerHome: {}", pid, asyncProfilerHome);
}

static void setIsTestRun(boolean isTestRun) {
ProfileServlet.isTestRun = isTestRun;
}

@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
throws IOException {
Expand Down Expand Up @@ -274,7 +281,9 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
cmd.add("--reverse");
}
cmd.add(pid.toString());
process = ProcessUtils.runCmdAsync(cmd);
if (!isTestRun) {
process = ProcessUtils.runCmdAsync(cmd);
}

// set response and set refresh header to output location
setResponseHeader(resp);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1329,14 +1329,6 @@ public String toString() {
}
}

@VisibleForTesting
@InterfaceAudience.Private
protected void configureSocketChannel(SocketChannel channel) throws IOException {
channel.configureBlocking(false);
channel.socket().setTcpNoDelay(tcpNoDelay);
channel.socket().setKeepAlive(true);
}

/** Listens on the socket. Creates jobs for the handler threads*/
private class Listener extends Thread {

Expand Down Expand Up @@ -1543,24 +1535,15 @@ private void closeCurrentConnection(SelectionKey key, Throwable e) {
InetSocketAddress getAddress() {
return (InetSocketAddress)acceptChannel.socket().getLocalSocketAddress();
}

void doAccept(SelectionKey key) throws InterruptedException, IOException, OutOfMemoryError {
ServerSocketChannel server = (ServerSocketChannel) key.channel();
SocketChannel channel;
while ((channel = server.accept()) != null) {

try {
configureSocketChannel(channel);
} catch (IOException e) {
LOG.warn("Error in an accepted SocketChannel", e);
try {
channel.socket().close();
channel.close();
} catch (IOException ex) {
LOG.warn("Error in closing SocketChannel", ex);
}
continue;
}
channel.configureBlocking(false);
channel.socket().setTcpNoDelay(tcpNoDelay);
channel.socket().setKeepAlive(true);

Reader reader = getReader();
Connection c = connectionManager.register(channel,
Expand Down
Loading