Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
3503461
HDFS-16847: RBF: Prevents StateStoreFileSystemImpl from committing tm…
simbadzina Nov 29, 2022
e263d73
HDFS-16845: Adds configuration flag to allow clients to use router ob…
simbadzina Nov 29, 2022
d53f699
HDFS-16846. EC: Only EC blocks should be effected by max-streams-hard…
lfxy Nov 29, 2022
12d42fd
HADOOP-18530. ChecksumFileSystem::readVectored might return byte buff…
HarshitGupta11 Nov 29, 2022
a1f28d5
HDFS-16851: RBF: Add a utility to dump the StateStore. (#5155)
omalley Nov 29, 2022
42a3586
HDFS-16839 It should consider EC reconstruction work when we determin…
RuinanGu Nov 30, 2022
513caff
HADOOP-18457. ABFS: Support account level throttling (#5034)
anmolanmol1234 Nov 30, 2022
d94d9ff
HDFS-16550. Allow JN edit cache size to be set as a fraction of heap …
tomscut Nov 30, 2022
f531c6b
YARN-10946. AbstractCSQueue: Create separate class for constructing Q…
szilard-nemeth Dec 1, 2022
e570e75
YARN-11158. Support (Create/Renew/Cancel) DelegationToken API's for F…
slfan1989 Dec 1, 2022
c47384f
YARN-11381. Fix hadoop-yarn-common module Java Doc Errors. (#5179)
slfan1989 Dec 2, 2022
2f146d3
HDFS-16809. EC striped block is not sufficient when doing in maintena…
dingshun3016 Dec 5, 2022
1cd582b
HDFS-16837. [RBF SBN] ClientGSIContext should merge RouterFederatedSt…
ZanderXu Dec 5, 2022
4144a8a
HADOOP-18470. index.md update for 3.3.5 release
steveloughran Dec 5, 2022
f1462e7
HADOOP-18560. AvroFSInput opens a stream twice and discards the secon…
steveloughran Dec 6, 2022
94db31c
YARN-11386. Fix issue with classpath resolution (#5183)
GauthamBanasandra Dec 6, 2022
a9376d9
YARN-10978. Fix ApplicationClassLoader to Correctly Expand Glob for W…
akshatb1 Dec 6, 2022
81c4863
YARN-11373. [Federation] Support refreshQueues refreshNodes API's for…
slfan1989 Dec 6, 2022
5d22042
HADOOP-18538. Upgrade kafka to 2.8.2 (#5164)
dmmkr Dec 6, 2022
f5e33a3
HADOOP-18546. ABFS. disable purging list of in progress reads in abfs…
saxenapranav Dec 7, 2022
1269eb9
HADOOP-18563. Misleading AWS SDK S3 timeout configuration comment (#5…
o-shevchenko Dec 8, 2022
7e7f1c6
YARN-11390. TestResourceTrackerService.testNodeRemovalNormally: Shutd…
K0K0V0K Dec 8, 2022
8024f54
HDFS-16858. Dynamically adjust max slow disks to exclude. (#5180)
dingshun3016 Dec 9, 2022
ec0c622
HDFS-16860 Upgrade moment.min.js to 2.29.4 (#5194)
theradtad Dec 9, 2022
cf89edf
HADOOP-18546. Followup: ITestReadBufferManager fix (#5198)
steveloughran Dec 9, 2022
c82a6f8
HADOOP-18329. Support for IBM Semeru JVM > 11.0.15.0 Vendor Name Chan…
JackBuggins Dec 10, 2022
8e24911
YARN-11385. Fix hadoop-yarn-server-common module Java Doc Errors. (#5…
slfan1989 Dec 10, 2022
07ccbf6
HDFS-16868. Fix audit log duplicate issue when an ACE occurs in FSNam…
curie71 Dec 13, 2022
d24a5c5
MAPREDUCE-7428. Fix failures related to Junit 4 to Junit 5 upgrade in…
hotcodemacha Dec 14, 2022
19cd2cf
HADOOP-18183. s3a audit logs to publish range start/end of GET reques…
steveloughran Dec 14, 2022
60b7a92
HADOOP-18569. NFS Gateway may release buffer too early (#5212)
adoroszlai Dec 14, 2022
d688a5c
YARN-11350. [Federation] Router Support DelegationToken With ZK. (#5131)
slfan1989 Dec 14, 2022
7343184
HADOOP-18526. Leak of S3AInstrumentation instances via hadoop Metrics…
steveloughran Dec 14, 2022
e8a6089
YARN-11358. [Federation] Add FederationInterceptor#allow-partial-resu…
slfan1989 Dec 14, 2022
060b080
HADOOP-18574. Changing log level of IOStatistics increment to make th…
mehakmeet Dec 15, 2022
3806b08
HADOOP-18573. Improve error reporting on non-standard kerberos names …
steveloughran Dec 15, 2022
3d25389
HADOOP-18561. Update commons-net to 3.9.0 (#5214)
steveloughran Dec 15, 2022
36f50c5
HADOOP-18577. ABFS: Add probes of readahead fix (#5205)
steveloughran Dec 15, 2022
69a98d8
HDFS-16866. Fix a typo in Dispatcher (#5202)
Happy-shi Dec 16, 2022
9092879
HDFS-16852. Skip KeyProviderCache shutdown hook registration if alrea…
xinglin Dec 16, 2022
67d64a2
HADOOP-18567. LogThrottlingHelper: properly trigger dependent recorde…
ChengbingLiu Dec 16, 2022
5d08fad
HADOOP-18577. Followup: javadoc fix (#5232)
steveloughran Dec 18, 2022
a023375
HADOOP-18575. Make XML transformer factory more lenient (#5224)
pjfanning Dec 18, 2022
998be80
HADOOP-18470. Update index md with section on ABFS prefetching
steveloughran Dec 19, 2022
8730458
YARN-11349. Fix CheckStyle.
Dec 20, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ commons-collections:commons-collections:3.2.2
commons-daemon:commons-daemon:1.0.13
commons-io:commons-io:2.8.0
commons-logging:commons-logging:1.1.3
commons-net:commons-net:3.8.0
commons-net:commons-net:3.9.0
de.ruedigermoeller:fst:2.50
io.grpc:grpc-api:1.26.0
io.grpc:grpc-context:1.26.0
Expand Down Expand Up @@ -324,7 +324,7 @@ org.apache.htrace:htrace-core:3.1.0-incubating
org.apache.htrace:htrace-core4:4.1.0-incubating
org.apache.httpcomponents:httpclient:4.5.6
org.apache.httpcomponents:httpcore:4.4.10
org.apache.kafka:kafka-clients:2.8.1
org.apache.kafka:kafka-clients:2.8.2
org.apache.kerby:kerb-admin:2.0.2
org.apache.kerby:kerb-client:2.0.2
org.apache.kerby:kerb-common:2.0.2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@

package org.apache.hadoop.util;

import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

Expand All @@ -33,21 +37,71 @@ public class PlatformName {
* per the java-vm.
*/
public static final String PLATFORM_NAME =
(System.getProperty("os.name").startsWith("Windows")
? System.getenv("os") : System.getProperty("os.name"))
+ "-" + System.getProperty("os.arch")
+ "-" + System.getProperty("sun.arch.data.model");
(System.getProperty("os.name").startsWith("Windows") ?
System.getenv("os") : System.getProperty("os.name"))
+ "-" + System.getProperty("os.arch") + "-"
+ System.getProperty("sun.arch.data.model");

/**
* The java vendor name used in this platform.
*/
public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");

/**
* Define a system class accessor that is open to changes in underlying implementations
* of the system class loader modules.
*/
private static final class SystemClassAccessor extends ClassLoader {
public Class<?> getSystemClass(String className) throws ClassNotFoundException {
return findSystemClass(className);
}
}

/**
* A public static variable to indicate the current java vendor is
* IBM java or not.
* IBM and the type is Java Technology Edition which provides its
* own implementations of many security packages and Cipher suites.
* Note that these are not provided in Semeru runtimes:
* See https://developer.ibm.com/languages/java/semeru-runtimes for details.
*/
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM") &&
hasIbmTechnologyEditionModules();

private static boolean hasIbmTechnologyEditionModules() {
return Arrays.asList(
"com.ibm.security.auth.module.JAASLoginModule",
"com.ibm.security.auth.module.Win64LoginModule",
"com.ibm.security.auth.module.NTLoginModule",
"com.ibm.security.auth.module.AIX64LoginModule",
"com.ibm.security.auth.module.LinuxLoginModule",
"com.ibm.security.auth.module.Krb5LoginModule"
).stream().anyMatch((module) -> isSystemClassAvailable(module));
}

/**
* In rare cases where different behaviour is performed based on the JVM vendor
* this method should be used to test for a unique JVM class provided by the
* vendor rather than using the vendor method. For example if on JVM provides a
* different Kerberos login module testing for that login module being loadable
* before configuring to use it is preferable to using the vendor data.
*
* @param className the name of a class in the JVM to test for
* @return true if the class is available, false otherwise.
*/
private static boolean isSystemClassAvailable(String className) {
return AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
try {
// Using ClassLoader.findSystemClass() instead of
// Class.forName(className, false, null) because Class.forName with a null
// ClassLoader only looks at the boot ClassLoader with Java 9 and above
// which doesn't look at all the modules available to the findSystemClass.
new SystemClassAccessor().getSystemClass(className);
return true;
} catch (Exception ignored) {
return false;
}
});
}

public static void main(String[] args) {
System.out.println(PLATFORM_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ public AvroFSInput(final FileContext fc, final Path p) throws IOException {
FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL)
.withFileStatus(status)
.build());
fc.open(p);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,9 +307,16 @@ public static ByteBuffer sliceTo(ByteBuffer readData, long readOffset,
FileRange request) {
int offsetChange = (int) (request.getOffset() - readOffset);
int requestLength = request.getLength();
// Create a new buffer that is backed by the original contents
// The buffer will have position 0 and the same limit as the original one
readData = readData.slice();
// Change the offset and the limit of the buffer as the reader wants to see
// only relevant data
readData.position(offsetChange);
readData.limit(offsetChange + requestLength);
// Create a new buffer after the limit change so that only that portion of the data is
// returned to the reader.
readData = readData.slice();
return readData;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,11 @@ private AuditConstants() {
*/
public static final String PARAM_PROCESS = "ps";

/**
* Header: Range for GET request data: {@value}.
*/
public static final String PARAM_RANGE = "rg";

/**
* Task Attempt ID query header: {@value}.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.fs.impl;

import java.lang.ref.WeakReference;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsSource;

import static java.util.Objects.requireNonNull;

/**
* A weak referenced metrics source which avoids hanging on to large objects
* if somehow they don't get fully closed/cleaned up.
* The JVM may clean up all objects which are only weakly referenced whenever
* it does a GC, <i>even if there is no memory pressure</i>.
* To avoid these refs being removed, always keep a strong reference around
* somewhere.
*/
@InterfaceAudience.Private
public class WeakRefMetricsSource implements MetricsSource {

/**
* Name to know when unregistering.
*/
private final String name;

/**
* Underlying metrics source.
*/
private final WeakReference<MetricsSource> sourceWeakReference;

/**
* Constructor.
* @param name Name to know when unregistering.
* @param source metrics source
*/
public WeakRefMetricsSource(final String name, final MetricsSource source) {
this.name = name;
this.sourceWeakReference = new WeakReference<>(requireNonNull(source));
}

/**
* If the weak reference is non null, update the metrics.
* @param collector to contain the resulting metrics snapshot
* @param all if true, return all metrics even if unchanged.
*/
@Override
public void getMetrics(final MetricsCollector collector, final boolean all) {
MetricsSource metricsSource = sourceWeakReference.get();
if (metricsSource != null) {
metricsSource.getMetrics(collector, all);
}
}

/**
* Name to know when unregistering.
* @return the name passed in during construction.
*/
public String getName() {
return name;
}

/**
* Get the source, will be null if the reference has been GC'd
* @return the source reference
*/
public MetricsSource getSource() {
return sourceWeakReference.get();
}

@Override
public String toString() {
return "WeakRefMetricsSource{" +
"name='" + name + '\'' +
", sourceWeakReference is " +
(sourceWeakReference.get() == null ? "unset" : "set") +
'}';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ public long incrementCounter(final String key, final long value) {
return counter.get();
} else {
long l = incAtomicLong(counter, value);
LOG.debug("Incrementing counter {} by {} with final value {}",
LOG.trace("Incrementing counter {} by {} with final value {}",
key, value, l);
return l;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,9 +262,15 @@ public LogAction record(String recorderName, long currentTimeMs,
if (primaryRecorderName.equals(recorderName) &&
currentTimeMs - minLogPeriodMs >= lastLogTimestampMs) {
lastLogTimestampMs = currentTimeMs;
for (LoggingAction log : currentLogs.values()) {
log.setShouldLog();
}
currentLogs.replaceAll((key, log) -> {
LoggingAction newLog = log;
if (log.hasLogged()) {
// create a fresh log since the old one has already been logged
newLog = new LoggingAction(log.getValueCount());
}
newLog.setShouldLog();
return newLog;
});
}
if (currentLog.shouldLog()) {
currentLog.setHasLogged();
Expand Down Expand Up @@ -357,6 +363,10 @@ private void setHasLogged() {
hasLogged = true;
}

private int getValueCount() {
return stats.length;
}

private void recordValues(double... values) {
if (values.length != stats.length) {
throw new IllegalArgumentException("received " + values.length +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.apache.hadoop.util.Shell.bashQuote;

/**
* A simple shell-based implementation of {@link IdMappingServiceProvider}
* Map id to user name or group name. It does update every 15 minutes. Only a
Expand Down Expand Up @@ -472,26 +474,27 @@ synchronized private void updateMapIncr(final String name,

boolean updated = false;
updateStaticMapping();
String name2 = bashQuote(name);

if (OS.startsWith("Linux") || OS.equals("SunOS") || OS.contains("BSD")) {
if (isGrp) {
updated = updateMapInternal(gidNameMap, "group",
getName2IdCmdNIX(name, true), ":",
getName2IdCmdNIX(name2, true), ":",
staticMapping.gidMapping);
} else {
updated = updateMapInternal(uidNameMap, "user",
getName2IdCmdNIX(name, false), ":",
getName2IdCmdNIX(name2, false), ":",
staticMapping.uidMapping);
}
} else {
// Mac
if (isGrp) {
updated = updateMapInternal(gidNameMap, "group",
getName2IdCmdMac(name, true), "\\s+",
getName2IdCmdMac(name2, true), "\\s+",
staticMapping.gidMapping);
} else {
updated = updateMapInternal(uidNameMap, "user",
getName2IdCmdMac(name, false), "\\s+",
getName2IdCmdMac(name2, false), "\\s+",
staticMapping.uidMapping);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.hadoop.util.PlatformName.JAVA_VENDOR_NAME;
import static org.apache.hadoop.util.PlatformName.IBM_JAVA;

import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
Expand Down Expand Up @@ -102,11 +102,11 @@ public enum Mode { CLIENT, SERVER }
"ssl.server.exclude.cipher.list";

public static final String KEY_MANAGER_SSLCERTIFICATE =
JAVA_VENDOR_NAME.contains("IBM") ? "ibmX509" :
IBM_JAVA ? "ibmX509" :
KeyManagerFactory.getDefaultAlgorithm();

public static final String TRUST_MANAGER_SSLCERTIFICATE =
JAVA_VENDOR_NAME.contains("IBM") ? "ibmX509" :
IBM_JAVA ? "ibmX509" :
TrustManagerFactory.getDefaultAlgorithm();

public static final String KEYSTORES_FACTORY_CLASS_KEY =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -866,9 +866,9 @@ private String getTokenRealOwner(TokenIdent id) {
/**
* Add token stats to the owner to token count mapping.
*
* @param id
* @param id token id.
*/
private void addTokenForOwnerStats(TokenIdent id) {
protected void addTokenForOwnerStats(TokenIdent id) {
String realOwner = getTokenRealOwner(id);
tokenOwnerStats.put(realOwner,
tokenOwnerStats.getOrDefault(realOwner, 0L)+1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ static URL[] constructUrlsFromClasspath(String classpath)
throws MalformedURLException {
List<URL> urls = new ArrayList<URL>();
for (String element : classpath.split(File.pathSeparator)) {
if (element.endsWith("/*")) {
if (element.endsWith(File.separator + "*")) {
List<Path> jars = FileUtil.getJarsInDirectory(element);
if (!jars.isEmpty()) {
for (Path jar: jars) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,8 @@ public static void checkWindowsCommandLineLength(String...commands)
* @param arg the argument to quote
* @return the quoted string
*/
static String bashQuote(String arg) {
@InterfaceAudience.Private
public static String bashQuote(String arg) {
StringBuilder buffer = new StringBuilder(arg.length() + 2);
buffer.append('\'')
.append(arg.replace("'", "'\\''"))
Expand Down
Loading