Date: Tue, 10 May 2022 00:03:25 -0700
Subject: [PATCH 16/53] HADOOP-18229. Fix some java doc compilation errors
CryptoInputStream.java no description for @throws, CryptoOutputStream.java no
description for @throws, CryptoStreamUtils.java no @param for buffer, no
@param for conf, no @return, no @param for codec, no @throws for
java.io.IOException HasFileDescriptor.java no description for @throws,
KeyProvider.java no description for @throws, OpensslCipher.java no
description for @throws, Seekable.java no @param for pos, no @throws for
java.io.IOException, no @param for targetPos
---
.../hadoop/crypto/CryptoInputStream.java | 2 +-
.../hadoop/crypto/CryptoOutputStream.java | 2 +-
.../hadoop/crypto/CryptoStreamUtils.java | 29 ++++++++++++++---
.../apache/hadoop/crypto/OpensslCipher.java | 20 ++++++------
.../apache/hadoop/crypto/key/KeyProvider.java | 32 +++++++++----------
.../apache/hadoop/fs/HasFileDescriptor.java | 2 +-
.../java/org/apache/hadoop/fs/Seekable.java | 14 ++++++--
7 files changed, 65 insertions(+), 36 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
index 5ab5d341fb826..067abde9dfbb8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
@@ -157,7 +157,7 @@ public InputStream getWrappedStream() {
* @param off the buffer offset.
* @param len the maximum number of decrypted data bytes to read.
* @return int the total number of decrypted data bytes read into the buffer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public int read(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
index 8e7522112551e..2a1335b6e745a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
@@ -146,7 +146,7 @@ public OutputStream getWrappedStream() {
* @param b the data.
* @param off the start offset in the data.
* @param len the number of bytes to write.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public synchronized void write(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index 318975fd6cebd..9db5f9173af38 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -52,13 +52,22 @@ public static void freeDB(ByteBuffer buffer) {
}
}
- /** Read crypto buffer size */
+ /**
+ * Read crypto buffer size
+ *
+ * @param conf configuration
+ * @return hadoop.security.crypto.buffer.size
+ */
public static int getBufferSize(Configuration conf) {
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
}
-
- /** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */
+
+ /**
+ * AES/CTR/NoPadding or SM4/CTR/NoPadding is required.
+ *
+ * @param codec crypto codec
+ */
public static void checkCodec(CryptoCodec codec) {
if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
@@ -67,17 +76,27 @@ public static void checkCodec(CryptoCodec codec) {
}
}
- /** Check and floor buffer size */
+ /**
+ * Check and floor buffer size
+ *
+ * @param codec crypto codec
+ * @param bufferSize the size of the buffer to be used.
+ * @return calc buffer siez
+ */
public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE,
"Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
return bufferSize - bufferSize % codec.getCipherSuite()
.getAlgorithmBlockSize();
}
-
+
/**
* If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
* current position, otherwise return 0;
+ *
+ * @param in wrapper
+ * @return current position, otherwise return 0;
+ * @throws IOException raised on errors performing I/O.
*/
public static long getInputStreamOffset(InputStream in) throws IOException {
if (in instanceof Seekable) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 0c65b74b2913b..1961a765b4a9a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -225,34 +225,34 @@ public int update(ByteBuffer input, ByteBuffer output)
output.position(output.position() + len);
return len;
}
-
+
/**
* Finishes a multiple-part operation. The data is encrypted or decrypted,
* depending on how this cipher was initialized.
*
- *
+ *
* The result is stored in the output buffer. Upon return, the output buffer's
* position will have advanced by n, where n is the value returned by this
* method; the output buffer's limit will not have changed.
*
- *
+ *
* If output.remaining() bytes are insufficient to hold the result,
* a ShortBufferException is thrown.
*
- *
+ *
* Upon finishing, this method resets this cipher object to the state it was
* in when previously initialized. That is, the object is available to encrypt
* or decrypt more data.
*
- *
- * If any exception is thrown, this cipher object need to be reset before it
+ *
+ * If any exception is thrown, this cipher object need to be reset before it
* can be used again.
- *
+ *
* @param output the output ByteBuffer
* @return int number of bytes stored in output
- * @throws ShortBufferException
- * @throws IllegalBlockSizeException
- * @throws BadPaddingException
+ * @throws ShortBufferException if there is insufficient space in the output buffer.
+ * @throws IllegalBlockSizeException This exception is thrown when the length of data provided to a block cipher is incorrect.
+ * @throws BadPaddingException This exception is thrown when a particular padding mechanism is expected for the input data but the data is not padded properly.
*/
public int doFinal(ByteBuffer output) throws ShortBufferException,
IllegalBlockSizeException, BadPaddingException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index dafdaf7e15b25..4210548f87720 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -242,7 +242,7 @@ protected int addVersion() {
/**
* Serialize the metadata to a set of bytes.
* @return the serialized bytes
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected byte[] serialize() throws IOException {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
@@ -281,7 +281,7 @@ protected byte[] serialize() throws IOException {
/**
* Deserialize a new metadata object from a set of bytes.
* @param bytes the serialized metadata
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected Metadata(byte[] bytes) throws IOException {
String cipher = null;
@@ -450,7 +450,7 @@ public boolean isTransient() {
* when decrypting data.
* @param versionName the name of a specific version of the key
* @return the key material
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion getKeyVersion(String versionName
) throws IOException;
@@ -458,14 +458,14 @@ public abstract KeyVersion getKeyVersion(String versionName
/**
* Get the key names for all keys.
* @return the list of key names
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getKeys() throws IOException;
/**
* Get key metadata in bulk.
* @param names the names of the keys to get
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Metadata[] getKeysMetadata(String... names) throws IOException {
Metadata[] result = new Metadata[names.length];
@@ -478,7 +478,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException {
/**
* Get the key material for all versions of a specific key name.
* @return the list of key material
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getKeyVersions(String name) throws IOException;
@@ -488,7 +488,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException {
* @param name the base name of the key
* @return the version name of the current version of the key or null if the
* key version doesn't exist
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public KeyVersion getCurrentKey(String name) throws IOException {
Metadata meta = getMetadata(name);
@@ -502,7 +502,7 @@ public KeyVersion getCurrentKey(String name) throws IOException {
* Get metadata about the key.
* @param name the basename of the key
* @return the key's metadata or null if the key doesn't exist
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract Metadata getMetadata(String name) throws IOException;
@@ -512,7 +512,7 @@ public KeyVersion getCurrentKey(String name) throws IOException {
* @param material the key material for the first version of the key.
* @param options the options for the new key.
* @return the version name of the first version of the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion createKey(String name, byte[] material,
Options options) throws IOException;
@@ -558,7 +558,7 @@ protected byte[] generateKey(int size, String algorithm)
* @param name the base name of the key
* @param options the options for the new key.
* @return the version name of the first version of the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws NoSuchAlgorithmException
*/
public KeyVersion createKey(String name, Options options)
@@ -570,7 +570,7 @@ public KeyVersion createKey(String name, Options options)
/**
* Delete the given key.
* @param name the name of the key to delete
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteKey(String name) throws IOException;
@@ -579,7 +579,7 @@ public KeyVersion createKey(String name, Options options)
* @param name the basename of the key
* @param material the new key material
* @return the name of the new version of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion rollNewVersion(String name,
byte[] material
@@ -601,7 +601,7 @@ public void close() throws IOException {
*
* @param name the basename of the key
* @return the name of the new version of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
IOException {
@@ -620,7 +620,7 @@ public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
* version of the given key.
*
* @param name the basename of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void invalidateCache(String name) throws IOException {
// NOP
@@ -628,7 +628,7 @@ public void invalidateCache(String name) throws IOException {
/**
* Ensures that any changes to the keys are written to persistent store.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void flush() throws IOException;
@@ -637,7 +637,7 @@ public void invalidateCache(String name) throws IOException {
* "/aaa/bbb".
* @param versionName the version name to split
* @return the base name of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static String getBaseName(String versionName) throws IOException {
int div = versionName.lastIndexOf('@');
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
index bcf325ceca5df..a0e89d6aeac44 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
@@ -33,7 +33,7 @@ public interface HasFileDescriptor {
/**
* @return the FileDescriptor
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public FileDescriptor getFileDescriptor() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
index 919c857ffa628..59f0c66b2dc7f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
@@ -32,17 +32,27 @@ public interface Seekable {
* Seek to the given offset from the start of the file.
* The next read() will be from that location. Can't
* seek past the end of the file.
+ *
+ * @param pos offset from the start of the file
+ * @throws IOException raised on errors performing I/O.
*/
void seek(long pos) throws IOException;
-
+
/**
* Return the current offset from the start of the file
+ *
+ * @return offset from the start of the file
+ * @throws IOException raised on errors performing I/O.
*/
long getPos() throws IOException;
/**
- * Seeks a different copy of the data. Returns true if
+ * Seeks a different copy of the data. Returns true if
* found a new source, false otherwise.
+ *
+ * @param targetPos target position
+ * @return true if found a new source, false otherwise.
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.Private
boolean seekToNewSource(long targetPos) throws IOException;
From b6c20efa4b680de569d36a1319633613fd8e8598 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Tue, 10 May 2022 02:11:24 -0700
Subject: [PATCH 17/53] HADOOP-18229. Fix some java doc compilation errors
KeyProvider.java no @throws for java.security.NoSuchAlgorithmException, no
@throws for java.io.IOException KeyProviderCryptoExtension.java warning: no
description for @param, warning: no @throws for java.io.IOException
DelegationTokenIssuer.java warning: no @return, warning: no @param for
renewer, no @param for issuer etc KeyProviderDelegationTokenExtension.java
no description for @throws, Tool.java warning: no description for @throws
CommandShell.java warning: no @return, warning: no description for @param
KeyShell.java warning: no description for @throws TokenRenewer.java warning:
no description for @throws,warning: no @param for token
---
.../apache/hadoop/crypto/key/KeyProvider.java | 8 +++++--
.../key/KeyProviderCryptoExtension.java | 5 +++--
.../KeyProviderDelegationTokenExtension.java | 4 ++--
.../apache/hadoop/crypto/key/KeyShell.java | 4 ++--
.../security/token/DelegationTokenIssuer.java | 13 +++++++++++
.../hadoop/security/token/TokenRenewer.java | 22 ++++++++++++-------
.../org/apache/hadoop/tools/CommandShell.java | 4 +++-
.../java/org/apache/hadoop/util/Tool.java | 2 +-
8 files changed, 44 insertions(+), 18 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index 4210548f87720..75355c464e21b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -601,7 +601,9 @@ public void close() throws IOException {
*
* @param name the basename of the key
* @return the name of the new version of the key
- * @throws IOException raised on errors performing I/O.
+ * @throws IOException raised on errors performing I/O.
+ * @throws NoSuchAlgorithmException This exception is thrown when a particular cryptographic algorithm is requested
+ * but is not available in the environment.
*/
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
IOException {
@@ -660,9 +662,11 @@ protected static String buildVersionName(String name, int version) {
/**
* Find the provider with the given key.
+ *
* @param providerList the list of providers
- * @param keyName the key name we are looking for
+ * @param keyName the key name we are looking for
* @return the KeyProvider that has the key
+ * @throws IOException raised on errors performing I/O.
*/
public static KeyProvider findProvider(List providerList,
String keyName) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
index 3f3c367fc3933..cc767ab545488 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
@@ -474,8 +474,9 @@ public void drain(String keyName) {
/**
* This constructor is to be used by sub classes that provide
* delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
- * @param keyProvider
- * @param extension
+ *
+ * @param keyProvider key provider
+ * @param extension crypto extension
*/
protected KeyProviderCryptoExtension(KeyProvider keyProvider,
CryptoExtension extension) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
index 1fdc2fe12455b..3c1af424eb7cd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
@@ -48,14 +48,14 @@ public interface DelegationTokenExtension
* Renews the given token.
* @param token The token to be renewed.
* @return The token's lifetime after renewal, or 0 if it can't be renewed.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
long renewDelegationToken(final Token> token) throws IOException;
/**
* Cancels the given token.
* @param token The token to be cancelled.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
Void cancelDelegationToken(final Token> token) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
index a75f7d3aa63bd..c18d0d41bc08a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
@@ -75,7 +75,7 @@ public class KeyShell extends CommandShell {
*
* @param args Command line arguments.
* @return 0 on success, 1 on failure.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
protected int init(String[] args) throws IOException {
@@ -547,7 +547,7 @@ private String prettifyException(Exception e) {
* success and 1 for failure.
*
* @param args Command line arguments.
- * @throws Exception
+ * @throws Exception raised on errors performing I/O.
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java
index 7b0a78bcd3c0d..892a01f0f21fc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java
@@ -39,17 +39,24 @@ public interface DelegationTokenIssuer {
* The service name used as the alias for the token in the credential
* token map. addDelegationTokens will use this to determine if
* a token exists, and if not, add a new token with this alias.
+ * @return the token
*/
String getCanonicalServiceName();
/**
* Unconditionally get a new token with the optional renewer. Returning
* null indicates the service does not issue tokens.
+ * @param renewer
+ * @return the token
+ * @throws IOException raised on errors performing I/O.
*/
Token> getDelegationToken(String renewer) throws IOException;
/**
* Issuers may need tokens from additional services.
+ *
+ * @return delegation token issuer
+ * @throws IOException raised on errors performing I/O.
*/
default DelegationTokenIssuer[] getAdditionalTokenIssuers()
throws IOException {
@@ -81,6 +88,12 @@ default Token>[] addDelegationTokens(
/**
* NEVER call this method directly.
+ *
+ * @param issuer issuer
+ * @param renewer renewer
+ * @param credentials cache in which to add new delegation tokens
+ * @param tokens list of new delegation tokens
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.Private
static void collectDelegationTokens(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
index 11e275f3213d2..2e27b3ca5b5fd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
@@ -44,16 +44,22 @@ public abstract class TokenRenewer {
* cancelled.
* @param token the token being checked
* @return true if the token may be renewed or cancelled
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract boolean isManaged(Token> token) throws IOException;
-
- /**
- * Renew the given token.
- * @return the new expiration time
- * @throws IOException
- * @throws InterruptedException
- */
+
+ /**
+ * Renew the given token.
+ *
+ * @param token the token being checked
+ * @param conf configuration
+ *
+ * @return the new expiration time
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException thrown when a thread is waiting, sleeping,
+ * or otherwise occupied, and the thread is interrupted,
+ * either before or during the activity.
+ */
public abstract long renew(Token> token,
Configuration conf
) throws IOException, InterruptedException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java
index a53e2259e0e25..4e5f0fa4054b7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java
@@ -36,6 +36,7 @@ public abstract class CommandShell extends Configured implements Tool {
/**
* Return usage string for the command including any summary of subcommands.
+ * @return command usage
*/
public abstract String getCommandUsage();
@@ -84,8 +85,9 @@ public int run(String[] args) throws Exception {
/**
* Parse the command line arguments and initialize subcommand instance.
- * @param args
+ * @param args arguments
* @return 0 if the argument(s) were recognized, 1 otherwise
+ * @throws Exception init exception
*/
protected abstract int init(String[] args) throws Exception;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
index 2b803d5eefced..b526861f45741 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
@@ -82,7 +82,7 @@ public interface Tool extends Configurable {
*
* @param args command specific arguments.
* @return exit code.
- * @throws Exception
+ * @throws Exception command exception
*/
int run(String [] args) throws Exception;
}
From 4ccb346f00550e243282e1b8da349c843183650b Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Tue, 10 May 2022 06:59:47 -0700
Subject: [PATCH 18/53] HADOOP-18229. Fix some java doc compilation errors
AbstractFileSystem.java no @param for f, no @param for opts etc
Configuration.java warning: no @throws for java.lang.Exception
CryptoStreamUtils.java: warning: no @param for buffer KeyProvider.java
warning: no description for @throws DelegationTokenIssuer.java: no
description for @param TokenRenewer.java: no @param for token
ValueQueue.java: no description for @throws CryptoStreamUtils.java checkstyle
KeyProvider.java checkstyle OpensslCipher.java checkstyle
---
.../org/apache/hadoop/conf/Configuration.java | 1 +
.../hadoop/crypto/CryptoStreamUtils.java | 10 +-
.../apache/hadoop/crypto/OpensslCipher.java | 7 +-
.../apache/hadoop/crypto/key/KeyProvider.java | 9 +-
.../hadoop/crypto/key/kms/ValueQueue.java | 6 +-
.../apache/hadoop/fs/AbstractFileSystem.java | 272 ++++++++++++++++--
.../security/token/DelegationTokenIssuer.java | 2 +-
.../hadoop/security/token/TokenRenewer.java | 15 +-
8 files changed, 279 insertions(+), 43 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index acf4fd54239c5..a1ae4a7ab5f5d 100755
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -3907,6 +3907,7 @@ synchronized boolean getQuietMode() {
/** For debugging. List non-default properties to the terminal and exit.
* @param args the argument to be parsed
+ * @throws Exception exception
*/
public static void main(String[] args) throws Exception {
new Configuration().writeXml(System.out);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index 9db5f9173af38..1235d3f55fb10 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -39,7 +39,11 @@ public class CryptoStreamUtils {
private static final Logger LOG =
LoggerFactory.getLogger(CryptoStreamUtils.class);
- /** Forcibly free the direct buffer. */
+ /**
+ * Forcibly free the direct buffer.
+ *
+ * @param buffer buffer
+ */
public static void freeDB(ByteBuffer buffer) {
if (CleanerUtil.UNMAP_SUPPORTED) {
try {
@@ -53,7 +57,7 @@ public static void freeDB(ByteBuffer buffer) {
}
/**
- * Read crypto buffer size
+ * Read crypto buffer size.
*
* @param conf configuration
* @return hadoop.security.crypto.buffer.size
@@ -77,7 +81,7 @@ public static void checkCodec(CryptoCodec codec) {
}
/**
- * Check and floor buffer size
+ * Check and floor buffer size.
*
* @param codec crypto codec
* @param bufferSize the size of the buffer to be used.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 1961a765b4a9a..ac8652cae03ae 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -251,8 +251,11 @@ public int update(ByteBuffer input, ByteBuffer output)
* @param output the output ByteBuffer
* @return int number of bytes stored in output
* @throws ShortBufferException if there is insufficient space in the output buffer.
- * @throws IllegalBlockSizeException This exception is thrown when the length of data provided to a block cipher is incorrect.
- * @throws BadPaddingException This exception is thrown when a particular padding mechanism is expected for the input data but the data is not padded properly.
+ * @throws IllegalBlockSizeException This exception is thrown when the length
+ * of data provided to a block cipher is incorrect.
+ * @throws BadPaddingException This exception is thrown when a particular
+ * padding mechanism is expected for the input
+ * data but the data is not padded properly.
*/
public int doFinal(ByteBuffer output) throws ShortBufferException,
IllegalBlockSizeException, BadPaddingException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index 75355c464e21b..e7727684e40ac 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -537,7 +537,7 @@ private String getAlgorithm(String cipher) {
* @param size length of the key.
* @param algorithm algorithm to use for generating the key.
* @return the generated key.
- * @throws NoSuchAlgorithmException
+ * @throws NoSuchAlgorithmException no such algorithm exception
*/
protected byte[] generateKey(int size, String algorithm)
throws NoSuchAlgorithmException {
@@ -559,7 +559,7 @@ protected byte[] generateKey(int size, String algorithm)
* @param options the options for the new key.
* @return the version name of the first version of the key.
* @throws IOException raised on errors performing I/O.
- * @throws NoSuchAlgorithmException
+ * @throws NoSuchAlgorithmException no such algorithm exception
*/
public KeyVersion createKey(String name, Options options)
throws NoSuchAlgorithmException, IOException {
@@ -602,7 +602,8 @@ public void close() throws IOException {
* @param name the basename of the key
* @return the name of the new version of the key
* @throws IOException raised on errors performing I/O.
- * @throws NoSuchAlgorithmException This exception is thrown when a particular cryptographic algorithm is requested
+ * @throws NoSuchAlgorithmException This exception is thrown when a particular
+ * cryptographic algorithm is requested
* but is not available in the environment.
*/
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
@@ -684,7 +685,7 @@ public static KeyProvider findProvider(List providerList,
* means. If true, the password should be provided by the caller using
* setPassword().
* @return Whether or not the provider requires a password
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean needsPassword() throws IOException {
return false;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
index be2db05842c8e..cc54ad2df1cee 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
@@ -63,7 +63,7 @@ public interface QueueRefiller {
* @param keyName Key name
* @param keyQueue Queue that needs to be filled
* @param numValues number of Values to be added to the queue.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void fillQueueForKey(String keyName,
Queue keyQueue, int numValues) throws IOException;
@@ -344,8 +344,8 @@ public int getSize(String keyName) {
* @param keyName String key name
* @param num Minimum number of values to return.
* @return {@literal List} values returned
- * @throws IOException
- * @throws ExecutionException
+ * @throws IOException raised on errors performing I/O.
+ * @throws ExecutionException execution exception
*/
public List getAtMost(String keyName, int num) throws IOException,
ExecutionException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index d9818b472f0e5..73b1e79efb010 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -272,7 +272,7 @@ public static AbstractFileSystem get(final URI uri, final Configuration conf)
* @param supportedScheme the scheme supported by the implementor
* @param authorityNeeded if true then theURI must have authority, if false
* then the URI must have null authority.
- *
+ * @param defaultPort default port to use if port is not specified in the URI.
* @throws URISyntaxException uri has syntax error
*/
public AbstractFileSystem(final URI uri, final String supportedScheme,
@@ -281,11 +281,12 @@ public AbstractFileSystem(final URI uri, final String supportedScheme,
myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort);
statistics = getStatistics(uri);
}
-
+
/**
* Check that the Uri's scheme matches
- * @param uri
- * @param supportedScheme
+ *
+ * @param uri name URI of the FS
+ * @param supportedScheme supported scheme
*/
public void checkScheme(URI uri, String supportedScheme) {
String scheme = uri.getScheme();
@@ -362,7 +363,7 @@ public URI getUri() {
* If the path is fully qualified URI, then its scheme and authority
* matches that of this file system. Otherwise the path must be
* slash-relative name.
- *
+ * @param path the path
* @throws InvalidPathException if the path is invalid
*/
public void checkPath(Path path) {
@@ -431,7 +432,7 @@ public String getUriPath(final Path p) {
/**
* Make the path fully qualified to this file system
- * @param path
+ * @param path the path
* @return the qualified path
*/
public Path makeQualified(Path path) {
@@ -496,9 +497,9 @@ public FsServerDefaults getServerDefaults(final Path f) throws IOException {
* through any internal symlinks or mount point
* @param p path to be resolved
* @return fully qualified path
- * @throws FileNotFoundException
- * @throws AccessControlException
- * @throws IOException
+ * @throws FileNotFoundException when file not find throw
+ * @throws AccessControlException when accees control error throw
+ * @throws IOException raised on errors performing I/O.
* @throws UnresolvedLinkException if symbolic link on path cannot be
* resolved internally
*/
@@ -513,6 +514,18 @@ public Path resolvePath(final Path p) throws FileNotFoundException,
* {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
* that the Path f must be fully qualified and the permission is absolute
* (i.e. umask has been applied).
+ *
+ * @param f the path
+ * @param createFlag create_flag
+ * @param opts create ops
+ * @throws AccessControlException access controll exception
+ * @throws FileAlreadyExistsException file already exception
+ * @throws FileNotFoundException file not found exception
+ * @throws ParentNotDirectoryException parent not dir exception
+ * @throws UnsupportedFileSystemException unsupported file system exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return output stream
*/
public final FSDataOutputStream create(final Path f,
final EnumSet createFlag, Options.CreateOpts... opts)
@@ -630,6 +643,24 @@ public final FSDataOutputStream create(final Path f,
* The specification of this method matches that of
* {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
* have been declared explicitly.
+ *
+ * @param f the path
+ * @param flag create flag
+ * @param absolutePermission absolute permission
+ * @param bufferSize buffer size
+ * @param replication replications
+ * @param blockSize block size
+ * @param progress progress
+ * @param checksumOpt check sum opt
+ * @param createParent create parent
+ * @throws AccessControlException access control exception
+ * @throws FileAlreadyExistsException file already exists exception
+ * @throws FileNotFoundException file not found exception
+ * @throws ParentNotDirectoryException parent not directory exception
+ * @throws UnsupportedFileSystemException unsupported filesystem exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return output stream
*/
public abstract FSDataOutputStream createInternal(Path f,
EnumSet flag, FsPermission absolutePermission,
@@ -644,6 +675,14 @@ public abstract FSDataOutputStream createInternal(Path f,
* {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
* f must be fully qualified and the permission is absolute (i.e.
* umask has been applied).
+ * @param dir directory
+ * @param permission permission
+ * @param createParent create parent flag
+ * @throws AccessControlException access control exception
+ * @throws FileAlreadyExistsException file already exists exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void mkdir(final Path dir, final FsPermission permission,
final boolean createParent) throws AccessControlException,
@@ -654,6 +693,14 @@ public abstract void mkdir(final Path dir, final FsPermission permission,
* The specification of this method matches that of
* {@link FileContext#delete(Path, boolean)} except that Path f must be for
* this file system.
+ *
+ * @param f the path
+ * @param recursive recursive flag
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully deleted success true, not false
*/
public abstract boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
@@ -663,6 +710,13 @@ public abstract boolean delete(final Path f, final boolean recursive)
* The specification of this method matches that of
* {@link FileContext#open(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return input stream
*/
public FSDataInputStream open(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
@@ -673,6 +727,14 @@ public FSDataInputStream open(final Path f) throws AccessControlException,
* The specification of this method matches that of
* {@link FileContext#open(Path, int)} except that Path f must be for this
* file system.
+ *
+ * @param f the path
+ * @param bufferSize buffer size
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully open success true, not false
*/
public abstract FSDataInputStream open(final Path f, int bufferSize)
throws AccessControlException, FileNotFoundException,
@@ -682,6 +744,14 @@ public abstract FSDataInputStream open(final Path f, int bufferSize)
* The specification of this method matches that of
* {@link FileContext#truncate(Path, long)} except that Path f must be for
* this file system.
+ *
+ * @param f the path
+ * @param newLength new length
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully truncate success true, not false
*/
public boolean truncate(Path f, long newLength)
throws AccessControlException, FileNotFoundException,
@@ -694,6 +764,14 @@ public boolean truncate(Path f, long newLength)
* The specification of this method matches that of
* {@link FileContext#setReplication(Path, short)} except that Path f must be
* for this file system.
+ *
+ * @param f the path
+ * @param replication replication
+ * @return if successfully set replication success true, not false
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract boolean setReplication(final Path f,
final short replication) throws AccessControlException,
@@ -703,6 +781,16 @@ public abstract boolean setReplication(final Path f,
* The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this file system.
+ *
+ * @param src src
+ * @param dst dst
+ * @param options options
+ * @throws AccessControlException access control exception
+ * @throws FileAlreadyExistsException file already exists exception
+ * @throws FileNotFoundException file not found exception
+ * @throws ParentNotDirectoryException parent not directory exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public final void rename(final Path src, final Path dst,
final Options.Rename... options) throws AccessControlException,
@@ -727,6 +815,15 @@ public final void rename(final Path src, final Path dst,
* File systems that do not have a built in overwrite need implement only this
* method and can take advantage of the default impl of the other
* {@link #renameInternal(Path, Path, boolean)}
+ *
+ * @param src src
+ * @param dst dst
+ * @throws AccessControlException access control exception
+ * @throws FileAlreadyExistsException file already exists exception
+ * @throws FileNotFoundException file not found exception
+ * @throws ParentNotDirectoryException parent not directory exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void renameInternal(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
@@ -737,6 +834,16 @@ public abstract void renameInternal(final Path src, final Path dst)
* The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this file system.
+ *
+ * @param src src
+ * @param dst dst
+ * @param overwrite overwrite flag
+ * @throws AccessControlException access control exception
+ * @throws FileAlreadyExistsException file already exists exception
+ * @throws FileNotFoundException file not found exception
+ * @throws ParentNotDirectoryException parent not directory exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public void renameInternal(final Path src, final Path dst,
boolean overwrite) throws AccessControlException,
@@ -800,6 +907,12 @@ public boolean supportsSymlinks() {
/**
* The specification of this method matches that of
* {@link FileContext#createSymlink(Path, Path, boolean)};
+ *
+ * @param target target
+ * @param link link
+ * @param createParent create parent
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnresolvedLinkException unresolved link exception
*/
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws IOException, UnresolvedLinkException {
@@ -810,6 +923,8 @@ public void createSymlink(final Path target, final Path link,
* Partially resolves the path. This is used during symlink resolution in
* {@link FSLinkResolver}, and differs from the similarly named method
* {@link FileContext#getLinkTarget(Path)}.
+ * @param f the path
+ * @return target path
* @throws IOException subclass implementations may throw IOException
*/
public Path getLinkTarget(final Path f) throws IOException {
@@ -822,6 +937,13 @@ public Path getLinkTarget(final Path f) throws IOException {
* The specification of this method matches that of
* {@link FileContext#setPermission(Path, FsPermission)} except that Path f
* must be for this file system.
+ *
+ * @param f the path
+ * @param permission permission
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setPermission(final Path f,
final FsPermission permission) throws AccessControlException,
@@ -831,6 +953,14 @@ public abstract void setPermission(final Path f,
* The specification of this method matches that of
* {@link FileContext#setOwner(Path, String, String)} except that Path f must
* be for this file system.
+ *
+ * @param f the path
+ * @param username user name
+ * @param groupname group name
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setOwner(final Path f, final String username,
final String groupname) throws AccessControlException,
@@ -840,6 +970,14 @@ public abstract void setOwner(final Path f, final String username,
* The specification of this method matches that of
* {@link FileContext#setTimes(Path, long, long)} except that Path f must be
* for this file system.
+ *
+ * @param f the path
+ * @param mtime modify time
+ * @param atime access time
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setTimes(final Path f, final long mtime,
final long atime) throws AccessControlException, FileNotFoundException,
@@ -849,6 +987,12 @@ public abstract void setTimes(final Path f, final long mtime,
* The specification of this method matches that of
* {@link FileContext#getFileChecksum(Path)} except that Path f must be for
* this file system.
+ *
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract FileChecksum getFileChecksum(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -859,6 +1003,12 @@ public abstract FileChecksum getFileChecksum(final Path f)
* {@link FileContext#getFileStatus(Path)}
* except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path.
+ *
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract FileStatus getFileStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -870,8 +1020,8 @@ public abstract FileStatus getFileStatus(final Path f)
* In some FileSystem implementations such as HDFS metadata
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnsupportedOperationException Unsupported Operation Exception
*/
public void msync() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -883,6 +1033,13 @@ public void msync() throws IOException, UnsupportedOperationException {
* {@link FileContext#access(Path, FsAction)}
* except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path.
+ *
+ * @param path the path
+ * @param mode fsaction mode
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
public void access(Path path, FsAction mode) throws AccessControlException,
@@ -897,6 +1054,13 @@ public void access(Path path, FsAction mode) throws AccessControlException,
* encountered in the path leading up to the final path component.
* If the file system does not support symlinks then the behavior is
* equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
+ *
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnsupportedFileSystemException UnSupported File System Exception
+ * @throws IOException raised on errors performing I/O.
+ * @return file status
*/
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -908,6 +1072,15 @@ public FileStatus getFileLinkStatus(final Path f)
* The specification of this method matches that of
* {@link FileContext#getFileBlockLocations(Path, long, long)} except that
* Path f must be for this file system.
+ *
+ * @param f the path
+ * @param start start
+ * @param len length
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return BlockLocation Array
*/
public abstract BlockLocation[] getFileBlockLocations(final Path f,
final long start, final long len) throws AccessControlException,
@@ -917,6 +1090,13 @@ public abstract BlockLocation[] getFileBlockLocations(final Path f,
* The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return Fs Status
*/
public FsStatus getFsStatus(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
@@ -927,6 +1107,11 @@ public FsStatus getFsStatus(final Path f) throws AccessControlException,
/**
* The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)}.
+ *
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws IOException raised on errors performing I/O.
+ * @return Fs Status
*/
public abstract FsStatus getFsStatus() throws AccessControlException,
FileNotFoundException, IOException;
@@ -935,6 +1120,13 @@ public abstract FsStatus getFsStatus() throws AccessControlException,
* The specification of this method matches that of
* {@link FileContext#listStatus(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator
*/
public RemoteIterator listStatusIterator(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -967,6 +1159,13 @@ public FileStatus next() {
* will have different formats for replicated and erasure coded file. Please
* refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
* for more details.
+ *
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator
*/
public RemoteIterator listLocatedStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -999,6 +1198,12 @@ public LocatedFileStatus next() throws IOException {
* The specification of this method matches that of
* {@link FileContext.Util#listStatus(Path)} except that Path f must be
* for this file system.
+ * @param f the path
+ * @throws AccessControlException access control exception
+ * @throws FileNotFoundException file not found exception
+ * @throws UnresolvedLinkException unresolved link exception
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator
*/
public abstract FileStatus[] listStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -1007,7 +1212,8 @@ public abstract FileStatus[] listStatus(final Path f)
/**
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
- * @throws IOException
+ * @param path the path
+ * @throws IOException raised on errors performing I/O.
*/
public RemoteIterator listCorruptFileBlocks(Path path)
throws IOException {
@@ -1020,6 +1226,10 @@ public RemoteIterator listCorruptFileBlocks(Path path)
* The specification of this method matches that of
* {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
* must be for this file system.
+ *
+ * @param verifyChecksum verify check sum flag
+ * @throws AccessControlException access control exception
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setVerifyChecksum(final boolean verifyChecksum)
throws AccessControlException, IOException;
@@ -1041,7 +1251,7 @@ public String getCanonicalServiceName() {
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
* If delegation tokens not supported then return a list of size zero.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List> getDelegationTokens(String renewer) throws IOException {
@@ -1141,7 +1351,7 @@ public AclStatus getAclStatus(Path path) throws IOException {
* @param path Path to modify
* @param name xattr name.
* @param value xattr value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
@@ -1160,7 +1370,7 @@ public void setXAttr(Path path, String name, byte[] value)
* @param name xattr name.
* @param value xattr value.
* @param flag xattr set flag
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void setXAttr(Path path, String name, byte[] value,
EnumSet flag) throws IOException {
@@ -1178,7 +1388,7 @@ public void setXAttr(Path path, String name, byte[] value,
* @param path Path to get extended attribute
* @param name xattr name.
* @return byte[] xattr value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public byte[] getXAttr(Path path, String name) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1196,7 +1406,7 @@ public byte[] getXAttr(Path path, String name) throws IOException {
*
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Map getXAttrs(Path path) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1214,7 +1424,7 @@ public Map getXAttrs(Path path) throws IOException {
* @param names XAttr names.
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Map getXAttrs(Path path, List names)
throws IOException {
@@ -1232,7 +1442,7 @@ public Map getXAttrs(Path path, List names)
* @param path Path to get extended attributes
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public List listXAttrs(Path path)
throws IOException {
@@ -1249,7 +1459,7 @@ public List listXAttrs(Path path)
*
* @param path Path to remove extended attribute
* @param name xattr name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void removeXAttr(Path path, String name) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1259,6 +1469,10 @@ public void removeXAttr(Path path, String name) throws IOException {
/**
* The specification of this method matches that of
* {@link FileContext#createSnapshot(Path, String)}.
+ *
+ * @param path the path
+ * @param snapshotName snapshot name
+ * @throws IOException raised on errors performing I/O.
*/
public Path createSnapshot(final Path path, final String snapshotName)
throws IOException {
@@ -1269,6 +1483,11 @@ public Path createSnapshot(final Path path, final String snapshotName)
/**
* The specification of this method matches that of
* {@link FileContext#renameSnapshot(Path, String, String)}.
+ *
+ * @param path the path
+ * @param snapshotOldName snapshot old name
+ * @param snapshotNewName snapshot new name
+ * @throws IOException raised on errors performing I/O.
*/
public void renameSnapshot(final Path path, final String snapshotOldName,
final String snapshotNewName) throws IOException {
@@ -1279,6 +1498,10 @@ public void renameSnapshot(final Path path, final String snapshotOldName,
/**
* The specification of this method matches that of
* {@link FileContext#deleteSnapshot(Path, String)}.
+ *
+ * @param snapshotDir snapshot dir
+ * @param snapshotName snapshot name
+ * @throws IOException raised on errors performing I/O.
*/
public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
throws IOException {
@@ -1289,7 +1512,7 @@ public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void satisfyStoragePolicy(final Path path) throws IOException {
throw new UnsupportedOperationException(
@@ -1303,6 +1526,7 @@ public void satisfyStoragePolicy(final Path path) throws IOException {
* @param policyName the name of the target storage policy. The list
* of supported Storage policies can be retrieved
* via {@link #getAllStoragePolicies}.
+ * @throws IOException raised on errors performing I/O.
*/
public void setStoragePolicy(final Path path, final String policyName)
throws IOException {
@@ -1314,7 +1538,7 @@ public void setStoragePolicy(final Path path, final String policyName)
/**
* Unset the storage policy set for a given file or directory.
* @param src file or directory path.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void unsetStoragePolicy(final Path src) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1326,7 +1550,7 @@ public void unsetStoragePolicy(final Path src) throws IOException {
*
* @param src file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockStoragePolicySpi getStoragePolicy(final Path src)
throws IOException {
@@ -1338,7 +1562,7 @@ public BlockStoragePolicySpi getStoragePolicy(final Path src)
* Retrieve all the storage policies supported by this file system.
*
* @return all storage policies supported by this filesystem.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Collection extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java
index 892a01f0f21fc..ad41107e4adc7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java
@@ -46,7 +46,7 @@ public interface DelegationTokenIssuer {
/**
* Unconditionally get a new token with the optional renewer. Returning
* null indicates the service does not issue tokens.
- * @param renewer
+ * @param renewer renewer
* @return the token
* @throws IOException raised on errors performing I/O.
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
index 2e27b3ca5b5fd..f71385f76f8a4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
@@ -63,12 +63,15 @@ public abstract class TokenRenewer {
public abstract long renew(Token> token,
Configuration conf
) throws IOException, InterruptedException;
-
- /**
- * Cancel the given token
- * @throws IOException
- * @throws InterruptedException
- */
+
+ /**
+ * Cancel the given token
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException thrown when a thread is waiting, sleeping,
+ * or otherwise occupied, and the thread is interrupted,
+ * either before or during the activity.
+ */
public abstract void cancel(Token> token,
Configuration conf
) throws IOException, InterruptedException;
From 8ccd2b5fcd8208d630f0692c30e1f6897cdf0968 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Tue, 10 May 2022 15:53:35 -0700
Subject: [PATCH 19/53] HADOOP-18229. Fix some java doc compilation errors
OpensslCipher.java empty tag KeyProvider.java warning: no @return
KeyProviderCryptoExtension.java warning: no @throws for java.io.IOException
TokenRenewer.java warning: no @param for token, checkstyle ValueQueue.java
warning: no description for @throws AbstractFileSystem.java warning: no
@return
---
.../main/java/org/apache/hadoop/crypto/OpensslCipher.java | 8 ++------
.../java/org/apache/hadoop/crypto/key/KeyProvider.java | 2 ++
.../hadoop/crypto/key/KeyProviderCryptoExtension.java | 2 ++
.../java/org/apache/hadoop/crypto/key/kms/ValueQueue.java | 6 +++---
.../java/org/apache/hadoop/fs/AbstractFileSystem.java | 3 +++
.../org/apache/hadoop/security/token/TokenRenewer.java | 5 ++++-
6 files changed, 16 insertions(+), 10 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index ac8652cae03ae..b166cfc8611b3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -230,21 +230,17 @@ public int update(ByteBuffer input, ByteBuffer output)
* Finishes a multiple-part operation. The data is encrypted or decrypted,
* depending on how this cipher was initialized.
*
- *
* The result is stored in the output buffer. Upon return, the output buffer's
* position will have advanced by n, where n is the value returned by this
* method; the output buffer's limit will not have changed.
- *
- *
+ *
* If output.remaining() bytes are insufficient to hold the result,
* a ShortBufferException is thrown.
*
- *
* Upon finishing, this method resets this cipher object to the state it was
* in when previously initialized. That is, the object is available to encrypt
* or decrypt more data.
- *
- *
+ *
* If any exception is thrown, this cipher object need to be reset before it
* can be used again.
*
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index e7727684e40ac..5b3df7d3a8196 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -466,6 +466,7 @@ public abstract KeyVersion getKeyVersion(String versionName
* Get key metadata in bulk.
* @param names the names of the keys to get
* @throws IOException raised on errors performing I/O.
+ * @return Metadata Array
*/
public Metadata[] getKeysMetadata(String... names) throws IOException {
Metadata[] result = new Metadata[names.length];
@@ -479,6 +480,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException {
* Get the key material for all versions of a specific key name.
* @return the list of key material
* @throws IOException raised on errors performing I/O.
+ * @return KeyVersion List
*/
public abstract List getKeyVersions(String name) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
index cc767ab545488..99cab35e351c6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
@@ -178,6 +178,7 @@ public interface CryptoExtension extends KeyProviderExtension.Extension {
* Calls to this method allows the underlying KeyProvider to warm-up any
* implementation specific caches used to store the Encrypted Keys.
* @param keyNames Array of Key Names
+ * @throws IOException thrown if the key material could not be encrypted
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException;
@@ -487,6 +488,7 @@ protected KeyProviderCryptoExtension(KeyProvider keyProvider,
* Notifies the Underlying CryptoExtension implementation to warm up any
* implementation specific caches for the specified KeyVersions
* @param keyNames Arrays of key Names
+ * @throws IOException raised on errors performing I/O.
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
index cc54ad2df1cee..ebe41b71f9517 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
@@ -268,7 +268,7 @@ public ValueQueue(final int numValues, final float lowWaterMark, long expiry,
* Initializes the Value Queues for the provided keys by calling the
* fill Method with "numInitValues" values
* @param keyNames Array of key Names
- * @throws ExecutionException
+ * @throws ExecutionException executionException
*/
public void initializeQueuesForKeys(String... keyNames)
throws ExecutionException {
@@ -285,8 +285,8 @@ public void initializeQueuesForKeys(String... keyNames)
* function to add 1 value to Queue and then drain it.
* @param keyName String key name
* @return E the next value in the Queue
- * @throws IOException
- * @throws ExecutionException
+ * @throws IOException raised on errors performing I/O.
+ * @throws ExecutionException executionException
*/
public E getNext(String keyName)
throws IOException, ExecutionException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index 73b1e79efb010..0ef81b60329a4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -993,6 +993,7 @@ public abstract void setTimes(final Path f, final long mtime,
* @throws FileNotFoundException file not found exception
* @throws UnresolvedLinkException unresolved link exception
* @throws IOException raised on errors performing I/O.
+ * @return File Check sum
*/
public abstract FileChecksum getFileChecksum(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -1009,6 +1010,7 @@ public abstract FileChecksum getFileChecksum(final Path f)
* @throws FileNotFoundException file not found exception
* @throws UnresolvedLinkException unresolved link exception
* @throws IOException raised on errors performing I/O.
+ * @return File Status
*/
public abstract FileStatus getFileStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -1473,6 +1475,7 @@ public void removeXAttr(Path path, String name) throws IOException {
* @param path the path
* @param snapshotName snapshot name
* @throws IOException raised on errors performing I/O.
+ * @return path
*/
public Path createSnapshot(final Path path, final String snapshotName)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
index f71385f76f8a4..032978aad3b6d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java
@@ -65,7 +65,10 @@ public abstract long renew(Token> token,
) throws IOException, InterruptedException;
/**
- * Cancel the given token
+ * Cancel the given token.
+ *
+ * @param token the token being checked
+ * @param conf configuration
*
* @throws IOException raised on errors performing I/O.
* @throws InterruptedException thrown when a thread is waiting, sleeping,
From 26c9f37174d20552fc6cab2e709da6bf5207f1f3 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Wed, 11 May 2022 00:44:16 -0700
Subject: [PATCH 20/53] HADOOP-18229. Fix some java doc compilation errors
FileContext.java no @return, no description for @param, no @throws for
java.io.FileNotFoundException etc, FileSystem.java no @throws for
java.io.IOException, no @param for uri etc KeyProvider.java warning: @return
has already been specified
---
.../apache/hadoop/crypto/key/KeyProvider.java | 3 +-
.../org/apache/hadoop/fs/FileContext.java | 57 +++++++++++++------
.../java/org/apache/hadoop/fs/FileSystem.java | 20 +++++--
3 files changed, 56 insertions(+), 24 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index 5b3df7d3a8196..19e620b0e84b4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -478,9 +478,10 @@ public Metadata[] getKeysMetadata(String... names) throws IOException {
/**
* Get the key material for all versions of a specific key name.
+ *
+ * @param name the base name of the key
* @return the list of key material
* @throws IOException raised on errors performing I/O.
- * @return KeyVersion List
*/
public abstract List getKeyVersions(String name) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index f3004ce7e03a3..e2a96bc16880c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -411,6 +411,7 @@ protected static FileContext getFileContext(
*
* @throws UnsupportedFileSystemException If the file system from the default
* configuration is not supported
+ * @return file context
*/
public static FileContext getFileContext()
throws UnsupportedFileSystemException {
@@ -554,6 +555,7 @@ public void setWorkingDirectory(final Path newWDir) throws IOException {
/**
* Gets the working directory for wd-relative names (such a "foo/bar").
+ * @return the path
*/
public Path getWorkingDirectory() {
return workingDir;
@@ -600,13 +602,14 @@ public void setUMask(final FsPermission newUmask) {
* @throws FileNotFoundException If f does not exist
* @throws AccessControlException if access denied
* @throws IOException If an IO Error occurred
- *
+ * @throws UnresolvedLinkException If unresolved link occurred
+ *
* Exceptions applicable to file systems accessed over RPC:
* @throws RpcClientException If an exception occurred in the RPC client
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
- *
+ *
* RuntimeExceptions:
* @throws InvalidPathException If path f is not valid
*/
@@ -620,7 +623,7 @@ public Path resolvePath(final Path f) throws FileNotFoundException,
* A Fully-qualified path has scheme and authority specified and an absolute
* path.
* Use the default file system and working dir in this FileContext to qualify.
- * @param path
+ * @param path the path
* @return qualified path
*/
public Path makeQualified(final Path path) {
@@ -759,6 +762,7 @@ public FSDataOutputStream build() throws IOException {
*
* Client should expect {@link FSDataOutputStreamBuilder#build()} throw the
* same exceptions as create(Path, EnumSet, CreateOpts...).
+ * @throws IOException If an I/O error occurred
*/
public FSDataOutputStreamBuilder create(final Path f)
throws IOException {
@@ -832,6 +836,8 @@ public Void next(final AbstractFileSystem fs, final Path p)
*
* RuntimeExceptions:
* @throws InvalidPathException If path f is invalid
+ *
+ * @return if delete success true, not false
*/
public boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
@@ -862,6 +868,7 @@ public Boolean next(final AbstractFileSystem fs, final Path p)
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return input stream
*/
public FSDataInputStream open(final Path f) throws AccessControlException,
FileNotFoundException, UnsupportedFileSystemException, IOException {
@@ -892,6 +899,7 @@ public FSDataInputStream next(final AbstractFileSystem fs, final Path p)
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return output stream
*/
public FSDataInputStream open(final Path f, final int bufferSize)
throws AccessControlException, FileNotFoundException,
@@ -1001,6 +1009,7 @@ public Boolean next(final AbstractFileSystem fs, final Path p)
*
* @param src path to be renamed
* @param dst new path after rename
+ * @param options rename options
*
* @throws AccessControlException If access is denied
* @throws FileAlreadyExistsException If dst already exists and
@@ -1613,9 +1622,12 @@ public RemoteIterator next(
}
/**
+ * List CorruptFile Blocks.
+ *
+ * @param path the path
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public RemoteIterator listCorruptFileBlocks(Path path)
throws IOException {
@@ -2276,7 +2288,7 @@ private static void checkDependencies(Path qualSrc, Path qualDst)
* Are qualSrc and qualDst of the same file system?
* @param qualPath1 - fully qualified path
* @param qualPath2 - fully qualified path
- * @return
+ * @return is same fs true,not false
*/
private static boolean isSameFS(Path qualPath1, Path qualPath2) {
URI srcUri = qualPath1.toUri();
@@ -2299,6 +2311,13 @@ public synchronized void run() {
/**
* Resolves all symbolic links in the specified path.
* Returns the new path object.
+ *
+ * @param f the path
+ * @throws FileNotFoundException If f does not exist
+ * @throws UnresolvedLinkException If unresolved link occurred
+ * @throws AccessControlException If access is denied.
+ * @throws IOException If an I/O error occurred
+ * @return resolve path
*/
protected Path resolve(final Path f) throws FileNotFoundException,
UnresolvedLinkException, AccessControlException, IOException {
@@ -2316,6 +2335,7 @@ public Path next(final AbstractFileSystem fs, final Path p)
* to, but not including the final path component.
* @param f path to resolve
* @return the new path object.
+ * @throws IOException If an I/O error occurred
*/
protected Path resolveIntermediate(final Path f) throws IOException {
return new FSLinkResolver() {
@@ -2334,7 +2354,7 @@ public FileStatus next(final AbstractFileSystem fs, final Path p)
* @param f
* Path which needs to be resolved
* @return List of AbstractFileSystems accessed in the path
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
Set resolveAbstractFileSystems(final Path f)
throws IOException {
@@ -2395,7 +2415,7 @@ public static Map getAllStatistics() {
* @param p Path for which delegations tokens are requested.
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List> getDelegationTokens(
@@ -2547,7 +2567,7 @@ public AclStatus next(final AbstractFileSystem fs, final Path p)
* @param path Path to modify
* @param name xattr name.
* @param value xattr value.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
@@ -2566,7 +2586,7 @@ public void setXAttr(Path path, String name, byte[] value)
* @param name xattr name.
* @param value xattr value.
* @param flag xattr set flag
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public void setXAttr(Path path, final String name, final byte[] value,
final EnumSet flag) throws IOException {
@@ -2591,7 +2611,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
* @param path Path to get extended attribute
* @param name xattr name.
* @return byte[] xattr value.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public byte[] getXAttr(Path path, final String name) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2614,7 +2634,7 @@ public byte[] next(final AbstractFileSystem fs, final Path p)
* @param path Path to get extended attributes
* @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
* of the file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public Map getXAttrs(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2638,7 +2658,7 @@ public Map next(final AbstractFileSystem fs, final Path p)
* @param names XAttr names.
* @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
* of the file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public Map getXAttrs(Path path, final List names)
throws IOException {
@@ -2661,7 +2681,7 @@ public Map next(final AbstractFileSystem fs, final Path p)
*
* @param path Path to remove extended attribute
* @param name xattr name
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public void removeXAttr(Path path, final String name) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2685,7 +2705,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
* @param path Path to get extended attributes
* @return List{@literal <}String{@literal >} of the XAttr names of the
* file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public List listXAttrs(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2802,7 +2822,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public void satisfyStoragePolicy(final Path path)
throws IOException {
@@ -2824,6 +2844,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
* @param policyName the name of the target storage policy. The list
* of supported Storage policies can be retrieved
* via {@link #getAllStoragePolicies}.
+ * @throws IOException If an I/O error occurred
*/
public void setStoragePolicy(final Path path, final String policyName)
throws IOException {
@@ -2841,7 +2862,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
/**
* Unset the storage policy set for a given file or directory.
* @param src file or directory path.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public void unsetStoragePolicy(final Path src) throws IOException {
final Path absF = fixRelativePart(src);
@@ -2860,7 +2881,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
*
* @param path file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2878,7 +2899,7 @@ public BlockStoragePolicySpi next(final AbstractFileSystem fs,
* Retrieve all the storage policies supported by this file system.
*
* @return all storage policies supported by this filesystem.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public Collection extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 180f5d1608164..04bb38a28ccda 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -281,6 +281,8 @@ public FileSystem run() throws IOException {
/**
* Returns the configured FileSystem implementation.
* @param conf the configuration to use
+ * @return FileSystem
+ * @throws IOException If an I/O error occurred
*/
public static FileSystem get(Configuration conf) throws IOException {
return get(getDefaultUri(conf), conf);
@@ -391,6 +393,7 @@ protected URI getCanonicalUri() {
* not specified and if {@link #getDefaultPort()} returns a
* default port.
*
+ * @param uri url
* @return URI
* @see NetUtils#getCanonicalUri(URI, int)
*/
@@ -458,7 +461,14 @@ public String getCanonicalServiceName() {
@Deprecated
public String getName() { return getUri().toString(); }
- /** @deprecated call {@link #get(URI, Configuration)} instead. */
+ /**
+ * @deprecated call {@link #get(URI, Configuration)} instead.
+ *
+ * @param name name
+ * @param conf configuration
+ * @return file system
+ * @throws IOException If an I/O error occurred
+ */
@Deprecated
public static FileSystem getNamed(String name, Configuration conf)
throws IOException {
@@ -1948,7 +1958,7 @@ public boolean hasMore() {
* if this is the first call.
* @return
* @throws FileNotFoundException
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
@InterfaceAudience.Private
protected DirectoryEntries listStatusBatch(Path f, byte[] token) throws
@@ -2685,7 +2695,7 @@ public short getDefaultReplication(Path path) {
* In some FileSystem implementations such as HDFS metadata
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
* @throws UnsupportedOperationException
*/
public void msync() throws IOException, UnsupportedOperationException {
@@ -3221,7 +3231,7 @@ public void removeXAttr(Path path, String name) throws IOException {
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
public void satisfyStoragePolicy(final Path path) throws IOException {
throw new UnsupportedOperationException(
@@ -3529,7 +3539,7 @@ FileSystem getUnique(URI uri, Configuration conf) throws IOException{
* @param conf configuration
* @param key key to store/retrieve this FileSystem in the cache
* @return a cached or newly instantiated FileSystem.
- * @throws IOException
+ * @throws IOException If an I/O error occurred
*/
private FileSystem getInternal(URI uri, Configuration conf, Key key)
throws IOException{
From 430385b118bda79aa280e8462849d0031cd27a9d Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Wed, 11 May 2022 05:24:44 -0700
Subject: [PATCH 21/53] HADOOP-18229. Fix some java doc compilation errors.
FileContext.java no @param for src, empty tag etc, FileSystem.java
warning: no @param for f, warning: no @return etc
---
.../org/apache/hadoop/fs/FileContext.java | 30 ++++++++++-----
.../java/org/apache/hadoop/fs/FileSystem.java | 37 ++++++++++++++++++-
2 files changed, 55 insertions(+), 12 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index e2a96bc16880c..29b711e492138 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -1811,6 +1811,11 @@ public ContentSummary getContentSummary(Path f)
/**
* See {@link #listStatus(Path[], PathFilter)}
+ *
+ * @param files files
+ * @throws AccessControlException If access is denied
+ * @throws FileNotFoundException If files does not exist
+ * @throws IOException If an I/O error occurred
*/
public FileStatus[] listStatus(Path[] files) throws AccessControlException,
FileNotFoundException, IOException {
@@ -2066,36 +2071,29 @@ public LocatedFileStatus next() throws IOException {
*
?
* Matches any single character.
*
- *
*
*
* Matches zero or more characters.
*
- *
*
[abc]
* Matches a single character from character set
* {a,b,c}.
*
- *
*
[a-b]
* Matches a single character from the character range
* {a...b}. Note: character a must be
* lexicographically less than or equal to character b.
*
- *
*
[^a]
* Matches a single char that is not from character set or range
* {a}. Note that the ^ character must occur
* immediately to the right of the opening bracket.
*
- *
*
\c
* Removes (escapes) any special meaning of character c.
*
- *
*
{ab,cd}
* Matches a string from the string set {ab, cd}
- *
- *
+ *
*
{ab,c{de,fh}}
* Matches a string from string set {ab, cde, cfh}
*
@@ -2156,6 +2154,18 @@ public FileStatus[] globStatus(final Path pathPattern,
/**
* Copy file from src to dest. See
* {@link #copy(Path, Path, boolean, boolean)}
+ *
+ * @param src src
+ * @param dst dst
+ * @throws AccessControlException If access is denied
+ * @throws FileAlreadyExistsException If file src already exists
+ * @throws FileNotFoundException if next file does not exist any more
+ * @throws ParentNotDirectoryException If parent of src is not a
+ * directory.
+ * @throws UnsupportedFileSystemException If file system for
+ * src/dst is not supported
+ * @thorws IOException If an I/O error occurred
+ * @return if success copy true, not false
*/
public boolean copy(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
@@ -2166,8 +2176,8 @@ public boolean copy(final Path src, final Path dst)
/**
* Copy from src to dst, optionally deleting src and overwriting dst.
- * @param src
- * @param dst
+ * @param src src
+ * @param dst dst
* @param deleteSource - delete src if true
* @param overwrite overwrite dst if true; throw IOException if dst exists
* and overwrite is false.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 04bb38a28ccda..0bd78498a1802 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -1084,6 +1084,7 @@ public FSDataOutputStream create(Path f, boolean overwrite)
* @param f the file to create
* @param progress to report progress
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f, Progressable progress)
throws IOException {
@@ -1100,6 +1101,7 @@ public FSDataOutputStream create(Path f, Progressable progress)
* @param f the file to create
* @param replication the replication factor
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f, short replication)
throws IOException {
@@ -1118,6 +1120,7 @@ public FSDataOutputStream create(Path f, short replication)
* @param replication the replication factor
* @param progress to report progress
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f, short replication,
Progressable progress) throws IOException {
@@ -1135,6 +1138,7 @@ public FSDataOutputStream create(Path f, short replication,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1155,6 +1159,7 @@ public FSDataOutputStream create(Path f,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1175,6 +1180,7 @@ public FSDataOutputStream create(Path f,
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1193,6 +1199,7 @@ public FSDataOutputStream create(Path f,
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1219,6 +1226,7 @@ public FSDataOutputStream create(Path f,
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream
*/
public abstract FSDataOutputStream create(Path f,
FsPermission permission,
@@ -1240,6 +1248,7 @@ public abstract FSDataOutputStream create(Path f,
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream
*/
public FSDataOutputStream create(Path f,
FsPermission permission,
@@ -1266,6 +1275,7 @@ public FSDataOutputStream create(Path f,
* found in conf will be used.
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream
*/
public FSDataOutputStream create(Path f,
FsPermission permission,
@@ -1287,6 +1297,16 @@ public FSDataOutputStream create(Path f,
* the permission with umask before calling this method.
* This a temporary method added to support the transition from FileSystem
* to FileContext for user applications.
+ *
+ * @param f path
+ * @param absolutePermission permission
+ * @param flag create flag
+ * @param bufferSize buffer size
+ * @param replication replication
+ * @param blockSize block size
+ * @param progress progress
+ * @param checksumOpt check sum opt
+ * @return output stream
* @throws IOException IO failure
*/
@Deprecated
@@ -1341,6 +1361,11 @@ protected boolean primitiveMkdir(Path f, FsPermission absolutePermission)
* with umask before calling this method.
* This a temporary method added to support the transition from FileSystem
* to FileContext for user applications.
+ *
+ * @param f the path
+ * @param absolutePermission permission
+ * @param createParent create parent
+ * @throws IOException IO failure
*/
@Deprecated
protected void primitiveMkdir(Path f, FsPermission absolutePermission,
@@ -1380,6 +1405,7 @@ protected void primitiveMkdir(Path f, FsPermission absolutePermission,
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream
*/
public FSDataOutputStream createNonRecursive(Path f,
boolean overwrite,
@@ -1403,6 +1429,7 @@ public FSDataOutputStream createNonRecursive(Path f,
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream
*/
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -1426,6 +1453,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream
*/
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
EnumSet flags, int bufferSize, short replication, long blockSize,
@@ -1440,6 +1468,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
* Important: the default implementation is not atomic
* @param f path to use for create
* @throws IOException IO failure
+ * @return if create new file success true,not false
*/
public boolean createNewFile(Path f) throws IOException {
if (exists(f)) {
@@ -1474,6 +1503,7 @@ public FSDataOutputStream append(Path f) throws IOException {
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream
*/
public FSDataOutputStream append(Path f, int bufferSize) throws IOException {
return append(f, bufferSize, null);
@@ -1487,6 +1517,7 @@ public FSDataOutputStream append(Path f, int bufferSize) throws IOException {
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream
*/
public abstract FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException;
@@ -1525,7 +1556,7 @@ public short getReplication(Path src) throws IOException {
* This is the default behavior.
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException an IO failure
* @return true if successful, or the feature in unsupported;
* false if replication is supported but the file does not exist,
* or is a directory
@@ -1554,11 +1585,12 @@ public boolean setReplication(Path src, short replication)
*
* If OVERWRITE option is not passed as an argument, rename fails
* if the dst already exists.
+ *
*
* If OVERWRITE option is passed as an argument, rename overwrites
* the dst if it is a file or an empty directory. Rename fails if dst is
* a non-empty directory.
- *
+ *
* Note that atomicity of rename is dependent on the file system
* implementation. Please refer to the file system documentation for
* details. This default implementation is non atomic.
@@ -1566,6 +1598,7 @@ public boolean setReplication(Path src, short replication)
* This method is deprecated since it is a temporary method added to
* support the transition from FileSystem to FileContext for user
* applications.
+ *
*
* @param src path to be renamed
* @param dst new path after rename
From e0188932cae0942a54a147254a92dced566dfe02 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Wed, 11 May 2022 16:05:16 -0700
Subject: [PATCH 22/53] HADOOP-18229. Fix some java doc compilation errors
FileContext.java no description for @param, unknown tag: thorws etc
FileSystem.java no @return, no @param for uri etc
KeyProviderCryptoExtension.java no description for @param
---
.../key/KeyProviderCryptoExtension.java | 2 +-
.../org/apache/hadoop/fs/FileContext.java | 27 ++++++++++---------
.../java/org/apache/hadoop/fs/FileSystem.java | 25 +++++++++++++++--
3 files changed, 39 insertions(+), 15 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
index 99cab35e351c6..7e85eef5cc741 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
@@ -560,7 +560,7 @@ public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv)
* Calls {@link CryptoExtension#drain(String)} for the given key name on the
* underlying {@link CryptoExtension}.
*
- * @param keyName
+ * @param keyName key name
*/
public void drain(String keyName) {
getExtension().drain(keyName);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index 29b711e492138..d48918f280ee7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -366,8 +366,8 @@ public AbstractFileSystem run() throws UnsupportedFileSystemException {
* Create a FileContext with specified FS as default using the specified
* config.
*
- * @param defFS
- * @param aConf
+ * @param defFS default fs
+ * @param aConf configutration
* @return new FileContext with specified FS as default.
*/
public static FileContext getFileContext(final AbstractFileSystem defFS,
@@ -378,7 +378,7 @@ public static FileContext getFileContext(final AbstractFileSystem defFS,
/**
* Create a FileContext for specified file system using the default config.
*
- * @param defaultFS
+ * @param defaultFS default fs
* @return a FileContext with the specified AbstractFileSystem
* as the default FS.
*/
@@ -431,7 +431,7 @@ public static FileContext getLocalFSFileContext()
/**
* Create a FileContext for specified URI using the default config.
*
- * @param defaultFsUri
+ * @param defaultFsUri defaultFsUri
* @return a FileContext with the specified URI as the default FS.
*
* @throws UnsupportedFileSystemException If the file system for
@@ -445,8 +445,8 @@ public static FileContext getFileContext(final URI defaultFsUri)
/**
* Create a FileContext for specified default URI using the specified config.
*
- * @param defaultFsUri
- * @param aConf
+ * @param defaultFsUri defaultFsUri
+ * @param aConf configrution
* @return new FileContext for specified uri
* @throws UnsupportedFileSystemException If the file system with specified is
* not supported
@@ -477,7 +477,7 @@ public static FileContext getFileContext(final URI defaultFsUri,
* {@link #getFileContext(URI, Configuration)} instead of this one.
*
*
- * @param aConf
+ * @param aConf configration
* @return new FileContext
* @throws UnsupportedFileSystemException If file system in the config
* is not supported
@@ -1061,7 +1061,7 @@ public Void next(final AbstractFileSystem fs, final Path p)
/**
* Set permission of a path.
- * @param f
+ * @param f the path
* @param permission - the new absolute permission (umask is not applied)
*
* @throws AccessControlException If access is denied
@@ -1205,7 +1205,7 @@ public FileChecksum next(final AbstractFileSystem fs, final Path p)
* Set the verify checksum flag for the file system denoted by the path.
* This is only applicable if the
* corresponding FileSystem supports checksum. By default doesn't do anything.
- * @param verifyChecksum
+ * @param verifyChecksum verify check sum
* @param f set the verifyChecksum for the Filesystem containing this path
*
* @throws AccessControlException If access is denied
@@ -1260,8 +1260,9 @@ public FileStatus next(final AbstractFileSystem fs, final Path p)
/**
* Synchronize client metadata state.
*
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException If an I/O error occurred
+ * @throws UnsupportedOperationException If file system for f is
+ * not supported
*/
public void msync() throws IOException, UnsupportedOperationException {
defaultFS.msync();
@@ -1751,6 +1752,7 @@ public class Util {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return if f exists true, not false
*/
public boolean exists(final Path f) throws AccessControlException,
UnsupportedFileSystemException, IOException {
@@ -1816,6 +1818,7 @@ public ContentSummary getContentSummary(Path f)
* @throws AccessControlException If access is denied
* @throws FileNotFoundException If files does not exist
* @throws IOException If an I/O error occurred
+ * @return file status array
*/
public FileStatus[] listStatus(Path[] files) throws AccessControlException,
FileNotFoundException, IOException {
@@ -2164,7 +2167,7 @@ public FileStatus[] globStatus(final Path pathPattern,
* directory.
* @throws UnsupportedFileSystemException If file system for
* src/dst is not supported
- * @thorws IOException If an I/O error occurred
+ * @throws IOException If an I/O error occurred
* @return if success copy true, not false
*/
public boolean copy(final Path src, final Path dst)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 0bd78498a1802..5ec78e943e9c0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -377,6 +377,7 @@ public String getScheme() {
* implement that method.
*
* @see #canonicalizeUri(URI)
+ * @return the URI of this filesystem.
*/
protected URI getCanonicalUri() {
return canonicalizeUri(getUri());
@@ -457,7 +458,10 @@ public String getCanonicalServiceName() {
: null;
}
- /** @deprecated call {@link #getUri()} instead.*/
+ /**
+ * @return uri to string
+ * @deprecated call {@link #getUri()} instead.
+ */
@Deprecated
public String getName() { return getUri().toString(); }
@@ -523,6 +527,9 @@ public static LocalFileSystem getLocal(Configuration conf)
* configuration and URI, cached and returned to the caller.
*
*
+ * @param uri uri of the filesystem
+ * @param conf configrution
+ * @return filesystem instance
* @throws IOException if the FileSystem cannot be instantiated.
*/
public static FileSystem get(URI uri, Configuration conf) throws IOException {
@@ -552,7 +559,7 @@ public static FileSystem get(URI uri, Configuration conf) throws IOException {
/**
* Returns the FileSystem for this URI's scheme and authority and the
* given user. Internally invokes {@link #newInstance(URI, Configuration)}
- * @param uri of the filesystem
+ * @param uri uri of the filesystem
* @param conf the configuration to use
* @param user to perform the get as
* @return filesystem instance
@@ -870,6 +877,7 @@ protected void checkPath(Path path) {
* @param start offset into the given file
* @param len length for which to get locations for
* @throws IOException IO failure
+ * @return block location array
*/
public BlockLocation[] getFileBlockLocations(FileStatus file,
long start, long len) throws IOException {
@@ -910,6 +918,7 @@ public BlockLocation[] getFileBlockLocations(FileStatus file,
* @param len length for which to get locations for
* @throws FileNotFoundException when the path does not exist
* @throws IOException IO failure
+ * @return block location array
*/
public BlockLocation[] getFileBlockLocations(Path p,
long start, long len) throws IOException {
@@ -972,6 +981,7 @@ public Path resolvePath(final Path p) throws IOException {
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
* @throws IOException IO failure
+ * @return input stream
*/
public abstract FSDataInputStream open(Path f, int bufferSize)
throws IOException;
@@ -980,6 +990,7 @@ public abstract FSDataInputStream open(Path f, int bufferSize)
* Opens an FSDataInputStream at the indicated Path.
* @param f the file to open
* @throws IOException IO failure
+ * @return input stream
*/
public FSDataInputStream open(Path f) throws IOException {
return open(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -997,6 +1008,7 @@ public FSDataInputStream open(Path f) throws IOException {
* @throws IOException IO failure
* @throws UnsupportedOperationException If {@link #open(PathHandle, int)}
* not overridden by subclass
+ * @return input stream
*/
public FSDataInputStream open(PathHandle fd) throws IOException {
return open(fd, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1014,6 +1026,7 @@ public FSDataInputStream open(PathHandle fd) throws IOException {
* not satisfied
* @throws IOException IO failure
* @throws UnsupportedOperationException If not overridden by subclass
+ * @return input stream
*/
public FSDataInputStream open(PathHandle fd, int bufferSize)
throws IOException {
@@ -1031,6 +1044,7 @@ public FSDataInputStream open(PathHandle fd, int bufferSize)
* not overridden by subclass.
* @throws UnsupportedOperationException If this FileSystem cannot enforce
* the specified constraints.
+ * @return path handle
*/
public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) {
// method is final with a default so clients calling getPathHandle(stat)
@@ -1046,6 +1060,7 @@ public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) {
* @param stat Referent in the target FileSystem
* @param opt Constraints that determine the validity of the
* {@link PathHandle} reference.
+ * @return path handle
*/
protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) {
throw new UnsupportedOperationException();
@@ -1056,6 +1071,7 @@ protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) {
* Files are overwritten by default.
* @param f the file to create
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f) throws IOException {
return create(f, true);
@@ -1067,6 +1083,7 @@ public FSDataOutputStream create(Path f) throws IOException {
* @param overwrite if a file with this name already exists, then if true,
* the file will be overwritten, and if false an exception will be thrown.
* @throws IOException IO failure
+ * @return output stream
*/
public FSDataOutputStream create(Path f, boolean overwrite)
throws IOException {
@@ -1158,6 +1175,7 @@ public FSDataOutputStream create(Path f,
* @param overwrite if a file with this name already exists, then if true,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
+ * @param progress to report progress
* @throws IOException IO failure
* @return output stream
*/
@@ -1179,6 +1197,7 @@ public FSDataOutputStream create(Path f,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
+ * @param blockSize the size of the buffer to be used.
* @throws IOException IO failure
* @return output stream
*/
@@ -1198,6 +1217,8 @@ public FSDataOutputStream create(Path f,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
+ * @param blockSize the size of the buffer to be used.
+ * @param progress to report progress
* @throws IOException IO failure
* @return output stream
*/
From 35ad959333ba416c1d54f46acc0188e2fcf974f6 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 00:36:11 -0700
Subject: [PATCH 23/53] YARN-11122: Add GetClusterNodesHADOOP-18229. Fix some
java doc compilation errors. FileSystem.java no @param for f, no @param for
link, no @return
---
.../java/org/apache/hadoop/fs/FileSystem.java | 51 +++++++++++++++++--
1 file changed, 46 insertions(+), 5 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 5ec78e943e9c0..9dee8158dedd2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -1510,6 +1510,7 @@ public boolean createNewFile(Path f) throws IOException {
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream
*/
public FSDataOutputStream append(Path f) throws IOException {
return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1623,6 +1624,7 @@ public boolean setReplication(Path src, short replication)
*
* @param src path to be renamed
* @param dst new path after rename
+ * @param option rename options
* @throws FileNotFoundException src path does not exist, or the parent
* path of dst does not exist.
* @throws FileAlreadyExistsException dest path exists and is a file
@@ -1717,6 +1719,9 @@ public boolean truncate(Path f, long newLength) throws IOException {
/**
* Delete a file/directory.
+ * @param f the path
+ * @throws IOException IO failure
+ * @return if delete success true, not false
* @deprecated Use {@link #delete(Path, boolean)} instead.
*/
@Deprecated
@@ -1833,6 +1838,7 @@ public boolean exists(Path f) throws IOException {
* @param f path to check
* @throws IOException IO failure
* @deprecated Use {@link #getFileStatus(Path)} instead
+ * @return if f is directory true, not false
*/
@Deprecated
public boolean isDirectory(Path f) throws IOException {
@@ -1850,6 +1856,7 @@ public boolean isDirectory(Path f) throws IOException {
* @param f path to check
* @throws IOException IO failure
* @deprecated Use {@link #getFileStatus(Path)} instead
+ * @return if f is file true, not false
*/
@Deprecated
public boolean isFile(Path f) throws IOException {
@@ -1862,6 +1869,7 @@ public boolean isFile(Path f) throws IOException {
/**
* The number of bytes in a file.
+ * @param f the path
* @return the number of bytes; 0 for a directory
* @deprecated Use {@link #getFileStatus(Path)} instead.
* @throws FileNotFoundException if the path does not resolve
@@ -1876,6 +1884,7 @@ public long getLength(Path f) throws IOException {
* @param f path to use
* @throws FileNotFoundException if the path does not resolve
* @throws IOException IO failure
+ * @return content summary
*/
public ContentSummary getContentSummary(Path f) throws IOException {
FileStatus status = getFileStatus(f);
@@ -2010,8 +2019,8 @@ public boolean hasMore() {
* @param f Path to list
* @param token opaque iteration token returned by previous call, or null
* if this is the first call.
- * @return
- * @throws FileNotFoundException
+ * @return directory entries
+ * @throws FileNotFoundException when the path does not exist
* @throws IOException If an I/O error occurred
*/
@InterfaceAudience.Private
@@ -2043,6 +2052,8 @@ private void listStatus(ArrayList results, Path f,
/**
* List corrupted file blocks.
+ *
+ * @param path the path
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
* @throws UnsupportedOperationException if the operation is unsupported
@@ -2458,6 +2469,7 @@ public boolean mkdirs(Path f) throws IOException {
* @param f path to create
* @param permission to apply to f
* @throws IOException IO failure
+ * @return if mkdir success true, not false
*/
public abstract boolean mkdirs(Path f, FsPermission permission
) throws IOException;
@@ -2674,7 +2686,9 @@ public long getUsed() throws IOException {
/**
* Return the total size of all files from a specified path.
+ * @param path the path
* @throws IOException IO failure
+ * @return the number of path content summary
*/
public long getUsed(Path path) throws IOException {
return getContentSummary(path).getLength();
@@ -2750,7 +2764,7 @@ public short getDefaultReplication(Path path) {
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
* @throws IOException If an I/O error occurred
- * @throws UnsupportedOperationException
+ * @throws UnsupportedOperationException if the operation is unsupported
*/
public void msync() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -2826,6 +2840,8 @@ static void checkAccessPermissions(FileStatus stat, FsAction mode)
/**
* See {@link FileContext#fixRelativePart}.
+ * @param p the path
+ * @return relative part
*/
protected Path fixRelativePart(Path p) {
if (p.isUriPathAbsolute()) {
@@ -2837,6 +2853,18 @@ protected Path fixRelativePart(Path p) {
/**
* See {@link FileContext#createSymlink(Path, Path, boolean)}.
+ *
+ * @param target target path
+ * @param link link
+ * @param createParent create parent
+ * @throws AccessControlException if access is denied
+ * @throws FileAlreadyExistsException when the path does not exist
+ * @throws FileNotFoundException when the path does not exist
+ * @throws ParentNotDirectoryException if the parent path of dest is not
+ * a directory
+ * @throws UnsupportedFileSystemException if there was no known implementation
+ * for the scheme.
+ * @throws IOException see specific implementation
*/
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException,
@@ -2850,8 +2878,14 @@ public void createSymlink(final Path target, final Path link,
/**
* See {@link FileContext#getFileLinkStatus(Path)}.
- * @throws FileNotFoundException when the path does not exist
- * @throws IOException see specific implementation
+ *
+ * @param f the path
+ * @throws AccessControlException if access is denied
+ * @throws FileNotFoundException when the path does not exist
+ * @throws IOException see specific implementation
+ * @throws UnsupportedFileSystemException if there was no known implementation
+ * for the scheme.
+ * @return file status
*/
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -2869,8 +2903,11 @@ public boolean supportsSymlinks() {
/**
* See {@link FileContext#getLinkTarget(Path)}.
+ * @param f the path
* @throws UnsupportedOperationException if the operation is unsupported
* (default outcome).
+ * @throws IOException IO failure
+ * @return the path
*/
public Path getLinkTarget(Path f) throws IOException {
// Supporting filesystems should override this method
@@ -2880,8 +2917,11 @@ public Path getLinkTarget(Path f) throws IOException {
/**
* See {@link AbstractFileSystem#getLinkTarget(Path)}.
+ * @param f the path
* @throws UnsupportedOperationException if the operation is unsupported
* (default outcome).
+ * @throws IOException IO failure
+ * @return the path
*/
protected Path resolveLink(Path f) throws IOException {
// Supporting filesystems should override this method
@@ -4454,6 +4494,7 @@ public static synchronized List getAllStatistics() {
/**
* Get the statistics for a particular file system.
+ * @param scheme scheme
* @param cls the class to lookup
* @return a statistics object
* @deprecated use {@link #getGlobalStorageStatistics()}
From df3140a0bb81fcbddbf842c1262a69b17a52e26e Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 00:36:11 -0700
Subject: [PATCH 24/53] HADOOP-18229. Fix some java doc compilation errors.
FileSystem.java no @param for f, no @param for link, no @return
---
.../java/org/apache/hadoop/fs/FileSystem.java | 51 +++++++++++++++++--
1 file changed, 46 insertions(+), 5 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 5ec78e943e9c0..9dee8158dedd2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -1510,6 +1510,7 @@ public boolean createNewFile(Path f) throws IOException {
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream
*/
public FSDataOutputStream append(Path f) throws IOException {
return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1623,6 +1624,7 @@ public boolean setReplication(Path src, short replication)
*
* @param src path to be renamed
* @param dst new path after rename
+ * @param option rename options
* @throws FileNotFoundException src path does not exist, or the parent
* path of dst does not exist.
* @throws FileAlreadyExistsException dest path exists and is a file
@@ -1717,6 +1719,9 @@ public boolean truncate(Path f, long newLength) throws IOException {
/**
* Delete a file/directory.
+ * @param f the path
+ * @throws IOException IO failure
+ * @return if delete success true, not false
* @deprecated Use {@link #delete(Path, boolean)} instead.
*/
@Deprecated
@@ -1833,6 +1838,7 @@ public boolean exists(Path f) throws IOException {
* @param f path to check
* @throws IOException IO failure
* @deprecated Use {@link #getFileStatus(Path)} instead
+ * @return if f is directory true, not false
*/
@Deprecated
public boolean isDirectory(Path f) throws IOException {
@@ -1850,6 +1856,7 @@ public boolean isDirectory(Path f) throws IOException {
* @param f path to check
* @throws IOException IO failure
* @deprecated Use {@link #getFileStatus(Path)} instead
+ * @return if f is file true, not false
*/
@Deprecated
public boolean isFile(Path f) throws IOException {
@@ -1862,6 +1869,7 @@ public boolean isFile(Path f) throws IOException {
/**
* The number of bytes in a file.
+ * @param f the path
* @return the number of bytes; 0 for a directory
* @deprecated Use {@link #getFileStatus(Path)} instead.
* @throws FileNotFoundException if the path does not resolve
@@ -1876,6 +1884,7 @@ public long getLength(Path f) throws IOException {
* @param f path to use
* @throws FileNotFoundException if the path does not resolve
* @throws IOException IO failure
+ * @return content summary
*/
public ContentSummary getContentSummary(Path f) throws IOException {
FileStatus status = getFileStatus(f);
@@ -2010,8 +2019,8 @@ public boolean hasMore() {
* @param f Path to list
* @param token opaque iteration token returned by previous call, or null
* if this is the first call.
- * @return
- * @throws FileNotFoundException
+ * @return directory entries
+ * @throws FileNotFoundException when the path does not exist
* @throws IOException If an I/O error occurred
*/
@InterfaceAudience.Private
@@ -2043,6 +2052,8 @@ private void listStatus(ArrayList results, Path f,
/**
* List corrupted file blocks.
+ *
+ * @param path the path
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
* @throws UnsupportedOperationException if the operation is unsupported
@@ -2458,6 +2469,7 @@ public boolean mkdirs(Path f) throws IOException {
* @param f path to create
* @param permission to apply to f
* @throws IOException IO failure
+ * @return if mkdir success true, not false
*/
public abstract boolean mkdirs(Path f, FsPermission permission
) throws IOException;
@@ -2674,7 +2686,9 @@ public long getUsed() throws IOException {
/**
* Return the total size of all files from a specified path.
+ * @param path the path
* @throws IOException IO failure
+ * @return the number of path content summary
*/
public long getUsed(Path path) throws IOException {
return getContentSummary(path).getLength();
@@ -2750,7 +2764,7 @@ public short getDefaultReplication(Path path) {
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
* @throws IOException If an I/O error occurred
- * @throws UnsupportedOperationException
+ * @throws UnsupportedOperationException if the operation is unsupported
*/
public void msync() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -2826,6 +2840,8 @@ static void checkAccessPermissions(FileStatus stat, FsAction mode)
/**
* See {@link FileContext#fixRelativePart}.
+ * @param p the path
+ * @return relative part
*/
protected Path fixRelativePart(Path p) {
if (p.isUriPathAbsolute()) {
@@ -2837,6 +2853,18 @@ protected Path fixRelativePart(Path p) {
/**
* See {@link FileContext#createSymlink(Path, Path, boolean)}.
+ *
+ * @param target target path
+ * @param link link
+ * @param createParent create parent
+ * @throws AccessControlException if access is denied
+ * @throws FileAlreadyExistsException when the path does not exist
+ * @throws FileNotFoundException when the path does not exist
+ * @throws ParentNotDirectoryException if the parent path of dest is not
+ * a directory
+ * @throws UnsupportedFileSystemException if there was no known implementation
+ * for the scheme.
+ * @throws IOException see specific implementation
*/
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException,
@@ -2850,8 +2878,14 @@ public void createSymlink(final Path target, final Path link,
/**
* See {@link FileContext#getFileLinkStatus(Path)}.
- * @throws FileNotFoundException when the path does not exist
- * @throws IOException see specific implementation
+ *
+ * @param f the path
+ * @throws AccessControlException if access is denied
+ * @throws FileNotFoundException when the path does not exist
+ * @throws IOException see specific implementation
+ * @throws UnsupportedFileSystemException if there was no known implementation
+ * for the scheme.
+ * @return file status
*/
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -2869,8 +2903,11 @@ public boolean supportsSymlinks() {
/**
* See {@link FileContext#getLinkTarget(Path)}.
+ * @param f the path
* @throws UnsupportedOperationException if the operation is unsupported
* (default outcome).
+ * @throws IOException IO failure
+ * @return the path
*/
public Path getLinkTarget(Path f) throws IOException {
// Supporting filesystems should override this method
@@ -2880,8 +2917,11 @@ public Path getLinkTarget(Path f) throws IOException {
/**
* See {@link AbstractFileSystem#getLinkTarget(Path)}.
+ * @param f the path
* @throws UnsupportedOperationException if the operation is unsupported
* (default outcome).
+ * @throws IOException IO failure
+ * @return the path
*/
protected Path resolveLink(Path f) throws IOException {
// Supporting filesystems should override this method
@@ -4454,6 +4494,7 @@ public static synchronized List getAllStatistics() {
/**
* Get the statistics for a particular file system.
+ * @param scheme scheme
* @param cls the class to lookup
* @return a statistics object
* @deprecated use {@link #getGlobalStorageStatistics()}
From 5fb63e9c0abf808e463a51fd64abc95ddc6a7ace Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 04:15:36 -0700
Subject: [PATCH 25/53] HADOOP-18229. Fix some java doc compilation errors.
AvroFSInput.java error BatchedRemoteIterator.java error BlockLocation.java
error FileSystem.java error
---
.../org/apache/hadoop/fs/AvroFSInput.java | 13 +++++-
.../hadoop/fs/BatchedRemoteIterator.java | 3 ++
.../org/apache/hadoop/fs/BlockLocation.java | 45 +++++++++++++++++++
.../java/org/apache/hadoop/fs/FileSystem.java | 18 ++++----
4 files changed, 69 insertions(+), 10 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
index 213fbc24c4db0..d8f87b07e5d16 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
@@ -36,13 +36,22 @@ public class AvroFSInput implements Closeable, SeekableInput {
private final FSDataInputStream stream;
private final long len;
- /** Construct given an {@link FSDataInputStream} and its length. */
+ /**
+ * Construct given an {@link FSDataInputStream} and its length.
+ *
+ * @param in inputstream
+ * @param len len
+ */
public AvroFSInput(final FSDataInputStream in, final long len) {
this.stream = in;
this.len = len;
}
- /** Construct given a {@link FileContext} and a {@link Path}. */
+ /** Construct given a {@link FileContext} and a {@link Path}.
+ * @param fc filecontext
+ * @param p the path
+ * @throws IOException If an I/O error occurred
+ * */
public AvroFSInput(final FileContext fc, final Path p) throws IOException {
FileStatus status = fc.getFileStatus(p);
this.len = status.getLen();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
index 607fffbcc701a..18f6b8137f85e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
@@ -68,6 +68,7 @@ public BatchedRemoteIterator(K prevKey) {
*
* @param prevKey The key to send.
* @return A list of replies.
+ * @throws IOException If an I/O error occurred
*/
public abstract BatchedEntries makeRequest(K prevKey) throws IOException;
@@ -102,6 +103,8 @@ public boolean hasNext() throws IOException {
/**
* Return the next list key associated with an element.
+ * @param element element
+ * @return K Generics Type
*/
public abstract K elementToPrevKey(E element);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
index 29358dd7d1086..657be6fc95a07 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
@@ -85,6 +85,7 @@ public BlockLocation() {
/**
* Copy constructor.
+ * @param that blocklocation
*/
public BlockLocation(BlockLocation that) {
this.hosts = that.hosts;
@@ -100,6 +101,10 @@ public BlockLocation(BlockLocation that) {
/**
* Constructor with host, name, offset and length.
+ * @param names names array
+ * @param hosts host array
+ * @param offset offset
+ * @param length length
*/
public BlockLocation(String[] names, String[] hosts, long offset,
long length) {
@@ -108,6 +113,11 @@ public BlockLocation(String[] names, String[] hosts, long offset,
/**
* Constructor with host, name, offset, length and corrupt flag.
+ * @param names names
+ * @param hosts hosts
+ * @param offset offset
+ * @param length length
+ * @param corrupt corrupt
*/
public BlockLocation(String[] names, String[] hosts, long offset,
long length, boolean corrupt) {
@@ -116,6 +126,11 @@ public BlockLocation(String[] names, String[] hosts, long offset,
/**
* Constructor with host, name, network topology, offset and length.
+ * @param names names
+ * @param hosts hosts
+ * @param topologyPaths topologyPaths
+ * @param offset offset
+ * @param length length
*/
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
long offset, long length) {
@@ -125,6 +140,12 @@ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
/**
* Constructor with host, name, network topology, offset, length
* and corrupt flag.
+ * @param names names
+ * @param hosts hosts
+ * @param topologyPaths topologyPaths
+ * @param offset offset
+ * @param length length
+ * @param corrupt corrupt
*/
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
long offset, long length, boolean corrupt) {
@@ -177,6 +198,8 @@ public BlockLocation(String[] names, String[] hosts, String[] cachedHosts,
/**
* Get the list of hosts (hostname) hosting this block.
+ * @return hosts array
+ * @throws IOException If an I/O error occurred
*/
public String[] getHosts() throws IOException {
return hosts;
@@ -184,6 +207,7 @@ public String[] getHosts() throws IOException {
/**
* Get the list of hosts (hostname) hosting a cached replica of the block.
+ * @return cached hosts
*/
public String[] getCachedHosts() {
return cachedHosts;
@@ -191,6 +215,8 @@ public String[] getCachedHosts() {
/**
* Get the list of names (IP:xferPort) hosting this block.
+ * @return names array
+ * @throws IOException If an I/O error occurred.
*/
public String[] getNames() throws IOException {
return names;
@@ -199,6 +225,8 @@ public String[] getNames() throws IOException {
/**
* Get the list of network topology paths for each of the hosts.
* The last component of the path is the "name" (IP:xferPort).
+ * @return topology paths
+ * @throws IOException If an I/O error occurred
*/
public String[] getTopologyPaths() throws IOException {
return topologyPaths;
@@ -206,6 +234,7 @@ public String[] getTopologyPaths() throws IOException {
/**
* Get the storageID of each replica of the block.
+ * @return storage ids
*/
public String[] getStorageIds() {
return storageIds;
@@ -213,6 +242,7 @@ public String[] getStorageIds() {
/**
* Get the storage type of each replica of the block.
+ * @return storage type of each replica of the block
*/
public StorageType[] getStorageTypes() {
return storageTypes;
@@ -220,6 +250,7 @@ public StorageType[] getStorageTypes() {
/**
* Get the start offset of file associated with this block.
+ * @return start offset of file associated with this block
*/
public long getOffset() {
return offset;
@@ -227,6 +258,7 @@ public long getOffset() {
/**
* Get the length of the block.
+ * @return length of the block
*/
public long getLength() {
return length;
@@ -234,6 +266,7 @@ public long getLength() {
/**
* Get the corrupt flag.
+ * @return corrupt flag
*/
public boolean isCorrupt() {
return corrupt;
@@ -241,6 +274,7 @@ public boolean isCorrupt() {
/**
* Return true if the block is striped (erasure coded).
+ * @return if the block is striped true, not false
*/
public boolean isStriped() {
return false;
@@ -248,6 +282,7 @@ public boolean isStriped() {
/**
* Set the start offset of file associated with this block.
+ * @param offset start offset
*/
public void setOffset(long offset) {
this.offset = offset;
@@ -255,6 +290,7 @@ public void setOffset(long offset) {
/**
* Set the length of block.
+ * @param length length of block
*/
public void setLength(long length) {
this.length = length;
@@ -262,6 +298,7 @@ public void setLength(long length) {
/**
* Set the corrupt flag.
+ * @param corrupt corrupt flag
*/
public void setCorrupt(boolean corrupt) {
this.corrupt = corrupt;
@@ -269,6 +306,8 @@ public void setCorrupt(boolean corrupt) {
/**
* Set the hosts hosting this block.
+ * @param hosts hosts array
+ * @throws IOException If an I/O error occurred
*/
public void setHosts(String[] hosts) throws IOException {
if (hosts == null) {
@@ -280,6 +319,7 @@ public void setHosts(String[] hosts) throws IOException {
/**
* Set the hosts hosting a cached replica of this block.
+ * @param cachedHosts cached hosts
*/
public void setCachedHosts(String[] cachedHosts) {
if (cachedHosts == null) {
@@ -291,6 +331,8 @@ public void setCachedHosts(String[] cachedHosts) {
/**
* Set the names (host:port) hosting this block.
+ * @param names names
+ * @throws IOException If an I/O error occurred
*/
public void setNames(String[] names) throws IOException {
if (names == null) {
@@ -302,6 +344,9 @@ public void setNames(String[] names) throws IOException {
/**
* Set the network topology paths of the hosts.
+ *
+ * @param topologyPaths topology paths
+ * @throws IOException If an I/O error occurred
*/
public void setTopologyPaths(String[] topologyPaths) throws IOException {
if (topologyPaths == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 9dee8158dedd2..421a0a1c54874 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -1624,7 +1624,7 @@ public boolean setReplication(Path src, short replication)
*
* @param src path to be renamed
* @param dst new path after rename
- * @param option rename options
+ * @param options rename options
* @throws FileNotFoundException src path does not exist, or the parent
* path of dst does not exist.
* @throws FileAlreadyExistsException dest path exists and is a file
@@ -2147,36 +2147,29 @@ public FileStatus[] listStatus(Path[] files, PathFilter filter)
* ?
* Matches any single character.
*
- *
*
*
* Matches zero or more characters.
*
- *
*
[abc]
* Matches a single character from character set
* {a,b,c}.
*
- *
*
[a-b]
* Matches a single character from the character range
* {a...b}. Note that character a must be
* lexicographically less than or equal to character b.
*
- *
*
[^a]
* Matches a single character that is not from character set or range
* {a}. Note that the ^ character must occur
* immediately to the right of the opening bracket.
*
- *
*
\c
* Removes (escapes) any special meaning of character c.
*
- *
*
{ab,cd}
* Matches a string from the string set {ab, cd}
*
- *
*
{ab,c{de,fh}}
* Matches a string from the string set {ab, cde, cfh}
*
@@ -2407,6 +2400,7 @@ public LocatedFileStatus next() throws IOException {
/** Return the current user's home directory in this FileSystem.
* The default implementation returns {@code "/user/$USER/"}.
+ * @return the path
*/
public Path getHomeDirectory() {
String username;
@@ -2517,6 +2511,7 @@ public void moveFromLocalFile(Path src, Path dst)
* @param delSrc whether to delete the src
* @param src path
* @param dst path
+ * @throws IOException IO failure
*/
public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
throws IOException {
@@ -2631,6 +2626,7 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst,
* @param fsOutputFile path of output file
* @param tmpLocalFile path of local tmp file
* @throws IOException IO failure
+ * @return the path
*/
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
@@ -2678,6 +2674,7 @@ public void close() throws IOException {
/**
* Return the total size of all files in the filesystem.
* @throws IOException IO failure
+ * @return the number of path used
*/
public long getUsed() throws IOException {
Path path = new Path("/");
@@ -2711,6 +2708,7 @@ public long getBlockSize(Path f) throws IOException {
* Return the number of bytes that large input files should be optimally
* be split into to minimize I/O time.
* @deprecated use {@link #getDefaultBlockSize(Path)} instead
+ * @return default block size
*/
@Deprecated
public long getDefaultBlockSize() {
@@ -2896,6 +2894,7 @@ public FileStatus getFileLinkStatus(final Path f)
/**
* See {@link AbstractFileSystem#supportsSymlinks()}.
+ * @return if support symlinkls true, not false
*/
public boolean supportsSymlinks() {
return false;
@@ -4486,6 +4485,7 @@ public static synchronized Map getStatistics() {
/**
* Return the FileSystem classes that have Statistics.
* @deprecated use {@link #getGlobalStorageStatistics()}
+ * @return statistics lists
*/
@Deprecated
public static synchronized List getAllStatistics() {
@@ -4529,6 +4529,7 @@ public static synchronized void clearStatistics() {
/**
* Print all statistics for all file systems to {@code System.out}
+ * @throws IOException If an I/O error occurred
*/
public static synchronized
void printStatistics() throws IOException {
@@ -4569,6 +4570,7 @@ public StorageStatistics getStorageStatistics() {
/**
* Get the global storage statistics.
+ * @return global storage statistics
*/
public static GlobalStorageStatistics getGlobalStorageStatistics() {
return GlobalStorageStatistics.INSTANCE;
From ddc34fc0aa3f64d7c9e2b98d0332ad7ac9f7c743 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 16:51:11 -0700
Subject: [PATCH 26/53] HADOOP-18229. Fix some java doc compilation errors.
ByteBufferUtil.java no @param for stream, no @param for bufferPool etc.
CachingGetSpaceUsed.java warning: no @param for builder, warning: no @return
etc. FilterFileSystem.java warning: no description for @throws.
---
.../main/java/org/apache/hadoop/fs/ByteBufferUtil.java | 6 ++++++
.../java/org/apache/hadoop/fs/CachingGetSpaceUsed.java | 9 +++++++++
.../main/java/org/apache/hadoop/fs/FilterFileSystem.java | 4 ++--
3 files changed, 17 insertions(+), 2 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
index 6576fe5827d94..a9790773b1707 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
@@ -47,6 +47,12 @@ private static boolean streamHasByteBufferRead(InputStream stream) {
/**
* Perform a fallback read.
+ *
+ * @param stream input stream
+ * @param bufferPool bufferPool
+ * @param maxLength maxLength
+ * @throws IOException raised on errors performing I/O.
+ * @return byte buffer
*/
public static ByteBuffer fallbackRead(
InputStream stream, ByteBufferPool bufferPool, int maxLength)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
index 362d125b09df5..e0af53bac6f7d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
@@ -53,6 +53,9 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* This is the constructor used by the builder.
* All overriding classes should implement this.
+ *
+ * @param builder builder
+ * @throws IOException raised on errors performing I/O.
*/
public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
throws IOException {
@@ -140,6 +143,8 @@ public String getDirPath() {
/**
* Increment the cached value of used space.
+ *
+ * @param value dfs used value
*/
public void incDfsUsed(long value) {
used.addAndGet(value);
@@ -154,6 +159,8 @@ boolean running() {
/**
* How long in between runs of the background refresh.
+ *
+ * @return refresh interval
*/
@VisibleForTesting
public long getRefreshInterval() {
@@ -163,6 +170,8 @@ public long getRefreshInterval() {
/**
* Randomize the refresh interval timing by this amount, the actual interval will be chosen
* uniformly between {@code interval-jitter} and {@code interval+jitter}.
+ *
+ * @return between interval-jitter and interval+jitter
*/
@VisibleForTesting
public long getJitter() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
index 607aa263622f6..331e2ab9830c8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
@@ -233,7 +233,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
*
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @return true if successful;
* false if file does not exist or is a directory
*/
@@ -304,7 +304,7 @@ public Path getHomeDirectory() {
* Set the current working directory for the given file system. All relative
* paths will be resolved relative to it.
*
- * @param newDir
+ * @param newDir new dir
*/
@Override
public void setWorkingDirectory(Path newDir) {
From a39012a0e27a66968043352a4ba06c06e6263c0f Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 22:19:59 -0700
Subject: [PATCH 27/53] HADOOP-18229. Fix some java doc compilation errors.
ChecksumFileSystem.java warning: no @param for src, warning: no @param for
copyCrc etc ChecksumFs.java warning: no @return, warning: no @param for file
---
.../apache/hadoop/fs/ChecksumFileSystem.java | 40 +++++++++++++++----
.../java/org/apache/hadoop/fs/ChecksumFs.java | 37 +++++++++++++----
2 files changed, 62 insertions(+), 15 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
index 59ffe00bcb24d..578ef03956d9e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
@@ -102,25 +102,44 @@ public FileSystem getRawFileSystem() {
return fs;
}
- /** Return the name of the checksum file associated with a file.*/
+ /**
+ * Return the name of the checksum file associated with a file.
+ *
+ * @param file the file path
+ * @return name of the checksum file associated with a file
+ */
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
- /** Return true iff file is a checksum file name.*/
+ /**
+ * Return true if file is a checksum file name.
+ *
+ * @param file the file path
+ * @return if file is a checksum file true, not false
+ */
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
- /** Return the length of the checksum file given the size of the
+ /**
+ * Return the length of the checksum file given the size of the
* actual file.
- **/
+ *
+ * @param file the file path
+ * @param fileSize file size
+ * @return checksum length
+ */
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
- /** Return the bytes Per Checksum */
+ /**
+ * Return the bytes Per Checksum
+ *
+ * @return bytes per check sum
+ */
public int getBytesPerSum() {
return bytesPerChecksum;
}
@@ -362,6 +381,7 @@ public synchronized void seek(long pos) throws IOException {
* Opens an FSDataInputStream at the indicated Path.
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
+ * @throws IOException if an I/O error occurs.
*/
@Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
@@ -669,7 +689,7 @@ boolean apply(Path p) throws IOException {
* Implement the abstract setReplication of FileSystem
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
* @return true if successful;
* false if file does not exist or is a directory
*/
@@ -754,7 +774,7 @@ public boolean accept(Path file) {
* @param f
* given path
* @return the statuses of the files/directories in the given path
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
*/
@Override
public FileStatus[] listStatus(Path f) throws IOException {
@@ -775,7 +795,7 @@ public RemoteIterator listStatusIterator(final Path p)
* @param f
* given path
* @return the statuses of the files/directories in the given patch
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
*/
@Override
public RemoteIterator listLocatedStatus(Path f)
@@ -811,6 +831,10 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst)
* Copy it from FS control to the local dst name.
* If src and dst are directories, the copyCrc parameter
* determines whether to copy CRC files.
+ * @param src src path
+ * @param dst dst path
+ * @param copyCrc copy csc flag
+ * @throws IOException if an I/O error occurs.
*/
@SuppressWarnings("deprecation")
public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index bc1122c56a2bd..a12b0e9b98613 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -70,30 +70,53 @@ public void setVerifyChecksum(boolean inVerifyChecksum) {
this.verifyChecksum = inVerifyChecksum;
}
- /** get the raw file system. */
+ /**
+ * get the raw file system.
+ *
+ * @return abstract file system
+ */
public AbstractFileSystem getRawFs() {
return getMyFs();
}
- /** Return the name of the checksum file associated with a file.*/
+ /**
+ * Return the name of the checksum file associated with a file.
+ *
+ * @param file the file path
+ * @return the checksum file associated with a file
+ */
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
- /** Return true iff file is a checksum file name.*/
+ /**
+ * Return true iff file is a checksum file name.
+ *
+ * @param file the file path
+ * @return if is checksum file true,not false
+ */
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
- /** Return the length of the checksum file given the size of the
+ /**
+ * Return the length of the checksum file given the size of the
* actual file.
- **/
+ *
+ * @param file the file path
+ * @param fileSize file size
+ * @return check sum file length
+ */
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
- /** Return the bytes Per Checksum. */
+ /**
+ * Return the bytes Per Checksum.
+ *
+ * @return bytes per sum
+ */
public int getBytesPerSum() {
return defaultBytesPerChecksum;
}
@@ -433,7 +456,7 @@ private boolean isDirectory(Path f)
* Implement the abstract setReplication of FileSystem
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
* @return true if successful;
* false if file does not exist or is a directory
*/
From 618c934486bc36d17c800b98f2f194fb4574bcee Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 22:51:16 -0700
Subject: [PATCH 28/53] HADOOP-18229. Fix some java doc compilation errors.
CommonConfigurationKeysPublic.java warning: empty tag
CompositeCrcFileChecksum.java warning: no @param for crc ContentSummary.java
warning: no @param for length, no @param for spaceQuota etc.
FileChecksum.java warning: no @return QuotaUsage.java warning: no @param for
builder,warning: no @return
---
.../fs/CommonConfigurationKeysPublic.java | 6 +-
.../hadoop/fs/CompositeCrcFileChecksum.java | 8 ++-
.../org/apache/hadoop/fs/ContentSummary.java | 24 ++++++-
.../org/apache/hadoop/fs/FileChecksum.java | 25 ++++++--
.../java/org/apache/hadoop/fs/QuotaUsage.java | 62 +++++++++++++++----
5 files changed, 103 insertions(+), 22 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index a799e883bcf2a..fdc5d3a40c106 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic {
/**
* Number of filesystems instances can be created in parallel.
- *
+ *
* A higher number here does not necessarily improve performance, especially
* for object stores, where multiple threads may be attempting to create an FS
* instance for the same URI.
- *
+ *
* Default value: {@value}.
*/
public static final String FS_CREATION_PARALLEL_COUNT =
@@ -181,7 +181,7 @@ public class CommonConfigurationKeysPublic {
/**
* Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
- *
+ *
* Default value: {@value}.
*/
public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
index e1ed5cbcfcaa6..9c2ceb0526565 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
@@ -37,7 +37,13 @@ public class CompositeCrcFileChecksum extends FileChecksum {
private DataChecksum.Type crcType;
private int bytesPerCrc;
- /** Create a CompositeCrcFileChecksum. */
+ /**
+ * Create a CompositeCrcFileChecksum.
+ *
+ * @param crc crc
+ * @param crcType crcType
+ * @param bytesPerCrc bytesPerCrc
+ */
public CompositeCrcFileChecksum(
int crc, DataChecksum.Type crcType, int bytesPerCrc) {
this.crc = crc;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
index 79850e1a2f291..1050083cea78d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
@@ -149,17 +149,31 @@ public ContentSummary build() {
@Deprecated
public ContentSummary() {}
- /** Constructor, deprecated by ContentSummary.Builder
+ /**
+ * Constructor, deprecated by ContentSummary.Builder
* This constructor implicitly set spaceConsumed the same as length.
* spaceConsumed and length must be set explicitly with
* ContentSummary.Builder
+ *
+ * @param length length
+ * @param fileCount file count
+ * @param directoryCount directory count
* */
@Deprecated
public ContentSummary(long length, long fileCount, long directoryCount) {
this(length, fileCount, directoryCount, -1L, length, -1L);
}
- /** Constructor, deprecated by ContentSummary.Builder */
+ /**
+ * Constructor, deprecated by ContentSummary.Builder.
+ *
+ * @param length length
+ * @param fileCount file count
+ * @param directoryCount directory count
+ * @param quota quota
+ * @param spaceConsumed space consumed
+ * @param spaceQuota space quota
+ * */
@Deprecated
public ContentSummary(
long length, long fileCount, long directoryCount, long quota,
@@ -172,7 +186,11 @@ public ContentSummary(
setSpaceQuota(spaceQuota);
}
- /** Constructor for ContentSummary.Builder*/
+ /**
+ * Constructor for ContentSummary.Builder.
+ *
+ * @param builder builder
+ */
private ContentSummary(Builder builder) {
super(builder);
this.length = builder.length;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
index 6822fa485622f..679c5811f19e4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
@@ -28,20 +28,37 @@
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class FileChecksum implements Writable {
- /** The checksum algorithm name */
+ /**
+ * The checksum algorithm name.
+ *
+ * @return algorithm name
+ */
public abstract String getAlgorithmName();
- /** The length of the checksum in bytes */
+ /**
+ * The length of the checksum in bytes.
+ *
+ * @return length
+ */
public abstract int getLength();
- /** The value of the checksum in bytes */
+ /**
+ * The value of the checksum in bytes.
+ *
+ * @return byte array
+ */
public abstract byte[] getBytes();
public ChecksumOpt getChecksumOpt() {
return null;
}
- /** Return true if both the algorithms and the values are the same. */
+ /**
+ * Return true if both the algorithms and the values are the same.
+ *
+ * @param other other
+ * @return if equal true, not false
+ */
@Override
public boolean equals(Object other) {
if (other == this) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
index b00a31891c867..215f9b233d3e8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
@@ -105,7 +105,9 @@ public QuotaUsage build() {
// Make it protected for the deprecated ContentSummary constructor.
protected QuotaUsage() { }
- /** Build the instance based on the builder. */
+ /** Build the instance based on the builder.
+ * @param builder bulider
+ */
protected QuotaUsage(Builder builder) {
this.fileAndDirectoryCount = builder.fileAndDirectoryCount;
this.quota = builder.quota;
@@ -127,37 +129,67 @@ protected void setSpaceQuota(long spaceQuota) {
this.spaceQuota = spaceQuota;
}
- /** Return the directory count. */
+ /**
+ * Return the directory count.
+ *
+ * @return file and directory count
+ */
public long getFileAndDirectoryCount() {
return fileAndDirectoryCount;
}
- /** Return the directory quota. */
+ /**
+ * Return the directory quota.
+ *
+ * @return quota
+ */
public long getQuota() {
return quota;
}
- /** Return (disk) space consumed. */
+ /**
+ * Return (disk) space consumed.
+ *
+ * @return space consumed
+ */
public long getSpaceConsumed() {
return spaceConsumed;
}
- /** Return (disk) space quota. */
+ /**
+ * Return (disk) space quota.
+ *
+ * @return space quota
+ */
public long getSpaceQuota() {
return spaceQuota;
}
- /** Return storage type quota. */
+ /**
+ * Return storage type quota.
+ *
+ * @param type storage type
+ * @return type quota
+ */
public long getTypeQuota(StorageType type) {
return (typeQuota != null) ? typeQuota[type.ordinal()] : -1L;
}
- /** Return storage type consumed. */
+ /**
+ * Return storage type consumed.
+ *
+ * @param type storage type
+ * @return type consumed
+ */
public long getTypeConsumed(StorageType type) {
return (typeConsumed != null) ? typeConsumed[type.ordinal()] : 0L;
}
- /** Return true if any storage type quota has been set. */
+ /**
+ * Return true if any storage type quota has been set.
+ *
+ * @return if any storage type quota has been set true, not false
+ * */
public boolean isTypeQuotaSet() {
if (typeQuota != null) {
for (StorageType t : StorageType.getTypesSupportingQuota()) {
@@ -169,7 +201,12 @@ public boolean isTypeQuotaSet() {
return false;
}
- /** Return true if any storage type consumption information is available. */
+ /**
+ * Return true if any storage type consumption information is available.
+ *
+ * @return if any storage type consumption information
+ * is available, not false
+ */
public boolean isTypeConsumedAvailable() {
if (typeConsumed != null) {
for (StorageType t : StorageType.getTypesSupportingQuota()) {
@@ -271,11 +308,14 @@ public String toString(boolean hOption) {
return toString(hOption, false, null);
}
- /** Return the string representation of the object in the output format.
+ /**
+ * Return the string representation of the object in the output format.
* if hOption is false file sizes are returned in bytes
* if hOption is true file sizes are returned in human readable
*
* @param hOption a flag indicating if human readable output if to be used
+ * @param tOption type option
+ * @param types storage types
* @return the string representation of the object
*/
public String toString(boolean hOption,
@@ -328,7 +368,7 @@ protected String getTypesQuotaUsage(boolean hOption,
/**
* return the header of with the StorageTypes.
*
- * @param storageTypes
+ * @param storageTypes storage types
* @return storage header string
*/
public static String getStorageTypeHeader(List storageTypes) {
From 89b7883173f186a553e8714659ef2a7a46af4e1c Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Thu, 12 May 2022 23:37:43 -0700
Subject: [PATCH 29/53] HADOOP-18229. Fix some java doc compilation errors.
CreateFlag.java warning: no @param for flag DelegationTokenRenewer.java
warning: no @return Shell.java warning: no @throws for java.io.IOException,no
@return etc. DF.java no @throws for java.io.IOException FSBuilder.java
warning: no @param for key,warning: no @return,warning: no @param for key
etc.
---
.../java/org/apache/hadoop/fs/CreateFlag.java | 2 +
.../main/java/org/apache/hadoop/fs/DF.java | 10 +-
.../hadoop/fs/DelegationTokenRenewer.java | 26 ++++-
.../java/org/apache/hadoop/fs/FSBuilder.java | 51 ++++++++--
.../java/org/apache/hadoop/util/Shell.java | 98 ++++++++++++++++---
5 files changed, 160 insertions(+), 27 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
index 71993713ad2eb..b197d43d8c792 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
@@ -189,6 +189,8 @@ public static void validate(Object path, boolean pathExists,
/**
* Validate the CreateFlag for the append operation. The flag must contain
* APPEND, and cannot contain OVERWRITE.
+ *
+ * @param flag enum set flag
*/
public static void validateForAppend(EnumSet flag) {
validate(flag);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
index da4636b2c0fbe..3c06e97401341 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
@@ -65,7 +65,10 @@ public String getDirPath() {
return dirPath;
}
- /** @return a string indicating which filesystem volume we're checking. */
+ /**
+ * @return a string indicating which filesystem volume we're checking.
+ * @throws IOException raised on errors performing I/O.
+ */
public String getFilesystem() throws IOException {
if (Shell.WINDOWS) {
this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
@@ -100,7 +103,10 @@ public int getPercentUsed() {
return (int) (used * 100.0 / cap);
}
- /** @return the filesystem mount point for the indicated volume */
+ /**
+ * @return the filesystem mount point for the indicated volume
+ * @throws IOException raised on errors performing I/O.
+ */
public String getMount() throws IOException {
// Abort early if specified path does not exist
if (!dirFile.exists()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
index 33905dcbb77fd..6244797119602 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
@@ -47,7 +47,11 @@ public interface Renewable {
/** @return the renew token. */
public Token> getRenewToken();
- /** Set delegation token. */
+ /**
+ * Set delegation token.
+ * @param generic type T
+ * @param token token
+ */
public void setDelegationToken(Token token);
}
@@ -172,7 +176,11 @@ public String toString() {
/** Queue to maintain the RenewActions to be processed by the {@link #run()} */
private volatile DelayQueue> queue = new DelayQueue>();
- /** For testing purposes */
+ /**
+ * For testing purposes.
+ *
+ * @return renew queue length
+ */
@VisibleForTesting
protected int getRenewQueueLength() {
return queue.size();
@@ -211,7 +219,13 @@ static synchronized void reset() {
}
}
- /** Add a renew action to the queue. */
+ /**
+ * Add a renew action to the queue.
+ *
+ * @param generic type T
+ * @param fs file system
+ * @return renew action
+ * */
@SuppressWarnings("static-access")
public RenewAction addRenewAction(final T fs) {
synchronized (this) {
@@ -230,8 +244,10 @@ public RenewAction addRenewAction(final T
/**
* Remove the associated renew action from the queue
- *
- * @throws IOException
+ *
+ * @param generic type T
+ * @param fs file system
+ * @throws IOException raised on errors performing I/O.
*/
public void removeRenewAction(
final T fs) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
index a4c7254cfeb3c..fe72e117903ba 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
@@ -37,12 +37,17 @@ public interface FSBuilder> {
/**
* Set optional Builder parameter.
+ * @param key key
+ * @param value value
+ * @return generic type B
*/
B opt(@Nonnull String key, @Nonnull String value);
/**
* Set optional boolean parameter for the Builder.
- *
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #opt(String, String)
*/
B opt(@Nonnull String key, boolean value);
@@ -50,6 +55,9 @@ public interface FSBuilder> {
/**
* Set optional int parameter for the Builder.
*
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #opt(String, String)
*/
B opt(@Nonnull String key, int value);
@@ -57,6 +65,9 @@ public interface FSBuilder> {
/**
* Set optional float parameter for the Builder.
*
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #opt(String, String)
*/
B opt(@Nonnull String key, float value);
@@ -64,6 +75,9 @@ public interface FSBuilder> {
/**
* Set optional long parameter for the Builder.
*
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #opt(String, String)
*/
B opt(@Nonnull String key, long value);
@@ -71,13 +85,18 @@ public interface FSBuilder> {
/**
* Set optional double parameter for the Builder.
*
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #opt(String, String)
*/
B opt(@Nonnull String key, double value);
/**
* Set an array of string values as optional parameter for the Builder.
- *
+ * @param key key
+ * @param values values
+ * @return generic type B
* @see #opt(String, String)
*/
B opt(@Nonnull String key, @Nonnull String... values);
@@ -87,47 +106,64 @@ public interface FSBuilder> {
*
* If the option is not supported or unavailable,
* the client should expect {@link #build()} throws IllegalArgumentException.
+ *
+ * @param key key
+ * @param value value
+ * @return generic type B
*/
B must(@Nonnull String key, @Nonnull String value);
/**
* Set mandatory boolean option.
*
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #must(String, String)
*/
B must(@Nonnull String key, boolean value);
/**
* Set mandatory int option.
- *
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #must(String, String)
*/
B must(@Nonnull String key, int value);
/**
* Set mandatory float option.
- *
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #must(String, String)
*/
B must(@Nonnull String key, float value);
/**
* Set mandatory long option.
- *
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #must(String, String)
*/
B must(@Nonnull String key, long value);
/**
* Set mandatory double option.
- *
+ * @param key key
+ * @param value value
+ * @return generic type B
* @see #must(String, String)
*/
B must(@Nonnull String key, double value);
/**
* Set a string array as mandatory option.
- *
+ * @param key key
+ * @param values values
+ * @return generic type B
* @see #must(String, String)
*/
B must(@Nonnull String key, @Nonnull String... values);
@@ -139,6 +175,7 @@ public interface FSBuilder> {
* @throws UnsupportedOperationException if the filesystem does not support
* the specific operation.
* @throws IOException on filesystem IO errors.
+ * @return generic type S
*/
S build() throws IllegalArgumentException,
UnsupportedOperationException, IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
index 084e2b8f5e3b6..b72ce63f5d06f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
@@ -122,6 +122,7 @@ public static boolean isJavaVersionAtLeast(int version) {
* delimiters, no extra count will be added for delimiters.
*
* @param commands command parts, including any space delimiters
+ * @throws IOException raised on errors performing I/O.
*/
public static void checkWindowsCommandLineLength(String...commands)
throws IOException {
@@ -205,7 +206,11 @@ private static OSType getOSType() {
public static final boolean PPC_64
= System.getProperties().getProperty("os.arch").contains("ppc64");
- /** a Unix command to get the current user's groups list. */
+ /**
+ * a Unix command to get the current user's groups list.
+ *
+ * @return group command array
+ */
public static String[] getGroupsCommand() {
return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
: new String[]{"groups"};
@@ -216,6 +221,9 @@ public static String[] getGroupsCommand() {
* If the OS is not WINDOWS, the command will get the user's primary group
* first and finally get the groups list which includes the primary group.
* i.e. the user's primary group will be included twice.
+ *
+ * @param user user
+ * @return groups for user command
*/
public static String[] getGroupsForUserCommand(final String user) {
//'groups username' command return is inconsistent across different unixes
@@ -235,6 +243,9 @@ public static String[] getGroupsForUserCommand(final String user) {
* first and finally get the groups list which includes the primary group.
* i.e. the user's primary group will be included twice.
* This command does not support Windows and will only return group names.
+ *
+ * @param user user
+ * @return groups id for user command
*/
public static String[] getGroupsIDForUserCommand(final String user) {
//'groups username' command return is inconsistent across different unixes
@@ -248,19 +259,34 @@ public static String[] getGroupsIDForUserCommand(final String user) {
}
}
- /** A command to get a given netgroup's user list. */
+ /**
+ * A command to get a given netgroup's user list.
+ *
+ * @param netgroup net group
+ * @return users for net group command
+ */
public static String[] getUsersForNetgroupCommand(final String netgroup) {
//'groups username' command return is non-consistent across different unixes
return new String[] {"getent", "netgroup", netgroup};
}
- /** Return a command to get permission information. */
+ /**
+ * Return a command to get permission information.
+ *
+ * @return permission command
+ */
public static String[] getGetPermissionCommand() {
return (WINDOWS) ? new String[] { getWinUtilsPath(), "ls", "-F" }
: new String[] { "ls", "-ld" };
}
- /** Return a command to set permission. */
+ /**
+ * Return a command to set permission.
+ *
+ * @param perm permission
+ * @param recursive recursive
+ * @return set permission command
+ */
public static String[] getSetPermissionCommand(String perm, boolean recursive) {
if (recursive) {
return (WINDOWS) ?
@@ -290,21 +316,37 @@ public static String[] getSetPermissionCommand(String perm,
return cmdWithFile;
}
- /** Return a command to set owner. */
+ /**
+ * Return a command to set owner.
+ *
+ * @param owner owner
+ * @return set owner command
+ */
public static String[] getSetOwnerCommand(String owner) {
return (WINDOWS) ?
new String[] { getWinUtilsPath(), "chown", "\"" + owner + "\"" }
: new String[] { "chown", owner };
}
- /** Return a command to create symbolic links. */
+ /**
+ * Return a command to create symbolic links.
+ *
+ * @param target target
+ * @param link link
+ * @return symlink command
+ */
public static String[] getSymlinkCommand(String target, String link) {
return WINDOWS ?
new String[] { getWinUtilsPath(), "symlink", link, target }
: new String[] { "ln", "-s", target, link };
}
- /** Return a command to read the target of the a symbolic link. */
+ /**
+ * Return a command to read the target of the a symbolic link.
+ *
+ * @param link link
+ * @return read link command
+ */
public static String[] getReadlinkCommand(String link) {
return WINDOWS ?
new String[] { getWinUtilsPath(), "readlink", link }
@@ -320,7 +362,13 @@ public static String[] getCheckProcessIsAliveCommand(String pid) {
return getSignalKillCommand(0, pid);
}
- /** Return a command to send a signal to a given pid. */
+ /**
+ * Return a command to send a signal to a given pid.
+ *
+ * @param code code
+ * @param pid pid
+ * @return signal kill command
+ */
public static String[] getSignalKillCommand(int code, String pid) {
// Code == 0 means check alive
if (Shell.WINDOWS) {
@@ -347,7 +395,11 @@ public static String[] getSignalKillCommand(int code, String pid) {
/** Regular expression for environment variables: {@value}. */
public static final String ENV_NAME_REGEX = "[A-Za-z_][A-Za-z0-9_]*";
- /** Return a regular expression string that match environment variables. */
+ /**
+ * Return a regular expression string that match environment variables.
+ *
+ * @return environment variable regex
+ */
public static String getEnvironmentVariableRegex() {
return (WINDOWS)
? "%(" + ENV_NAME_REGEX + "?)%"
@@ -890,7 +942,11 @@ protected void setWorkingDirectory(File dir) {
this.dir = dir;
}
- /** Check to see if a command needs to be executed and execute if needed. */
+ /**
+ * Check to see if a command needs to be executed and execute if needed.
+ *
+ * @throws IOException raised on errors performing I/O.
+ */
protected void run() throws IOException {
if (lastTime + interval > Time.monotonicNow()) {
return;
@@ -902,7 +958,11 @@ protected void run() throws IOException {
runCommand();
}
- /** Run the command. */
+ /**
+ * Run the command.
+ *
+ * @throws IOException raised on errors performing I/O.
+ */
private void runCommand() throws IOException {
ProcessBuilder builder = new ProcessBuilder(getExecString());
Timer timeOutTimer = null;
@@ -1049,10 +1109,19 @@ private static void joinThread(Thread t) {
}
}
- /** return an array containing the command name and its parameters. */
+ /**
+ * return an array containing the command name and its parameters.
+ *
+ * @return exec string array
+ */
protected abstract String[] getExecString();
- /** Parse the execution result */
+ /**
+ * Parse the execution result
+ *
+ * @param lines lines
+ * @throws IOException raised on errors performing I/O.
+ * */
protected abstract void parseExecResult(BufferedReader lines)
throws IOException;
@@ -1283,6 +1352,7 @@ private void setTimedOut() {
* the Shell interface.
* @param cmd shell command to execute.
* @return the output of the executed command.
+ * @throws IOException raised on errors performing I/O.
*/
public static String execCommand(String ... cmd) throws IOException {
return execCommand(null, cmd, 0L);
@@ -1367,6 +1437,8 @@ public static void destroyAllShellProcesses() {
/**
* Static method to return a Set of all Shell objects.
+ *
+ * @return all shells set
*/
public static Set getAllShells() {
synchronized (CHILD_SHELLS) {
From b30f011c1cb48a08c520a4df36fb435cb8d1a887 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 02:07:55 -0700
Subject: [PATCH 30/53] HADOOP-18229. Fix some java doc compilation errors.
AbstractFSBuilderImpl.java warning: no @return, ChecksumFileSystem.java
warning: no @param for file warning: no @return etc, FileEncryptionInfo.java
warning: no @param for version,no @param for isdir etc, FileStatus.java
warning: no @param for block_replication, warning: no @param for blocksize
etc, FileSystem.java warning: no @return, FileSystemLinkResolver.java
warning: no description for @throws, FSDataOutputStreamBuilder.java warning:
no @return, warning: no @param for key etc, NetUtils.java warning: no @param
for conf, warning: no @return etc, SecurityUtil.java warning: no @param for
flag, warning: no @param for etc, StorageStatistics.java warning: no
@return, warning: no @param for key
---
.../apache/hadoop/fs/ChecksumFileSystem.java | 2 +-
.../hadoop/fs/FSDataOutputStreamBuilder.java | 31 ++++++++++
.../apache/hadoop/fs/FileEncryptionInfo.java | 4 ++
.../java/org/apache/hadoop/fs/FileStatus.java | 14 +++++
.../java/org/apache/hadoop/fs/FileSystem.java | 1 +
.../hadoop/fs/FileSystemLinkResolver.java | 8 +--
.../apache/hadoop/fs/StorageStatistics.java | 5 ++
.../hadoop/fs/impl/AbstractFSBuilderImpl.java | 2 +
.../java/org/apache/hadoop/net/NetUtils.java | 61 +++++++++++++------
.../apache/hadoop/security/SecurityUtil.java | 12 ++++
10 files changed, 115 insertions(+), 25 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
index 578ef03956d9e..6d7afadd78f50 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
@@ -136,7 +136,7 @@ public long getChecksumFileLength(Path file, long fileSize) {
}
/**
- * Return the bytes Per Checksum
+ * Return the bytes Per Checksum.
*
* @return bytes per check sum
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
index c96d499d17ba6..6212fa58c2228 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
@@ -123,6 +123,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Constructor.
+ * @param fileSystem file system
+ * @param p the path
*/
protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem,
@Nonnull Path p) {
@@ -149,6 +151,9 @@ protected FsPermission getPermission() {
/**
* Set permission for the file.
+ *
+ * @param perm permission
+ * @return B Generics Type
*/
public B permission(@Nonnull final FsPermission perm) {
checkNotNull(perm);
@@ -162,6 +167,9 @@ protected int getBufferSize() {
/**
* Set the size of the buffer to be used.
+ *
+ * @param bufSize buffer size
+ * @return Generics Type B
*/
public B bufferSize(int bufSize) {
bufferSize = bufSize;
@@ -174,6 +182,9 @@ protected short getReplication() {
/**
* Set replication factor.
+ *
+ * @param replica replica
+ * @return Generics Type B
*/
public B replication(short replica) {
replication = replica;
@@ -186,6 +197,9 @@ protected long getBlockSize() {
/**
* Set block size.
+ *
+ * @param blkSize block size
+ * @return B Generics Type
*/
public B blockSize(long blkSize) {
blockSize = blkSize;
@@ -194,6 +208,8 @@ public B blockSize(long blkSize) {
/**
* Return true to create the parent directories if they do not exist.
+ *
+ * @return if create the parent directories if they do not exist true,not false
*/
protected boolean isRecursive() {
return recursive;
@@ -201,6 +217,8 @@ protected boolean isRecursive() {
/**
* Create the parent directory if they do not exist.
+ *
+ * @return B Generics Type
*/
public B recursive() {
recursive = true;
@@ -213,6 +231,9 @@ protected Progressable getProgress() {
/**
* Set the facility of reporting progress.
+ *
+ * @param prog progress
+ * @return B Generics Type
*/
public B progress(@Nonnull final Progressable prog) {
checkNotNull(prog);
@@ -226,6 +247,8 @@ protected EnumSet getFlags() {
/**
* Create an FSDataOutputStream at the specified path.
+ *
+ * return Generics Type B
*/
public B create() {
flags.add(CreateFlag.CREATE);
@@ -236,6 +259,9 @@ public B create() {
* Set to true to overwrite the existing file.
* Set it to false, an exception will be thrown when calling {@link #build()}
* if the file exists.
+ *
+ * @param overwrite overrite
+ * @return Generics Type B
*/
public B overwrite(boolean overwrite) {
if (overwrite) {
@@ -248,6 +274,8 @@ public B overwrite(boolean overwrite) {
/**
* Append to an existing file (optional operation).
+ *
+ * @return Generics Type B
*/
public B append() {
flags.add(CreateFlag.APPEND);
@@ -260,6 +288,9 @@ protected ChecksumOpt getChecksumOpt() {
/**
* Set checksum opt.
+ *
+ * @param chksumOpt check sum opt
+ * @return Generics Type B
*/
public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) {
checkNotNull(chksumOpt);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
index 9260b9a62c62e..4fd80572e60b4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
@@ -52,6 +52,8 @@ public class FileEncryptionInfo implements Serializable {
* @param keyName name of the key used for the encryption zone
* @param ezKeyVersionName name of the KeyVersion used to encrypt the
* encrypted data encryption key.
+ * @param version version
+ * @return file encryption info
*/
public FileEncryptionInfo(final CipherSuite suite,
final CryptoProtocolVersion version, final byte[] edek,
@@ -134,6 +136,8 @@ public String toString() {
*
* NOTE:
* Currently this method is used by CLI for backward compatibility.
+ *
+ * @return stable string
*/
public String toStringStable() {
StringBuilder builder = new StringBuilder("{")
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
index d7ca8f172f8e2..18e7154a7d613 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
@@ -116,6 +116,17 @@ public FileStatus(long length, boolean isdir, int block_replication,
/**
* Constructor for file systems on which symbolic links are not supported
+ *
+ * @param length length
+ * @param isdir isdir
+ * @param block_replication block replication
+ * @param blocksize block size
+ * @param modification_time modification time
+ * @param access_time access_time
+ * @param permission permission
+ * @param owner owner
+ * @param group group
+ * @param path the path
*/
public FileStatus(long length, boolean isdir,
int block_replication,
@@ -182,6 +193,7 @@ public FileStatus(long length, boolean isdir, int block_replication,
* Copy constructor.
*
* @param other FileStatus to copy
+ * @throws IOException raised on errors performing I/O.
*/
public FileStatus(FileStatus other) throws IOException {
// It's important to call the getters here instead of directly accessing the
@@ -375,6 +387,8 @@ protected void setGroup(String group) {
/**
* @return The contents of the symbolic link.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public Path getSymlink() throws IOException {
if (!isSymlink()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 421a0a1c54874..84dc9a01494de 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -4127,6 +4127,7 @@ public void run() {
/**
* Get or create the thread-local data associated with the current thread.
+ * @return statistics data
*/
public StatisticsData getThreadStatistics() {
StatisticsData data = threadData.get();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
index 7eec0eb7cec54..e2011e279a5e6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
@@ -38,8 +38,8 @@ public abstract class FileSystemLinkResolver {
* an UnresolvedLinkException if called on an unresolved {@link Path}.
* @param p Path on which to perform an operation
* @return Generic type returned by operation
- * @throws IOException
- * @throws UnresolvedLinkException
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnresolvedLinkException unresolved link exception
*/
abstract public T doCall(final Path p) throws IOException,
UnresolvedLinkException;
@@ -54,7 +54,7 @@ abstract public T doCall(final Path p) throws IOException,
* @param p
* Resolved Target of path
* @return Generic type determined by implementation
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
abstract public T next(final FileSystem fs, final Path p) throws IOException;
@@ -66,7 +66,7 @@ abstract public T doCall(final Path p) throws IOException,
* @param filesys FileSystem with which to try call
* @param path Path with which to try call
* @return Generic type determined by implementation
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T resolve(final FileSystem filesys, final Path path)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
index 2efe4566344ee..1122e5fbf1162 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
@@ -127,6 +127,7 @@ public StorageStatistics(String name) {
/**
* Get the name of this StorageStatistics object.
+ * @return name of this StorageStatistics object
*/
public String getName() {
return name;
@@ -145,12 +146,15 @@ public String getScheme() {
*
* The values returned will depend on the type of FileSystem or FileContext
* object. The values do not necessarily reflect a snapshot in time.
+ *
+ * @return LongStatistic Iterator
*/
public abstract Iterator getLongStatistics();
/**
* Get the value of a statistic.
*
+ * @param key key
* @return null if the statistic is not being tracked or is not a
* long statistic. The value of the statistic, otherwise.
*/
@@ -159,6 +163,7 @@ public String getScheme() {
/**
* Return true if a statistic is being tracked.
*
+ * @param key key
* @return True only if the statistic is being tracked.
*/
public abstract boolean isTracked(String key);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
index 9d3a46d633253..44380904b3f26 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
@@ -340,12 +340,14 @@ public Configuration getOptions() {
/**
* Get all the keys that are set as mandatory keys.
+ * @return mandatory keys
*/
public Set getMandatoryKeys() {
return Collections.unmodifiableSet(mandatoryKeys);
}
/**
* Get all the keys that are set as optional keys.
+ * @return optional keys
*/
public Set getOptionalKeys() {
return Collections.unmodifiableSet(optionalKeys);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
index fead87d7907d7..eef7d7c31964a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
@@ -133,7 +133,8 @@ public static SocketFactory getDefaultSocketFactory(Configuration conf) {
* Get the socket factory corresponding to the given proxy URI. If the
* given proxy URI corresponds to an absence of configuration parameter,
* returns null. If the URI is malformed raises an exception.
- *
+ *
+ * @param conf configuration
* @param propValue the property which is the class name of the
* SocketFactory to instantiate; assumed non null and non empty.
* @return a socket factory as defined in the property value.
@@ -151,19 +152,26 @@ public static SocketFactory getSocketFactoryFromProperty(
}
/**
- * Util method to build socket addr from either:
+ * Util method to build socket addr from either.
* {@literal :}
* {@literal ://:/}
+ *
+ * @param target target
+ * @return socket addr
*/
public static InetSocketAddress createSocketAddr(String target) {
return createSocketAddr(target, -1);
}
/**
- * Util method to build socket addr from either:
+ * Util method to build socket addr from either.
* {@literal }
* {@literal :}
* {@literal ://:/}
+ *
+ * @param target target
+ * @param defaultPort default port
+ * @return socket addr
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort) {
@@ -183,6 +191,7 @@ public static InetSocketAddress createSocketAddr(String target,
* @param configName the name of the configuration from which
* target was loaded. This is used in the
* exception message in the case that parsing fails.
+ * @return socket addr
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort,
@@ -204,6 +213,7 @@ public static InetSocketAddress createSocketAddr(String target,
* target was loaded. This is used in the
* exception message in the case that parsing fails.
* @param useCacheIfPresent Whether use cache when create URI
+ * @return socket addr
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort,
@@ -361,8 +371,8 @@ private static String canonicalizeHost(String host) {
* daemons, one can set up mappings from those hostnames to "localhost".
* {@link NetUtils#getStaticResolution(String)} can be used to query for
* the actual hostname.
- * @param host
- * @param resolvedName
+ * @param host the hostname or IP use to instantiate the object
+ * @param resolvedName resolved name
*/
public static void addStaticResolution(String host, String resolvedName) {
synchronized (hostToResolved) {
@@ -374,7 +384,7 @@ public static void addStaticResolution(String host, String resolvedName) {
* Retrieves the resolved name for the passed host. The resolved name must
* have been set earlier using
* {@link NetUtils#addStaticResolution(String, String)}
- * @param host
+ * @param host the hostname or IP use to instantiate the object
* @return the resolution
*/
public static String getStaticResolution(String host) {
@@ -410,7 +420,7 @@ public static List getAllStaticResolutions() {
* the server binds to "0.0.0.0". This returns "hostname:port" of the server,
* or "127.0.0.1:port" when the getListenerAddress() returns "0.0.0.0:port".
*
- * @param server
+ * @param server server
* @return socket address that a client can use to connect to the server.
*/
public static InetSocketAddress getConnectAddress(Server server) {
@@ -438,8 +448,11 @@ public static InetSocketAddress getConnectAddress(InetSocketAddress addr) {
/**
* Same as getInputStream(socket, socket.getSoTimeout()).
- *
- *
+ *
+ *
+ * @param socket socket
+ * @throws IOException raised on errors performing I/O.
+ * @return SocketInputWrapper for reading from the socket.
* @see #getInputStream(Socket, long)
*/
public static SocketInputWrapper getInputStream(Socket socket)
@@ -462,11 +475,11 @@ public static SocketInputWrapper getInputStream(Socket socket)
*
* @see Socket#getChannel()
*
- * @param socket
+ * @param socket socket
* @param timeout timeout in milliseconds. zero for waiting as
* long as necessary.
* @return SocketInputWrapper for reading from the socket.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static SocketInputWrapper getInputStream(Socket socket, long timeout)
throws IOException {
@@ -494,9 +507,9 @@ public static SocketInputWrapper getInputStream(Socket socket, long timeout)
*
* @see #getOutputStream(Socket, long)
*
- * @param socket
+ * @param socket socket
* @return OutputStream for writing to the socket.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static OutputStream getOutputStream(Socket socket)
throws IOException {
@@ -516,11 +529,11 @@ public static OutputStream getOutputStream(Socket socket)
*
* @see Socket#getChannel()
*
- * @param socket
+ * @param socket socket
* @param timeout timeout in milliseconds. This may not always apply. zero
* for waiting as long as necessary.
* @return OutputStream for writing to the socket.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static OutputStream getOutputStream(Socket socket, long timeout)
throws IOException {
@@ -541,9 +554,10 @@ public static OutputStream getOutputStream(Socket socket, long timeout)
*
* @see java.net.Socket#connect(java.net.SocketAddress, int)
*
- * @param socket
+ * @param socket socket
* @param address the remote address
* @param timeout timeout in milliseconds
+ * @throws IOException raised on errors performing I/O.
*/
public static void connect(Socket socket,
SocketAddress address,
@@ -555,10 +569,11 @@ public static void connect(Socket socket,
* Like {@link NetUtils#connect(Socket, SocketAddress, int)} but
* also takes a local address and port to bind the socket to.
*
- * @param socket
+ * @param socket socket
* @param endpoint the remote address
* @param localAddr the local address to bind the socket to
* @param timeout timeout in milliseconds
+ * @throws IOException raised on errors performing I/O.
*/
public static void connect(Socket socket,
SocketAddress endpoint,
@@ -644,7 +659,7 @@ public static List normalizeHostNames(Collection names) {
* Performs a sanity check on the list of hostnames/IPs to verify they at least
* appear to be valid.
* @param names - List of hostnames/IPs
- * @throws UnknownHostException
+ * @throws UnknownHostException Unknown Host Exception
*/
public static void verifyHostnames(String[] names) throws UnknownHostException {
for (String name: names) {
@@ -735,6 +750,9 @@ public static String getHostname() {
/**
* Compose a "host:port" string from the address.
+ *
+ * @param addr address
+ * @return hort port string
*/
public static String getHostPortString(InetSocketAddress addr) {
return addr.getHostName() + ":" + addr.getPort();
@@ -969,6 +987,8 @@ private static String quoteHost(final String hostname) {
}
/**
+ * isValidSubnet.
+ * @param subnet subnet
* @return true if the given string is a subnet specified
* using CIDR notation, false otherwise
*/
@@ -1004,6 +1024,7 @@ private static void addMatchingAddrs(NetworkInterface nif,
* @param returnSubinterfaces
* whether to return IPs associated with subinterfaces
* @throws IllegalArgumentException if subnet is invalid
+ * @return ips
*/
public static List getIPs(String subnet,
boolean returnSubinterfaces) {
@@ -1083,8 +1104,8 @@ public static Set getFreeSocketPorts(int numOfPorts) {
* Return an @{@link InetAddress} to bind to. If bindWildCardAddress is true
* than returns null.
*
- * @param localAddr
- * @param bindWildCardAddress
+ * @param localAddr local addr
+ * @param bindWildCardAddress bind wildcard address
* @return InetAddress
*/
public static InetAddress bindToLocalAddress(InetAddress localAddr, boolean
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index c9423490635cb..187ea28632bd5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -116,6 +116,8 @@ private static void setConfigurationInternal(Configuration conf) {
/**
* For use only by tests and initialization
+ *
+ * @param flag flag
*/
@InterfaceAudience.Private
@VisibleForTesting
@@ -487,6 +489,10 @@ public static Text buildTokenService(URI uri) {
* Perform the given action as the daemon's login user. If the login
* user cannot be determined, this will log a FATAL error and exit
* the whole JVM.
+ *
+ * @param action action
+ * @param generic type T
+ * @return generic type T
*/
public static T doAsLoginUserOrFatal(PrivilegedAction action) {
if (UserGroupInformation.isSecurityEnabled()) {
@@ -511,6 +517,7 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) {
* @param action the action to perform
* @return the result of the action
* @throws IOException in the event of error
+ * @return generic type T
*/
public static T doAsLoginUser(PrivilegedExceptionAction action)
throws IOException {
@@ -522,6 +529,7 @@ public static T doAsLoginUser(PrivilegedExceptionAction action)
* InterruptedException is thrown, it is converted to an IOException.
*
* @param action the action to perform
+ * @param generic type T
* @return the result of the action
* @throws IOException in the event of error
*/
@@ -745,9 +753,13 @@ public static boolean isPrivilegedPort(final int port) {
/**
* Utility method to fetch ZK auth info from the configuration.
+ *
+ * @param conf configuration
+ * @param configKey config key
* @throws java.io.IOException if the Zookeeper ACLs configuration file
* cannot be read
* @throws ZKUtil.BadAuthFormatException if the auth format is invalid
+ * @return ZKAuthInfo List
*/
public static List getZKAuthInfos(Configuration conf,
String configKey) throws IOException {
From 402620856ebe2ceb1674e005a0133aea6dab6d97 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 03:18:13 -0700
Subject: [PATCH 31/53] HADOOP-18229. Fix some java doc compilation errors.
Command.java warning: no description for @throws, warning: no @return etc.
FileUtil.java warning: no @param for useLocal, warning: no description for
@param etc. FSInputChecker.java warning: no @return, warning: no @param for
checksum etc. FSLinkResolver.java warning: no description for @throws
FSOutputSummer.java warning: no description for @param, warning: no
description for @throws etc FsShell.java warning: no description for @throws
FsStatus.java warning: no @return,warning: no @param for remaining
GlobalStorageStatistics.java warning: no @return GlobExpander.java warning:
no description for @throws HardLink.java warning: no @throws for
java.io.IOException, warning: no @return etc. HarFileSystem.java warning: no
description for @throws
---
.../org/apache/hadoop/fs/FSInputChecker.java | 10 ++-
.../org/apache/hadoop/fs/FSLinkResolver.java | 2 +-
.../org/apache/hadoop/fs/FSOutputSummer.java | 8 ++
.../java/org/apache/hadoop/fs/FileUtil.java | 86 +++++++++++++++----
.../java/org/apache/hadoop/fs/FsShell.java | 2 +-
.../java/org/apache/hadoop/fs/FsStatus.java | 23 ++++-
.../org/apache/hadoop/fs/GlobExpander.java | 4 +-
.../hadoop/fs/GlobalStorageStatistics.java | 2 +
.../org/apache/hadoop/fs/HarFileSystem.java | 8 +-
.../java/org/apache/hadoop/fs/HardLink.java | 6 ++
.../org/apache/hadoop/fs/shell/Command.java | 24 ++++--
11 files changed, 139 insertions(+), 36 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index de66eab713ab6..459114e89cc85 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -82,6 +82,7 @@ protected FSInputChecker( Path file, int numOfRetries) {
* @param sum the type of Checksum engine
* @param chunkSize maximun chunk size
* @param checksumSize the number byte of each checksum
+ * @param verifyChecksum verify check sum
*/
protected FSInputChecker( Path file, int numOfRetries,
boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) {
@@ -118,6 +119,7 @@ protected FSInputChecker( Path file, int numOfRetries,
* @param len maximum number of bytes to read
* @param checksum the data buffer into which to write checksums
* @return number of bytes read
+ * @throws IOException raised on errors performing I/O.
*/
abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
byte[] checksum) throws IOException;
@@ -129,7 +131,10 @@ abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
*/
abstract protected long getChunkPosition(long pos);
- /** Return true if there is a need for checksum verification */
+ /**
+ * Return true if there is a need for checksum verification
+ * @return if there is a need for checksum verification true, not false
+ */
protected synchronized boolean needChecksum() {
return verifyChecksum && sum != null;
}
@@ -357,6 +362,9 @@ private void verifySums(final byte b[], final int off, int read)
* Convert a checksum byte array to a long
* This is deprecated since 0.22 since it is no longer in use
* by this class.
+ *
+ * @param checksum check sum
+ * @return crc
*/
@Deprecated
static public long checksum2long(byte[] checksum) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
index ffe4b34ca5fdb..f85cf7a858152 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
@@ -74,7 +74,7 @@ abstract public T next(final AbstractFileSystem fs, final Path p)
* @param fc FileContext used to access file systems.
* @param path The path to resolve symlinks on.
* @return Generic type determined by the implementation of next.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T resolve(final FileContext fc, final Path path) throws IOException {
int count = 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
index 6de026b9d17c0..cf819fe1e1e41 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
@@ -186,6 +186,8 @@ public void flush() throws IOException {
/**
* Return the number of valid bytes currently in the buffer.
+ *
+ * @return buffer data size
*/
protected synchronized int getBufferedDataSize() {
return count;
@@ -227,6 +229,10 @@ private void writeChecksumChunks(byte b[], int off, int len)
/**
* Converts a checksum integer value to a byte stream
+ *
+ * @param sum check sum
+ * @param checksumSize check sum size
+ * @return byte stream
*/
static public byte[] convertToByteStream(Checksum sum, int checksumSize) {
return int2byte((int)sum.getValue(), new byte[checksumSize]);
@@ -245,6 +251,8 @@ static byte[] int2byte(int integer, byte[] bytes) {
/**
* Resets existing buffer with a new one of the specified size.
+ *
+ * @param size size
*/
protected synchronized void setChecksumBufSize(int size) {
this.buf = new byte[size];
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index 7400ca36daa5c..96f5298c366d1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -162,6 +162,8 @@ public static void fullyDeleteOnExit(final File file) {
* (3) If dir is a normal file, it is deleted.
* (4) If dir is a normal directory, then dir and all its contents recursively
* are deleted.
+ * @param dir dir
+ * @return fully delete status
*/
public static boolean fullyDelete(final File dir) {
return fullyDelete(dir, false);
@@ -257,6 +259,9 @@ private static boolean deleteImpl(final File f, final boolean doLog) {
* we return false, the directory may be partially-deleted.
* If dir is a symlink to a directory, all the contents of the actual
* directory pointed to by dir will be deleted.
+ *
+ * @param dir dir
+ * @return fullyDeleteContents Status
*/
public static boolean fullyDeleteContents(final File dir) {
return fullyDeleteContents(dir, false);
@@ -267,8 +272,11 @@ public static boolean fullyDeleteContents(final File dir) {
* we return false, the directory may be partially-deleted.
* If dir is a symlink to a directory, all the contents of the actual
* directory pointed to by dir will be deleted.
+ *
+ * @param dir dir
* @param tryGrantPermissions if 'true', try grant +rwx permissions to this
* and all the underlying directories before trying to delete their contents.
+ * @return fully delete contents status
*/
public static boolean fullyDeleteContents(final File dir, final boolean tryGrantPermissions) {
if (tryGrantPermissions) {
@@ -311,7 +319,7 @@ public static boolean fullyDeleteContents(final File dir, final boolean tryGrant
*
* @param fs {@link FileSystem} on which the path is present
* @param dir directory to recursively delete
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link FileSystem#delete(Path, boolean)}
*/
@Deprecated
@@ -343,7 +351,17 @@ private static void checkDependencies(FileSystem srcFS,
}
}
- /** Copy files between FileSystems. */
+ /**
+ * Copy files between FileSystems.
+ * @param srcFS src fs
+ * @param src src
+ * @param dstFS dst fs
+ * @param dst dst
+ * @param deleteSource delete source
+ * @param conf configuration
+ * @return if copy success true, not false
+ * @throws IOException raised on errors performing I/O.
+ */
public static boolean copy(FileSystem srcFS, Path src,
FileSystem dstFS, Path dst,
boolean deleteSource,
@@ -391,7 +409,19 @@ public static boolean copy(FileSystem srcFS, Path[] srcs,
return returnVal;
}
- /** Copy files between FileSystems. */
+ /**
+ * Copy files between FileSystems.
+ *
+ * @param srcFS srcFs
+ * @param src src
+ * @param dstFS dstFs
+ * @param dst dst
+ * @param deleteSource delete source
+ * @param overwrite overwrite
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
+ *
+ */
public static boolean copy(FileSystem srcFS, Path src,
FileSystem dstFS, Path dst,
boolean deleteSource,
@@ -403,17 +433,17 @@ public static boolean copy(FileSystem srcFS, Path src,
/**
* Copy a file/directory tree within/between filesystems.
- *
+ *
* returns true if the operation succeeded. When deleteSource is true,
* this means "after the copy, delete(source) returned true"
* If the destination is a directory, and mkdirs (dest) fails,
* the operation will return false rather than raise any exception.
- *
+ *
* The overwrite flag is about overwriting files; it has no effect about
* handing an attempt to copy a file atop a directory (expect an IOException),
* or a directory over a path which contains a file (mkdir will fail, so
* "false").
- *
+ *
* The operation is recursive, and the deleteSource operation takes place
* as each subdirectory is copied. Therefore, if an operation fails partway
* through, the source tree may be partially deleted.
@@ -471,7 +501,18 @@ public static boolean copy(FileSystem srcFS, FileStatus srcStatus,
}
- /** Copy local files to a FileSystem. */
+ /**
+ * Copy local files to a FileSystem.
+ *
+ * @param src src
+ * @param dstFS dstFs
+ * @param dst dst
+ * @param deleteSource delete source
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
+ * @return true if the operation succeeded.
+ *
+ */
public static boolean copy(File src,
FileSystem dstFS, Path dst,
boolean deleteSource,
@@ -514,7 +555,17 @@ public static boolean copy(File src,
}
}
- /** Copy FileSystem files to local files. */
+ /**
+ * Copy FileSystem files to local files.
+ *
+ * @param srcFS srcFs
+ * @param src src
+ * @param dst dst
+ * @param deleteSource delete source
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
+ * @return true if the operation succeeded.
+ */
public static boolean copy(FileSystem srcFS, Path src,
File dst, boolean deleteSource,
Configuration conf) throws IOException {
@@ -958,7 +1009,7 @@ public static void unTar(InputStream inputStream, File untarDir,
*
* @param inFile The tar file as input.
* @param untarDir The untar directory where to untar the tar file.
- * @throws IOException
+ * @throws IOException an exception occurred
*/
public static void unTar(File inFile, File untarDir) throws IOException {
if (!untarDir.mkdirs()) {
@@ -1169,6 +1220,7 @@ public static class HardLink extends org.apache.hadoop.fs.HardLink {
* @param target the target for symlink
* @param linkname the symlink
* @return 0 on success
+ * @throws IOException raised on errors performing I/O.
*/
public static int symLink(String target, String linkname) throws IOException{
@@ -1230,8 +1282,8 @@ public static int symLink(String target, String linkname) throws IOException{
* @param filename the name of the file to change
* @param perm the permission string
* @return the exit code from the command
- * @throws IOException
- * @throws InterruptedException
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException command interrupted
*/
public static int chmod(String filename, String perm
) throws IOException, InterruptedException {
@@ -1245,7 +1297,7 @@ public static int chmod(String filename, String perm
* @param perm permission string
* @param recursive true, if permissions should be changed recursively
* @return the exit code from the command.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static int chmod(String filename, String perm, boolean recursive)
throws IOException {
@@ -1271,7 +1323,7 @@ public static int chmod(String filename, String perm, boolean recursive)
* @param file the file to change
* @param username the new user owner name
* @param groupname the new group owner name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void setOwner(File file, String username,
String groupname) throws IOException {
@@ -1288,7 +1340,7 @@ public static void setOwner(File file, String username,
* Platform independent implementation for {@link File#setReadable(boolean)}
* File#setReadable does not work as expected on Windows.
* @param f input file
- * @param readable
+ * @param readable readable
* @return true on success, false otherwise
*/
public static boolean setReadable(File f, boolean readable) {
@@ -1309,7 +1361,7 @@ public static boolean setReadable(File f, boolean readable) {
* Platform independent implementation for {@link File#setWritable(boolean)}
* File#setWritable does not work as expected on Windows.
* @param f input file
- * @param writable
+ * @param writable writable
* @return true on success, false otherwise
*/
public static boolean setWritable(File f, boolean writable) {
@@ -1333,7 +1385,7 @@ public static boolean setWritable(File f, boolean writable) {
* behavior on Windows as on Unix platforms. Creating, deleting or renaming
* a file within that folder will still succeed on Windows.
* @param f input file
- * @param executable
+ * @param executable executable
* @return true on success, false otherwise
*/
public static boolean setExecutable(File f, boolean executable) {
@@ -1412,7 +1464,7 @@ public static boolean canExecute(File f) {
* of forking if group == other.
* @param f the file to change
* @param permission the new permissions
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void setPermission(File f, FsPermission permission
) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 7275b70227f99..73258661ec191 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -130,7 +130,7 @@ public Path getCurrentTrashDir() throws IOException {
* Returns the current trash location for the path specified
* @param path to be deleted
* @return path to the trash
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getCurrentTrashDir(Path path) throws IOException {
return getTrash().getCurrentTrashDir(path);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
index d392c7d765d72..dafb66f2edcba 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
@@ -35,24 +35,39 @@ public class FsStatus implements Writable {
private long used;
private long remaining;
- /** Construct a FsStatus object, using the specified statistics */
+ /**
+ * Construct a FsStatus object, using the specified statistics
+ *
+ * @param capacity capacity
+ * @param used used
+ * @param remaining remaining
+ */
public FsStatus(long capacity, long used, long remaining) {
this.capacity = capacity;
this.used = used;
this.remaining = remaining;
}
- /** Return the capacity in bytes of the file system */
+ /**
+ * Return the capacity in bytes of the file system.
+ * @return capacity
+ */
public long getCapacity() {
return capacity;
}
- /** Return the number of bytes used on the file system */
+ /**
+ * Return the number of bytes used on the file system.
+ * @return used
+ */
public long getUsed() {
return used;
}
- /** Return the number of remaining bytes on the file system */
+ /**
+ * Return the number of remaining bytes on the file system.
+ * @return remaining
+ */
public long getRemaining() {
return remaining;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
index cb430ed3f6251..efa10b1805895 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
@@ -56,9 +56,9 @@ public StringWithOffset(String string, int offset) {
* {a,b}/{c/\d} - {a,b}/c/d
*
*
- * @param filePattern
+ * @param filePattern file pattern
* @return expanded file patterns
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static List expand(String filePattern) throws IOException {
List fullyExpanded = new ArrayList();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
index 30ce07a422e6e..9509f8436ab03 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
@@ -104,6 +104,8 @@ public synchronized void reset() {
/**
* Get an iterator that we can use to iterate throw all the global storage
* statistics objects.
+ *
+ * @return StorageStatistics Iterator
*/
synchronized public Iterator iterator() {
Entry first = map.firstEntry();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
index 7e12d0a11e953..1d64b0bcbe921 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
@@ -463,7 +463,7 @@ static BlockLocation[] fixBlockLocations(BlockLocation[] locations,
* @param start the start of the desired range in the contained file
* @param len the length of the desired range
* @return block locations for this segment of file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
@@ -525,7 +525,7 @@ private void fileStatusesInIndex(HarStatus parent, List statuses)
* Combine the status stored in the index and the underlying status.
* @param h status stored in the index
* @return the combined file status
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
private FileStatus toFileStatus(HarStatus h) throws IOException {
final Path p = h.isDir ? archivePath : new Path(archivePath, h.partName);
@@ -635,7 +635,7 @@ public long getModificationTime() {
* while creating a hadoop archive.
* @param f the path in har filesystem
* @return filestatus.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public FileStatus getFileStatus(Path f) throws IOException {
@@ -1104,7 +1104,7 @@ public void setDropBehind(Boolean dropBehind) throws IOException {
* @param start the start position in the part file
* @param length the length of valid data in the part file
* @param bufsize the buffer size
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public HarFSDataInputStream(FileSystem fs, Path p, long start,
long length, int bufsize) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
index 855fbb04e59b4..6cc8f9ef1b5f7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
@@ -156,6 +156,7 @@ String[] linkCount(File file) throws IOException {
* Creates a hardlink.
* @param file - existing source file
* @param linkName - desired target link file
+ * @throws IOException raised on errors performing I/O.
*/
public static void createHardLink(File file, File linkName)
throws IOException {
@@ -177,6 +178,7 @@ public static void createHardLink(File file, File linkName)
* @param fileBaseNames - list of path-less file names, as returned by
* parentDir.list()
* @param linkDir - where the hardlinks should be put. It must already exist.
+ * @throws IOException raised on errors performing I/O.
*/
public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
File linkDir) throws IOException {
@@ -204,6 +206,10 @@ public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
/**
* Retrieves the number of links to the specified file.
+ *
+ * @param fileName file name
+ * @throws IOException raised on errors performing I/O.
+ * @return link count
*/
public static int getLinkCount(File fileName) throws IOException {
if (fileName == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
index 0bdb47730a929..d2728374f5011 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
@@ -77,7 +77,11 @@ protected Command() {
err = System.err;
}
- /** Constructor */
+ /**
+ * Constructor.
+ *
+ * @param conf configuration
+ */
protected Command(Configuration conf) {
super(conf);
}
@@ -109,7 +113,7 @@ protected int getDepth() {
* Execute the command on the input path data. Commands can override to make
* use of the resolved filesystem.
* @param pathData The input path with resolved filesystem
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void run(PathData pathData) throws IOException {
run(pathData.path);
@@ -136,11 +140,19 @@ public int runAll() {
return exitCode;
}
- /** sets the command factory for later use */
+ /**
+ * sets the command factory for later use
+ * @param factory factory
+ */
public void setCommandFactory(CommandFactory factory) {
this.commandFactory = factory;
}
- /** retrieves the command factory */
+
+ /**
+ * retrieves the command factory
+ *
+ * @return command factory
+ */
protected CommandFactory getCommandFactory() {
return this.commandFactory;
}
@@ -201,7 +213,7 @@ public int run(String...argv) {
* IllegalArgumentException is thrown, the FsShell object will print the
* short usage of the command.
* @param args the command line arguments
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void processOptions(LinkedList args) throws IOException {}
@@ -211,7 +223,7 @@ protected void processOptions(LinkedList args) throws IOException {}
* {@link #expandArguments(LinkedList)} and pass the resulting list to
* {@link #processArguments(LinkedList)}
* @param args the list of argument strings
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void processRawArguments(LinkedList args)
throws IOException {
From 9a6ae6981bed034be8b96b13b52555328734ed57 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 04:57:22 -0700
Subject: [PATCH 32/53] HADOOP-18229. Fix some java doc compilation errors.
LocalDirAllocator.java no description for @param, LocalFileSystem.java
warning: no @return, warning: no @param for path,
MD5MD5CRC32CastagnoliFileChecksum.java warning: no @param for md5,warning: no
@param for crcPerBlock, MD5MD5CRC32FileChecksum.java warning: no @return,
MD5MD5CRC32GzipFileChecksum.java warning: no @param for md5,
MultipartUploaderBuilder.java warning: no @return, Options.java no
description for @throws, RawLocalFileSystem.java warning: no description for
@throws, Stat.java warning: no description for @return, Trash.java warning:
no @throws for java.io.IOException, warning: no @return
---
.../apache/hadoop/fs/LocalDirAllocator.java | 31 +++++++-------
.../org/apache/hadoop/fs/LocalFileSystem.java | 6 ++-
.../fs/MD5MD5CRC32CastagnoliFileChecksum.java | 8 +++-
.../hadoop/fs/MD5MD5CRC32FileChecksum.java | 13 +++++-
.../fs/MD5MD5CRC32GzipFileChecksum.java | 8 +++-
.../hadoop/fs/MultipartUploaderBuilder.java | 15 +++++++
.../java/org/apache/hadoop/fs/Options.java | 7 +++-
.../apache/hadoop/fs/RawLocalFileSystem.java | 7 +++-
.../main/java/org/apache/hadoop/fs/Stat.java | 4 +-
.../main/java/org/apache/hadoop/fs/Trash.java | 41 ++++++++++++++++---
10 files changed, 111 insertions(+), 29 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
index 5f266a7b82555..e9a011154a449 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
@@ -78,8 +78,9 @@ public class LocalDirAllocator {
private final DiskValidator diskValidator;
- /**Create an allocator object
- * @param contextCfgItemName
+ /**
+ * Create an allocator object.
+ * @param contextCfgItemName contextCfgItemName
*/
public LocalDirAllocator(String contextCfgItemName) {
this.contextCfgItemName = contextCfgItemName;
@@ -123,7 +124,7 @@ private AllocatorPerContext obtainContext(String contextCfgItemName) {
* available disk)
* @param conf the Configuration object
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathForWrite(String pathStr,
Configuration conf) throws IOException {
@@ -139,7 +140,7 @@ public Path getLocalPathForWrite(String pathStr,
* @param size the size of the file that is going to be written
* @param conf the Configuration object
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathForWrite(String pathStr, long size,
Configuration conf) throws IOException {
@@ -156,7 +157,7 @@ public Path getLocalPathForWrite(String pathStr, long size,
* @param conf the Configuration object
* @param checkWrite ensure that the path is writable
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathForWrite(String pathStr, long size,
Configuration conf,
@@ -171,7 +172,7 @@ public Path getLocalPathForWrite(String pathStr, long size,
* @param pathStr the requested file (this will be searched)
* @param conf the Configuration object
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathToRead(String pathStr,
Configuration conf) throws IOException {
@@ -184,7 +185,7 @@ public Path getLocalPathToRead(String pathStr,
* @param pathStr the path underneath the roots
* @param conf the configuration to look up the roots in
* @return all of the paths that exist under any of the roots
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Iterable getAllLocalPathsToRead(String pathStr,
Configuration conf
@@ -205,7 +206,7 @@ public Iterable getAllLocalPathsToRead(String pathStr,
* @param size the size of the file that is going to be written
* @param conf the Configuration object
* @return a unique temporary file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public File createTmpFileForWrite(String pathStr, long size,
Configuration conf) throws IOException {
@@ -213,8 +214,9 @@ public File createTmpFileForWrite(String pathStr, long size,
return context.createTmpFileForWrite(pathStr, size, conf);
}
- /** Method to check whether a context is valid
- * @param contextCfgItemName
+ /**
+ * Method to check whether a context is valid.
+ * @param contextCfgItemName contextCfgItemName
* @return true/false
*/
public static boolean isContextValid(String contextCfgItemName) {
@@ -224,9 +226,9 @@ public static boolean isContextValid(String contextCfgItemName) {
}
/**
- * Removes the context from the context config items
+ * Removes the context from the context config items.
*
- * @param contextCfgItemName
+ * @param contextCfgItemName contextCfgItemName
*/
@Deprecated
@InterfaceAudience.LimitedPrivate({"MapReduce"})
@@ -236,8 +238,9 @@ public static void removeContext(String contextCfgItemName) {
}
}
- /** We search through all the configured dirs for the file's existence
- * and return true when we find
+ /**
+ * We search through all the configured dirs for the file's existence
+ * and return true when we find.
* @param pathStr the requested file (this will be searched)
* @param conf the Configuration object
* @return true if files exist. false otherwise
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
index c41190a7b360b..38cefaa663155 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
@@ -71,7 +71,11 @@ public LocalFileSystem(FileSystem rawLocalFileSystem) {
super(rawLocalFileSystem);
}
- /** Convert a path to a File. */
+ /**
+ * Convert a path to a File.
+ * @param path the path
+ * @return file
+ */
public File pathToFile(Path path) {
return ((RawLocalFileSystem)fs).pathToFile(path);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
index 5a4a6a97cc4f7..bff8eed214c56 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
@@ -28,7 +28,13 @@ public MD5MD5CRC32CastagnoliFileChecksum() {
this(0, 0, null);
}
- /** Create a MD5FileChecksum */
+ /**
+ * Create a MD5FileChecksum.
+ *
+ * @param bytesPerCRC bytesPerCRC
+ * @param crcPerBlock crcPerBlock
+ * @param md5 md5
+ */
public MD5MD5CRC32CastagnoliFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
super(bytesPerCRC, crcPerBlock, md5);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
index 3fdb7e982621c..604f71c8f7c1e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
@@ -44,7 +44,13 @@ public MD5MD5CRC32FileChecksum() {
this(0, 0, null);
}
- /** Create a MD5FileChecksum */
+ /**
+ * Create a MD5FileChecksum.
+ *
+ * @param bytesPerCRC bytesPerCRC
+ * @param crcPerBlock crcPerBlock
+ * @param md5 md5
+ */
public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
this.bytesPerCRC = bytesPerCRC;
this.crcPerBlock = crcPerBlock;
@@ -76,7 +82,10 @@ public byte[] getBytes() {
return WritableUtils.toByteArray(this);
}
- /** returns the CRC type */
+ /**
+ * returns the CRC type.
+ * @return data check sum type
+ */
public DataChecksum.Type getCrcType() {
// default to the one that is understood by all releases.
return DataChecksum.Type.CRC32;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
index 5164d0200d28d..a23baf4e11b43 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
@@ -28,7 +28,13 @@ public MD5MD5CRC32GzipFileChecksum() {
this(0, 0, null);
}
- /** Create a MD5FileChecksum */
+ /**
+ * Create a MD5FileChecksum.
+ *
+ * @param bytesPerCRC bytesPerCRC
+ * @param crcPerBlock crcPerBlock
+ * @param md5 md5
+ */
public MD5MD5CRC32GzipFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
super(bytesPerCRC, crcPerBlock, md5);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
index 44d9fb7a65218..7c24f6695d0e8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
@@ -33,26 +33,35 @@ public interface MultipartUploaderBuilder
Date: Fri, 13 May 2022 05:57:39 -0700
Subject: [PATCH 33/53] HADOOP-18229. Fix some java doc compilation errors.
AbstractMultipartUploader.java warning: no description for @throws etc,
AclStatus.java warning: no @return etc, FsAction.java warning: no @return
etc, FsCreateModes.java warning: no @return etc, FsPermission.java warning:
no @return etc, FutureDataInputStreamBuilderImpl.java warning: no @return
etc, FutureIOSupport.java warning: no @return etc,
MultipartUploaderBuilderImpl.java warning: no @param for p etc,
TrashPolicy.java warning: no @throws for java.io.IOException etc,
XAttrCodec.java warning: no @throws for java.io.IOException etc.
---
.../org/apache/hadoop/fs/TrashPolicy.java | 15 ++++-
.../java/org/apache/hadoop/fs/XAttrCodec.java | 6 +-
.../fs/impl/AbstractMultipartUploader.java | 2 +-
.../FutureDataInputStreamBuilderImpl.java | 5 ++
.../hadoop/fs/impl/FutureIOSupport.java | 2 +
.../fs/impl/MultipartUploaderBuilderImpl.java | 3 +
.../hadoop/fs/permission/AclStatus.java | 4 +-
.../apache/hadoop/fs/permission/FsAction.java | 20 +++++--
.../hadoop/fs/permission/FsCreateModes.java | 9 ++-
.../hadoop/fs/permission/FsPermission.java | 57 ++++++++++++++++---
10 files changed, 104 insertions(+), 19 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
index 64fb81be99ee3..b8b67c9e8b2ff 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
@@ -60,27 +60,33 @@ public void initialize(Configuration conf, FileSystem fs) {
/**
* Returns whether the Trash Policy is enabled for this filesystem.
+ *
+ * @return if isEnabled true,not false.
*/
public abstract boolean isEnabled();
/**
* Move a file or directory to the current trash directory.
* @return false if the item is already in the trash or trash is disabled
+ * @throws IOException raised on errors performing I/O.
*/
public abstract boolean moveToTrash(Path path) throws IOException;
/**
- * Create a trash checkpoint.
+ * Create a trash checkpoint.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void createCheckpoint() throws IOException;
/**
* Delete old trash checkpoint(s).
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteCheckpoint() throws IOException;
/**
* Delete all checkpoints immediately, ie empty trash.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteCheckpointsImmediately() throws IOException;
@@ -94,6 +100,8 @@ public void initialize(Configuration conf, FileSystem fs) {
* TrashPolicy#getCurrentTrashDir(Path path).
* It returns the trash location correctly for the path specified no matter
* the path is in encryption zone or not.
+ *
+ * @return the path
*/
public abstract Path getCurrentTrashDir();
@@ -102,7 +110,7 @@ public void initialize(Configuration conf, FileSystem fs) {
* Policy
* @param path path to be deleted
* @return current trash directory for the path to be deleted
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getCurrentTrashDir(Path path) throws IOException {
throw new UnsupportedOperationException();
@@ -111,6 +119,9 @@ public Path getCurrentTrashDir(Path path) throws IOException {
/**
* Return a {@link Runnable} that periodically empties the trash of all
* users, intended to be run by the superuser.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @return Runnable
*/
public abstract Runnable getEmptier() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
index 3d65275e673d6..de1a5322e1ee0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
@@ -67,7 +67,7 @@ public enum XAttrCodec {
* the given string is treated as text.
* @param value string representation of the value.
* @return byte[] the value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static byte[] decodeValue(String value) throws IOException {
byte[] result = null;
@@ -102,9 +102,9 @@ public static byte[] decodeValue(String value) throws IOException {
* while strings encoded as hexadecimal and base64 are prefixed with
* 0x and 0s, respectively.
* @param value byte[] value
- * @param encoding
+ * @param encoding encoding
* @return String string representation of value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static String encodeValue(byte[] value, XAttrCodec encoding)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
index 416924e18d87c..f9ae9f55cc17f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
@@ -127,7 +127,7 @@ protected void checkPutArguments(Path filePath,
* {@inheritDoc}.
* @param path path to abort uploads under.
* @return a future to -1.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public CompletableFuture abortUploadsUnderPath(Path path)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
index 70e39de7388c3..cbeb06a60c0eb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
@@ -126,6 +126,9 @@ protected int getBufferSize() {
/**
* Set the size of the buffer to be used.
+ *
+ * @param bufSize buffer size
+ * @return FutureDataInputStreamBuilder
*/
public FutureDataInputStreamBuilder bufferSize(int bufSize) {
bufferSize = bufSize;
@@ -137,6 +140,8 @@ public FutureDataInputStreamBuilder bufferSize(int bufSize) {
* This must be used after the constructor has been invoked to create
* the actual builder: it allows for subclasses to do things after
* construction.
+ *
+ * @return FutureDataInputStreamBuilder
*/
public FutureDataInputStreamBuilder builder() {
return getThisBuilder();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
index f47e5f4fbfbd6..6b1fea7351a26 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
@@ -75,6 +75,8 @@ public static T awaitFuture(final Future future)
* See {@link FutureIO#awaitFuture(Future, long, TimeUnit)}.
* @param future future to evaluate
* @param type of the result.
+ * @param timeout timeout
+ * @param unit unit
* @return the result, if all went well.
* @throws InterruptedIOException future was interrupted
* @throws IOException if something went wrong
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
index 5584e647849f5..c704cb116c5d6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
@@ -88,6 +88,9 @@ protected MultipartUploaderBuilderImpl(@Nonnull FileContext fc,
/**
* Constructor.
+ *
+ * @param fileSystem fileSystem
+ * @param p path
*/
protected MultipartUploaderBuilderImpl(@Nonnull FileSystem fileSystem,
@Nonnull Path p) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
index 25b9ba659048a..ab273b305543b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
@@ -185,7 +185,8 @@ public Builder stickyBit(boolean stickyBit) {
/**
* Sets the permission for the file.
- * @param permission
+ * @param permission permission
+ * @return Builder
*/
public Builder setPermission(FsPermission permission) {
this.permission = permission;
@@ -224,6 +225,7 @@ private AclStatus(String owner, String group, boolean stickyBit,
/**
* Get the effective permission for the AclEntry
* @param entry AclEntry to get the effective action
+ * @return FsAction
*/
public FsAction getEffectivePermission(AclEntry entry) {
return getEffectivePermission(entry, permission);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
index 97dcf816c16ad..7e328d2c31450 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
@@ -48,7 +48,8 @@ private FsAction(String s) {
/**
* Return true if this action implies that action.
- * @param that
+ * @param that FsAction that
+ * @return if implies true,not false
*/
public boolean implies(FsAction that) {
if (that != null) {
@@ -57,15 +58,26 @@ public boolean implies(FsAction that) {
return false;
}
- /** AND operation. */
+ /**
+ * AND operation.
+ * @param that FsAction that
+ * @return FsAction
+ */
public FsAction and(FsAction that) {
return vals[ordinal() & that.ordinal()];
}
- /** OR operation. */
+ /**
+ * OR operation.
+ * @param that FsAction that
+ * @return FsAction
+ */
public FsAction or(FsAction that) {
return vals[ordinal() | that.ordinal()];
}
- /** NOT operation. */
+ /**
+ * NOT operation.
+ * @return FsAction
+ */
public FsAction not() {
return vals[7 - ordinal()];
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
index 2bd6f1f3b9126..fd67607c1723e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
@@ -35,7 +35,10 @@ public final class FsCreateModes extends FsPermission {
/**
* Create from unmasked mode and umask.
*
- * If the mode is already an FsCreateModes object, return it.
+ * @param mode mode
+ * @param umask umask
+ * @retutn If the mode is already
+ * an FsCreateModes object, return it.
*/
public static FsPermission applyUMask(FsPermission mode,
FsPermission umask) {
@@ -47,6 +50,10 @@ public static FsPermission applyUMask(FsPermission mode,
/**
* Create from masked and unmasked modes.
+ *
+ * @param masked masked
+ * @param unmasked unmasked
+ * @return FsCreateModes
*/
public static FsCreateModes create(FsPermission masked,
FsPermission unmasked) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
index 51c113af2702e..c416e5f41a2e6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
@@ -56,7 +56,11 @@ public class FsPermission implements Writable, Serializable,
/** Maximum acceptable length of a permission string to parse */
public static final int MAX_PERMISSION_LENGTH = 10;
- /** Create an immutable {@link FsPermission} object. */
+ /**
+ * Create an immutable {@link FsPermission} object.
+ * @param permission permission
+ * @return FsPermission
+ */
public static FsPermission createImmutable(short permission) {
return new ImmutableFsPermission(permission);
}
@@ -85,7 +89,7 @@ public FsPermission(FsAction u, FsAction g, FsAction o, boolean sb) {
/**
* Construct by the given mode.
- * @param mode
+ * @param mode mode
* @see #toShort()
*/
public FsPermission(short mode) { fromShort(mode); }
@@ -145,13 +149,22 @@ public FsPermission(String mode) {
this(new RawParser(mode).getPermission());
}
- /** Return user {@link FsAction}. */
+ /**
+ * Return user {@link FsAction}.
+ * return FsAction useraction
+ */
public FsAction getUserAction() {return useraction;}
- /** Return group {@link FsAction}. */
+ /**
+ * Return group {@link FsAction}.
+ * return FsAction groupaction
+ */
public FsAction getGroupAction() {return groupaction;}
- /** Return other {@link FsAction}. */
+ /**
+ * Return other {@link FsAction}.
+ * return FsAction otheraction
+ */
public FsAction getOtherAction() {return otheraction;}
private void set(FsAction u, FsAction g, FsAction o, boolean sb) {
@@ -180,6 +193,7 @@ public void readFields(DataInput in) throws IOException {
/**
* Get masked permission if exists.
+ * @return masked
*/
public FsPermission getMasked() {
return null;
@@ -187,6 +201,7 @@ public FsPermission getMasked() {
/**
* Get unmasked permission if exists.
+ * @return unmasked
*/
public FsPermission getUnmasked() {
return null;
@@ -194,6 +209,10 @@ public FsPermission getUnmasked() {
/**
* Create and initialize a {@link FsPermission} from {@link DataInput}.
+ *
+ * @param in data input
+ * @throws IOException raised on errors performing I/O.
+ * @return FsPermission
*/
public static FsPermission read(DataInput in) throws IOException {
FsPermission p = new FsPermission();
@@ -203,6 +222,7 @@ public static FsPermission read(DataInput in) throws IOException {
/**
* Encode the object to a short.
+ * @return object to a short
*/
public short toShort() {
int s = (stickyBit ? 1 << 9 : 0) |
@@ -301,6 +321,9 @@ public FsPermission applyUMask(FsPermission umask) {
* '-' sets bits in the mask.
*
* Octal umask, the specified bits are set in the file mode creation mask.
+ *
+ * @param conf configuration
+ * @return FsPermission UMask
*/
public static FsPermission getUMask(Configuration conf) {
int umask = DEFAULT_UMASK;
@@ -346,7 +369,11 @@ public boolean getAclBit() {
}
/**
- * Returns true if the file is encrypted or directory is in an encryption zone
+ * Returns true if the file is encrypted or directory is in an encryption zone.
+ *
+ * @return if the file is encrypted or directory
+ * is in an encryption zone true, not false
+ *
* @deprecated Get encryption bit from the
* {@link org.apache.hadoop.fs.FileStatus} object.
*/
@@ -357,6 +384,9 @@ public boolean getEncryptedBit() {
/**
* Returns true if the file or directory is erasure coded.
+ *
+ * @return if the file or directory is
+ * erasure coded true, not false
* @deprecated Get ec bit from the {@link org.apache.hadoop.fs.FileStatus}
* object.
*/
@@ -365,7 +395,11 @@ public boolean getErasureCodedBit() {
return false;
}
- /** Set the user file creation mask (umask) */
+ /**
+ * Set the user file creation mask (umask)
+ * @param conf configuration
+ * @param umask umask
+ */
public static void setUMask(Configuration conf, FsPermission umask) {
conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort()));
}
@@ -379,6 +413,8 @@ public static void setUMask(Configuration conf, FsPermission umask) {
* {@link FsPermission#getDirDefault()} for directory, and use
* {@link FsPermission#getFileDefault()} for file.
* This method is kept for compatibility.
+ *
+ * @return Default FsPermission
*/
public static FsPermission getDefault() {
return new FsPermission((short)00777);
@@ -386,6 +422,8 @@ public static FsPermission getDefault() {
/**
* Get the default permission for directory.
+ *
+ * @return DirDefault FsPermission
*/
public static FsPermission getDirDefault() {
return new FsPermission((short)00777);
@@ -393,6 +431,8 @@ public static FsPermission getDirDefault() {
/**
* Get the default permission for file.
+ *
+ * @return FileDefault FsPermission
*/
public static FsPermission getFileDefault() {
return new FsPermission((short)00666);
@@ -400,6 +440,8 @@ public static FsPermission getFileDefault() {
/**
* Get the default permission for cache pools.
+ *
+ * @return CachePoolDefault FsPermission
*/
public static FsPermission getCachePoolDefault() {
return new FsPermission((short)00755);
@@ -408,6 +450,7 @@ public static FsPermission getCachePoolDefault() {
/**
* Create a FsPermission from a Unix symbolic permission string
* @param unixSymbolicPermission e.g. "-rw-rw-rw-"
+ * @return FsPermission
*/
public static FsPermission valueOf(String unixSymbolicPermission) {
if (unixSymbolicPermission == null) {
From e354b8387fea2106dc50aa0c4a146b69954d007b Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 08:18:54 -0700
Subject: [PATCH 34/53] HADOOP-18229. Fix some java doc compilation errors.
AccessControlList.java warning: no @return, AuditingFunctions.java warning:
no @return, AvroSerialization.java warning: no @return, warning: no @param
for clazz etc, BlockDecompressorStream.java warning: no description for
@throws, Bzip2Compressor.java warning: no @param for conf,
Bzip2Decompressor.java warning: no @param for conserveMemory,
CBZip2InputStream.java warning: no description for @throws,
CBZip2OutputStream.java warning: no @return, Command.java warning: no
description for @throws, CompressionCodec.java warning: no description for
@throws, CompressionInputStream.java warning: no description for @throws,
CompressionOutputStream.java warning: no @throws for java.io.IOException,
Compressor.java warning: no @throws for java.io.IOException,
CredentialProvider.java warning: no description for @throws,
CredentialShell.java warning: no description for @param, Decompressor.java
warning: no description for @throws, DecompressorStream.java: warning: no
description for @throws, ErasureCoder.java: warning: no @return,
ErasureCodingStep.java: warning: no description for @param,
ErasureDecoder.java warning: no description for @param,
ErasureDecodingStep.java warning: no description for @param,
ErasureEncoder.java: warning: no description for @param,
ErasureEncodingStep.java warning: no description for @param,
FSInputChecker.java: warning: no @return, FsStatus.java: warning: no @return,
HHErasureCodingStep.java warning: no description for @param,
HHXORErasureDecodingStep.java warning: no description for @param,
HHXORErasureEncodingStep.java warning: no description for @param,
ImpersonationProvider.java warning: no description for @throws, Key.java
warning: no description for @param, NetUtils.java: warning: no description
for @param, ProxyUsers.java: warning: no description for @throws,
RefreshAuthorizationPolicyProtocol: warning: no description for @throws,
Serialization.java: warning: no @param for c, Shell.java: warning: no
@return, SplittableCompressionCodec.java: warning: no @throws for
java.io.IOException, XORErasureDecoder.java warning: no description for
@param
---
.../java/org/apache/hadoop/fs/FSInputChecker.java | 2 +-
.../main/java/org/apache/hadoop/fs/FsStatus.java | 2 +-
.../java/org/apache/hadoop/fs/shell/Command.java | 4 ++--
.../hadoop/fs/store/audit/AuditingFunctions.java | 2 ++
.../hadoop/io/compress/BlockDecompressorStream.java | 4 ++--
.../apache/hadoop/io/compress/CompressionCodec.java | 8 ++++----
.../hadoop/io/compress/CompressionInputStream.java | 8 +++++---
.../hadoop/io/compress/CompressionOutputStream.java | 4 +++-
.../org/apache/hadoop/io/compress/Compressor.java | 3 +++
.../org/apache/hadoop/io/compress/Decompressor.java | 2 +-
.../hadoop/io/compress/DecompressorStream.java | 2 +-
.../io/compress/SplittableCompressionCodec.java | 2 ++
.../hadoop/io/compress/bzip2/Bzip2Compressor.java | 1 +
.../hadoop/io/compress/bzip2/Bzip2Decompressor.java | 2 ++
.../hadoop/io/compress/bzip2/CBZip2InputStream.java | 8 +++++---
.../io/compress/bzip2/CBZip2OutputStream.java | 5 +++++
.../hadoop/io/erasurecode/coder/ErasureCoder.java | 1 +
.../io/erasurecode/coder/ErasureCodingStep.java | 5 +++--
.../hadoop/io/erasurecode/coder/ErasureDecoder.java | 10 +++++-----
.../io/erasurecode/coder/ErasureDecodingStep.java | 6 +++---
.../hadoop/io/erasurecode/coder/ErasureEncoder.java | 2 +-
.../io/erasurecode/coder/ErasureEncodingStep.java | 6 +++---
.../io/erasurecode/coder/HHErasureCodingStep.java | 4 ++--
.../erasurecode/coder/HHXORErasureDecodingStep.java | 4 ++--
.../erasurecode/coder/HHXORErasureEncodingStep.java | 4 ++--
.../io/erasurecode/coder/XORErasureDecoder.java | 2 +-
.../apache/hadoop/io/serializer/Serialization.java | 5 +++++
.../io/serializer/avro/AvroSerialization.java | 6 ++++++
.../main/java/org/apache/hadoop/net/NetUtils.java | 1 -
.../hadoop/security/alias/CredentialProvider.java | 13 +++++++------
.../hadoop/security/alias/CredentialShell.java | 6 +++---
.../security/authorize/AccessControlList.java | 1 +
.../security/authorize/ImpersonationProvider.java | 4 ++--
.../hadoop/security/authorize/ProxyUsers.java | 12 ++++++------
.../RefreshAuthorizationPolicyProtocol.java | 2 +-
.../src/main/java/org/apache/hadoop/util/Shell.java | 2 +-
.../main/java/org/apache/hadoop/util/bloom/Key.java | 4 ++--
37 files changed, 97 insertions(+), 62 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index 459114e89cc85..e367f3666c6eb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -132,7 +132,7 @@ abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
abstract protected long getChunkPosition(long pos);
/**
- * Return true if there is a need for checksum verification
+ * Return true if there is a need for checksum verification.
* @return if there is a need for checksum verification true, not false
*/
protected synchronized boolean needChecksum() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
index dafb66f2edcba..fd69dc7615bbd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
@@ -36,7 +36,7 @@ public class FsStatus implements Writable {
private long remaining;
/**
- * Construct a FsStatus object, using the specified statistics
+ * Construct a FsStatus object, using the specified statistics.
*
* @param capacity capacity
* @param used used
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
index d2728374f5011..038fa43069b97 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
@@ -141,7 +141,7 @@ public int runAll() {
}
/**
- * sets the command factory for later use
+ * sets the command factory for later use.
* @param factory factory
*/
public void setCommandFactory(CommandFactory factory) {
@@ -149,7 +149,7 @@ public void setCommandFactory(CommandFactory factory) {
}
/**
- * retrieves the command factory
+ * retrieves the command factory.
*
* @return command factory
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
index acc82766be190..21ae5606f101a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
@@ -86,6 +86,8 @@ public static InvocationRaisingIOE withinAuditSpan(
* activates and deactivates the span around the inner one.
* @param auditSpan audit span
* @param operation operation
+ * @param Generics Type T
+ * @param Generics Type R
* @return a new invocation.
*/
public static FunctionRaisingIOE withinAuditSpan(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
index de457d192400d..ff10332ea8d5a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
@@ -43,7 +43,7 @@ public class BlockDecompressorStream extends DecompressorStream {
* @param in input stream
* @param decompressor decompressor to use
* @param bufferSize size of buffer
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockDecompressorStream(InputStream in, Decompressor decompressor,
int bufferSize) throws IOException {
@@ -55,7 +55,7 @@ public BlockDecompressorStream(InputStream in, Decompressor decompressor,
*
* @param in input stream
* @param decompressor decompressor to use
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockDecompressorStream(InputStream in, Decompressor decompressor) throws IOException {
super(in, decompressor);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
index f37aadfcb57f3..d064e1b914707 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
@@ -39,7 +39,7 @@ public interface CompressionCodec {
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionOutputStream createOutputStream(OutputStream out)
throws IOException;
@@ -51,7 +51,7 @@ CompressionOutputStream createOutputStream(OutputStream out)
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
@@ -77,7 +77,7 @@ CompressionOutputStream createOutputStream(OutputStream out,
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionInputStream createInputStream(InputStream in) throws IOException;
@@ -88,7 +88,7 @@ CompressionOutputStream createOutputStream(OutputStream out,
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
index 55bb132e9c87c..017c89a327a5c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
@@ -53,7 +53,7 @@ public abstract class CompressionInputStream extends InputStream
* the decompressed bytes from the given stream.
*
* @param in The input stream to be compressed.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected CompressionInputStream(InputStream in) throws IOException {
if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)) {
@@ -93,6 +93,8 @@ public IOStatistics getIOStatistics() {
/**
* Reset the decompressor to its initial state and discard any buffered data,
* as the underlying stream may have been repositioned.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void resetState() throws IOException;
@@ -118,7 +120,7 @@ public long getPos() throws IOException {
/**
* This method is current not supported.
*
- * @throws UnsupportedOperationException
+ * @throws UnsupportedOperationException Unsupported Operation Exception
*/
@Override
@@ -129,7 +131,7 @@ public void seek(long pos) throws UnsupportedOperationException {
/**
* This method is current not supported.
*
- * @throws UnsupportedOperationException
+ * @throws UnsupportedOperationException Unsupported Operation Exception
*/
@Override
public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
index 2a11ace81702c..aebcffa8117c7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
@@ -48,7 +48,7 @@ public abstract class CompressionOutputStream extends OutputStream
/**
* Create a compression output stream that writes
* the compressed bytes to the given stream.
- * @param out
+ * @param out out
*/
protected CompressionOutputStream(OutputStream out) {
this.out = out;
@@ -89,12 +89,14 @@ public void flush() throws IOException {
/**
* Finishes writing compressed data to the output stream
* without closing the underlying stream.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void finish() throws IOException;
/**
* Reset the compression to the initial state.
* Does not reset the underlying stream.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void resetState() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java
index 537837faa0a51..8ecd3eb4cb7ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java
@@ -65,11 +65,13 @@ public interface Compressor {
/**
* Return number of uncompressed bytes input so far.
+ * @return bytes read
*/
public long getBytesRead();
/**
* Return number of compressed bytes output so far.
+ * @return bytes written
*/
public long getBytesWritten();
@@ -97,6 +99,7 @@ public interface Compressor {
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of compressed data.
+ * @throws IOException raised on errors performing I/O.
*/
public int compress(byte[] b, int off, int len) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java
index e9558fab87325..30d4e29892eb7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java
@@ -96,7 +96,7 @@ public interface Decompressor {
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of uncompressed data.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int decompress(byte[] b, int off, int len) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
index 570d15c7f16aa..745105ce873af 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
@@ -80,7 +80,7 @@ public DecompressorStream(InputStream in, Decompressor decompressor)
* Allow derived classes to directly set the underlying stream.
*
* @param in Underlying input stream.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected DecompressorStream(InputStream in) throws IOException {
super(in);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
index a756f47260c33..e5a04f2e7e88f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
@@ -61,6 +61,7 @@ public enum READ_MODE {CONTINUOUS, BYBLOCK};
* Create a stream as dictated by the readMode. This method is used when
* the codecs wants the ability to work with the underlying stream positions.
*
+ * @param decompressor decompressor
* @param seekableIn The seekable input stream (seeks in compressed data)
* @param start The start offset into the compressed stream. May be changed
* by the underlying codec.
@@ -69,6 +70,7 @@ public enum READ_MODE {CONTINUOUS, BYBLOCK};
* @param readMode Controls whether stream position is reported continuously
* from the compressed stream only only at block boundaries.
* @return a stream to read uncompressed bytes from
+ * @throws IOException raised on errors performing I/O.
*/
SplitCompressionInputStream createInputStream(InputStream seekableIn,
Decompressor decompressor, long start, long end, READ_MODE readMode)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
index 5713c56df6aef..f753af1b9fee9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
@@ -67,6 +67,7 @@ public Bzip2Compressor() {
/**
* Creates a new compressor, taking settings from the configuration.
+ * @param conf configuration
*/
public Bzip2Compressor(Configuration conf) {
this(Bzip2Factory.getBlockSize(conf),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
index 72ba97630e206..afa963e6b5da9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
@@ -50,6 +50,8 @@ public class Bzip2Decompressor implements Decompressor {
/**
* Creates a new decompressor.
+ * @param conserveMemory conserveMemory
+ * @param directBufferSize directBufferSize
*/
public Bzip2Decompressor(boolean conserveMemory, int directBufferSize) {
this.conserveMemory = conserveMemory;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
index 8426d25c2950e..0c1f1802025b0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
@@ -152,6 +152,7 @@ public enum STATE {
* This method reports the processed bytes so far. Please note that this
* statistic is only updated on block boundaries and only when the stream is
* initiated in BYBLOCK mode.
+ * @return ProcessedByteCount
*/
public long getProcessedByteCount() {
return reportedBytesReadFromCompressedStream;
@@ -209,7 +210,7 @@ private int readAByte(InputStream inStream) throws IOException {
* @param marker The bit pattern to be found in the stream
* @param markerBitLength No of bits in the marker
* @return true if the marker was found otherwise false
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws IllegalArgumentException if marketBitLength is greater than 63
*/
public boolean skipToNextMarker(long marker, int markerBitLength)
@@ -282,7 +283,8 @@ private void makeMaps() {
* the magic. Thus callers have to skip the first two bytes. Otherwise this
* constructor will throw an exception.
*
- *
+ * @param in in
+ * @param readMode READ_MODE
* @throws IOException
* if the stream content is malformed or an I/O error occurs.
* @throws NullPointerException
@@ -326,7 +328,7 @@ private CBZip2InputStream(final InputStream in, READ_MODE readMode, boolean skip
*
* @return long Number of bytes between current stream position and the
* next BZip2 block start marker.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
*/
public static long numberOfBytesTillNextMarker(final InputStream in) throws IOException{
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
index 794f9d02229ec..dde473fd1feb6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
@@ -210,6 +210,10 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants {
/**
* This method is accessible by subclasses for historical purposes. If you
* don't know what it does then you don't need it.
+ * @param len len
+ * @param freq freq
+ * @param alphaSize alphaSize
+ * @param maxLen maxLen
*/
protected static void hbMakeCodeLengths(char[] len, int[] freq,
int alphaSize, int maxLen) {
@@ -846,6 +850,7 @@ private void endCompression() throws IOException {
/**
* Returns the blocksize parameter specified at construction time.
+ * @return blocksize
*/
public final int getBlockSize() {
return this.blockSize100k;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java
index b5ae1f1e399a8..4ce8b9c663d7e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java
@@ -68,6 +68,7 @@ public interface ErasureCoder extends Configurable {
*
* @param blockGroup the erasure coding block group containing all necessary
* information for codec calculation
+ * @return ErasureCodingStep
*/
ErasureCodingStep calculateCoding(ECBlockGroup blockGroup);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java
index fb89d99a0540c..87b528c1a484f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java
@@ -46,8 +46,9 @@ public interface ErasureCodingStep {
/**
* Perform encoding or decoding given the input chunks, and generated results
* will be written to the output chunks.
- * @param inputChunks
- * @param outputChunks
+ * @param inputChunks inputChunks
+ * @param outputChunks outputChunks
+ * @throws IOException raised on errors performing I/O.
*/
void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java
index 004fd38df1141..5a06ee883bb7d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java
@@ -65,7 +65,7 @@ public ErasureCoderOptions getOptions() {
/**
* We have all the data blocks and parity blocks as input blocks for
* recovering by default. It's codec specific
- * @param blockGroup
+ * @param blockGroup blockGroup
* @return input blocks
*/
protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
@@ -83,7 +83,7 @@ protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
/**
* Which blocks were erased ?
- * @param blockGroup
+ * @param blockGroup blockGroup
* @return output blocks to recover
*/
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
@@ -118,7 +118,7 @@ public void release() {
/**
* Perform decoding against a block blockGroup.
- * @param blockGroup
+ * @param blockGroup blockGroup
* @return decoding step for caller to do the real work
*/
protected abstract ErasureCodingStep prepareDecodingStep(
@@ -126,7 +126,7 @@ protected abstract ErasureCodingStep prepareDecodingStep(
/**
* Get the number of erased blocks in the block group.
- * @param blockGroup
+ * @param blockGroup blockGroup
* @return number of erased blocks
*/
protected int getNumErasedBlocks(ECBlockGroup blockGroup) {
@@ -153,7 +153,7 @@ protected static int getNumErasedBlocks(ECBlock[] inputBlocks) {
/**
* Get indexes of erased blocks from inputBlocks
- * @param inputBlocks
+ * @param inputBlocks inputBlocks
* @return indexes of erased blocks from inputBlocks
*/
protected int[] getErasedIndexes(ECBlock[] inputBlocks) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java
index 24f55470e1727..c5927c9cdf59c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java
@@ -37,10 +37,10 @@ public class ErasureDecodingStep implements ErasureCodingStep {
/**
* The constructor with all the necessary info.
- * @param inputBlocks
+ * @param inputBlocks inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
- * @param outputBlocks
- * @param rawDecoder
+ * @param outputBlocks outputBlocks
+ * @param rawDecoder rawDecoder
*/
public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java
index 81666e9b76b2e..3102d6f2c9533 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java
@@ -83,7 +83,7 @@ public void release() {
/**
* Perform encoding against a block group.
- * @param blockGroup
+ * @param blockGroup blockGroup
* @return encoding step for caller to do the real work
*/
protected abstract ErasureCodingStep prepareEncodingStep(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
index 5fc5c7a09928f..854017c6bad59 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
@@ -36,9 +36,9 @@ public class ErasureEncodingStep implements ErasureCodingStep {
/**
* The constructor with all the necessary info.
- * @param inputBlocks
- * @param outputBlocks
- * @param rawEncoder
+ * @param inputBlocks inputBlocks
+ * @param outputBlocks outputBlocks
+ * @param rawEncoder rawEncoder
*/
public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks,
RawErasureEncoder rawEncoder) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java
index a0f5b72710679..a568499ec897d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java
@@ -38,8 +38,8 @@ public abstract class HHErasureCodingStep
/**
* Constructor given input blocks and output blocks.
*
- * @param inputBlocks
- * @param outputBlocks
+ * @param inputBlocks inputBlocks
+ * @param outputBlocks outputBlocks
*/
public HHErasureCodingStep(ECBlock[] inputBlocks,
ECBlock[] outputBlocks) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
index 16a3c0fa61c4b..6f8ab521b1a33 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
@@ -43,9 +43,9 @@ public class HHXORErasureDecodingStep extends HHErasureCodingStep {
/**
* The constructor with all the necessary info.
- * @param inputBlocks
+ * @param inputBlocks inputBlocks
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
- * @param outputBlocks
+ * @param outputBlocks outputBlocks
* @param rawDecoder underlying RS decoder for hitchhiker decoding
* @param rawEncoder underlying XOR encoder for hitchhiker decoding
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java
index 6a5644270117b..5d5e60508f24a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java
@@ -40,8 +40,8 @@ public class HHXORErasureEncodingStep extends HHErasureCodingStep {
/**
* The constructor with all the necessary info.
*
- * @param inputBlocks
- * @param outputBlocks
+ * @param inputBlocks inputBlocks
+ * @param outputBlocks outputBlocks
* @param rsRawEncoder underlying RS encoder for hitchhiker encoding
* @param xorRawEncoder underlying XOR encoder for hitchhiker encoding
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java
index 1a0e5c030e070..16c7417446088 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java
@@ -53,7 +53,7 @@ protected ErasureCodingStep prepareDecodingStep(
/**
* Which blocks were erased ? For XOR it's simple we only allow and return one
* erased block, either data or parity.
- * @param blockGroup
+ * @param blockGroup blockGroup
* @return output blocks to recover
*/
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java
index f17375a2551fa..12a9eeb2f3b78 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java
@@ -34,16 +34,21 @@ public interface Serialization {
/**
* Allows clients to test whether this {@link Serialization}
* supports the given class.
+ *
+ * @param c class
+ * @return if accept true,not false
*/
boolean accept(Class> c);
/**
* @return a {@link Serializer} for the given class.
+ * @param c class
*/
Serializer getSerializer(Class c);
/**
* @return a {@link Deserializer} for the given class.
+ * @param c class
*/
Deserializer getDeserializer(Class c);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
index f340cb3a98a44..7280e3f44e4dc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
@@ -61,18 +61,24 @@ public Serializer getSerializer(Class c) {
/**
* Return an Avro Schema instance for the given class.
+ * @param t Generics Type T
+ * @return schema
*/
@InterfaceAudience.Private
public abstract Schema getSchema(T t);
/**
* Create and return Avro DatumWriter for the given class.
+ * @param clazz clazz
+ * @return DatumWriter
*/
@InterfaceAudience.Private
public abstract DatumWriter getWriter(Class clazz);
/**
* Create and return Avro DatumReader for the given class.
+ * @param clazz clazz
+ * @return DatumReader
*/
@InterfaceAudience.Private
public abstract DatumReader getReader(Class clazz);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
index eef7d7c31964a..fd9cd7c946b1f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
@@ -448,7 +448,6 @@ public static InetSocketAddress getConnectAddress(InetSocketAddress addr) {
/**
* Same as getInputStream(socket, socket.getSoTimeout()).
- *
*
* @param socket socket
* @throws IOException raised on errors performing I/O.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
index 113dcaeb5e644..d93e0e609b1ae 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
@@ -91,7 +91,7 @@ public boolean isTransient() {
/**
* Ensures that any changes to the credentials are written to persistent
* store.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void flush() throws IOException;
@@ -99,7 +99,7 @@ public boolean isTransient() {
* Get the credential entry for a specific alias.
* @param alias the name of a specific credential
* @return the credentialEntry
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract CredentialEntry getCredentialEntry(String alias)
throws IOException;
@@ -107,7 +107,7 @@ public abstract CredentialEntry getCredentialEntry(String alias)
/**
* Get the aliases for all credentials.
* @return the list of alias names
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getAliases() throws IOException;
@@ -115,7 +115,8 @@ public abstract CredentialEntry getCredentialEntry(String alias)
* Create a new credential. The given alias must not already exist.
* @param name the alias of the credential
* @param credential the credential value for the alias.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @return CredentialEntry
*/
public abstract CredentialEntry createCredentialEntry(String name,
char[] credential) throws IOException;
@@ -123,7 +124,7 @@ public abstract CredentialEntry createCredentialEntry(String name,
/**
* Delete the given credential.
* @param name the alias of the credential to delete
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteCredentialEntry(String name) throws IOException;
@@ -133,7 +134,7 @@ public abstract CredentialEntry createCredentialEntry(String name,
* means. If true, the password should be provided by the caller using
* setPassword().
* @return Whether or not the provider requires a password
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean needsPassword() throws IOException {
return false;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
index 06d42207ecba5..c998bd51a5c38 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
@@ -70,9 +70,9 @@ public class CredentialShell extends CommandShell {
* % hadoop credential check alias [-provider providerPath]
* % hadoop credential delete alias [-provider providerPath] [-f]
*
- * @param args
+ * @param args args
* @return 0 if the argument(s) were recognized, 1 otherwise
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
protected int init(String[] args) throws IOException {
@@ -523,7 +523,7 @@ public void format(String message) {
*
* @param args
* Command line arguments
- * @throws Exception
+ * @throws Exception exception
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new CredentialShell(), args);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
index aa5b01fbed113..8453f4f59c6e7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
@@ -296,6 +296,7 @@ else if (!users.isEmpty()) {
/**
* Returns the access control list as a String that can be used for building a
* new instance by sending it to the constructor of {@link AccessControlList}.
+ * @return acl string
*/
public String getAclString() {
StringBuilder sb = new StringBuilder(INITIAL_CAPACITY);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
index eff77d8942cf7..df022c38076bf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
@@ -46,7 +46,7 @@ public interface ImpersonationProvider extends Configurable {
* be preferred to avoid possibly re-resolving the ip address.
* @param user ugi of the effective or proxy user which contains a real user.
* @param remoteAddress the ip address of client.
- * @throws AuthorizationException
+ * @throws AuthorizationException Authorization Exception
*/
default void authorize(UserGroupInformation user, String remoteAddress)
throws AuthorizationException {
@@ -62,7 +62,7 @@ default void authorize(UserGroupInformation user, String remoteAddress)
*
* @param user ugi of the effective or proxy user which contains a real user
* @param remoteAddress the ip address of client
- * @throws AuthorizationException
+ * @throws AuthorizationException Authorization Exception
*/
void authorize(UserGroupInformation user, InetAddress remoteAddress)
throws AuthorizationException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
index be05e110b59cf..dede4c925ae0d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
@@ -94,7 +94,7 @@ public static void refreshSuperUserGroupsConfiguration(Configuration conf) {
*
* @param user ugi of the effective or proxy user which contains a real user
* @param remoteAddress the ip address of client
- * @throws AuthorizationException
+ * @throws AuthorizationException Authorization Exception
*/
public static void authorize(UserGroupInformation user,
String remoteAddress) throws AuthorizationException {
@@ -106,7 +106,7 @@ public static void authorize(UserGroupInformation user,
*
* @param user ugi of the effective or proxy user which contains a real user
* @param remoteAddress the inet address of client
- * @throws AuthorizationException
+ * @throws AuthorizationException Authorization Exception
*/
public static void authorize(UserGroupInformation user,
InetAddress remoteAddress) throws AuthorizationException {
@@ -125,10 +125,10 @@ private static ImpersonationProvider getSip() {
/**
* This function is kept to provide backward compatibility.
- * @param user
- * @param remoteAddress
- * @param conf
- * @throws AuthorizationException
+ * @param user user
+ * @param remoteAddress remote address
+ * @param conf configuration
+ * @throws AuthorizationException Authorization Exception
* @deprecated use {@link #authorize(UserGroupInformation, String)} instead.
*/
@Deprecated
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java
index 0f0b25d8344e2..51a900fa71cb0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java
@@ -41,7 +41,7 @@ public interface RefreshAuthorizationPolicyProtocol {
/**
* Refresh the service-level authorization policy in-effect.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Idempotent
void refreshServiceAcl() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
index b72ce63f5d06f..d49de10c61cd2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
@@ -1117,7 +1117,7 @@ private static void joinThread(Thread t) {
protected abstract String[] getExecString();
/**
- * Parse the execution result
+ * Parse the execution result.
*
* @param lines lines
* @throws IOException raised on errors performing I/O.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java
index e8ad18cfc87e3..be97b55bbc25b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java
@@ -100,8 +100,8 @@ public Key(byte[] value, double weight) {
}
/**
- * @param value
- * @param weight
+ * @param value value
+ * @param weight weight
*/
public void set(byte[] value, double weight) {
if (value == null) {
From 0a9bbff1adf70ef37c79b3daea584e884a696986 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 17:08:30 -0700
Subject: [PATCH 35/53] HADOOP-18229. Fix some java doc compilation 150
warnings and 6 erros.
---
.../fs/CommonConfigurationKeysPublic.java | 3 +-
.../apache/hadoop/fs/FileEncryptionInfo.java | 1 -
.../java/org/apache/hadoop/fs/FileUtil.java | 10 +-
.../main/java/org/apache/hadoop/fs/Trash.java | 4 +-
.../org/apache/hadoop/fs/TrashPolicy.java | 1 +
.../hadoop/fs/permission/FsCreateModes.java | 2 +-
.../hadoop/fs/shell/find/BaseExpression.java | 17 +++-
.../hadoop/fs/shell/find/Expression.java | 15 ++-
.../hadoop/fs/shell/find/FindOptions.java | 1 +
.../apache/hadoop/fs/shell/find/Result.java | 21 +++-
.../hadoop/ha/ActiveStandbyElector.java | 29 +++++-
.../java/org/apache/hadoop/ha/HAAdmin.java | 3 +
.../apache/hadoop/ha/HAServiceProtocol.java | 8 +-
.../org/apache/hadoop/ha/HAServiceTarget.java | 8 +-
.../apache/hadoop/io/compress/BZip2Codec.java | 8 +-
.../apache/hadoop/io/compress/CodecPool.java | 10 +-
.../io/compress/CompressionCodecFactory.java | 5 +-
.../apache/hadoop/io/compress/Lz4Codec.java | 8 +-
.../hadoop/io/compress/SnappyCodec.java | 8 +-
.../hadoop/io/compress/ZStandardCodec.java | 8 +-
.../hadoop/io/erasurecode/CodecUtil.java | 2 +
.../io/erasurecode/ErasureCodeNative.java | 2 +
.../io/erasurecode/grouper/BlockGrouper.java | 6 +-
.../sink/ganglia/AbstractGangliaSink.java | 5 +-
.../metrics2/sink/ganglia/GangliaSink30.java | 2 +-
.../metrics2/sink/ganglia/GangliaSink31.java | 2 +-
.../apache/hadoop/security/SecurityUtil.java | 2 +-
.../AbstractDelegationTokenSecretManager.java | 96 +++++++++++++++----
.../hadoop/util/concurrent/AsyncGet.java | 7 +-
.../hadoop/util/curator/ZKCuratorManager.java | 12 ++-
.../functional/CommonCallableSupplier.java | 5 +
.../util/functional/RemoteIterators.java | 12 ++-
32 files changed, 245 insertions(+), 78 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index fdc5d3a40c106..5225236509294 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -181,8 +181,9 @@ public class CommonConfigurationKeysPublic {
/**
* Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
- *
+ *
* Default value: {@value}.
+ *
*/
public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
64;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
index 4fd80572e60b4..915f73f19b5c6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
@@ -53,7 +53,6 @@ public class FileEncryptionInfo implements Serializable {
* @param ezKeyVersionName name of the KeyVersion used to encrypt the
* encrypted data encryption key.
* @param version version
- * @return file encryption info
*/
public FileEncryptionInfo(final CipherSuite suite,
final CryptoProtocolVersion version, final byte[] edek,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index 96f5298c366d1..308d008dfff0b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -420,7 +420,7 @@ public static boolean copy(FileSystem srcFS, Path[] srcs,
* @param overwrite overwrite
* @param conf configuration
* @throws IOException raised on errors performing I/O.
- *
+ * @return true if the operation succeeded.
*/
public static boolean copy(FileSystem srcFS, Path src,
FileSystem dstFS, Path dst,
@@ -433,20 +433,21 @@ public static boolean copy(FileSystem srcFS, Path src,
/**
* Copy a file/directory tree within/between filesystems.
- *
+ *
* returns true if the operation succeeded. When deleteSource is true,
* this means "after the copy, delete(source) returned true"
* If the destination is a directory, and mkdirs (dest) fails,
* the operation will return false rather than raise any exception.
- *
+ *
* The overwrite flag is about overwriting files; it has no effect about
* handing an attempt to copy a file atop a directory (expect an IOException),
* or a directory over a path which contains a file (mkdir will fail, so
* "false").
- *
+ *
* The operation is recursive, and the deleteSource operation takes place
* as each subdirectory is copied. Therefore, if an operation fails partway
* through, the source tree may be partially deleted.
+ *
* @param srcFS source filesystem
* @param srcStatus status of source
* @param dstFS destination filesystem
@@ -1769,6 +1770,7 @@ public static List getJarsInDirectory(String path) {
* wildcard path to return all jars from the directory to use in a classpath.
*
* @param path the path to the directory. The path may include the wildcard.
+ * @param useLocal use local
* @return the list of jars as URLs, or an empty list if there are no jars, or
* the directory does not exist
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
index 450e62b4d5d51..f34b40f992e5d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
@@ -99,9 +99,9 @@ public static boolean moveToAppropriateTrash(FileSystem fs, Path p,
}
/**
- * Returns whether the trash is enabled for this filesystem
+ * Returns whether the trash is enabled for this filesystem.
*
- * return if isEnabled true,not false
+ * @return return if isEnabled true,not false
*/
public boolean isEnabled() {
return trashPolicy.isEnabled();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
index b8b67c9e8b2ff..e4c7f4035248d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
@@ -67,6 +67,7 @@ public void initialize(Configuration conf, FileSystem fs) {
/**
* Move a file or directory to the current trash directory.
+ * @param path the path
* @return false if the item is already in the trash or trash is disabled
* @throws IOException raised on errors performing I/O.
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
index fd67607c1723e..a684fd33f94d4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
@@ -37,7 +37,7 @@ public final class FsCreateModes extends FsPermission {
*
* @param mode mode
* @param umask umask
- * @retutn If the mode is already
+ * @return If the mode is already
* an FsCreateModes object, return it.
*/
public static FsPermission applyUMask(FsPermission mode,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
index 5069d2d34e51c..0f4c1771012f0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
@@ -38,12 +38,18 @@ public abstract class BaseExpression implements Expression, Configurable {
private String[] usage = { "Not yet implemented" };
private String[] help = { "Not yet implemented" };
- /** Sets the usage text for this {@link Expression} */
+ /**
+ * Sets the usage text for this {@link Expression} .
+ * @param usage usage array
+ */
protected void setUsage(String[] usage) {
this.usage = usage;
}
- /** Sets the help text for this {@link Expression} */
+ /**
+ * Sets the help text for this {@link Expression} .
+ * @param help help
+ */
protected void setHelp(String[] help) {
this.help = help;
}
@@ -92,7 +98,10 @@ public void finish() throws IOException {
/** Children of this expression. */
private LinkedList children = new LinkedList();
- /** Return the options to be used by this expression. */
+ /**
+ * Return the options to be used by this expression.
+ * @return options
+ */
protected FindOptions getOptions() {
return (this.options == null) ? new FindOptions() : this.options;
}
@@ -265,6 +274,7 @@ protected void addArgument(String arg) {
* @param depth
* current depth in the process directories
* @return FileStatus
+ * @throws IOException raised on errors performing I/O.
*/
protected FileStatus getFileStatus(PathData item, int depth)
throws IOException {
@@ -295,6 +305,7 @@ protected Path getPath(PathData item) throws IOException {
*
* @param item PathData
* @return FileSystem
+ * @throws IOException raised on errors performing I/O.
*/
protected FileSystem getFileSystem(PathData item) throws IOException {
return item.fs;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
index ccad631028cc9..3a4265c0ab7a5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
@@ -30,13 +30,15 @@ public interface Expression {
/**
* Set the options for this expression, called once before processing any
* items.
+ * @param options options
+ * @throws IOException raised on errors performing I/O.
*/
public void setOptions(FindOptions options) throws IOException;
/**
* Prepares the expression for execution, called once after setting options
* and before processing any options.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void prepare() throws IOException;
@@ -46,13 +48,14 @@ public interface Expression {
* @param item {@link PathData} item to be processed
* @param depth distance of the item from the command line argument
* @return {@link Result} of applying the expression to the item
+ * @throws IOException raised on errors performing I/O.
*/
public Result apply(PathData item, int depth) throws IOException;
/**
* Finishes the expression, called once after processing all items.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void finish() throws IOException;
@@ -76,15 +79,21 @@ public interface Expression {
/**
* Indicates whether this expression performs an action, i.e. provides output
* back to the user.
+ * @return if is action true, not false
*/
public boolean isAction();
- /** Identifies the expression as an operator rather than a primary. */
+ /**
+ * Identifies the expression as an operator rather than a primary.
+ * @return if is operator true, not false
+ */
public boolean isOperator();
/**
* Returns the precedence of this expression
* (only applicable to operators).
+ *
+ * @return precedence
*/
public int getPrecedence();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
index b0f1be5c35c93..e3f24835f800f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
@@ -264,6 +264,7 @@ public void setConfiguration(Configuration configuration) {
/**
* Return the {@link Configuration} return configuration {@link Configuration}
+ * @return configuration
*/
public Configuration getConfiguration() {
return this.configuration;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
index 2ef9cb4a801d6..a7dee3a97439b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
@@ -35,23 +35,36 @@ private Result(boolean success, boolean recurse) {
this.descend = recurse;
}
- /** Should further directories be descended. */
+ /**
+ * Should further directories be descended.
+ * @return if is pass true,not false.
+ * */
public boolean isDescend() {
return this.descend;
}
- /** Should processing continue. */
+ /**
+ * Should processing continue.
+ * @return if is pass true,not false
+ */
public boolean isPass() {
return this.success;
}
- /** Returns the combination of this and another result. */
+ /**
+ * Returns the combination of this and another result.
+ * @param other other
+ * @return result
+ */
public Result combine(Result other) {
return new Result(this.isPass() && other.isPass(), this.isDescend()
&& other.isDescend());
}
- /** Negate this result. */
+ /**
+ * Negate this result.
+ * @return Result
+ */
public Result negate() {
return new Result(!this.isPass(), this.isDescend());
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
index 041f8cab49c4d..7394e5fb46633 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
@@ -91,6 +91,8 @@ public interface ActiveStandbyElectorCallback {
*
* Callback implementations are expected to manage their own
* timeouts (e.g. when making an RPC to a remote node).
+ *
+ * @throws ServiceFailedException Service Failed Exception
*/
void becomeActive() throws ServiceFailedException;
@@ -119,6 +121,8 @@ public interface ActiveStandbyElectorCallback {
* If there is any fatal error (e.g. wrong ACL's, unexpected Zookeeper
* errors or Zookeeper persistent unavailability) then notifyFatalError is
* called to notify the app about it.
+ *
+ * @param errorMessage error message
*/
void notifyFatalError(String errorMessage);
@@ -204,8 +208,13 @@ enum State {
* ZK connection
* @param app
* reference to callback interface object
+ * @param maxRetryNum maxRetryNum
* @throws IOException
+ * raised on errors performing I/O.
* @throws HadoopIllegalArgumentException
+ * if valid data is not supplied.
+ * @throws KeeperException
+ * other zookeeper operation errors.
*/
public ActiveStandbyElector(String zookeeperHostPorts,
int zookeeperSessionTimeout, String parentZnodeName, List acl,
@@ -246,7 +255,11 @@ public ActiveStandbyElector(String zookeeperHostPorts,
* @param failFast
* whether need to add the retry when establishing ZK connection.
* @throws IOException
+ * raised on errors performing I/O.
* @throws HadoopIllegalArgumentException
+ * if valid data is not supplied.
+ * @throws KeeperException
+ * other zookeeper operation errors.
*/
public ActiveStandbyElector(String zookeeperHostPorts,
int zookeeperSessionTimeout, String parentZnodeName, List acl,
@@ -312,6 +325,8 @@ public synchronized void joinElection(byte[] data)
/**
* @return true if the configured parent znode exists
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException interrupted exception.
*/
public synchronized boolean parentZNodeExists()
throws IOException, InterruptedException {
@@ -327,6 +342,10 @@ public synchronized boolean parentZNodeExists()
/**
* Utility function to ensure that the configured base znode exists.
* This recursively creates the znode as well as all of its parents.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException interrupted exception.
+ * @throws KeeperException other zookeeper operation errors.
*/
public synchronized void ensureParentZNode()
throws IOException, InterruptedException, KeeperException {
@@ -371,6 +390,9 @@ public synchronized void ensureParentZNode()
* This recursively deletes everything within the znode as well as the
* parent znode itself. It should only be used when it's certain that
* no electors are currently participating in the election.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException interrupted exception.
*/
public synchronized void clearParentZNode()
throws IOException, InterruptedException {
@@ -435,6 +457,7 @@ public static class ActiveNotFoundException extends Exception {
* @throws KeeperException
* other zookeeper operation errors
* @throws InterruptedException
+ * interrupted exception
* @throws IOException
* when ZooKeeper connection could not be established
*/
@@ -684,7 +707,7 @@ synchronized void processWatchEvent(ZooKeeper zk, WatchedEvent event) {
* inherit and mock out the zookeeper instance
*
* @return new zookeeper client instance
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws KeeperException zookeeper connectionloss exception
*/
protected synchronized ZooKeeper connectToZooKeeper() throws IOException,
@@ -714,7 +737,7 @@ protected synchronized ZooKeeper connectToZooKeeper() throws IOException,
* inherit and pass in a mock object for zookeeper
*
* @return new zookeeper client instance
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected ZooKeeper createZooKeeper() throws IOException {
return new ZooKeeper(zkHostPort, zkSessionTimeout, watcher);
@@ -781,6 +804,8 @@ private void reJoinElection(int sleepTime) {
* Sleep for the given number of milliseconds.
* This is non-static, and separated out, so that unit tests
* can override the behavior not to sleep.
+ *
+ * @param sleepMs sleep ms
*/
@VisibleForTesting
protected void sleepFor(int sleepMs) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index c6949e561e2a2..d557e587652ae 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -326,6 +326,9 @@ private int getServiceState(final CommandLine cmd)
/**
* Return the serviceId as is, we are assuming it was
* given as a service address of form {@literal <}host:ipcport{@literal >}.
+ *
+ * @param serviceId serviceId
+ * @return service addr
*/
protected String getServiceAddr(String serviceId) {
return serviceId;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
index 74a3d121a1abe..6eeb93012b125 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
@@ -118,7 +118,8 @@ public void monitorHealth() throws HealthCheckFailedException,
/**
* Request service to transition to active state. No operation, if the
* service is already in active state.
- *
+ *
+ * @param reqInfo
* @throws ServiceFailedException
* if transition from standby to active fails.
* @throws AccessControlException
@@ -135,7 +136,8 @@ public void transitionToActive(StateChangeRequestInfo reqInfo)
/**
* Request service to transition to standby state. No operation, if the
* service is already in standby state.
- *
+ *
+ * @param reqInfo reqInfo
* @throws ServiceFailedException
* if transition from active to standby fails.
* @throws AccessControlException
@@ -153,6 +155,7 @@ public void transitionToStandby(StateChangeRequestInfo reqInfo)
* Request service to transition to observer state. No operation, if the
* service is already in observer state.
*
+ * @param reqInfo reqInfo
* @throws ServiceFailedException
* if transition from standby to observer fails.
* @throws AccessControlException
@@ -176,6 +179,7 @@ void transitionToObserver(StateChangeRequestInfo reqInfo)
* @throws IOException
* if other errors happen
* @see HAServiceStatus
+ * @return HAServiceStatus
*/
@Idempotent
public HAServiceStatus getServiceStatus() throws AccessControlException,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
index 2e6b1fe113479..324c5f2225c19 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
@@ -93,6 +93,9 @@ public abstract void checkFencingConfigured()
/**
* @return a proxy to connect to the target HA Service.
+ * @param timeoutMs timeout in milliseconds
+ * @param conf Configuration
+ * @throws IOException raised on errors performing I/O.
*/
public HAServiceProtocol getProxy(Configuration conf, int timeoutMs)
throws IOException {
@@ -115,7 +118,7 @@ public HAServiceProtocol.HAServiceState getTransitionTargetHAStatus() {
* returned proxy defaults to using {@link #getAddress()}, which means this
* method's behavior is identical to {@link #getProxy(Configuration, int)}.
*
- * @param conf Configuration
+ * @param conf configuration
* @param timeoutMs timeout in milliseconds
* @return a proxy to connect to the target HA service for health monitoring
* @throws IOException if there is an error
@@ -154,6 +157,9 @@ private HAServiceProtocol getProxyForAddress(Configuration conf,
/**
* @return a proxy to the ZKFC which is associated with this HA service.
+ * @param conf configuration
+ * @param timeoutMs timeout in milliseconds
+ * @throws IOException raised on errors performing I/O.
*/
public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
index 7fd5633daa698..7640f7ed7a6f7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
@@ -99,7 +99,7 @@ public BZip2Codec() { }
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to, to have it
* compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -116,7 +116,7 @@ public CompressionOutputStream createOutputStream(OutputStream out)
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to, to have it
* compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -154,7 +154,7 @@ public Compressor createCompressor() {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -171,7 +171,7 @@ public CompressionInputStream createInputStream(InputStream in)
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
index 2ac2ca65173f0..50a98778655a6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
@@ -235,7 +235,10 @@ public static void returnDecompressor(Decompressor decompressor) {
/**
* Return the number of leased {@link Compressor}s for this
- * {@link CompressionCodec}
+ * {@link CompressionCodec}.
+ *
+ * @param codec codec
+ * @return the number of leased.
*/
public static int getLeasedCompressorsCount(CompressionCodec codec) {
return (codec == null) ? 0 : getLeaseCount(compressorCounts,
@@ -244,7 +247,10 @@ public static int getLeasedCompressorsCount(CompressionCodec codec) {
/**
* Return the number of leased {@link Decompressor}s for this
- * {@link CompressionCodec}
+ * {@link CompressionCodec}.
+ *
+ * @param codec codec
+ * @return the number of leased
*/
public static int getLeasedDecompressorsCount(CompressionCodec codec) {
return (codec == null) ? 0 : getLeaseCount(decompressorCounts,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
index a195ed4e77fd4..6291d083e83fd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
@@ -171,6 +171,8 @@ public static void setCodecClasses(Configuration conf,
/**
* Find the codecs specified in the config value io.compression.codecs
* and register them. Defaults to gzip and deflate.
+ *
+ * @param conf configuration
*/
public CompressionCodecFactory(Configuration conf) {
codecs = new TreeMap();
@@ -293,7 +295,8 @@ public static String removeSuffix(String filename, String suffix) {
/**
* A little test program.
- * @param args
+ * @param args arguments
+ * @throws Exception exception
*/
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
index 8bfb7fe95c4e2..a5afb706c99c1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
@@ -61,7 +61,7 @@ public Configuration getConf() {
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -77,7 +77,7 @@ public CompressionOutputStream createOutputStream(OutputStream out)
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -125,7 +125,7 @@ public Compressor createCompressor() {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -141,7 +141,7 @@ public CompressionInputStream createInputStream(InputStream in)
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
index 77cf36a339b34..d64c6e512f87c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
@@ -61,7 +61,7 @@ public Configuration getConf() {
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -77,7 +77,7 @@ public CompressionOutputStream createOutputStream(OutputStream out)
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -122,7 +122,7 @@ public Compressor createCompressor() {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -138,7 +138,7 @@ public CompressionInputStream createInputStream(InputStream in)
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java
index a7afebc0c49ae..139e81eb73cc2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java
@@ -116,7 +116,7 @@ private static int getBufferSize(Configuration conf) {
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -132,7 +132,7 @@ public CompressionOutputStream createOutputStream(OutputStream out)
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -173,7 +173,7 @@ public Compressor createCompressor() {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -189,7 +189,7 @@ public CompressionInputStream createInputStream(InputStream in)
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
index 2632f4b82f070..d302932fa8fd5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
@@ -83,6 +83,7 @@ private CodecUtil() { }
/**
* Create encoder corresponding to given codec.
* @param options Erasure codec options
+ * @param conf configuration
* @return erasure encoder
*/
public static ErasureEncoder createEncoder(Configuration conf,
@@ -100,6 +101,7 @@ public static ErasureEncoder createEncoder(Configuration conf,
/**
* Create decoder corresponding to given codec.
* @param options Erasure codec options
+ * @param conf configuration
* @return erasure decoder
*/
public static ErasureDecoder createDecoder(Configuration conf,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java
index ec317eee4dc3e..b931a68bddbe8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java
@@ -61,6 +61,7 @@ private ErasureCodeNative() {}
/**
* Are native libraries loaded?
+ * @return if is native code loaded true,not false
*/
public static boolean isNativeCodeLoaded() {
return LOADING_FAILURE_REASON == null;
@@ -82,6 +83,7 @@ public static void checkNativeCodeLoaded() {
/**
* Get the native library name that's available or supported.
+ * @return library name
*/
public static native String getLibraryName();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java
index 3f1b0c22941bd..1a7757cbc16da 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java
@@ -33,7 +33,7 @@ public class BlockGrouper {
/**
* Set EC schema.
- * @param schema
+ * @param schema schema
*/
public void setSchema(ECSchema schema) {
this.schema = schema;
@@ -41,7 +41,7 @@ public void setSchema(ECSchema schema) {
/**
* Get EC schema.
- * @return
+ * @return ECSchema
*/
protected ECSchema getSchema() {
return schema;
@@ -67,7 +67,7 @@ public int getRequiredNumParityBlocks() {
* Calculating and organizing BlockGroup, to be called by ECManager
* @param dataBlocks Data blocks to compute parity blocks against
* @param parityBlocks To be computed parity blocks
- * @return
+ * @return ECBlockGroup
*/
public ECBlockGroup makeBlockGroup(ECBlock[] dataBlocks,
ECBlock[] parityBlocks) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
index 804e90330fba3..5c5fe97f42610 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
@@ -212,7 +212,7 @@ private void loadGangliaConf(GangliaConfType gtype) {
/**
* Lookup GangliaConf from cache. If not found, return default values
*
- * @param metricName
+ * @param metricName metricName
* @return looked up GangliaConf
*/
protected GangliaConf getGangliaConfForMetric(String metricName) {
@@ -253,6 +253,7 @@ private void pad() {
/**
* Puts an integer into the buffer as 4 bytes, big-endian.
+ * @param i i
*/
protected void xdr_int(int i) {
buffer[offset++] = (byte) ((i >> 24) & 0xff);
@@ -263,7 +264,7 @@ protected void xdr_int(int i) {
/**
* Sends Ganglia Metrics to the configured hosts
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void emitToGangliaHosts() throws IOException {
try {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
index 3e8314ee884d8..196824f433c81 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
@@ -216,7 +216,7 @@ private GangliaSlope calculateSlope(GangliaConf gConf,
* @param value The value of the metric
* @param gConf The GangliaConf for this metric
* @param gSlope The slope for this metric
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void emitMetric(String groupName, String name, String type,
String value, GangliaConf gConf, GangliaSlope gSlope) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
index 5aebff8c031a9..fae0d4e85e1ec 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
@@ -42,7 +42,7 @@ public class GangliaSink31 extends GangliaSink30 {
* @param value The value of the metric
* @param gConf The GangliaConf for this metric
* @param gSlope The slope for this metric
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
protected void emitMetric(String groupName, String name, String type,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index 187ea28632bd5..277a037a53742 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -515,9 +515,9 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) {
* InterruptedException is thrown, it is converted to an IOException.
*
* @param action the action to perform
+ * @param Generics Type T
* @return the result of the action
* @throws IOException in the event of error
- * @return generic type T
*/
public static T doAsLoginUser(PrivilegedExceptionAction action)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
index 2dec2df4b9d6c..72bdfceed78be 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
@@ -152,7 +152,10 @@ public AbstractDelegationTokenSecretManager(long delegationKeyUpdateInterval,
this.metrics = DelegationTokenSecretManagerMetrics.create();
}
- /** should be called before this object is used */
+ /**
+ * should be called before this object is used.
+ * @throws IOException raised on errors performing I/O.
+ */
public void startThreads() throws IOException {
Preconditions.checkState(!running);
updateCurrentKey();
@@ -175,6 +178,8 @@ public synchronized void reset() {
/**
* Total count of active delegation tokens.
+ *
+ * @return currentTokens.size.
*/
public long getCurrentTokensSize() {
return currentTokens.size();
@@ -182,8 +187,11 @@ public long getCurrentTokensSize() {
/**
* Add a previously used master key to cache (when NN restarts),
- * should be called before activate().
- * */
+ * should be called before activate().
+ *
+ * @param key delegation key
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void addKey(DelegationKey key) throws IOException {
if (running) // a safety check
throw new IOException("Can't add delegation key to a running SecretManager.");
@@ -233,7 +241,9 @@ protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOExce
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @return currentId
*/
protected synchronized int getCurrentKeyId() {
return currentId;
@@ -241,7 +251,9 @@ protected synchronized int getCurrentKeyId() {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @return currentId
*/
protected synchronized int incrementCurrentKeyId() {
return ++currentId;
@@ -249,7 +261,9 @@ protected synchronized int incrementCurrentKeyId() {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param keyId keyId
*/
protected synchronized void setCurrentKeyId(int keyId) {
currentId = keyId;
@@ -257,7 +271,9 @@ protected synchronized void setCurrentKeyId(int keyId) {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @return delegationTokenSequenceNumber
*/
protected synchronized int getDelegationTokenSeqNum() {
return delegationTokenSequenceNumber;
@@ -265,7 +281,9 @@ protected synchronized int getDelegationTokenSeqNum() {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @return delegationTokenSequenceNumber
*/
protected synchronized int incrementDelegationTokenSeqNum() {
return ++delegationTokenSequenceNumber;
@@ -273,7 +291,9 @@ protected synchronized int incrementDelegationTokenSeqNum() {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param seqNum seqNum
*/
protected synchronized void setDelegationTokenSeqNum(int seqNum) {
delegationTokenSequenceNumber = seqNum;
@@ -281,7 +301,9 @@ protected synchronized void setDelegationTokenSeqNum(int seqNum) {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param keyId keyId
*/
protected DelegationKey getDelegationKey(int keyId) {
return allKeys.get(keyId);
@@ -289,7 +311,10 @@ protected DelegationKey getDelegationKey(int keyId) {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param key DelegationKey
+ * @throws IOException raised on errors performing I/O.
*/
protected void storeDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
@@ -298,7 +323,10 @@ protected void storeDelegationKey(DelegationKey key) throws IOException {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param key DelegationKey
+ * @throws IOException raised on errors performing I/O.
*/
protected void updateDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
@@ -307,6 +335,9 @@ protected void updateDelegationKey(DelegationKey key) throws IOException {
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
+ *
+ * @param ident ident
+ * @return DelegationTokenInformation
*/
protected DelegationTokenInformation getTokenInfo(TokenIdent ident) {
return currentTokens.get(ident);
@@ -314,7 +345,11 @@ protected DelegationTokenInformation getTokenInfo(TokenIdent ident) {
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param ident ident
+ * @param tokenInfo tokenInfo
+ * @throws IOException raised on errors performing I/O.
*/
protected void storeToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
@@ -325,7 +360,11 @@ protected void storeToken(TokenIdent ident,
/**
* For subclasses externalizing the storage, for example Zookeeper
- * based implementations
+ * based implementations.
+ *
+ * @param ident ident
+ * @param tokenInfo tokenInfo
+ * @throws IOException raised on errors performing I/O.
*/
protected void updateToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
@@ -341,7 +380,7 @@ protected void updateToken(TokenIdent ident,
* startThreads() is called)
* @param identifier identifier read from persistent storage
* @param renewDate token renew time
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void addPersistedDelegationToken(
TokenIdent identifier, long renewDate) throws IOException {
@@ -460,6 +499,10 @@ protected synchronized byte[] createPassword(TokenIdent identifier) {
* Find the DelegationTokenInformation for the given token id, and verify that
* if the token is expired. Note that this method should be called with
* acquiring the secret manager's monitor.
+ *
+ * @param identifier identifier
+ * @throws InvalidToken invalid token exception
+ * @return DelegationTokenInformation
*/
protected DelegationTokenInformation checkToken(TokenIdent identifier)
throws InvalidToken {
@@ -503,7 +546,7 @@ public synchronized String getTokenTrackingId(TokenIdent identifier) {
* Verifies that the given identifier and password are valid and match.
* @param identifier Token identifier.
* @param password Password in the token.
- * @throws InvalidToken
+ * @throws InvalidToken InvalidToken
*/
public synchronized void verifyToken(TokenIdent identifier, byte[] password)
throws InvalidToken {
@@ -577,6 +620,9 @@ public synchronized long renewToken(Token token,
/**
* Cancel a token by removing it from cache.
+ *
+ * @param token token
+ * @param canceller canceller
* @return Identifier of the canceled token
* @throws InvalidToken for invalid token
* @throws AccessControlException if the user isn't allowed to cancel
@@ -640,15 +686,25 @@ public DelegationTokenInformation(long renewDate, byte[] password,
this.password = password;
this.trackingId = trackingId;
}
- /** returns renew date */
+ /**
+ * returns renew date.
+ * @return renew date
+ */
public long getRenewDate() {
return renewDate;
}
- /** returns password */
+ /**
+ * returns password.
+ * @return password
+ */
byte[] getPassword() {
return password;
}
- /** returns tracking id */
+
+ /**
+ * returns tracking id.
+ * @return tracking id
+ */
public String getTrackingId() {
return trackingId;
}
@@ -753,7 +809,7 @@ public void run() {
*
* @param token the token where to extract the identifier
* @return the delegation token identifier
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public TokenIdent decodeTokenIdentifier(Token token) throws IOException {
return token.decodeIdentifier();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
index 9304b483952d0..d50dbc8f3efca 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
@@ -52,7 +52,12 @@ R get(long timeout, TimeUnit unit)
/** Utility */
class Util {
- /** Use {@link #get(long, TimeUnit)} timeout parameters to wait. */
+ /**
+ * Use {@link #get(long, TimeUnit)} timeout parameters to wait.
+ * @param obj object
+ * @param timeout timeout
+ * @param unit unit
+ */
public static void wait(Object obj, long timeout, TimeUnit unit)
throws InterruptedException {
if (timeout < 0) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
index ef9cec6677302..f818556077f00 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
@@ -83,8 +83,11 @@ public void close() {
/**
* Utility method to fetch the ZK ACLs from the configuration.
+ *
+ * @param conf configuration
* @throws java.io.IOException if the Zookeeper ACLs configuration file
* cannot be read
+ * @return acl list
*/
public static List getZKAcls(Configuration conf) throws IOException {
// Parse authentication from configuration.
@@ -102,9 +105,12 @@ public static List getZKAcls(Configuration conf) throws IOException {
/**
* Utility method to fetch ZK auth info from the configuration.
+ *
+ * @param conf configuration
* @throws java.io.IOException if the Zookeeper ACLs configuration file
* cannot be read
* @throws ZKUtil.BadAuthFormatException if the auth format is invalid
+ * @return ZKAuthInfo List
*/
public static List getZKAuths(Configuration conf)
throws IOException {
@@ -167,7 +173,7 @@ public void start(List authInfos) throws IOException {
* Get ACLs for a ZNode.
* @param path Path of the ZNode.
* @return The list of ACLs.
- * @throws Exception
+ * @throws Exception If it cannot contact Zookeeper.
*/
public List getACL(final String path) throws Exception {
return curator.getACL().forPath(path);
@@ -186,7 +192,7 @@ public byte[] getData(final String path) throws Exception {
/**
* Get the data in a ZNode.
* @param path Path of the ZNode.
- * @param stat
+ * @param stat stat
* @return The data in the ZNode.
* @throws Exception If it cannot contact Zookeeper.
*/
@@ -363,7 +369,9 @@ public void safeCreate(String path, byte[] data, List acl,
/**
* Deletes the path. Checks for existence of path as well.
+ *
* @param path Path to be deleted.
+ * @param fencingNodePath fencingNodePath
* @throws Exception if any problem occurs while performing deletion.
*/
public void safeDelete(final String path, List fencingACL,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java
index 32e299b4d45b1..89c4568a56075 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java
@@ -89,7 +89,9 @@ public static CompletableFuture submit(final Executor executor,
/**
* Wait for a list of futures to complete. If the list is empty,
* return immediately.
+ *
* @param futures list of futures.
+ * @param Generics Type T
* @throws IOException if one of the called futures raised an IOE.
* @throws RuntimeException if one of the futures raised one.
*/
@@ -105,6 +107,8 @@ public static void waitForCompletion(
/**
* Wait for a single of future to complete, extracting IOEs afterwards.
+ *
+ * @param Generics Type T
* @param future future to wait for.
* @throws IOException if one of the called futures raised an IOE.
* @throws RuntimeException if one of the futures raised one.
@@ -124,6 +128,7 @@ public static void waitForCompletion(final CompletableFuture future)
/**
* Wait for a single of future to complete, ignoring exceptions raised.
* @param future future to wait for.
+ * @param Generics Type T
*/
public static void waitForCompletionIgnoringExceptions(
@Nullable final CompletableFuture future) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java
index 68261a22e44f4..bc4c91ae9c078 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java
@@ -99,6 +99,7 @@ public static RemoteIterator remoteIteratorFromSingleton(
/**
* Create a remote iterator from a java.util.Iterator.
* @param type
+ * @param iterator iterator
* @return a remote iterator
*/
public static RemoteIterator remoteIteratorFromIterator(
@@ -110,6 +111,7 @@ public static RemoteIterator remoteIteratorFromIterator(
* Create a remote iterator from a java.util.Iterable -e.g. a list
* or other collection.
* @param type
+ * @param iterable iterable
* @return a remote iterator
*/
public static RemoteIterator remoteIteratorFromIterable(
@@ -120,6 +122,7 @@ public static RemoteIterator remoteIteratorFromIterable(
/**
* Create a remote iterator from an array.
* @param type
+ * @param array array
* @return a remote iterator
*/
public static RemoteIterator remoteIteratorFromArray(T[] array) {
@@ -158,10 +161,11 @@ public static RemoteIterator typeCastingRemoteIterator(
* Create a RemoteIterator from a RemoteIterator and a filter
* function which returns true for every element to be passed
* through.
- *
+ *
* Elements are filtered in the hasNext() method; if not used
* the filtering will be done on demand in the {@code next()}
* call.
+ *
* @param type
* @param iterator source
* @param filter filter
@@ -218,16 +222,16 @@ public static T[] toArray(RemoteIterator source,
/**
* Apply an operation to all values of a RemoteIterator.
- *
+ *
* If the iterator is an IOStatisticsSource returning a non-null
* set of statistics, and this classes log is set to DEBUG,
* then the statistics of the operation are evaluated and logged at
* debug.
- *
+ *
* The number of entries processed is returned, as it is useful to
* know this, especially during tests or when reporting values
* to users.
- *
+ *
* This does not close the iterator afterwards.
* @param source iterator source
* @param consumer consumer of the values.
From 963b9077cc2803335c69d12129e604093981684e Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 17:19:12 -0700
Subject: [PATCH 36/53] HADOOP-18229. Fix some java doc compilation 3 warnings.
---
.../src/main/java/org/apache/hadoop/io/MapFile.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index 51db0b3f0afef..9612616d84bb1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -154,7 +154,9 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
valueClass(valClass));
}
- /** Create the named map using the named key comparator.
+ /** Create the named map using the named key comparator.
+ * @param conf configuration
+ * @param fs filesystem
* @deprecated Use Writer(Configuration, Path, Option...) instead.
*/
@Deprecated
From 1757b3eb06302d7dd32200aad21a0d1f43d74ae0 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 17:24:41 -0700
Subject: [PATCH 37/53] HADOOP-18229. Fix some java doc compilation 1 warnings.
---
.../src/main/java/org/apache/hadoop/io/MapFile.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index 9612616d84bb1..c6cba88304971 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -145,6 +145,7 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
/** Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
From 5bcd161f3b1ef91fb5b4a0fb8e2be40f211ee45c Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 18:47:16 -0700
Subject: [PATCH 38/53] HADOOP-18229. Fix some java doc compilation 150
warnings. AbstractMapWritable.java warning: no @param for clazz etc,
ArrayFile.java warning: no @param for fs etc, ArrayPrimitiveWritable.java
warning: no @param for componentType, BinaryComparable.java warning: no
@return, HttpServer2.java warning: no description for @throws,
IOStatisticsBinding.java warning: no @param for , MapFile.java warning: no
@param for fs, MetricsSystem.java warning: no description for @exception,
MetricsSystemMXBean.java warning: no description for @throws,
OperationDuration.java: warning: empty tag, WritableComparator.java:
warning: no @param for keyClass
---
.../statistics/impl/IOStatisticsBinding.java | 4 +
.../org/apache/hadoop/http/HttpServer2.java | 13 ++
.../apache/hadoop/io/AbstractMapWritable.java | 22 +++-
.../java/org/apache/hadoop/io/ArrayFile.java | 69 +++++++++--
.../hadoop/io/ArrayPrimitiveWritable.java | 4 +-
.../apache/hadoop/io/BinaryComparable.java | 9 ++
.../java/org/apache/hadoop/io/MapFile.java | 117 ++++++++++++++++--
.../apache/hadoop/io/WritableComparator.java | 12 +-
.../apache/hadoop/metrics2/MetricsSystem.java | 6 +-
.../hadoop/metrics2/MetricsSystemMXBean.java | 10 +-
.../apache/hadoop/util/OperationDuration.java | 4 +-
11 files changed, 232 insertions(+), 38 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
index c45dfc21a1b1d..6a5d01fb3b074 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
@@ -141,6 +141,7 @@ public static String entryToString(
/**
* Convert entry values to the string format used in logging.
*
+ * @param type of values.
* @param name statistic name
* @param value stat value
* @return formatted string
@@ -178,6 +179,8 @@ private static Map copyMap(
/**
* A passthrough copy operation suitable for immutable
* types, including numbers.
+ *
+ * @param type of values.
* @param src source object
* @return the source object
*/
@@ -437,6 +440,7 @@ public static Function trackJavaFunctionDuration(
* @param input input callable.
* @param return type.
* @return the result of the operation.
+ * @throws IOException raised on errors performing I/O.
*/
public static B trackDuration(
DurationTrackerFactory factory,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 49807ac4b4597..5abe36653e37b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -270,6 +270,7 @@ public Builder setName(String name){
* specifies the binding address, and the port specifies the
* listening port. Unspecified or zero port means that the server
* can listen to any port.
+ * @return Builder
*/
public Builder addEndpoint(URI endpoint) {
endpoints.add(endpoint);
@@ -280,6 +281,9 @@ public Builder addEndpoint(URI endpoint) {
* Set the hostname of the http server. The host name is used to resolve the
* _HOST field in Kerberos principals. The hostname of the first listener
* will be used if the name is unspecified.
+ *
+ * @param hostName hostName
+ * @return Builder
*/
public Builder hostName(String hostName) {
this.hostName = hostName;
@@ -308,6 +312,9 @@ public Builder keyPassword(String password) {
/**
* Specify whether the server should authorize the client in SSL
* connections.
+ *
+ * @param value value
+ * @return Builder
*/
public Builder needsClientAuth(boolean value) {
this.needsClientAuth = value;
@@ -332,6 +339,9 @@ public Builder setConf(Configuration conf) {
/**
* Specify the SSL configuration to load. This API provides an alternative
* to keyStore/keyPassword/trustStore.
+ *
+ * @param sslCnf sslCnf
+ * @return Builder
*/
public Builder setSSLConf(Configuration sslCnf) {
this.sslConf = sslCnf;
@@ -1610,6 +1620,7 @@ public String toString() {
* @param request the servlet request.
* @param response the servlet response.
* @return TRUE/FALSE based on the logic decribed above.
+ * @throws IOException raised on errors performing I/O.
*/
public static boolean isInstrumentationAccessAllowed(
ServletContext servletContext, HttpServletRequest request,
@@ -1631,6 +1642,8 @@ public static boolean isInstrumentationAccessAllowed(
* Does the user sending the HttpServletRequest has the administrator ACLs? If
* it isn't the case, response will be modified to send an error to the user.
*
+ * @param servletContext servletContext
+ * @param request request
* @param response used to send the error response if user does not have admin access.
* @return true if admin-authorized, false otherwise
* @throws IOException
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
index eef74628e16b1..616d5ebccf05a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
@@ -84,7 +84,10 @@ private synchronized void addToMap(Class> clazz, byte id) {
idToClassMap.put(id, clazz);
}
- /** Add a Class to the maps if it is not already present. */
+ /**
+ * Add a Class to the maps if it is not already present.
+ * @param clazz clazz
+ */
protected synchronized void addToMap(Class> clazz) {
if (classToIdMap.containsKey(clazz)) {
return;
@@ -97,17 +100,28 @@ protected synchronized void addToMap(Class> clazz) {
addToMap(clazz, id);
}
- /** @return the Class class for the specified id */
+ /**
+ * the Class class for the specified id.
+ * @param id id
+ * @return the Class class for the specified id
+ */
protected Class> getClass(byte id) {
return idToClassMap.get(id);
}
- /** @return the id for the specified Class */
+ /**
+ * get id.
+ * @return the id for the specified Class
+ * @param clazz clazz
+ */
protected byte getId(Class> clazz) {
return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1;
}
- /** Used by child copy constructors. */
+ /**
+ * Used by child copy constructors.
+ * @param other other
+ */
protected synchronized void copy(Writable other) {
if (other != null) {
try {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
index bee5fd2cb430c..b51be38f0aa8c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
@@ -38,7 +38,15 @@ protected ArrayFile() {} // no public ctor
public static class Writer extends MapFile.Writer {
private LongWritable count = new LongWritable(0);
- /** Create the named file for values of the named class. */
+ /**
+ * Create the named file for values of the named class.
+ *
+ * @param conf configuration
+ * @param fs file system
+ * @param file file
+ * @param valClass valClass
+ * @throws IOException raised on errors performing I/O.
+ */
public Writer(Configuration conf, FileSystem fs,
String file, Class extends Writable> valClass)
throws IOException {
@@ -46,7 +54,17 @@ public Writer(Configuration conf, FileSystem fs,
valueClass(valClass));
}
- /** Create the named file for values of the named class. */
+ /**
+ * Create the named file for values of the named class.
+ *
+ * @param conf configuration
+ * @param fs file system
+ * @param file file
+ * @param valClass valClass
+ * @param compress compress
+ * @param progress progress
+ * @throws IOException raised on errors performing I/O.
+ */
public Writer(Configuration conf, FileSystem fs,
String file, Class extends Writable> valClass,
CompressionType compress, Progressable progress)
@@ -58,7 +76,11 @@ public Writer(Configuration conf, FileSystem fs,
progressable(progress));
}
- /** Append a value to the file. */
+ /**
+ * Append a value to the file.
+ * @param value value
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void append(Writable value) throws IOException {
super.append(count, value); // add to map
count.set(count.get()+1); // increment count
@@ -69,31 +91,60 @@ public synchronized void append(Writable value) throws IOException {
public static class Reader extends MapFile.Reader {
private LongWritable key = new LongWritable();
- /** Construct an array reader for the named file.*/
+ /**
+ * Construct an array reader for the named file.
+ * @param fs FileSystem
+ * @param file file
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
+ */
public Reader(FileSystem fs, String file,
Configuration conf) throws IOException {
super(new Path(file), conf);
}
- /** Positions the reader before its nth value. */
+ /**
+ * Positions the reader before its nth value.
+ *
+ * @param n n key
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void seek(long n) throws IOException {
key.set(n);
seek(key);
}
- /** Read and return the next value in the file. */
+ /**
+ * Read and return the next value in the file.
+ *
+ * @param value value
+ * @throws IOException raised on errors performing I/O.
+ * @return Writable
+ */
public synchronized Writable next(Writable value) throws IOException {
return next(key, value) ? value : null;
}
- /** Returns the key associated with the most recent call to {@link
+ /**
+ * Returns the key associated with the most recent call to {@link
* #seek(long)}, {@link #next(Writable)}, or {@link
- * #get(long,Writable)}. */
+ * #get(long,Writable)}.
+ *
+ * @return key key
+ * @throws IOException raised on errors performing I/O.
+ * @return seek long
+ */
public synchronized long key() throws IOException {
return key.get();
}
- /** Return the nth value in the file. */
+ /**
+ * Return the nth value in the file.
+ * @param n n key
+ * @param value value
+ * @throws IOException raised on errors performing I/O.
+ * @return writable
+ */
public synchronized Writable get(long n, Writable value)
throws IOException {
key.set(n);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
index 2b6f3166bc282..adafe0412bc83 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
@@ -106,7 +106,9 @@ public ArrayPrimitiveWritable() {
/**
* Construct an instance of known type but no value yet
- * for use with type-specific wrapper classes
+ * for use with type-specific wrapper classes.
+ *
+ * @param componentType componentType
*/
public ArrayPrimitiveWritable(Class> componentType) {
checkPrimitive(componentType);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
index a32c44c8e5058..24ad68fab0176 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
@@ -31,11 +31,15 @@ public abstract class BinaryComparable implements Comparable {
/**
* Return n st bytes 0..n-1 from {#getBytes()} are valid.
+ *
+ * @return length
*/
public abstract int getLength();
/**
* Return representative byte array for this instance.
+ *
+ * @return getBytes
*/
public abstract byte[] getBytes();
@@ -53,6 +57,11 @@ public int compareTo(BinaryComparable other) {
/**
* Compare bytes from {#getBytes()} to those provided.
+ *
+ * @param other other
+ * @param off off
+ * @param len len
+ * @return compareBytes
*/
public int compareTo(byte[] other, int off, int len) {
return WritableComparator.compareBytes(getBytes(), 0, getLength(),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index c6cba88304971..5519507848253 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -98,8 +98,16 @@ public static class Writer implements java.io.Closeable {
private long lastIndexKeyCount = Long.MIN_VALUE;
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration
+ * @param fs filesystem
+ * @param dirName dirName
+ * @param keyClass keyClass
+ * @param valClass valClass
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -108,8 +116,18 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
this(conf, new Path(dirName), keyClass(keyClass), valueClass(valClass));
}
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration
+ * @param fs fs
+ * @param dirName dirName
+ * @param keyClass keyClass
+ * @param valClass valClass
+ * @param compress compress
+ * @param progress progress
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -120,8 +138,19 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
compression(compress), progressable(progress));
}
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration
+ * @param fs FileSystem
+ * @param dirName dirName
+ * @param keyClass keyClass
+ * @param valClass valClass
+ * @param compress compress
+ * @param codec codec
+ * @param progress progress
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -132,8 +161,16 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
compression(compress, codec), progressable(progress));
}
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ * @param conf configuration
+ * @param fs fs
+ * @param dirName dirName
+ * @param keyClass keyClass
+ * @param valClass valClass
+ * @param compress compress
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -145,6 +182,11 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
/** Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ * @param conf configuration
+ * @param fs fs
+ * @param dirName dirName
+ * @param comparator comparator
+ * @param valClass valClass
* @throws IOException raised on errors performing I/O.
*/
@Deprecated
@@ -158,6 +200,11 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
/** Create the named map using the named key comparator.
* @param conf configuration
* @param fs filesystem
+ * @param dirName dirName
+ * @param comparator comparator
+ * @param valClass valClass
+ * @param compress compress
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
*/
@Deprecated
@@ -168,8 +215,18 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
valueClass(valClass), compression(compress));
}
- /** Create the named map using the named key comparator.
+ /**
+ * Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...)} instead.
+ *
+ * @param conf configuration
+ * @param fs filesystem
+ * @param dirName dirName
+ * @param comparator comparator
+ * @param valClass valClass
+ * @param compress CompressionType
+ * @param progress progress
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -181,8 +238,19 @@ public Writer(Configuration conf, FileSystem fs, String dirName,
progressable(progress));
}
- /** Create the named map using the named key comparator.
+ /**
+ * Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration
+ * @param fs FileSystem
+ * @param dirName dirName
+ * @param comparator comparator
+ * @param valClass valClass
+ * @param compress CompressionType
+ * @param codec codec
+ * @param progress progress
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -288,16 +356,26 @@ public Writer(Configuration conf,
this.index = SequenceFile.createWriter(conf, indexOptions);
}
- /** The number of entries that are added before an index entry is added.*/
+ /**
+ * The number of entries that are added before an index entry is added.
+ * @return indexInterval
+ */
public int getIndexInterval() { return indexInterval; }
- /** Sets the index interval.
+ /**
+ * Sets the index interval.
* @see #getIndexInterval()
+ *
+ * @param interval interval
*/
public void setIndexInterval(int interval) { indexInterval = interval; }
- /** Sets the index interval and stores it in conf
+ /**
+ * Sets the index interval and stores it in conf.
* @see #getIndexInterval()
+ *
+ * @param conf configuration
+ * @param interval interval
*/
public static void setIndexInterval(Configuration conf, int interval) {
conf.setInt(INDEX_INTERVAL, interval);
@@ -310,8 +388,14 @@ public synchronized void close() throws IOException {
index.close();
}
- /** Append a key/value pair to the map. The key must be greater or equal
- * to the previous key added to the map. */
+ /**
+ * Append a key/value pair to the map. The key must be greater or equal
+ * to the previous key added to the map.
+ *
+ * @param key key
+ * @param val value
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void append(WritableComparable key, Writable val)
throws IOException {
@@ -672,9 +756,16 @@ else if (cmp > 0)
return -(low + 1); // key not found.
}
- /** Read the next key/value pair in the map into key and
+ /**
+ * Read the next key/value pair in the map into key and
* val. Returns true if such a pair exists and false when at
- * the end of the map */
+ * the end of the map.
+ *
+ * @param key WritableComparable
+ * @param val Writable
+ * @return if such a pair exists true,not false
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized boolean next(WritableComparable key, Writable val)
throws IOException {
return data.next(key, val);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
index 1754b8d06f6fa..2f90a084ddf19 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
@@ -46,7 +46,12 @@ public class WritableComparator implements RawComparator, Configurable {
private Configuration conf;
- /** For backwards compatibility. **/
+ /**
+ * For backwards compatibility.
+ *
+ * @param c WritableComparable Type
+ * @return WritableComparator
+ */
public static WritableComparator get(Class extends WritableComparable> c) {
return get(c, null);
}
@@ -111,7 +116,10 @@ protected WritableComparator() {
this(null);
}
- /** Construct for a {@link WritableComparable} implementation. */
+ /**
+ * Construct for a {@link WritableComparable} implementation.
+ * @param keyClass WritableComparable Class
+ */
protected WritableComparator(Class extends WritableComparable> keyClass) {
this(keyClass, null, false);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
index a277abd6e1384..e4693ed775e2d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
@@ -50,7 +50,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean {
* the annotations of the source object.)
* @param desc the description of the source (or null. See above.)
* @return the source object
- * @exception MetricsException
+ * @exception MetricsException Metrics Exception
*/
public abstract T register(String name, String desc, T source);
@@ -65,7 +65,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean {
* @param the actual type of the source object
* @param source object to register
* @return the source object
- * @exception MetricsException
+ * @exception MetricsException Metrics Exception
*/
public T register(T source) {
return register(null, null, source);
@@ -85,7 +85,7 @@ public T register(T source) {
* @param name of the sink. Must be unique.
* @param desc the description of the sink
* @return the sink
- * @exception MetricsException
+ * @exception MetricsException Metrics Exception
*/
public abstract
T register(String name, String desc, T sink);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
index e471ab7498ce4..f0fd7689b8604 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
@@ -29,19 +29,19 @@
public interface MetricsSystemMXBean {
/**
* Start the metrics system
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception
*/
public void start();
/**
* Stop the metrics system
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception
*/
public void stop();
/**
* Start metrics MBeans
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception
*/
public void startMetricsMBeans();
@@ -49,7 +49,7 @@ public interface MetricsSystemMXBean {
* Stop metrics MBeans.
* Note, it doesn't stop the metrics system control MBean,
* i.e this interface.
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception
*/
public void stopMetricsMBeans();
@@ -57,7 +57,7 @@ public interface MetricsSystemMXBean {
* @return the current config
* Avoided getConfig, as it'll turn into a "Config" attribute,
* which doesn't support multiple line values in jconsole.
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception
*/
public String currentConfig();
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java
index fdd25286a2300..1fb920e99f08e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java
@@ -95,9 +95,11 @@ public String toString() {
/**
* Get the duration in milliseconds.
- *
+ *
+ *
* This will be 0 until a call
* to {@link #finished()} has been made.
+ *
* @return the currently recorded duration.
*/
public long value() {
From 0f7af84b0059923753a0ced329efb4ebeba691f3 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Fri, 13 May 2022 19:32:04 -0700
Subject: [PATCH 39/53] HADOOP-18229. Fix some java doc compilation 150
warnings.
---
.../hadoop/fs/FSDataOutputStreamBuilder.java | 2 +-
.../hadoop/fs/shell/find/BaseExpression.java | 2 +
.../hadoop/ha/ActiveStandbyElector.java | 1 +
.../apache/hadoop/ha/HAServiceProtocol.java | 2 +-
.../org/apache/hadoop/ha/HealthMonitor.java | 3 +
.../hadoop/ha/ZKFailoverController.java | 2 +
.../org/apache/hadoop/http/HtmlQuoting.java | 1 +
.../org/apache/hadoop/http/HttpServer2.java | 23 +++-
.../java/org/apache/hadoop/io/ArrayFile.java | 1 -
.../org/apache/hadoop/io/BloomMapFile.java | 2 +-
.../org/apache/hadoop/io/BooleanWritable.java | 9 +-
.../io/BoundedByteArrayOutputStream.java | 14 ++-
.../org/apache/hadoop/io/ByteWritable.java | 10 +-
.../org/apache/hadoop/io/BytesWritable.java | 4 +
.../apache/hadoop/io/CompressedWritable.java | 13 ++-
.../org/apache/hadoop/io/DataInputBuffer.java | 23 +++-
.../apache/hadoop/io/DataOutputBuffer.java | 32 +++++-
.../org/apache/hadoop/io/EnumSetWritable.java | 15 ++-
.../java/org/apache/hadoop/io/MapFile.java | 67 +++++++++--
.../apache/hadoop/io/WritableComparator.java | 108 +++++++++++++++---
.../AbstractDelegationTokenSecretManager.java | 1 +
.../hadoop/util/concurrent/AsyncGet.java | 1 +
.../hadoop/util/curator/ZKCuratorManager.java | 1 +
.../apache/hadoop/util/hash/JenkinsHash.java | 2 +-
24 files changed, 277 insertions(+), 62 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
index 6212fa58c2228..e7d79f2a90f10 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
@@ -248,7 +248,7 @@ protected EnumSet getFlags() {
/**
* Create an FSDataOutputStream at the specified path.
*
- * return Generics Type B
+ * @return return Generics Type B
*/
public B create() {
flags.add(CreateFlag.CREATE);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
index 0f4c1771012f0..542f3e9134993 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
@@ -295,6 +295,8 @@ protected FileStatus getFileStatus(PathData item, int depth)
* @param item
* PathData
* @return Path
+ *
+ * @throws IOException raised on errors performing I/O.
*/
protected Path getPath(PathData item) throws IOException {
return item.path;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
index 7394e5fb46633..2236c9cdf4195 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
@@ -254,6 +254,7 @@ public ActiveStandbyElector(String zookeeperHostPorts,
* reference to callback interface object
* @param failFast
* whether need to add the retry when establishing ZK connection.
+ * @param maxRetryNum max Retry Num
* @throws IOException
* raised on errors performing I/O.
* @throws HadoopIllegalArgumentException
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
index 6eeb93012b125..66604cc39134c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
@@ -119,7 +119,7 @@ public void monitorHealth() throws HealthCheckFailedException,
* Request service to transition to active state. No operation, if the
* service is already in active state.
*
- * @param reqInfo
+ * @param reqInfo reqInfo
* @throws ServiceFailedException
* if transition from standby to active fails.
* @throws AccessControlException
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
index 7e90fb77a0702..f0d1f29b7f95c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
@@ -184,6 +184,9 @@ private void tryConnect() {
/**
* Connect to the service to be monitored. Stubbed out for easier testing.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @return HAServiceProtocol
*/
protected HAServiceProtocol createProxy() throws IOException {
return targetToMonitor.getHealthMonitorProxy(conf, rpcTimeout, rpcConnectRetries);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
index 87a80b868cdb1..13e55ccfb3a16 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
@@ -153,6 +153,8 @@ protected abstract void checkRpcAdminAccess()
* the ZKFC will do all of its work. This is so that multiple federated
* nameservices can run on the same ZK quorum without having to manually
* configure them to separate subdirectories.
+ *
+ * @return ScopeInsideParentNode
*/
protected abstract String getScopeInsideParentNode();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
index 51db21c185f20..5f47ddb339212 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
@@ -80,6 +80,7 @@ public static boolean needsQuoting(String str) {
* @param buffer the byte array to take the characters from
* @param off the index of the first byte to quote
* @param len the number of bytes to quote
+ * @throws IOException raised on errors performing I/O.
*/
public static void quoteHtmlChars(OutputStream output, byte[] buffer,
int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 5abe36653e37b..3bf3b590cb9fd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -908,8 +908,11 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) {
/**
* Add default apps.
+ *
+ * @param parent contexthandlercollection
* @param appDir The application directory
- * @throws IOException
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
*/
protected void addDefaultApps(ContextHandlerCollection parent,
final String appDir, Configuration conf) throws IOException {
@@ -1190,6 +1193,12 @@ public void addGlobalFilter(String name, String classname,
/**
* Define a filter for a context and set up default url mappings.
+ *
+ * @param ctx ctx
+ * @param name name
+ * @param classname classname
+ * @param parameters parameters
+ * @param urls urls
*/
public static void defineFilter(ServletContextHandler ctx, String name,
String classname, Map parameters, String[] urls) {
@@ -1300,6 +1309,7 @@ public int getPort() {
/**
* Get the address that corresponds to a particular connector.
*
+ * @param index index
* @return the corresponding address for the connector, or null if there's no
* such connector or the connector is not bounded or was closed.
*/
@@ -1319,6 +1329,9 @@ public InetSocketAddress getConnectorAddress(int index) {
/**
* Set the min, max number of worker threads (simultaneous connections).
+ *
+ * @param min min
+ * @param max max
*/
public void setThreads(int min, int max) {
QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
@@ -1345,6 +1358,8 @@ private void initSpnego(Configuration conf, String hostName,
/**
* Start the server. Does not wait for the server to start.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public void start() throws IOException {
try {
@@ -1519,7 +1534,9 @@ void openListeners() throws Exception {
}
/**
- * stop the server
+ * stop the server.
+ *
+ * @throws Exception exception
*/
public void stop() throws Exception {
MultiException exception = null;
@@ -1646,7 +1663,7 @@ public static boolean isInstrumentationAccessAllowed(
* @param request request
* @param response used to send the error response if user does not have admin access.
* @return true if admin-authorized, false otherwise
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static boolean hasAdministratorAccess(
ServletContext servletContext, HttpServletRequest request,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
index b51be38f0aa8c..ce0075aedcc14 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
@@ -132,7 +132,6 @@ public synchronized Writable next(Writable value) throws IOException {
*
* @return key key
* @throws IOException raised on errors performing I/O.
- * @return seek long
*/
public synchronized long key() throws IOException {
return key.get();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
index 519fcd74cbb71..91ea07d5de412 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
@@ -259,7 +259,7 @@ private void initBloomFilter(Path dirName,
* probability of false positives.
* @param key key to check
* @return false iff key doesn't exist, true if key probably exists.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean probablyHasKey(WritableComparable key) throws IOException {
if (bloomFilter == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
index 0079079a7921d..a779254fdc277 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
@@ -35,21 +35,24 @@ public class BooleanWritable implements WritableComparable {
*/
public BooleanWritable() {};
- /**
+ /**
+ * @param value value
*/
public BooleanWritable(boolean value) {
set(value);
}
/**
- * Set the value of the BooleanWritable
+ * Set the value of the BooleanWritable.
+ * @param value value
*/
public void set(boolean value) {
this.value = value;
}
/**
- * Returns the value of the BooleanWritable
+ * Returns the value of the BooleanWritable.
+ * @return the value of the BooleanWritable
*/
public boolean get() {
return value;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
index c27449d36189c..470e61ed1a302 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
@@ -114,20 +114,28 @@ public void reset() {
this.currentPointer = startOffset;
}
- /** Return the current limit */
+ /**
+ * Return the current limit.
+ * @return limit
+ */
public int getLimit() {
return limit;
}
- /** Returns the underlying buffer.
+ /**
+ * Returns the underlying buffer.
* Data is only valid to {@link #size()}.
+ * @return the underlying buffer
*/
public byte[] getBuffer() {
return buffer;
}
- /** Returns the length of the valid data
+ /**
+ * Returns the length of the valid data
* currently in the buffer.
+ *
+ * @return the length of the valid data
*/
public int size() {
return currentPointer - startOffset;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
index ffcdea2c9a3ab..86374fc4b8fa0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
@@ -33,10 +33,16 @@ public ByteWritable() {}
public ByteWritable(byte value) { set(value); }
- /** Set the value of this ByteWritable. */
+ /**
+ * Set the value of this ByteWritable.
+ * @param value value.
+ */
public void set(byte value) { this.value = value; }
- /** Return the value of this ByteWritable. */
+ /**
+ * Return the value of this ByteWritable.
+ * @return value bytes
+ */
public byte get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
index c5538c9e56e85..2e753d489979d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
@@ -77,6 +77,8 @@ public BytesWritable(byte[] bytes, int length) {
/**
* Get a copy of the bytes that is exactly the length of the data.
* See {@link #getBytes()} for faster access to the underlying array.
+ *
+ * @return copyBytes
*/
public byte[] copyBytes() {
return Arrays.copyOf(bytes, size);
@@ -95,6 +97,7 @@ public byte[] getBytes() {
/**
* Get the data from the BytesWritable.
* @deprecated Use {@link #getBytes()} instead.
+ * @return data from the BytesWritable.
*/
@Deprecated
public byte[] get() {
@@ -112,6 +115,7 @@ public int getLength() {
/**
* Get the current size of the buffer.
* @deprecated Use {@link #getLength()} instead.
+ * @return current size of the buffer
*/
@Deprecated
public int getSize() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
index 6550e1f2fde04..1f303a8888a04 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
@@ -67,7 +67,11 @@ protected void ensureInflated() {
}
}
- /** Subclasses implement this instead of {@link #readFields(DataInput)}. */
+ /**
+ * Subclasses implement this instead of {@link #readFields(DataInput)}.
+ * @param in data input
+ * @throws IOException raised on errors performing I/O.
+ */
protected abstract void readFieldsCompressed(DataInput in)
throws IOException;
@@ -87,7 +91,12 @@ public final void write(DataOutput out) throws IOException {
out.write(compressed);
}
- /** Subclasses implement this instead of {@link #write(DataOutput)}. */
+ /**
+ * Subclasses implement this instead of {@link #write(DataOutput)}.
+ *
+ * @param out data output
+ * @throws IOException raised on errors performing I/O.
+ */
protected abstract void writeCompressed(DataOutput out) throws IOException;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
index 63c41c2e75008..e707d4a83fca3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
@@ -140,12 +140,23 @@ private DataInputBuffer(Buffer buffer) {
this.buffer = buffer;
}
- /** Resets the data that the buffer reads. */
+ /**
+ * Resets the data that the buffer reads.
+ *
+ * @param input input
+ * @param length length
+ */
public void reset(byte[] input, int length) {
buffer.reset(input, 0, length);
}
- /** Resets the data that the buffer reads. */
+ /**
+ * Resets the data that the buffer reads.
+ *
+ * @param input input
+ * @param start start
+ * @param length length
+ */
public void reset(byte[] input, int start, int length) {
buffer.reset(input, start, length);
}
@@ -154,12 +165,18 @@ public byte[] getData() {
return buffer.getData();
}
- /** Returns the current position in the input. */
+ /**
+ * Returns the current position in the input.
+ *
+ * @return position
+ */
public int getPosition() { return buffer.getPosition(); }
/**
* Returns the index one greater than the last valid character in the input
* stream buffer.
+ *
+ * @return length.
*/
public int getLength() { return buffer.getLength(); }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
index 1d86b89701c03..fec36488b96b1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
@@ -99,27 +99,45 @@ private DataOutputBuffer(Buffer buffer) {
this.buffer = buffer;
}
- /** Returns the current contents of the buffer.
+ /**
+ * Returns the current contents of the buffer.
* Data is only valid to {@link #getLength()}.
+ *
+ * @return data byte
*/
public byte[] getData() { return buffer.getData(); }
- /** Returns the length of the valid data currently in the buffer. */
+ /**
+ * Returns the length of the valid data currently in the buffer.
+ * @return length
+ */
public int getLength() { return buffer.getLength(); }
- /** Resets the buffer to empty. */
+ /**
+ * Resets the buffer to empty.
+ * @return DataOutputBuffer
+ */
public DataOutputBuffer reset() {
this.written = 0;
buffer.reset();
return this;
}
- /** Writes bytes from a DataInput directly into the buffer. */
+ /**
+ * Writes bytes from a DataInput directly into the buffer.
+ * @param in data input
+ * @param length length
+ * @throws IOException raised on errors performing I/O.
+ */
public void write(DataInput in, int length) throws IOException {
buffer.write(in, length);
}
- /** Write to a file stream */
+ /**
+ * Write to a file stream.
+ * @param out OutputStream
+ * @throws IOException raised on errors performing I/O.
+ */
public void writeTo(OutputStream out) throws IOException {
buffer.writeTo(out);
}
@@ -128,6 +146,10 @@ public void writeTo(OutputStream out) throws IOException {
* Overwrite an integer into the internal buffer. Note that this call can only
* be used to overwrite existing data in the buffer, i.e., buffer#count cannot
* be increased, and DataOutputStream#written cannot be increased.
+ *
+ * @param v v
+ * @param offset offset
+ * @throws IOException raised on errors performing I/O.
*/
public void writeInt(int v, int offset) throws IOException {
Preconditions.checkState(offset + 4 <= buffer.getLength());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
index be86159519b87..7482b0304e54d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
@@ -64,8 +64,8 @@ public boolean add(E e) {
* the argument value's size is bigger than zero, the argument
* elementType is not be used.
*
- * @param value
- * @param elementType
+ * @param value enumSet value
+ * @param elementType elementType
*/
public EnumSetWritable(EnumSet value, Class elementType) {
set(value, elementType);
@@ -75,7 +75,7 @@ public EnumSetWritable(EnumSet value, Class elementType) {
* Construct a new EnumSetWritable. Argument value should not be null
* or empty.
*
- * @param value
+ * @param value enumSet value
*/
public EnumSetWritable(EnumSet value) {
this(value, null);
@@ -88,8 +88,8 @@ public EnumSetWritable(EnumSet value) {
* null. If the argument value's size is bigger than zero, the
* argument elementType is not be used.
*
- * @param value
- * @param elementType
+ * @param value enumSet Value
+ * @param elementType elementType
*/
public void set(EnumSet value, Class elementType) {
if ((value == null || value.size() == 0)
@@ -106,7 +106,10 @@ public void set(EnumSet value, Class elementType) {
}
}
- /** Return the value of this EnumSetWritable. */
+ /**
+ * Return the value of this EnumSetWritable.
+ * @return EnumSet
+ */
public EnumSet get() {
return value;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index 5519507848253..bf96cd2aee87b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -457,10 +457,18 @@ public static class Reader implements java.io.Closeable {
private WritableComparable[] keys;
private long[] positions;
- /** Returns the class of keys in this file. */
+ /**
+ * Returns the class of keys in this file.
+ *
+ * @return keyClass
+ */
public Class> getKeyClass() { return data.getKeyClass(); }
- /** Returns the class of values in this file. */
+ /**
+ * Returns the class of values in this file.
+ *
+ * @return Value Class
+ */
public Class> getValueClass() { return data.getValueClass(); }
public static interface Option extends SequenceFile.Reader.Option {}
@@ -490,8 +498,14 @@ public Reader(Path dir, Configuration conf,
open(dir, comparator, conf, opts);
}
- /** Construct a map reader for the named map.
+ /**
+ * Construct a map reader for the named map.
* @deprecated
+ *
+ * @param fs FileSystem
+ * @param dirName dirName
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Reader(FileSystem fs, String dirName,
@@ -537,6 +551,12 @@ protected synchronized void open(Path dir,
/**
* Override this method to specialize the type of
* {@link SequenceFile.Reader} returned.
+ *
+ * @param dataFile data file
+ * @param conf configuration
+ * @param options options
+ * @throws IOException raised on errors performing I/O.
+ * @return SequenceFile.Reader
*/
protected SequenceFile.Reader
createDataFileReader(Path dataFile, Configuration conf,
@@ -603,13 +623,21 @@ private void readIndex() throws IOException {
}
}
- /** Re-positions the reader before its first key. */
+ /**
+ * Re-positions the reader before its first key.
+ *
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void reset() throws IOException {
data.seek(firstPosition);
}
- /** Get the key at approximately the middle of the file. Or null if the
- * file is empty.
+ /**
+ * Get the key at approximately the middle of the file. Or null if the
+ * file is empty.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @return WritableComparable
*/
public synchronized WritableComparable midKey() throws IOException {
@@ -621,9 +649,11 @@ public synchronized WritableComparable midKey() throws IOException {
return keys[(count - 1) / 2];
}
- /** Reads the final key from the file.
+ /**
+ * Reads the final key from the file.
*
* @param key key to read into
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void finalKey(WritableComparable key)
throws IOException {
@@ -643,9 +673,14 @@ public synchronized void finalKey(WritableComparable key)
}
}
- /** Positions the reader at the named key, or if none such exists, at the
+ /**
+ * Positions the reader at the named key, or if none such exists, at the
* first entry after the named key. Returns true iff the named key exists
* in this map.
+ *
+ * @param key key
+ * @throws IOException raised on errors performing I/O.
+ * @return if the named key exists in this map true, not false.
*/
public synchronized boolean seek(WritableComparable key) throws IOException {
return seekInternal(key) == 0;
@@ -771,7 +806,13 @@ public synchronized boolean next(WritableComparable key, Writable val)
return data.next(key, val);
}
- /** Return the value for the named key, or null if none exists. */
+ /**
+ * Return the value for the named key, or null if none exists.
+ * @param key key
+ * @param val val
+ * @throws Writable if such a pair exists true,not false
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized Writable get(WritableComparable key, Writable val)
throws IOException {
if (seek(key)) {
@@ -786,9 +827,10 @@ public synchronized Writable get(WritableComparable key, Writable val)
* Returns key or if it does not exist, at the first entry
* after the named key.
*
-- * @param key - key that we're trying to find
-- * @param val - data value if key is found
-- * @return - the key that was the closest match or null if eof.
+ * @param key - key that we're trying to find
+ * @param val - data value if key is found
+ * @return - the key that was the closest match or null if eof.
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized WritableComparable getClosest(WritableComparable key,
Writable val)
@@ -805,6 +847,7 @@ public synchronized WritableComparable getClosest(WritableComparable key,
* the first entry that falls just before the key. Otherwise,
* return the record that sorts just after.
* @return - the key that was the closest match or null if eof.
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized WritableComparable getClosest(WritableComparable key,
Writable val, final boolean before)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
index 2f90a084ddf19..53f81e34db8c6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
@@ -56,7 +56,12 @@ public static WritableComparator get(Class extends WritableComparable> c) {
return get(c, null);
}
- /** Get a comparator for a {@link WritableComparable} implementation. */
+ /**
+ * Get a comparator for a {@link WritableComparable} implementation.
+ * @param c class
+ * @param conf configuration
+ * @return WritableComparator
+ */
public static WritableComparator get(
Class extends WritableComparable> c, Configuration conf) {
WritableComparator comparator = comparators.get(c);
@@ -100,9 +105,13 @@ private static void forceInit(Class> cls) {
}
}
- /** Register an optimized comparator for a {@link WritableComparable}
+ /**
+ * Register an optimized comparator for a {@link WritableComparable}
* implementation. Comparators registered with this method must be
- * thread-safe. */
+ * thread-safe.
+ * @param c class
+ * @param comparator WritableComparator
+ */
public static void define(Class c, WritableComparator comparator) {
comparators.put(c, comparator);
}
@@ -144,10 +153,16 @@ protected WritableComparator(Class extends WritableComparable> keyClass,
}
}
- /** Returns the WritableComparable implementation class. */
+ /**
+ * Returns the WritableComparable implementation class.
+ * @return WritableComparable.
+ */
public Class extends WritableComparable> getKeyClass() { return keyClass; }
- /** Construct a new {@link WritableComparable} instance. */
+ /**
+ * Construct a new {@link WritableComparable} instance.
+ * @return WritableComparable.
+ */
public WritableComparable newKey() {
return ReflectionUtils.newInstance(keyClass, conf);
}
@@ -176,27 +191,54 @@ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return compare(key1, key2); // compare them
}
- /** Compare two WritableComparables.
+ /**
+ * Compare two WritableComparables.
*
- * The default implementation uses the natural ordering, calling {@link
- * Comparable#compareTo(Object)}. */
+ * The default implementation uses the natural ordering, calling {@link
+ * Comparable#compareTo(Object)}.
+ * @param a the first object to be compared.
+ * @param b the second object to be compared.
+ * @return compare result.
+ */
@SuppressWarnings("unchecked")
public int compare(WritableComparable a, WritableComparable b) {
return a.compareTo(b);
}
+ /**
+ * Compare two Object.
+ *
+ * @param a the first object to be compared.
+ * @param b the second object to be compared.
+ * @return compare result.
+ */
@Override
public int compare(Object a, Object b) {
return compare((WritableComparable)a, (WritableComparable)b);
}
- /** Lexicographic order of binary data. */
+ /**
+ * Lexicographic order of binary data.
+ * @param b1 b1
+ * @param s1 s1
+ * @param l1 l1
+ * @param b2 b2
+ * @param s2 s2
+ * @param l2 l2
+ * @return compare bytes
+ */
public static int compareBytes(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
return FastByteComparisons.compareTo(b1, s1, l1, b2, s2, l2);
}
- /** Compute hash for binary data. */
+ /**
+ * Compute hash for binary data.
+ * @param bytes bytes
+ * @param offset offset
+ * @param length length
+ * @return hash for binary data
+ */
public static int hashBytes(byte[] bytes, int offset, int length) {
int hash = 1;
for (int i = offset; i < offset + length; i++)
@@ -204,18 +246,33 @@ public static int hashBytes(byte[] bytes, int offset, int length) {
return hash;
}
- /** Compute hash for binary data. */
+ /**
+ * Compute hash for binary data.
+ * @param bytes bytes
+ * @param length length
+ * @return hash for binary data.
+ */
public static int hashBytes(byte[] bytes, int length) {
return hashBytes(bytes, 0, length);
}
- /** Parse an unsigned short from a byte array. */
+ /**
+ * Parse an unsigned short from a byte array.
+ * @param bytes bytes
+ * @param start start
+ * @return unsigned short from a byte array
+ */
public static int readUnsignedShort(byte[] bytes, int start) {
return (((bytes[start] & 0xff) << 8) +
((bytes[start+1] & 0xff)));
}
- /** Parse an integer from a byte array. */
+ /**
+ * Parse an integer from a byte array.
+ * @param bytes bytes
+ * @param start start
+ * @return integer from a byte array
+ */
public static int readInt(byte[] bytes, int start) {
return (((bytes[start ] & 0xff) << 24) +
((bytes[start+1] & 0xff) << 16) +
@@ -224,18 +281,33 @@ public static int readInt(byte[] bytes, int start) {
}
- /** Parse a float from a byte array. */
+ /**
+ * Parse a float from a byte array.
+ * @param bytes bytes
+ * @param start start
+ * @return float from a byte array
+ */
public static float readFloat(byte[] bytes, int start) {
return Float.intBitsToFloat(readInt(bytes, start));
}
- /** Parse a long from a byte array. */
+ /**
+ * Parse a long from a byte array.
+ * @param bytes bytes.
+ * @param start start.
+ * @return long from a byte array
+ */
public static long readLong(byte[] bytes, int start) {
return ((long)(readInt(bytes, start)) << 32) +
(readInt(bytes, start+4) & 0xFFFFFFFFL);
}
- /** Parse a double from a byte array. */
+ /**
+ * Parse a double from a byte array.
+ * @param bytes bytes
+ * @param start start
+ * @return double from a byte array
+ */
public static double readDouble(byte[] bytes, int start) {
return Double.longBitsToDouble(readLong(bytes, start));
}
@@ -244,7 +316,7 @@ public static double readDouble(byte[] bytes, int start) {
* Reads a zero-compressed encoded long from a byte array and returns it.
* @param bytes byte array with decode long
* @param start starting index
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
* @return deserialized long
*/
public static long readVLong(byte[] bytes, int start) throws IOException {
@@ -269,7 +341,7 @@ public static long readVLong(byte[] bytes, int start) throws IOException {
* Reads a zero-compressed encoded integer from a byte array and returns it.
* @param bytes byte array with the encoded integer
* @param start start index
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
* @return deserialized integer
*/
public static int readVInt(byte[] bytes, int start) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
index beb4085eef032..063d0a8687b8e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
@@ -305,6 +305,7 @@ protected synchronized void setDelegationTokenSeqNum(int seqNum) {
* based implementations.
*
* @param keyId keyId
+ * @return DelegationKey
*/
protected DelegationKey getDelegationKey(int keyId) {
return allKeys.get(keyId);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
index d50dbc8f3efca..fce21dab9413c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java
@@ -57,6 +57,7 @@ class Util {
* @param obj object
* @param timeout timeout
* @param unit unit
+ * @throws InterruptedException if the thread is interrupted.
*/
public static void wait(Object obj, long timeout, TimeUnit unit)
throws InterruptedException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
index f818556077f00..54f0fb2a74604 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
@@ -372,6 +372,7 @@ public void safeCreate(String path, byte[] data, List acl,
*
* @param path Path to be deleted.
* @param fencingNodePath fencingNodePath
+ * @param fencingACL fencingACL
* @throws Exception if any problem occurs while performing deletion.
*/
public void safeDelete(final String path, List fencingACL,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
index 3f62aef00a5f6..595a09db3f824 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
@@ -247,7 +247,7 @@ public int hash(byte[] key, int nbytes, int initval) {
/**
* Compute the hash of the specified file
* @param args name of file to compute hash of.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void main(String[] args) throws IOException {
if (args.length != 1) {
From 6875e4b4c9885ee6e9a4bcafd17eaee1d869ae40 Mon Sep 17 00:00:00 2001
From: slfan1989
Date: Sat, 14 May 2022 07:10:09 -0700
Subject: [PATCH 40/53] HADOOP-18229. Fix some java doc compilation 250+
warnings.
---
.../fs/statistics/IOStatisticsSnapshot.java | 6 ++
.../apache/hadoop/fs/viewfs/ConfigUtil.java | 56 ++++++++++------
.../org/apache/hadoop/fs/viewfs/FsGetter.java | 8 +++
.../apache/hadoop/fs/viewfs/InodeTree.java | 40 ++++++++----
.../fs/viewfs/MountTableConfigLoader.java | 1 +
.../hadoop/fs/viewfs/ViewFileSystem.java | 12 ++--
.../viewfs/ViewFileSystemOverloadScheme.java | 10 ++-
.../hadoop/fs/viewfs/ViewFileSystemUtil.java | 5 +-
.../org/apache/hadoop/fs/viewfs/ViewFs.java | 2 +-
.../java/org/apache/hadoop/io/MapFile.java | 2 +-
.../org/apache/hadoop/io/SequenceFile.java | 31 +++++++--
.../io/compress/zlib/ZlibCompressor.java | 1 +
.../io/compress/zlib/ZlibDecompressor.java | 2 +
.../hadoop/io/compress/zlib/ZlibFactory.java | 2 +-
.../io/compress/zstd/ZStandardCompressor.java | 2 +
.../compress/zstd/ZStandardDecompressor.java | 1 +
.../web/DelegationTokenAuthenticatedURL.java | 6 ++
.../DelegationTokenAuthenticationFilter.java | 1 +
.../web/DelegationTokenAuthenticator.java | 8 +++
.../org/apache/hadoop/util/IdGenerator.java | 5 +-
.../apache/hadoop/util/InstrumentedLock.java | 1 +
.../hadoop/util/IntrusiveCollection.java | 22 +++++++
.../apache/hadoop/util/JsonSerialization.java | 2 +
.../apache/hadoop/util/JvmPauseMonitor.java | 3 +
.../apache/hadoop/util/LightWeightGSet.java | 12 +++-
.../hadoop/util/LightWeightResizableGSet.java | 2 +
.../org/apache/hadoop/util/LineReader.java | 6 +-
.../java/org/apache/hadoop/util/Lists.java | 28 +++++++-
.../org/apache/hadoop/util/MachineList.java | 6 +-
.../apache/hadoop/util/NativeCodeLoader.java | 8 ++-
.../hadoop/util/NativeLibraryChecker.java | 3 +-
.../java/org/apache/hadoop/util/Options.java | 2 +-
.../apache/hadoop/util/PrintJarMainClass.java | 2 +-
.../org/apache/hadoop/util/PriorityQueue.java | 36 ++++++++---
.../org/apache/hadoop/util/ProgramDriver.java | 20 +++---
.../java/org/apache/hadoop/util/Progress.java | 49 +++++++++++---
.../org/apache/hadoop/util/ProtoUtil.java | 4 ++
.../org/apache/hadoop/util/QuickSort.java | 3 +
.../apache/hadoop/util/ReflectionUtils.java | 15 ++++-
.../java/org/apache/hadoop/util/RunJar.java | 7 +-
.../apache/hadoop/util/SequentialNumber.java | 17 ++++-
.../org/apache/hadoop/util/ServletUtil.java | 16 ++++-
.../java/org/apache/hadoop/util/Sets.java | 64 ++++++++++++++++---
.../hadoop/util/ShutdownThreadsHelper.java | 8 ++-
.../org/apache/hadoop/util/StopWatch.java | 3 +
.../apache/hadoop/util/StringInterner.java | 3 +
.../org/apache/hadoop/util/StringUtils.java | 59 ++++++++++++++---
.../java/org/apache/hadoop/util/Time.java | 2 +
.../org/apache/hadoop/util/ToolRunner.java | 7 +-
.../java/org/apache/hadoop/util/XMLUtils.java | 6 +-
.../java/org/apache/hadoop/util/ZKUtil.java | 1 +
.../hadoop/util/functional/package-info.java | 6 +-
52 files changed, 498 insertions(+), 126 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
index 7e18a83e77257..4a84d47de77db 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
@@ -238,6 +238,8 @@ public static JsonSerialization serializer() {
/**
* Serialize by converting each map to a TreeMap, and saving that
* to the stream.
+ * @param s ObjectOutputStream
+ * @throws IOException raised on errors performing I/O.
*/
private synchronized void writeObject(ObjectOutputStream s)
throws IOException {
@@ -253,6 +255,10 @@ private synchronized void writeObject(ObjectOutputStream s)
/**
* Deserialize by loading each TreeMap, and building concurrent
* hash maps from them.
+ *
+ * @param s ObjectInputStream
+ * @throws IOException raised on errors performing I/O.
+ * @throws ClassNotFoundException class not found exception
*/
private void readObject(final ObjectInputStream s)
throws IOException, ClassNotFoundException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
index ead2a365f3ae6..1faf215e50553 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
@@ -48,7 +48,7 @@ public static String getConfigViewFsPrefix() {
/**
* Add a link to the config for the specified mount table
* @param conf - add the link to this conf
- * @param mountTableName
+ * @param mountTableName mountTable
* @param src - the src path name
* @param target - the target URI link
*/
@@ -71,9 +71,10 @@ public static void addLink(final Configuration conf, final String src,
/**
* Add a LinkMergeSlash to the config for the specified mount table.
- * @param conf
- * @param mountTableName
- * @param target
+ *
+ * @param conf configuration
+ * @param mountTableName mountTable
+ * @param target target
*/
public static void addLinkMergeSlash(Configuration conf,
final String mountTableName, final URI target) {
@@ -83,8 +84,9 @@ public static void addLinkMergeSlash(Configuration conf,
/**
* Add a LinkMergeSlash to the config for the default mount table.
- * @param conf
- * @param target
+ *
+ * @param conf configuration
+ * @param target targets
*/
public static void addLinkMergeSlash(Configuration conf, final URI target) {
addLinkMergeSlash(conf, getDefaultMountTableName(conf), target);
@@ -92,9 +94,10 @@ public static void addLinkMergeSlash(Configuration conf, final URI target) {
/**
* Add a LinkFallback to the config for the specified mount table.
- * @param conf
- * @param mountTableName
- * @param target
+ *
+ * @param conf configuration
+ * @param mountTableName mountTable
+ * @param target targets
*/
public static void addLinkFallback(Configuration conf,
final String mountTableName, final URI target) {
@@ -104,8 +107,9 @@ public static void addLinkFallback(Configuration conf,
/**
* Add a LinkFallback to the config for the default mount table.
- * @param conf
- * @param target
+ *
+ * @param conf configuration
+ * @param target targets
*/
public static void addLinkFallback(Configuration conf, final URI target) {
addLinkFallback(conf, getDefaultMountTableName(conf), target);
@@ -113,9 +117,10 @@ public static void addLinkFallback(Configuration conf, final URI target) {
/**
* Add a LinkMerge to the config for the specified mount table.
- * @param conf
- * @param mountTableName
- * @param targets
+ *
+ * @param conf configuration
+ * @param mountTableName mountTable
+ * @param targets targets
*/
public static void addLinkMerge(Configuration conf,
final String mountTableName, final URI[] targets) {
@@ -125,8 +130,9 @@ public static void addLinkMerge(Configuration conf,
/**
* Add a LinkMerge to the config for the default mount table.
- * @param conf
- * @param targets
+ *
+ * @param conf configuration
+ * @param targets targets array
*/
public static void addLinkMerge(Configuration conf, final URI[] targets) {
addLinkMerge(conf, getDefaultMountTableName(conf), targets);
@@ -134,6 +140,12 @@ public static void addLinkMerge(Configuration conf, final URI[] targets) {
/**
* Add nfly link to configuration for the given mount table.
+ *
+ * @param conf configuration
+ * @param mountTableName mount table
+ * @param src src
+ * @param settings settings
+ * @param targets targets
*/
public static void addLinkNfly(Configuration conf, String mountTableName,
String src, String settings, final String targets) {
@@ -144,12 +156,13 @@ public static void addLinkNfly(Configuration conf, String mountTableName,
}
/**
+ * Add nfly link to configuration for the given mount table.
*
- * @param conf
- * @param mountTableName
- * @param src
- * @param settings
- * @param targets
+ * @param conf configuration
+ * @param mountTableName mount table
+ * @param src src
+ * @param settings settings
+ * @param targets targets
*/
public static void addLinkNfly(Configuration conf, String mountTableName,
String src, String settings, final URI ... targets) {
@@ -202,6 +215,7 @@ public static void setHomeDirConf(final Configuration conf,
* Add config variable for homedir the specified mount table
* @param conf - add to this conf
* @param homedir - the home dir path starting with slash
+ * @param mountTableName - the mount table
*/
public static void setHomeDirConf(final Configuration conf,
final String mountTableName, final String homedir) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
index c72baac25fb75..b6490e6b9db84 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
@@ -34,6 +34,9 @@ public class FsGetter {
/**
* Gets new file system instance of given uri.
+ * @param uri uri
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
*/
public FileSystem getNewInstance(URI uri, Configuration conf)
throws IOException {
@@ -42,6 +45,11 @@ public FileSystem getNewInstance(URI uri, Configuration conf)
/**
* Gets file system instance of given uri.
+ *
+ * @param uri uri
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
+ * @return FileSystem
*/
public FileSystem get(URI uri, Configuration conf) throws IOException {
return FileSystem.get(uri, conf);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
index fb7c46fb662b5..c03e41bae7ff3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
@@ -364,6 +364,8 @@ public static class INodeLink extends INode {
/**
* Get the target of the link. If a merge link then it returned
* as "," separated URI list.
+ *
+ * @return the path
*/
public Path getTargetLink() {
StringBuilder result = new StringBuilder(targetDirLinkList[0].toString());
@@ -387,7 +389,7 @@ INodeLink getLink() {
/**
* Get the instance of FileSystem to use, creating one if needed.
* @return An Initialized instance of T
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T getTargetFileSystem() throws IOException {
if (targetFileSystem != null) {
@@ -500,6 +502,7 @@ private void createLink(final String src, final String target,
/**
* The user of this class must subclass and implement the following
* 3 abstract methods.
+ * @return Function
*/
protected abstract Function initAndGetTargetFs();
@@ -590,14 +593,19 @@ Configuration getConfig() {
}
/**
- * Create Inode Tree from the specified mount-table specified in Config
- * @param config - the mount table keys are prefixed with
- * FsConstants.CONFIG_VIEWFS_PREFIX
- * @param viewName - the name of the mount table - if null use defaultMT name
- * @throws UnsupportedFileSystemException
- * @throws URISyntaxException
- * @throws FileAlreadyExistsException
- * @throws IOException
+ * Create Inode Tree from the specified mount-table specified in Config.
+ *
+ * @param config - the mount table keys are prefixed with
+ * FsConstants.CONFIG_VIEWFS_PREFIX
+ * @param viewName - the name of the mount table - if null use defaultMT name
+ * @param theUri theUri
+ * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts
+ * @throws UnsupportedFileSystemException file system for uri is
+ * not found
+ * @throws URISyntaxException if the URI does not have an authority it is badly formed.
+ * @throws FileAlreadyExistsException there is a file at the path specified
+ * or is discovered on one of its ancestors.
+ * @throws IOException raised on errors performing I/O.
*/
protected InodeTree(final Configuration config, final String viewName,
final URI theUri, boolean initingUriAsFallbackOnNoMounts)
@@ -871,9 +879,9 @@ boolean isLastInternalDirLink() {
/**
* Resolve the pathname p relative to root InodeDir.
* @param p - input path
- * @param resolveLastComponent
+ * @param resolveLastComponent resolveLastComponent
* @return ResolveResult which allows further resolution of the remaining path
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public ResolveResult resolve(final String p, final boolean resolveLastComponent)
throws IOException {
@@ -1000,9 +1008,9 @@ private Path getRemainingPath(String[] path, int startIndex) {
* resolveLastComponent: true
* then return value is s3://hadoop.apache.com/_hadoop
*
- * @param srcPath
- * @param resolveLastComponent
- * @return
+ * @param srcPath srcPath
+ * @param resolveLastComponent resolveLastComponent
+ * @return ResolveResult
*/
protected ResolveResult tryResolveInRegexMountpoint(final String srcPath,
final boolean resolveLastComponent) {
@@ -1029,6 +1037,10 @@ protected ResolveResult tryResolveInRegexMountpoint(final String srcPath,
* targetOfResolvedPathStr: /targetTestRoot/hadoop-user1
* remainingPath: /hadoop_dir1
*
+ * @param resultKind resultKind
+ * @param resolvedPathStr resolvedPathStr
+ * @param targetOfResolvedPathStr targetOfResolvedPathStr
+ * @param remainingPath remainingPath
* @return targetFileSystem or null on exceptions.
*/
protected ResolveResult buildResolveResultForRegexMountPoint(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
index bc2c3ea93c58c..5fcd77cd29155 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
@@ -38,6 +38,7 @@ public interface MountTableConfigLoader {
* a directory in the case of multiple versions of mount-table
* files(Recommended option).
* @param conf - Configuration object to add mount table.
+ * @throws IOException raised on errors performing I/O.
*/
void load(String mountTableConfigPath, Configuration conf)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
index 8f4631b0e833e..d2a9bb667f893 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
@@ -107,6 +107,8 @@ static AccessControlException readOnlyMountTable(final String operation,
/**
* Gets file system creator instance.
+ *
+ * @return fs getter
*/
protected FsGetter fsGetter() {
return new FsGetter();
@@ -273,7 +275,7 @@ private Path makeAbsolute(final Path f) {
* {@link FileSystem#createFileSystem(URI, Configuration)}
*
* After this constructor is called initialize() is called.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public ViewFileSystem() throws IOException {
ugi = UserGroupInformation.getCurrentUser();
@@ -394,9 +396,9 @@ protected FileSystem getTargetFileSystem(final String settings,
}
/**
- * Convenience Constructor for apps to call directly
- * @param conf
- * @throws IOException
+ * Convenience Constructor for apps to call directly.
+ * @param conf configuration
+ * @throws IOException raised on errors performing I/O.
*/
public ViewFileSystem(final Configuration conf) throws IOException {
this(FsConstants.VIEWFS_URI, conf);
@@ -1314,7 +1316,7 @@ public FsStatus getStatus(Path p) throws IOException {
* Constants#CONFIG_VIEWFS_LINK_MERGE_SLASH} is supported and is a valid
* mount point. Else, throw NotInMountpointException.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public long getUsed() throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
index e91b66512d5bf..99c626be3a214 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
@@ -139,6 +139,8 @@ public boolean supportAutoAddingFallbackOnNoMounts() {
/**
* Sets whether to add fallback automatically when no mount points found.
+ *
+ * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts
*/
public void setSupportAutoAddingFallbackOnNoMounts(
boolean addAutoFallbackOnNoMounts) {
@@ -320,7 +322,8 @@ private T newInstance(Class theClass, URI uri, Configuration conf) {
*
* @param path - fs uri path
* @param conf - configuration
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @return file system
*/
public FileSystem getRawFileSystem(Path path, Configuration conf)
throws IOException {
@@ -339,6 +342,11 @@ public FileSystem getRawFileSystem(Path path, Configuration conf)
/**
* Gets the mount path info, which contains the target file system and
* remaining path to pass to the target file system.
+ *
+ * @param path the path
+ * @param conf configuration
+ * @return mount path info
+ * @throws IOException raised on errors performing I/O.
*/
public MountPathInfo getMountPathInfo(Path path,
Configuration conf) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
index f486a10b4c8f9..1f05076f47397 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
@@ -44,7 +44,7 @@ private ViewFileSystemUtil() {
/**
* Check if the FileSystem is a ViewFileSystem.
*
- * @param fileSystem
+ * @param fileSystem file system
* @return true if the fileSystem is ViewFileSystem
*/
public static boolean isViewFileSystem(final FileSystem fileSystem) {
@@ -54,7 +54,7 @@ public static boolean isViewFileSystem(final FileSystem fileSystem) {
/**
* Check if the FileSystem is a ViewFileSystemOverloadScheme.
*
- * @param fileSystem
+ * @param fileSystem file system
* @return true if the fileSystem is ViewFileSystemOverloadScheme
*/
public static boolean isViewFileSystemOverloadScheme(
@@ -101,6 +101,7 @@ public static boolean isViewFileSystemOverloadScheme(
* @param fileSystem - ViewFileSystem on which mount point exists
* @param path - URI for which FsStatus is requested
* @return Map of ViewFsMountPoint and FsStatus
+ * @throws IOException raised on errors performing I/O.
*/
public static Map getStatus(
FileSystem fileSystem, Path path) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
index d98082fe5c1e0..5f54c9cdd06aa 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
@@ -909,7 +909,7 @@ public void unsetStoragePolicy(final Path src)
*
* @param src file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockStoragePolicySpi getStoragePolicy(final Path src)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index bf96cd2aee87b..87feb1029ea6b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -810,7 +810,7 @@ public synchronized boolean next(WritableComparable key, Writable val)
* Return the value for the named key, or null if none exists.
* @param key key
* @param val val
- * @throws Writable if such a pair exists true,not false
+ * @return Writable if such a pair exists true,not false
* @throws IOException raised on errors performing I/O.
*/
public synchronized Writable get(WritableComparable key, Writable val)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
index 890e7916ab076..420fe51492a70 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
@@ -2173,13 +2173,22 @@ public synchronized Class> getValueClass() {
return valClass;
}
- /** Returns true if values are compressed. */
+ /**
+ * Returns true if values are compressed.
+ * @return if values are compressed true, not false
+ */
public boolean isCompressed() { return decompress; }
- /** Returns true if records are block-compressed. */
+ /**
+ * Returns true if records are block-compressed.
+ * @return if records are block-compressed true, not false
+ */
public boolean isBlockCompressed() { return blockCompressed; }
- /** Returns the compression codec of data in this file. */
+ /**
+ * Returns the compression codec of data in this file.
+ * @return CompressionCodec
+ */
public CompressionCodec getCompressionCodec() { return codec; }
private byte[] getSync() {
@@ -2202,7 +2211,10 @@ public CompressionType getCompressionType() {
}
}
- /** Returns the metadata object of the file */
+ /**
+ * Returns the metadata object of the file.
+ * @return metadata
+ */
public Metadata getMetadata() {
return this.metadata;
}
@@ -2311,7 +2323,7 @@ private synchronized void seekToCurrentValue() throws IOException {
/**
* Get the 'value' corresponding to the last read 'key'.
* @param val : The 'value' to be read.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void getCurrentValue(Writable val)
throws IOException {
@@ -2392,8 +2404,13 @@ private Object deserializeValue(Object val) throws IOException {
return valDeserializer.deserialize(val);
}
- /** Read the next key in the file into key, skipping its
- * value. True if another entry exists, and false at end of file. */
+ /**
+ * Read the next key in the file into key, skipping its
+ * value.True if another entry exists, and false at end of file.
+ *
+ * @param key key
+ *
+ */
public synchronized boolean next(Writable key) throws IOException {
if (key.getClass() != getKeyClass())
throw new IOException("wrong key class: "+key.getClass().getName()
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
index da8a90bb3170e..a3ce3ab076581 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
@@ -240,6 +240,7 @@ public ZlibCompressor() {
/**
* Creates a new compressor, taking settings from the configuration.
+ * @param conf configuration
*/
public ZlibCompressor(Configuration conf) {
this(ZlibFactory.getCompressionLevel(conf),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
index f642d7713035d..5f749748f30ec 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
@@ -101,6 +101,8 @@ static boolean isNativeZlibLoaded() {
/**
* Creates a new decompressor.
+ * @param header header
+ * @param directBufferSize directBufferSize
*/
public ZlibDecompressor(CompressionHeader header, int directBufferSize) {
this.header = header;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
index 883f1717eea93..f4bae38dc457e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
@@ -66,7 +66,7 @@ public static void loadNativeZLib() {
/**
* Set the flag whether to use native library. Used for testing non-native
* libraries
- *
+ * @param isLoaded isLoaded
*/
@VisibleForTesting
public static void setNativeZlibLoaded(final boolean isLoaded) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java
index bc51f3d98a505..dfef01044d2c9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java
@@ -84,6 +84,8 @@ public static int getRecommendedBufferSize() {
/**
* Creates a new compressor with the default compression level.
* Compressed data will be generated in ZStandard format.
+ * @param level level
+ * @param bufferSize bufferSize
*/
public ZStandardCompressor(int level, int bufferSize) {
this(level, bufferSize, bufferSize);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java
index adf2fe629f8f7..c9ef509c6dce2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java
@@ -73,6 +73,7 @@ public ZStandardDecompressor() {
/**
* Creates a new decompressor.
+ * @param bufferSize bufferSize
*/
public ZStandardDecompressor(int bufferSize) {
this.directBufferSize = bufferSize;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
index 0988826605fbb..2815f56818501 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
@@ -336,6 +336,10 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs)
/**
* Select a delegation token from all tokens in credentials, based on url.
+ *
+ * @param url url
+ * @param creds credentials
+ * @return token
*/
@InterfaceAudience.Private
public org.apache.hadoop.security.token.Token extends TokenIdentifier>
@@ -407,6 +411,7 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs)
* @param token the authentication token with the Delegation Token to renew.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
+ * @return delegation token long value
*/
public long renewDelegationToken(URL url, Token token)
throws IOException, AuthenticationException {
@@ -423,6 +428,7 @@ public long renewDelegationToken(URL url, Token token)
* @param doAsUser the user to do as, which will be the token owner.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
+ * @return delegation token long value
*/
public long renewDelegationToken(URL url, Token token, String doAsUser)
throws IOException, AuthenticationException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
index be061bb63f3ee..3de8d3ab91377 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
@@ -125,6 +125,7 @@ protected Properties getConfiguration(String configPrefix,
* Set AUTH_TYPE property to the name of the corresponding authentication
* handler class based on the input properties.
* @param props input properties.
+ * @throws ServletException servlet exception
*/
protected void setAuthHandlerClass(Properties props)
throws ServletException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
index 19427dcfafeb4..2694df5a97485 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
@@ -163,6 +163,7 @@ public void authenticate(URL url, AuthenticatedURL.Token token)
* @param renewer the renewer user.
* @throws IOException if an IO error occurred.
* @throws AuthenticationException if an authentication exception occurred.
+ * @return abstract delegation token identifier
*/
public Token getDelegationToken(URL url,
AuthenticatedURL.Token token, String renewer)
@@ -182,6 +183,7 @@ public Token