From e3d75af60ce00cbcc07a4a48742b09c097cf48b7 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 03:46:15 -0700 Subject: [PATCH 01/53] HADOOP-18229. Fix Hadoop-Common JavaDoc Error --- .../src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java index 23ad053a67d5c..88549cba9e63d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java @@ -429,7 +429,6 @@ private void createLink(final String src, final String target, /** * The user of this class must subclass and implement the following * 3 abstract methods. - * @throws IOException */ protected abstract Function initAndGetTargetFs(); @@ -882,7 +881,7 @@ public ResolveResult resolve(final String p, final boolean resolveLastCompone /** * Walk through all regex mount points to see * whether the path match any regex expressions. - * E.g. link: ^/user/(?\\w+) => s3://$user.apache.com/_${user} + * E.g. link: ^/user/(?<username>\\w+) => s3://$user.apache.com/_${user} * srcPath: is /user/hadoop/dir1 * resolveLastComponent: true * then return value is s3://hadoop.apache.com/_hadoop @@ -907,7 +906,7 @@ protected ResolveResult tryResolveInRegexMountpoint(final String srcPath, * Build resolve result. * Here's an example * Mountpoint: fs.viewfs.mounttable.mt - * .linkRegex.replaceresolveddstpath:_:-#.^/user/(?\w+) + * .linkRegex.replaceresolveddstpath:_:-#.^/user/(??<username>\w+) * Value: /targetTestRoot/$username * Dir path to test: * viewfs://mt/user/hadoop_user1/hadoop_dir1 From 4c38268fe77809ddeb1fcb0f03e62bdbe8ef745c Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:12:35 -0700 Subject: [PATCH 02/53] HADOOP-18229. Fix GenericOptionsParser annotation P tag has no end --- .../java/org/apache/hadoop/util/GenericOptionsParser.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index 8ca7a904fdc84..55c8bf2d688fe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -57,7 +57,7 @@ * *

Generic Options

* - *

The supported generic options are: + *

The supported generic options are:

*

  *     -conf <configuration file>     specify a configuration file
  *     -D <property=value>            use value for given property
@@ -70,12 +70,12 @@
  *     -archives <comma separated list of archives>    specify comma
  *             separated archives to be unarchived on the compute machines.
 
- * 

+ *

* *

The general command line syntax is:

*


  * bin/hadoop command [genericOptions] [commandOptions]
- * 

+ *

* *

Generic command line arguments might modify * Configuration objects, given to constructors.

@@ -105,7 +105,7 @@ * $ bin/hadoop jar -libjars testlib.jar * -archives test.tgz -files file.txt inputjar args * job submission with libjars, files and archives - *

+ *

* * @see Tool * @see ToolRunner From bd6a4fb3713d502f9ace55239515e7d32d32f194 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:16:27 -0700 Subject: [PATCH 03/53] HADOOP-18229. Fix RawComparator no description for @param --- .../src/main/java/org/apache/hadoop/io/RawComparator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java index a52190db5f4d1..a15e2346ae9f0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java @@ -29,7 +29,7 @@ * A {@link Comparator} that operates directly on byte representations of * objects. *

- * @param + * @param generic type * @see DeserializerComparator */ @InterfaceAudience.Public From 8057246f88f9beec73b9630cf8b9d589faf2ec11 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:18:11 -0700 Subject: [PATCH 04/53] HADOOP-18229. Fix CBZip2OutputStream.java has empty tag,remove it --- .../org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java | 1 - 1 file changed, 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index 850fec77c5109..afe90f10eb0db 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -64,7 +64,6 @@ * * * - * * * * From cb3f0f532d38386425f7c98c954aa29a92ec282a Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:19:46 -0700 Subject: [PATCH 05/53] HADOOP-18229. Fix JavaSerializationComparator.java no description for @param --- .../hadoop/io/serializer/JavaSerializationComparator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java index f9bf692f1fcc8..ac8dbbeaa277e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java @@ -31,7 +31,7 @@ * {@link Deserializer} to deserialize objects that are then compared via * their {@link Comparable} interfaces. *

- * @param + * @param generic type * @see JavaSerialization */ @InterfaceAudience.Public From ae647e7bb844b5fc06f8186ddf26e04530744721 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:20:27 -0700 Subject: [PATCH 06/53] HADOOP-18229. Fix DeserializerComparator.java no description for @param --- .../org/apache/hadoop/io/serializer/DeserializerComparator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java index 05205c5523cc6..b60d310f0b64e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java @@ -37,7 +37,7 @@ * implementation of {@link RawComparator} that operates directly * on byte representations. *

- * @param + * @param generic type */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving From b507a95ef68d3c16a735926fd1b767e80925253e Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:21:19 -0700 Subject: [PATCH 07/53] HADOOP-18229. Fix Deserializer.java no description for @param --- .../main/java/org/apache/hadoop/io/serializer/Deserializer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java index 3c8dfccafa8bb..d89442e703fce 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java @@ -35,7 +35,7 @@ * other producers may read from the input between calls to * {@link #deserialize(Object)}. *

- * @param + * @param generic type */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving From 44df72b261f8828d256c943265b5123a97450271 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:22:06 -0700 Subject: [PATCH 08/53] HADOOP-18229. Fix Serialization.java no description for @param --- .../java/org/apache/hadoop/io/serializer/Serialization.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java index 6f2097f7bf9da..f17375a2551fa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java @@ -25,7 +25,7 @@ *

* Encapsulates a {@link Serializer}/{@link Deserializer} pair. *

- * @param + * @param generic type */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving From a45ff20b38fbb0d065ca42fbae79960677e42e9f Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:22:37 -0700 Subject: [PATCH 09/53] HADOOP-18229. Fix Serializer.java no description for @param --- .../main/java/org/apache/hadoop/io/serializer/Serializer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java index 5ada541370ee0..2c6dd124c4dc0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java @@ -35,7 +35,7 @@ * other producers may write to the output between calls to * {@link #serialize(Object)}. *

- * @param + * @param generic type */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving From baeb0b9a43b6e195aa4e91893060d1b448a5f7e5 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:27:21 -0700 Subject: [PATCH 10/53] HADOOP-18229. Fix JMXJsonServlet.java annotation P tag has no end --- .../java/org/apache/hadoop/jmx/JMXJsonServlet.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index f20933b5c8668..0d38830c7532d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -64,25 +64,30 @@ * functionality is provided through the * {@link MBeanServer#queryNames(ObjectName, javax.management.QueryExp)} * method. + *

*

* For example http://.../jmx?qry=Hadoop:* will return * all hadoop metrics exposed through JMX. + *

*

* The optional get parameter is used to query an specific * attribute of a JMX bean. The format of the URL is * http://.../jmx?get=MXBeanName::AttributeName + *

*

* For example * * http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId * will return the cluster id of the namenode mxbean. + *

*

* If the qry or the get parameter is not formatted - * correctly then a 400 BAD REQUEST http response code will be returned. + * correctly then a 400 BAD REQUEST http response code will be returned. + *

*

* If a resouce such as a mbean or attribute can not be found, * a 404 SC_NOT_FOUND http response code will be returned. - *

+ *

* The return format is JSON and in the form *

*


@@ -95,7 +100,7 @@
  *    ]
  *  }
  *  
- *

+ *

* The servlet attempts to convert the the JMXBeans into JSON. Each * bean's attributes will be converted to a JSON object member. * From c2e906f72d08ec0cc4141b0146b3c536cc815124 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:29:48 -0700 Subject: [PATCH 11/53] HADOOP-18229. Fix FileSystem.java annotation P tag has no end and has empty P tag --- .../src/main/java/org/apache/hadoop/fs/FileSystem.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index aa194e84a35d6..f69748171cbef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -104,13 +104,13 @@ * All user code that may potentially use the Hadoop Distributed * File System should be written to use a FileSystem object or its * successor, {@link FileContext}. - * + *

*

* The local implementation is {@link LocalFileSystem} and distributed * implementation is DistributedFileSystem. There are other implementations * for object stores and (outside the Apache Hadoop codebase), * third party filesystems. - *

+ *

* Notes *
    *
  1. The behaviour of the filesystem is @@ -133,13 +133,12 @@ * New methods may be marked as Unstable or Evolving for their initial release, * as a warning that they are new and may change based on the * experience of use in applications. - *

    + *

    * Important note for developers - *

    + *

    * If you are making changes here to the public API or protected methods, * you must review the following subclasses and make sure that * they are filtering/passing through new methods as appropriate. - *

    * * {@link FilterFileSystem}: methods are passed through. If not, * then {@code TestFilterFileSystem.MustNotImplement} must be From 8052142c966c28c581d45ba8df878ba597f0ae86 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 05:31:12 -0700 Subject: [PATCH 12/53] HADOOP-18229. Fix MultipartUploader.java annotation has empty P tag --- .../src/main/java/org/apache/hadoop/fs/MultipartUploader.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java index dcb76b50b3429..5e4eda26c7f1d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java @@ -31,10 +31,11 @@ /** * MultipartUploader is an interface for copying files multipart and across * multiple nodes. - *

    + *

    * The interface extends {@link IOStatisticsSource} so that there is no * need to cast an instance to see if is a source of statistics. * However, implementations MAY return null for their actual statistics. + *

    */ @InterfaceAudience.Public @InterfaceStability.Unstable From 07f63be449635135719580fe7773ff8e23387e8e Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 16:52:06 -0700 Subject: [PATCH 13/53] HADOOP-18229. Fix some java doc compilation errors GenericOptionsParser.java empty p tag and unexpected end tag: p Tool.java empty p tag RemoteIterators.java empty p tag FileSystem.java empty p tag MultipartUploaderBuilder.java no description for @param InodeTree.java reference not found JMXJsonServlet.java empty p tag and unexpected end tag CBZip2OutputStream.java no summary or caption for table --- .../java/org/apache/hadoop/fs/FileSystem.java | 11 +++++----- .../hadoop/fs/MultipartUploaderBuilder.java | 4 ++-- .../apache/hadoop/fs/viewfs/InodeTree.java | 6 +++--- .../io/compress/bzip2/CBZip2OutputStream.java | 4 +--- .../org/apache/hadoop/jmx/JMXJsonServlet.java | 2 -- .../hadoop/util/GenericOptionsParser.java | 21 ++++++++++++------- .../java/org/apache/hadoop/util/Tool.java | 4 ++-- .../util/functional/RemoteIterators.java | 8 +++---- 8 files changed, 32 insertions(+), 28 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index f69748171cbef..180f5d1608164 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -147,21 +147,22 @@ * {@link #hasPathCapability(Path, String)} then * {@link FilterFileSystem#hasPathCapability(Path, String)} * must return false, always. - *

    + *

    * {@link ChecksumFileSystem}: checksums are created and * verified. - *

    + *

    * {@code TestHarFileSystem} will need its {@code MustNotImplement} * interface updated. - *

    * + *

    * There are some external places your changes will break things. * Do co-ordinate changes here. - *

    + *

    * * HBase: HBoss - *

    + *

    * Hive: HiveShim23 + *

    * {@code shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java} * *****************************************************************/ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java index 381bfaa07f6d1..44d9fb7a65218 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java @@ -25,8 +25,8 @@ /** * Builder interface for Multipart readers. - * @param - * @param + * @param MultipartUploader Generic Type + * @param MultipartUploaderBuilder Generic Type */ public interface MultipartUploaderBuilder> extends FSBuilder { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java index 88549cba9e63d..11841d8dbd7a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java @@ -55,7 +55,7 @@ * @param is AbstractFileSystem or FileSystem * * The two main methods are - * {@link #InodeTree(Configuration, String)} // constructor + * {@link #InodeTree(Configuration, String, URI, boolean)} // constructor * {@link #resolve(String, boolean)} */ @@ -266,8 +266,8 @@ enum LinkType { * A merge dir link is a merge (junction) of links to dirs: * example : merge of 2 dirs - * /users -> hdfs:nn1//users - * /users -> hdfs:nn2//users + * /users -> hdfs:nn1//users + * /users -> hdfs:nn2//users * * For a merge, each target is checked to be dir when created but if target * is changed later it is then ignored (a dir with null entries) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index afe90f10eb0db..794f9d02229ec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -64,12 +64,10 @@ * * *
+ * * * * - * - * - * * diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index 0d38830c7532d..85f2d2828562d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -89,7 +89,6 @@ * a 404 SC_NOT_FOUND http response code will be returned. *

* The return format is JSON and in the form - *

*


  *  {
  *    "beans" : [
@@ -100,7 +99,6 @@
  *    ]
  *  }
  *  
- *

* The servlet attempts to convert the the JMXBeans into JSON. Each * bean's attributes will be converted to a JSON object member. * diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index 55c8bf2d688fe..f6d6518b59968 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -58,7 +58,8 @@ *

Generic Options

* *

The supported generic options are:

- *

+ * 
+ *
  *     -conf <configuration file>     specify a configuration file
  *     -D <property=value>            use value for given property
  *     -fs <local|namenode:port>      specify a namenode
@@ -69,13 +70,15 @@
  *                            jar files to include in the classpath.
  *     -archives <comma separated list of archives>    specify comma
  *             separated archives to be unarchived on the compute machines.
-
- * 

+ *
+ *
* *

The general command line syntax is:

- *


+ * 
+ * 
  * bin/hadoop command [genericOptions] [commandOptions]
- * 

+ *
+ *
* *

Generic command line arguments might modify * Configuration objects, given to constructors.

@@ -83,7 +86,9 @@ *

The functionality is implemented using Commons CLI.

* *

Examples:

- *

+ * 

+ *

+ *
  * $ bin/hadoop dfs -fs darwin:8020 -ls /data
  * list /data directory in dfs with namenode darwin:8020
  * 
@@ -105,7 +110,9 @@
  * $ bin/hadoop jar -libjars testlib.jar 
  * -archives test.tgz -files file.txt inputjar args
  * job submission with libjars, files and archives
- * 

+ *
+ *
+ *

* * @see Tool * @see ToolRunner diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java index a4fbce4ace86f..2b803d5eefced 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java @@ -32,7 +32,7 @@ * and only handle its custom arguments.

* *

Here is how a typical Tool is implemented:

- *

+ * 
  *     public class MyApp extends Configured implements Tool {
  *     
  *       public int run(String[] args) throws Exception {
@@ -69,7 +69,7 @@
  *         System.exit(res);
  *       }
  *     }
- * 

+ *

* * @see GenericOptionsParser * @see ToolRunner diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java index 5fdea4f5b747a..68261a22e44f4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java @@ -47,18 +47,18 @@ * with IOStatisticsSource passthrough, and of conversions of * the iterators to lists/arrays and of performing actions * on the values. - *

+ *

* This aims to make it straightforward to use lambda-expressions to * transform the results of an iterator, without losing the statistics * in the process, and to chain the operations together. - *

+ *

* The closeable operation will be passed through RemoteIterators which * wrap other RemoteIterators. This is to support any iterator which * can be closed to release held connections, file handles etc. * Unless client code is written to assume that RemoteIterator instances * may be closed, this is not likely to be broadly used. It is added * to make it possible to adopt this feature in a managed way. - *

+ *

* One notable feature is that the * {@link #foreach(RemoteIterator, ConsumerRaisingIOE)} method will * LOG at debug any IOStatistics provided by the iterator, if such @@ -66,7 +66,7 @@ * if the LOG is not set to debug, so it is a zero cost feature unless * the logger {@code org.apache.hadoop.fs.functional.RemoteIterators} * is at DEBUG. - *

+ *

* Based on the S3A Listing code, and some some work on moving other code * to using iterative listings so as to pick up the statistics. */ From 1655d3f2e0833431c242d51478d6fed44c8e9a0d Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 21:32:28 -0700 Subject: [PATCH 14/53] HADOOP-18229. Fix some java doc compilation errors ConfigRedactor.java no description for @param Configuration.java warning: no @return, no description for @param, no @param for , no @throws for java.io.IOException, no description for @throws etc. GenericOptionsParser.java unexpected end tag. IOStatisticsSnapshot.java warning: empty

tag. Writable.java no description for @throws. --- .../apache/hadoop/conf/ConfigRedactor.java | 4 +- .../org/apache/hadoop/conf/Configuration.java | 45 ++++++++++++------- .../fs/statistics/IOStatisticsSnapshot.java | 2 +- .../java/org/apache/hadoop/io/Writable.java | 4 +- .../hadoop/util/GenericOptionsParser.java | 4 +- 5 files changed, 37 insertions(+), 22 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java index 5b2d1449f9c86..3a6e30874bc24 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java @@ -57,8 +57,8 @@ public ConfigRedactor(Configuration conf) { * Given a key / value pair, decides whether or not to redact and returns * either the original value or text indicating it has been redacted. * - * @param key - * @param value + * @param key param key + * @param value param value, will return if conditions permit * @return Original value, or text indicating it has been redacted */ public String redact(String key, String value) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 1f809b7b54706..acf4fd54239c5 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1908,6 +1908,7 @@ public long getTimeDuration(String name, String defaultValue, * @param name Property name * @param vStr The string value with time unit suffix to be converted. * @param unit Unit to convert the stored property, if it exists. + * @return time duration in given time unit */ public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) { return getTimeDurationHelper(name, vStr, unit, unit); @@ -1922,6 +1923,7 @@ public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) { * @param vStr The string value with time unit suffix to be converted. * @param defaultUnit Unit to convert the stored property, if it exists. * @param returnUnit Unit for the returned value. + * @return time duration in given time unit */ private long getTimeDurationHelper(String name, String vStr, TimeUnit defaultUnit, TimeUnit returnUnit) { @@ -2482,7 +2484,7 @@ public char[] getPasswordFromCredentialProviders(String name) /** * Fallback to clear text passwords in configuration. - * @param name + * @param name the property name * @return clear text password or null */ protected char[] getPasswordFromConfig(String name) { @@ -2547,6 +2549,8 @@ public InetSocketAddress getSocketAddr( /** * Set the socket address for the name property as * a host:port. + * @param name property name. + * @param addr inetSocketAddress addr */ public void setSocketAddr(String name, InetSocketAddress addr) { set(name, NetUtils.getHostPortString(addr)); @@ -2724,6 +2728,7 @@ public Class getClass(String name, Class defaultValue) { * @param name the conf key name. * @param defaultValue default value. * @param xface the interface implemented by the named class. + * @param Interface class type * @return property value as a Class, * or defaultValue. */ @@ -2753,6 +2758,7 @@ else if (theClass != null) * @param name the property name. * @param xface the interface implemented by the classes named by * name. + * @param Interface class type * @return a List of objects implementing xface. */ @SuppressWarnings("unchecked") @@ -2785,15 +2791,16 @@ public void setClass(String name, Class theClass, Class xface) { set(name, theClass.getName()); } - /** + /** * Get a local file under a directory named by dirsProp with * the given path. If dirsProp contains multiple directories, * then one is chosen based on path's hash code. If the selected * directory does not exist, an attempt is made to create it. - * + * * @param dirsProp directory in which to locate the file. - * @param path file-path. + * @param path file-path. * @return local file under the directory with the given path. + * @throws IOException raised on errors performing I/O. */ public Path getLocalPath(String dirsProp, String path) throws IOException { @@ -2817,15 +2824,16 @@ public Path getLocalPath(String dirsProp, String path) throw new IOException("No valid local directories in property: "+dirsProp); } - /** + /** * Get a local file name under a directory named in dirsProp with * the given path. If dirsProp contains multiple directories, * then one is chosen based on path's hash code. If the selected * directory does not exist, an attempt is made to create it. - * + * * @param dirsProp directory in which to locate the file. - * @param path file-path. + * @param path file-path. * @return local file under the directory with the given path. + * @throws IOException raised on errors performing I/O. */ public File getFile(String dirsProp, String path) throws IOException { @@ -3437,7 +3445,7 @@ void parseNext() throws IOException, XMLStreamException { /** * Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM. - * @param prop + * @param prop properties */ public void addTags(Properties prop) { // Get all system tags @@ -3548,11 +3556,12 @@ private void checkForOverride(Properties properties, String name, String attr, S } } - /** + /** * Write out the non-default properties in this configuration to the given * {@link OutputStream} using UTF-8 encoding. - * + * * @param out the output stream to write to. + * @throws IOException raised on errors performing I/O. */ public void writeXml(OutputStream out) throws IOException { writeXml(new OutputStreamWriter(out, "UTF-8")); @@ -3582,7 +3591,9 @@ public void writeXml(Writer out) throws IOException { * the configuration, this method throws an {@link IllegalArgumentException}. * * + * @param propertyName xml property name * @param out the writer to write to. + * @throws IOException raised on errors performing I/O. */ public void writeXml(@Nullable String propertyName, Writer out) throws IOException, IllegalArgumentException { @@ -3736,7 +3747,7 @@ private synchronized void appendXMLProperty(Document doc, Element conf, * @param config the configuration * @param propertyName property name * @param out the Writer to write to - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws IllegalArgumentException when property name is not * empty and the property is not found in configuration **/ @@ -3783,7 +3794,7 @@ public static void dumpConfiguration(Configuration config, * * @param config the configuration * @param out the Writer to write to - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { @@ -3812,7 +3823,7 @@ public static void dumpConfiguration(Configuration config, * @param jsonGen json writer * @param config configuration * @param name property name - * @throws IOException + * @throws IOException raised on errors performing I/O. */ private static void appendJSONProperty(JsonGenerator jsonGen, Configuration config, String name, ConfigRedactor redactor) @@ -3894,7 +3905,9 @@ synchronized boolean getQuietMode() { return this.quietmode; } - /** For debugging. List non-default properties to the terminal and exit. */ + /** For debugging. List non-default properties to the terminal and exit. + * @param args the argument to be parsed + */ public static void main(String[] args) throws Exception { new Configuration().writeXml(System.out); } @@ -3929,7 +3942,7 @@ public void write(DataOutput out) throws IOException { /** * get keys matching the the regex - * @param regex + * @param regex the regex to match against. * @return {@literal Map} with matching keys */ public Map getValByRegex(String regex) { @@ -3974,6 +3987,8 @@ public static void dumpDeprecatedKeys() { /** * Returns whether or not a deprecated name has been warned. If the name is not * deprecated then always return false + * @param name proprties + * @return true if name is a warned deprecation */ public static boolean hasWarnedDeprecation(String name) { DeprecationContext deprecations = deprecationContext.get(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java index 63d37e97c98b8..7e18a83e77257 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java @@ -53,7 +53,7 @@ * deserialized. If for some reason this is required, use * {@link #requiredSerializationClasses()} to get the list of classes * used when deserializing instances of this object. - *

+ *

*

* It is annotated for correct serializations with jackson2. *

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java index b94de6c3c72bd..3b3f8cf78e1bc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java @@ -71,7 +71,7 @@ public interface Writable { * Serialize the fields of this object to out. * * @param out DataOuput to serialize this object into. - * @throws IOException + * @throws IOException any other problem for write */ void write(DataOutput out) throws IOException; @@ -82,7 +82,7 @@ public interface Writable { * existing object where possible.

* * @param in DataInput to deseriablize this object from. - * @throws IOException + * @throws IOException any other problem for readFields */ void readFields(DataInput in) throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index f6d6518b59968..7eaaeb89185e3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -86,7 +86,7 @@ *

The functionality is implemented using Commons CLI.

* *

Examples:

- *

+ * *

*
  * $ bin/hadoop dfs -fs darwin:8020 -ls /data
@@ -112,7 +112,7 @@
  * job submission with libjars, files and archives
  * 
*
- *

+ * * * @see Tool * @see ToolRunner From 42df843f301c9919e1fbb20dafd9fdd1398d6660 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 9 May 2022 23:06:35 -0700 Subject: [PATCH 15/53] HADOOP-18229. Fix some java doc compilation errors Configured.java no @param for conf CryptoCodec.java no @throws for java.security.GeneralSecurityException Reconfigurable.java no @param for property, no @param for newVal, no @throws for org.apache.hadoop.conf.ReconfigurationException ReconfigurableBase.java no @param for conf,no @return,no @throws for java.io.IOException ReconfigurationException.java no @param for property, no @param for newVal, no @param for cause for property etc ReconfigurationTaskStatus.java no @return --- .../main/java/org/apache/hadoop/conf/Configured.java | 4 +++- .../java/org/apache/hadoop/conf/Reconfigurable.java | 6 ++++++ .../org/apache/hadoop/conf/ReconfigurableBase.java | 3 +++ .../apache/hadoop/conf/ReconfigurationException.java | 10 ++++++++++ .../apache/hadoop/conf/ReconfigurationTaskStatus.java | 2 ++ .../java/org/apache/hadoop/crypto/CryptoCodec.java | 8 ++++++-- 6 files changed, 30 insertions(+), 3 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java index f06af2b98df14..4889dd5311826 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java @@ -33,7 +33,9 @@ public Configured() { this(null); } - /** Construct a Configured. */ + /** Construct a Configured. + * @param conf the Configuration object + */ public Configured(Configuration conf) { setConf(conf); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java index c93dc31a881a9..c03193968ce2f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java @@ -33,6 +33,9 @@ public interface Reconfigurable extends Configurable { * (or null if it was not previously set). If newVal is null, set the property * to its default value; * + * @param property property name + * @param newVal new value + * @throws ReconfigurationException if there was an error applying newVal. * If the property cannot be changed, throw a * {@link ReconfigurationException}. */ @@ -45,11 +48,14 @@ void reconfigureProperty(String property, String newVal) * If isPropertyReconfigurable returns true for a property, * then changeConf should not throw an exception when changing * this property. + * @param property property name + * @return true if property reconfigurable; false if not. */ boolean isPropertyReconfigurable(String property); /** * Return all the properties that can be changed at run time. + * @return reconfigurable propertys */ Collection getReconfigurableProperties(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java index 35dfeb99f0ba6..c3706dec4b7f8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java @@ -79,6 +79,7 @@ public ReconfigurableBase() { /** * Construct a ReconfigurableBase with the {@link Configuration} * conf. + * @param conf configuration */ public ReconfigurableBase(Configuration conf) { super((conf == null) ? new Configuration() : conf); @@ -91,6 +92,7 @@ public void setReconfigurationUtil(ReconfigurationUtil ru) { /** * Create a new configuration. + * @return configuration */ protected abstract Configuration getNewConf(); @@ -162,6 +164,7 @@ public void run() { /** * Start a reconfiguration task to reload configuration in background. + * @throws IOException raised on errors performing I/O. */ public void startReconfigurationTask() throws IOException { synchronized (reconfigLock) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java index 0935bf025fd30..cf30ba5a2b9f3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java @@ -59,6 +59,10 @@ public ReconfigurationException() { /** * Create a new instance of {@link ReconfigurationException}. + * @param property property name + * @param newVal new value + * @param oldVal old value + * @param cause original exception. */ public ReconfigurationException(String property, String newVal, String oldVal, @@ -71,6 +75,9 @@ public ReconfigurationException(String property, /** * Create a new instance of {@link ReconfigurationException}. + * @param property property name + * @param newVal new value + * @param oldVal old value */ public ReconfigurationException(String property, String newVal, String oldVal) { @@ -82,6 +89,7 @@ public ReconfigurationException(String property, /** * Get property that cannot be changed. + * @return property info */ public String getProperty() { return property; @@ -89,6 +97,7 @@ public String getProperty() { /** * Get value to which property was supposed to be changed. + * @return new value */ public String getNewValue() { return newVal; @@ -96,6 +105,7 @@ public String getNewValue() { /** * Get old value of property that cannot be changed. + * @return old value */ public String getOldValue() { return oldVal; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java index 05ec90758e5fa..04de7aa97a34e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java @@ -43,6 +43,7 @@ public ReconfigurationTaskStatus(long startTime, long endTime, * Return true if * - A reconfiguration task has finished or * - an active reconfiguration task is running + * @return true if startTime > 0; false if not. */ public boolean hasTask() { return startTime > 0; @@ -51,6 +52,7 @@ public boolean hasTask() { /** * Return true if the latest reconfiguration task has finished and there is * no another active task running. + * @return true if endTime > 0; false if not. */ public boolean stopped() { return endTime > 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java index 64c754faa59d8..872c03f413767 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java @@ -145,14 +145,18 @@ private static List> getCodecClasses( public abstract CipherSuite getCipherSuite(); /** - * Create a {@link org.apache.hadoop.crypto.Encryptor}. + * Create a {@link org.apache.hadoop.crypto.Encryptor}. + * * @return Encryptor the encryptor + * @throws GeneralSecurityException thrown if create encryptor error */ public abstract Encryptor createEncryptor() throws GeneralSecurityException; - + /** * Create a {@link org.apache.hadoop.crypto.Decryptor}. + * * @return Decryptor the decryptor + * @throws GeneralSecurityException thrown if create decryptor error */ public abstract Decryptor createDecryptor() throws GeneralSecurityException; From 4aa8abb63119ed66769f99744c06f07b54e53f37 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Tue, 10 May 2022 00:03:25 -0700 Subject: [PATCH 16/53] HADOOP-18229. Fix some java doc compilation errors CryptoInputStream.java no description for @throws, CryptoOutputStream.java no description for @throws, CryptoStreamUtils.java no @param for buffer, no @param for conf, no @return, no @param for codec, no @throws for java.io.IOException HasFileDescriptor.java no description for @throws, KeyProvider.java no description for @throws, OpensslCipher.java no description for @throws, Seekable.java no @param for pos, no @throws for java.io.IOException, no @param for targetPos --- .../hadoop/crypto/CryptoInputStream.java | 2 +- .../hadoop/crypto/CryptoOutputStream.java | 2 +- .../hadoop/crypto/CryptoStreamUtils.java | 29 ++++++++++++++--- .../apache/hadoop/crypto/OpensslCipher.java | 20 ++++++------ .../apache/hadoop/crypto/key/KeyProvider.java | 32 +++++++++---------- .../apache/hadoop/fs/HasFileDescriptor.java | 2 +- .../java/org/apache/hadoop/fs/Seekable.java | 14 ++++++-- 7 files changed, 65 insertions(+), 36 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java index 5ab5d341fb826..067abde9dfbb8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java @@ -157,7 +157,7 @@ public InputStream getWrappedStream() { * @param off the buffer offset. * @param len the maximum number of decrypted data bytes to read. * @return int the total number of decrypted data bytes read into the buffer. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public int read(byte[] b, int off, int len) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java index 8e7522112551e..2a1335b6e745a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java @@ -146,7 +146,7 @@ public OutputStream getWrappedStream() { * @param b the data. * @param off the start offset in the data. * @param len the number of bytes to write. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public synchronized void write(byte[] b, int off, int len) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java index 318975fd6cebd..9db5f9173af38 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java @@ -52,13 +52,22 @@ public static void freeDB(ByteBuffer buffer) { } } - /** Read crypto buffer size */ + /** + * Read crypto buffer size + * + * @param conf configuration + * @return hadoop.security.crypto.buffer.size + */ public static int getBufferSize(Configuration conf) { return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY, HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT); } - - /** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */ + + /** + * AES/CTR/NoPadding or SM4/CTR/NoPadding is required. + * + * @param codec crypto codec + */ public static void checkCodec(CryptoCodec codec) { if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING && codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) { @@ -67,17 +76,27 @@ public static void checkCodec(CryptoCodec codec) { } } - /** Check and floor buffer size */ + /** + * Check and floor buffer size + * + * @param codec crypto codec + * @param bufferSize the size of the buffer to be used. + * @return calc buffer siez + */ public static int checkBufferSize(CryptoCodec codec, int bufferSize) { Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE, "Minimum value of buffer size is " + MIN_BUFFER_SIZE + "."); return bufferSize - bufferSize % codec.getCipherSuite() .getAlgorithmBlockSize(); } - + /** * If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's * current position, otherwise return 0; + * + * @param in wrapper + * @return current position, otherwise return 0; + * @throws IOException raised on errors performing I/O. */ public static long getInputStreamOffset(InputStream in) throws IOException { if (in instanceof Seekable) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java index 0c65b74b2913b..1961a765b4a9a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java @@ -225,34 +225,34 @@ public int update(ByteBuffer input, ByteBuffer output) output.position(output.position() + len); return len; } - + /** * Finishes a multiple-part operation. The data is encrypted or decrypted, * depending on how this cipher was initialized. *

- * + *

* The result is stored in the output buffer. Upon return, the output buffer's * position will have advanced by n, where n is the value returned by this * method; the output buffer's limit will not have changed. *

- * + *

* If output.remaining() bytes are insufficient to hold the result, * a ShortBufferException is thrown. *

- * + *

* Upon finishing, this method resets this cipher object to the state it was * in when previously initialized. That is, the object is available to encrypt * or decrypt more data. *

- * - * If any exception is thrown, this cipher object need to be reset before it + *

+ * If any exception is thrown, this cipher object need to be reset before it * can be used again. - * + * * @param output the output ByteBuffer * @return int number of bytes stored in output - * @throws ShortBufferException - * @throws IllegalBlockSizeException - * @throws BadPaddingException + * @throws ShortBufferException if there is insufficient space in the output buffer. + * @throws IllegalBlockSizeException This exception is thrown when the length of data provided to a block cipher is incorrect. + * @throws BadPaddingException This exception is thrown when a particular padding mechanism is expected for the input data but the data is not padded properly. */ public int doFinal(ByteBuffer output) throws ShortBufferException, IllegalBlockSizeException, BadPaddingException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index dafdaf7e15b25..4210548f87720 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -242,7 +242,7 @@ protected int addVersion() { /** * Serialize the metadata to a set of bytes. * @return the serialized bytes - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected byte[] serialize() throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); @@ -281,7 +281,7 @@ protected byte[] serialize() throws IOException { /** * Deserialize a new metadata object from a set of bytes. * @param bytes the serialized metadata - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected Metadata(byte[] bytes) throws IOException { String cipher = null; @@ -450,7 +450,7 @@ public boolean isTransient() { * when decrypting data. * @param versionName the name of a specific version of the key * @return the key material - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract KeyVersion getKeyVersion(String versionName ) throws IOException; @@ -458,14 +458,14 @@ public abstract KeyVersion getKeyVersion(String versionName /** * Get the key names for all keys. * @return the list of key names - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract List getKeys() throws IOException; /** * Get key metadata in bulk. * @param names the names of the keys to get - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Metadata[] getKeysMetadata(String... names) throws IOException { Metadata[] result = new Metadata[names.length]; @@ -478,7 +478,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException { /** * Get the key material for all versions of a specific key name. * @return the list of key material - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract List getKeyVersions(String name) throws IOException; @@ -488,7 +488,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException { * @param name the base name of the key * @return the version name of the current version of the key or null if the * key version doesn't exist - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public KeyVersion getCurrentKey(String name) throws IOException { Metadata meta = getMetadata(name); @@ -502,7 +502,7 @@ public KeyVersion getCurrentKey(String name) throws IOException { * Get metadata about the key. * @param name the basename of the key * @return the key's metadata or null if the key doesn't exist - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract Metadata getMetadata(String name) throws IOException; @@ -512,7 +512,7 @@ public KeyVersion getCurrentKey(String name) throws IOException { * @param material the key material for the first version of the key. * @param options the options for the new key. * @return the version name of the first version of the key. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract KeyVersion createKey(String name, byte[] material, Options options) throws IOException; @@ -558,7 +558,7 @@ protected byte[] generateKey(int size, String algorithm) * @param name the base name of the key * @param options the options for the new key. * @return the version name of the first version of the key. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws NoSuchAlgorithmException */ public KeyVersion createKey(String name, Options options) @@ -570,7 +570,7 @@ public KeyVersion createKey(String name, Options options) /** * Delete the given key. * @param name the name of the key to delete - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract void deleteKey(String name) throws IOException; @@ -579,7 +579,7 @@ public KeyVersion createKey(String name, Options options) * @param name the basename of the key * @param material the new key material * @return the name of the new version of the key - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract KeyVersion rollNewVersion(String name, byte[] material @@ -601,7 +601,7 @@ public void close() throws IOException { * * @param name the basename of the key * @return the name of the new version of the key - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException, IOException { @@ -620,7 +620,7 @@ public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException, * version of the given key. * * @param name the basename of the key - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void invalidateCache(String name) throws IOException { // NOP @@ -628,7 +628,7 @@ public void invalidateCache(String name) throws IOException { /** * Ensures that any changes to the keys are written to persistent store. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract void flush() throws IOException; @@ -637,7 +637,7 @@ public void invalidateCache(String name) throws IOException { * "/aaa/bbb". * @param versionName the version name to split * @return the base name of the key - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static String getBaseName(String versionName) throws IOException { int div = versionName.lastIndexOf('@'); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java index bcf325ceca5df..a0e89d6aeac44 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java @@ -33,7 +33,7 @@ public interface HasFileDescriptor { /** * @return the FileDescriptor - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public FileDescriptor getFileDescriptor() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java index 919c857ffa628..59f0c66b2dc7f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java @@ -32,17 +32,27 @@ public interface Seekable { * Seek to the given offset from the start of the file. * The next read() will be from that location. Can't * seek past the end of the file. + * + * @param pos offset from the start of the file + * @throws IOException raised on errors performing I/O. */ void seek(long pos) throws IOException; - + /** * Return the current offset from the start of the file + * + * @return offset from the start of the file + * @throws IOException raised on errors performing I/O. */ long getPos() throws IOException; /** - * Seeks a different copy of the data. Returns true if + * Seeks a different copy of the data. Returns true if * found a new source, false otherwise. + * + * @param targetPos target position + * @return true if found a new source, false otherwise. + * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.Private boolean seekToNewSource(long targetPos) throws IOException; From b6c20efa4b680de569d36a1319633613fd8e8598 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Tue, 10 May 2022 02:11:24 -0700 Subject: [PATCH 17/53] HADOOP-18229. Fix some java doc compilation errors KeyProvider.java no @throws for java.security.NoSuchAlgorithmException, no @throws for java.io.IOException KeyProviderCryptoExtension.java warning: no description for @param, warning: no @throws for java.io.IOException DelegationTokenIssuer.java warning: no @return, warning: no @param for renewer, no @param for issuer etc KeyProviderDelegationTokenExtension.java no description for @throws, Tool.java warning: no description for @throws CommandShell.java warning: no @return, warning: no description for @param KeyShell.java warning: no description for @throws TokenRenewer.java warning: no description for @throws,warning: no @param for token --- .../apache/hadoop/crypto/key/KeyProvider.java | 8 +++++-- .../key/KeyProviderCryptoExtension.java | 5 +++-- .../KeyProviderDelegationTokenExtension.java | 4 ++-- .../apache/hadoop/crypto/key/KeyShell.java | 4 ++-- .../security/token/DelegationTokenIssuer.java | 13 +++++++++++ .../hadoop/security/token/TokenRenewer.java | 22 ++++++++++++------- .../org/apache/hadoop/tools/CommandShell.java | 4 +++- .../java/org/apache/hadoop/util/Tool.java | 2 +- 8 files changed, 44 insertions(+), 18 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index 4210548f87720..75355c464e21b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -601,7 +601,9 @@ public void close() throws IOException { * * @param name the basename of the key * @return the name of the new version of the key - * @throws IOException raised on errors performing I/O. + * @throws IOException raised on errors performing I/O. + * @throws NoSuchAlgorithmException This exception is thrown when a particular cryptographic algorithm is requested + * but is not available in the environment. */ public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException, IOException { @@ -660,9 +662,11 @@ protected static String buildVersionName(String name, int version) { /** * Find the provider with the given key. + * * @param providerList the list of providers - * @param keyName the key name we are looking for + * @param keyName the key name we are looking for * @return the KeyProvider that has the key + * @throws IOException raised on errors performing I/O. */ public static KeyProvider findProvider(List providerList, String keyName) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java index 3f3c367fc3933..cc767ab545488 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java @@ -474,8 +474,9 @@ public void drain(String keyName) { /** * This constructor is to be used by sub classes that provide * delegating/proxying functionality to the {@link KeyProviderCryptoExtension} - * @param keyProvider - * @param extension + * + * @param keyProvider key provider + * @param extension crypto extension */ protected KeyProviderCryptoExtension(KeyProvider keyProvider, CryptoExtension extension) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java index 1fdc2fe12455b..3c1af424eb7cd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java @@ -48,14 +48,14 @@ public interface DelegationTokenExtension * Renews the given token. * @param token The token to be renewed. * @return The token's lifetime after renewal, or 0 if it can't be renewed. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ long renewDelegationToken(final Token token) throws IOException; /** * Cancels the given token. * @param token The token to be cancelled. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ Void cancelDelegationToken(final Token token) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java index a75f7d3aa63bd..c18d0d41bc08a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java @@ -75,7 +75,7 @@ public class KeyShell extends CommandShell { * * @param args Command line arguments. * @return 0 on success, 1 on failure. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override protected int init(String[] args) throws IOException { @@ -547,7 +547,7 @@ private String prettifyException(Exception e) { * success and 1 for failure. * * @param args Command line arguments. - * @throws Exception + * @throws Exception raised on errors performing I/O. */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new KeyShell(), args); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java index 7b0a78bcd3c0d..892a01f0f21fc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java @@ -39,17 +39,24 @@ public interface DelegationTokenIssuer { * The service name used as the alias for the token in the credential * token map. addDelegationTokens will use this to determine if * a token exists, and if not, add a new token with this alias. + * @return the token */ String getCanonicalServiceName(); /** * Unconditionally get a new token with the optional renewer. Returning * null indicates the service does not issue tokens. + * @param renewer + * @return the token + * @throws IOException raised on errors performing I/O. */ Token getDelegationToken(String renewer) throws IOException; /** * Issuers may need tokens from additional services. + * + * @return delegation token issuer + * @throws IOException raised on errors performing I/O. */ default DelegationTokenIssuer[] getAdditionalTokenIssuers() throws IOException { @@ -81,6 +88,12 @@ default Token[] addDelegationTokens( /** * NEVER call this method directly. + * + * @param issuer issuer + * @param renewer renewer + * @param credentials cache in which to add new delegation tokens + * @param tokens list of new delegation tokens + * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.Private static void collectDelegationTokens( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java index 11e275f3213d2..2e27b3ca5b5fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java @@ -44,16 +44,22 @@ public abstract class TokenRenewer { * cancelled. * @param token the token being checked * @return true if the token may be renewed or cancelled - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract boolean isManaged(Token token) throws IOException; - - /** - * Renew the given token. - * @return the new expiration time - * @throws IOException - * @throws InterruptedException - */ + + /** + * Renew the given token. + * + * @param token the token being checked + * @param conf configuration + * + * @return the new expiration time + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException thrown when a thread is waiting, sleeping, + * or otherwise occupied, and the thread is interrupted, + * either before or during the activity. + */ public abstract long renew(Token token, Configuration conf ) throws IOException, InterruptedException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java index a53e2259e0e25..4e5f0fa4054b7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java @@ -36,6 +36,7 @@ public abstract class CommandShell extends Configured implements Tool { /** * Return usage string for the command including any summary of subcommands. + * @return command usage */ public abstract String getCommandUsage(); @@ -84,8 +85,9 @@ public int run(String[] args) throws Exception { /** * Parse the command line arguments and initialize subcommand instance. - * @param args + * @param args arguments * @return 0 if the argument(s) were recognized, 1 otherwise + * @throws Exception init exception */ protected abstract int init(String[] args) throws Exception; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java index 2b803d5eefced..b526861f45741 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java @@ -82,7 +82,7 @@ public interface Tool extends Configurable { * * @param args command specific arguments. * @return exit code. - * @throws Exception + * @throws Exception command exception */ int run(String [] args) throws Exception; } From 4ccb346f00550e243282e1b8da349c843183650b Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Tue, 10 May 2022 06:59:47 -0700 Subject: [PATCH 18/53] HADOOP-18229. Fix some java doc compilation errors AbstractFileSystem.java no @param for f, no @param for opts etc Configuration.java warning: no @throws for java.lang.Exception CryptoStreamUtils.java: warning: no @param for buffer KeyProvider.java warning: no description for @throws DelegationTokenIssuer.java: no description for @param TokenRenewer.java: no @param for token ValueQueue.java: no description for @throws CryptoStreamUtils.java checkstyle KeyProvider.java checkstyle OpensslCipher.java checkstyle --- .../org/apache/hadoop/conf/Configuration.java | 1 + .../hadoop/crypto/CryptoStreamUtils.java | 10 +- .../apache/hadoop/crypto/OpensslCipher.java | 7 +- .../apache/hadoop/crypto/key/KeyProvider.java | 9 +- .../hadoop/crypto/key/kms/ValueQueue.java | 6 +- .../apache/hadoop/fs/AbstractFileSystem.java | 272 ++++++++++++++++-- .../security/token/DelegationTokenIssuer.java | 2 +- .../hadoop/security/token/TokenRenewer.java | 15 +- 8 files changed, 279 insertions(+), 43 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index acf4fd54239c5..a1ae4a7ab5f5d 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -3907,6 +3907,7 @@ synchronized boolean getQuietMode() { /** For debugging. List non-default properties to the terminal and exit. * @param args the argument to be parsed + * @throws Exception exception */ public static void main(String[] args) throws Exception { new Configuration().writeXml(System.out); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java index 9db5f9173af38..1235d3f55fb10 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java @@ -39,7 +39,11 @@ public class CryptoStreamUtils { private static final Logger LOG = LoggerFactory.getLogger(CryptoStreamUtils.class); - /** Forcibly free the direct buffer. */ + /** + * Forcibly free the direct buffer. + * + * @param buffer buffer + */ public static void freeDB(ByteBuffer buffer) { if (CleanerUtil.UNMAP_SUPPORTED) { try { @@ -53,7 +57,7 @@ public static void freeDB(ByteBuffer buffer) { } /** - * Read crypto buffer size + * Read crypto buffer size. * * @param conf configuration * @return hadoop.security.crypto.buffer.size @@ -77,7 +81,7 @@ public static void checkCodec(CryptoCodec codec) { } /** - * Check and floor buffer size + * Check and floor buffer size. * * @param codec crypto codec * @param bufferSize the size of the buffer to be used. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java index 1961a765b4a9a..ac8652cae03ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java @@ -251,8 +251,11 @@ public int update(ByteBuffer input, ByteBuffer output) * @param output the output ByteBuffer * @return int number of bytes stored in output * @throws ShortBufferException if there is insufficient space in the output buffer. - * @throws IllegalBlockSizeException This exception is thrown when the length of data provided to a block cipher is incorrect. - * @throws BadPaddingException This exception is thrown when a particular padding mechanism is expected for the input data but the data is not padded properly. + * @throws IllegalBlockSizeException This exception is thrown when the length + * of data provided to a block cipher is incorrect. + * @throws BadPaddingException This exception is thrown when a particular + * padding mechanism is expected for the input + * data but the data is not padded properly. */ public int doFinal(ByteBuffer output) throws ShortBufferException, IllegalBlockSizeException, BadPaddingException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index 75355c464e21b..e7727684e40ac 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -537,7 +537,7 @@ private String getAlgorithm(String cipher) { * @param size length of the key. * @param algorithm algorithm to use for generating the key. * @return the generated key. - * @throws NoSuchAlgorithmException + * @throws NoSuchAlgorithmException no such algorithm exception */ protected byte[] generateKey(int size, String algorithm) throws NoSuchAlgorithmException { @@ -559,7 +559,7 @@ protected byte[] generateKey(int size, String algorithm) * @param options the options for the new key. * @return the version name of the first version of the key. * @throws IOException raised on errors performing I/O. - * @throws NoSuchAlgorithmException + * @throws NoSuchAlgorithmException no such algorithm exception */ public KeyVersion createKey(String name, Options options) throws NoSuchAlgorithmException, IOException { @@ -602,7 +602,8 @@ public void close() throws IOException { * @param name the basename of the key * @return the name of the new version of the key * @throws IOException raised on errors performing I/O. - * @throws NoSuchAlgorithmException This exception is thrown when a particular cryptographic algorithm is requested + * @throws NoSuchAlgorithmException This exception is thrown when a particular + * cryptographic algorithm is requested * but is not available in the environment. */ public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException, @@ -684,7 +685,7 @@ public static KeyProvider findProvider(List providerList, * means. If true, the password should be provided by the caller using * setPassword(). * @return Whether or not the provider requires a password - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean needsPassword() throws IOException { return false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java index be2db05842c8e..cc54ad2df1cee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java @@ -63,7 +63,7 @@ public interface QueueRefiller { * @param keyName Key name * @param keyQueue Queue that needs to be filled * @param numValues number of Values to be added to the queue. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void fillQueueForKey(String keyName, Queue keyQueue, int numValues) throws IOException; @@ -344,8 +344,8 @@ public int getSize(String keyName) { * @param keyName String key name * @param num Minimum number of values to return. * @return {@literal List} values returned - * @throws IOException - * @throws ExecutionException + * @throws IOException raised on errors performing I/O. + * @throws ExecutionException execution exception */ public List getAtMost(String keyName, int num) throws IOException, ExecutionException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index d9818b472f0e5..73b1e79efb010 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -272,7 +272,7 @@ public static AbstractFileSystem get(final URI uri, final Configuration conf) * @param supportedScheme the scheme supported by the implementor * @param authorityNeeded if true then theURI must have authority, if false * then the URI must have null authority. - * + * @param defaultPort default port to use if port is not specified in the URI. * @throws URISyntaxException uri has syntax error */ public AbstractFileSystem(final URI uri, final String supportedScheme, @@ -281,11 +281,12 @@ public AbstractFileSystem(final URI uri, final String supportedScheme, myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort); statistics = getStatistics(uri); } - + /** * Check that the Uri's scheme matches - * @param uri - * @param supportedScheme + * + * @param uri name URI of the FS + * @param supportedScheme supported scheme */ public void checkScheme(URI uri, String supportedScheme) { String scheme = uri.getScheme(); @@ -362,7 +363,7 @@ public URI getUri() { * If the path is fully qualified URI, then its scheme and authority * matches that of this file system. Otherwise the path must be * slash-relative name. - * + * @param path the path * @throws InvalidPathException if the path is invalid */ public void checkPath(Path path) { @@ -431,7 +432,7 @@ public String getUriPath(final Path p) { /** * Make the path fully qualified to this file system - * @param path + * @param path the path * @return the qualified path */ public Path makeQualified(Path path) { @@ -496,9 +497,9 @@ public FsServerDefaults getServerDefaults(final Path f) throws IOException { * through any internal symlinks or mount point * @param p path to be resolved * @return fully qualified path - * @throws FileNotFoundException - * @throws AccessControlException - * @throws IOException + * @throws FileNotFoundException when file not find throw + * @throws AccessControlException when accees control error throw + * @throws IOException raised on errors performing I/O. * @throws UnresolvedLinkException if symbolic link on path cannot be * resolved internally */ @@ -513,6 +514,18 @@ public Path resolvePath(final Path p) throws FileNotFoundException, * {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except * that the Path f must be fully qualified and the permission is absolute * (i.e. umask has been applied). + * + * @param f the path + * @param createFlag create_flag + * @param opts create ops + * @throws AccessControlException access controll exception + * @throws FileAlreadyExistsException file already exception + * @throws FileNotFoundException file not found exception + * @throws ParentNotDirectoryException parent not dir exception + * @throws UnsupportedFileSystemException unsupported file system exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return output stream */ public final FSDataOutputStream create(final Path f, final EnumSet createFlag, Options.CreateOpts... opts) @@ -630,6 +643,24 @@ public final FSDataOutputStream create(final Path f, * The specification of this method matches that of * {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts * have been declared explicitly. + * + * @param f the path + * @param flag create flag + * @param absolutePermission absolute permission + * @param bufferSize buffer size + * @param replication replications + * @param blockSize block size + * @param progress progress + * @param checksumOpt check sum opt + * @param createParent create parent + * @throws AccessControlException access control exception + * @throws FileAlreadyExistsException file already exists exception + * @throws FileNotFoundException file not found exception + * @throws ParentNotDirectoryException parent not directory exception + * @throws UnsupportedFileSystemException unsupported filesystem exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return output stream */ public abstract FSDataOutputStream createInternal(Path f, EnumSet flag, FsPermission absolutePermission, @@ -644,6 +675,14 @@ public abstract FSDataOutputStream createInternal(Path f, * {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path * f must be fully qualified and the permission is absolute (i.e. * umask has been applied). + * @param dir directory + * @param permission permission + * @param createParent create parent flag + * @throws AccessControlException access control exception + * @throws FileAlreadyExistsException file already exists exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract void mkdir(final Path dir, final FsPermission permission, final boolean createParent) throws AccessControlException, @@ -654,6 +693,14 @@ public abstract void mkdir(final Path dir, final FsPermission permission, * The specification of this method matches that of * {@link FileContext#delete(Path, boolean)} except that Path f must be for * this file system. + * + * @param f the path + * @param recursive recursive flag + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return if successfully deleted success true, not false */ public abstract boolean delete(final Path f, final boolean recursive) throws AccessControlException, FileNotFoundException, @@ -663,6 +710,13 @@ public abstract boolean delete(final Path f, final boolean recursive) * The specification of this method matches that of * {@link FileContext#open(Path)} except that Path f must be for this * file system. + * + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return input stream */ public FSDataInputStream open(final Path f) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { @@ -673,6 +727,14 @@ public FSDataInputStream open(final Path f) throws AccessControlException, * The specification of this method matches that of * {@link FileContext#open(Path, int)} except that Path f must be for this * file system. + * + * @param f the path + * @param bufferSize buffer size + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return if successfully open success true, not false */ public abstract FSDataInputStream open(final Path f, int bufferSize) throws AccessControlException, FileNotFoundException, @@ -682,6 +744,14 @@ public abstract FSDataInputStream open(final Path f, int bufferSize) * The specification of this method matches that of * {@link FileContext#truncate(Path, long)} except that Path f must be for * this file system. + * + * @param f the path + * @param newLength new length + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return if successfully truncate success true, not false */ public boolean truncate(Path f, long newLength) throws AccessControlException, FileNotFoundException, @@ -694,6 +764,14 @@ public boolean truncate(Path f, long newLength) * The specification of this method matches that of * {@link FileContext#setReplication(Path, short)} except that Path f must be * for this file system. + * + * @param f the path + * @param replication replication + * @return if successfully set replication success true, not false + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract boolean setReplication(final Path f, final short replication) throws AccessControlException, @@ -703,6 +781,16 @@ public abstract boolean setReplication(final Path f, * The specification of this method matches that of * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * f must be for this file system. + * + * @param src src + * @param dst dst + * @param options options + * @throws AccessControlException access control exception + * @throws FileAlreadyExistsException file already exists exception + * @throws FileNotFoundException file not found exception + * @throws ParentNotDirectoryException parent not directory exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public final void rename(final Path src, final Path dst, final Options.Rename... options) throws AccessControlException, @@ -727,6 +815,15 @@ public final void rename(final Path src, final Path dst, * File systems that do not have a built in overwrite need implement only this * method and can take advantage of the default impl of the other * {@link #renameInternal(Path, Path, boolean)} + * + * @param src src + * @param dst dst + * @throws AccessControlException access control exception + * @throws FileAlreadyExistsException file already exists exception + * @throws FileNotFoundException file not found exception + * @throws ParentNotDirectoryException parent not directory exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract void renameInternal(final Path src, final Path dst) throws AccessControlException, FileAlreadyExistsException, @@ -737,6 +834,16 @@ public abstract void renameInternal(final Path src, final Path dst) * The specification of this method matches that of * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * f must be for this file system. + * + * @param src src + * @param dst dst + * @param overwrite overwrite flag + * @throws AccessControlException access control exception + * @throws FileAlreadyExistsException file already exists exception + * @throws FileNotFoundException file not found exception + * @throws ParentNotDirectoryException parent not directory exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public void renameInternal(final Path src, final Path dst, boolean overwrite) throws AccessControlException, @@ -800,6 +907,12 @@ public boolean supportsSymlinks() { /** * The specification of this method matches that of * {@link FileContext#createSymlink(Path, Path, boolean)}; + * + * @param target target + * @param link link + * @param createParent create parent + * @throws IOException raised on errors performing I/O. + * @throws UnresolvedLinkException unresolved link exception */ public void createSymlink(final Path target, final Path link, final boolean createParent) throws IOException, UnresolvedLinkException { @@ -810,6 +923,8 @@ public void createSymlink(final Path target, final Path link, * Partially resolves the path. This is used during symlink resolution in * {@link FSLinkResolver}, and differs from the similarly named method * {@link FileContext#getLinkTarget(Path)}. + * @param f the path + * @return target path * @throws IOException subclass implementations may throw IOException */ public Path getLinkTarget(final Path f) throws IOException { @@ -822,6 +937,13 @@ public Path getLinkTarget(final Path f) throws IOException { * The specification of this method matches that of * {@link FileContext#setPermission(Path, FsPermission)} except that Path f * must be for this file system. + * + * @param f the path + * @param permission permission + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract void setPermission(final Path f, final FsPermission permission) throws AccessControlException, @@ -831,6 +953,14 @@ public abstract void setPermission(final Path f, * The specification of this method matches that of * {@link FileContext#setOwner(Path, String, String)} except that Path f must * be for this file system. + * + * @param f the path + * @param username user name + * @param groupname group name + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract void setOwner(final Path f, final String username, final String groupname) throws AccessControlException, @@ -840,6 +970,14 @@ public abstract void setOwner(final Path f, final String username, * The specification of this method matches that of * {@link FileContext#setTimes(Path, long, long)} except that Path f must be * for this file system. + * + * @param f the path + * @param mtime modify time + * @param atime access time + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract void setTimes(final Path f, final long mtime, final long atime) throws AccessControlException, FileNotFoundException, @@ -849,6 +987,12 @@ public abstract void setTimes(final Path f, final long mtime, * The specification of this method matches that of * {@link FileContext#getFileChecksum(Path)} except that Path f must be for * this file system. + * + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract FileChecksum getFileChecksum(final Path f) throws AccessControlException, FileNotFoundException, @@ -859,6 +1003,12 @@ public abstract FileChecksum getFileChecksum(final Path f) * {@link FileContext#getFileStatus(Path)} * except that an UnresolvedLinkException may be thrown if a symlink is * encountered in the path. + * + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ public abstract FileStatus getFileStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -870,8 +1020,8 @@ public abstract FileStatus getFileStatus(final Path f) * In some FileSystem implementations such as HDFS metadata * synchronization is essential to guarantee consistency of read requests * particularly in HA setting. - * @throws IOException - * @throws UnsupportedOperationException + * @throws IOException raised on errors performing I/O. + * @throws UnsupportedOperationException Unsupported Operation Exception */ public void msync() throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException(getClass().getCanonicalName() + @@ -883,6 +1033,13 @@ public void msync() throws IOException, UnsupportedOperationException { * {@link FileContext#access(Path, FsAction)} * except that an UnresolvedLinkException may be thrown if a symlink is * encountered in the path. + * + * @param path the path + * @param mode fsaction mode + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"}) public void access(Path path, FsAction mode) throws AccessControlException, @@ -897,6 +1054,13 @@ public void access(Path path, FsAction mode) throws AccessControlException, * encountered in the path leading up to the final path component. * If the file system does not support symlinks then the behavior is * equivalent to {@link AbstractFileSystem#getFileStatus(Path)}. + * + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnsupportedFileSystemException UnSupported File System Exception + * @throws IOException raised on errors performing I/O. + * @return file status */ public FileStatus getFileLinkStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -908,6 +1072,15 @@ public FileStatus getFileLinkStatus(final Path f) * The specification of this method matches that of * {@link FileContext#getFileBlockLocations(Path, long, long)} except that * Path f must be for this file system. + * + * @param f the path + * @param start start + * @param len length + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return BlockLocation Array */ public abstract BlockLocation[] getFileBlockLocations(final Path f, final long start, final long len) throws AccessControlException, @@ -917,6 +1090,13 @@ public abstract BlockLocation[] getFileBlockLocations(final Path f, * The specification of this method matches that of * {@link FileContext#getFsStatus(Path)} except that Path f must be for this * file system. + * + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return Fs Status */ public FsStatus getFsStatus(final Path f) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { @@ -927,6 +1107,11 @@ public FsStatus getFsStatus(final Path f) throws AccessControlException, /** * The specification of this method matches that of * {@link FileContext#getFsStatus(Path)}. + * + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws IOException raised on errors performing I/O. + * @return Fs Status */ public abstract FsStatus getFsStatus() throws AccessControlException, FileNotFoundException, IOException; @@ -935,6 +1120,13 @@ public abstract FsStatus getFsStatus() throws AccessControlException, * The specification of this method matches that of * {@link FileContext#listStatus(Path)} except that Path f must be for this * file system. + * + * @param f path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return FileStatus Iterator */ public RemoteIterator listStatusIterator(final Path f) throws AccessControlException, FileNotFoundException, @@ -967,6 +1159,13 @@ public FileStatus next() { * will have different formats for replicated and erasure coded file. Please * refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)} * for more details. + * + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return FileStatus Iterator */ public RemoteIterator listLocatedStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -999,6 +1198,12 @@ public LocatedFileStatus next() throws IOException { * The specification of this method matches that of * {@link FileContext.Util#listStatus(Path)} except that Path f must be * for this file system. + * @param f the path + * @throws AccessControlException access control exception + * @throws FileNotFoundException file not found exception + * @throws UnresolvedLinkException unresolved link exception + * @throws IOException raised on errors performing I/O. + * @return FileStatus Iterator */ public abstract FileStatus[] listStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -1007,7 +1212,8 @@ public abstract FileStatus[] listStatus(final Path f) /** * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) - * @throws IOException + * @param path the path + * @throws IOException raised on errors performing I/O. */ public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { @@ -1020,6 +1226,10 @@ public RemoteIterator listCorruptFileBlocks(Path path) * The specification of this method matches that of * {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f * must be for this file system. + * + * @param verifyChecksum verify check sum flag + * @throws AccessControlException access control exception + * @throws IOException raised on errors performing I/O. */ public abstract void setVerifyChecksum(final boolean verifyChecksum) throws AccessControlException, IOException; @@ -1041,7 +1251,7 @@ public String getCanonicalServiceName() { * @param renewer the account name that is allowed to renew the token. * @return List of delegation tokens. * If delegation tokens not supported then return a list of size zero. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" }) public List> getDelegationTokens(String renewer) throws IOException { @@ -1141,7 +1351,7 @@ public AclStatus getAclStatus(Path path) throws IOException { * @param path Path to modify * @param name xattr name. * @param value xattr value. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void setXAttr(Path path, String name, byte[] value) throws IOException { @@ -1160,7 +1370,7 @@ public void setXAttr(Path path, String name, byte[] value) * @param name xattr name. * @param value xattr value. * @param flag xattr set flag - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void setXAttr(Path path, String name, byte[] value, EnumSet flag) throws IOException { @@ -1178,7 +1388,7 @@ public void setXAttr(Path path, String name, byte[] value, * @param path Path to get extended attribute * @param name xattr name. * @return byte[] xattr value. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public byte[] getXAttr(Path path, String name) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() @@ -1196,7 +1406,7 @@ public byte[] getXAttr(Path path, String name) throws IOException { * * @return {@literal Map} describing the XAttrs of the file * or directory - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Map getXAttrs(Path path) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() @@ -1214,7 +1424,7 @@ public Map getXAttrs(Path path) throws IOException { * @param names XAttr names. * @return {@literal Map} describing the XAttrs of the file * or directory - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Map getXAttrs(Path path, List names) throws IOException { @@ -1232,7 +1442,7 @@ public Map getXAttrs(Path path, List names) * @param path Path to get extended attributes * @return {@literal Map} describing the XAttrs of the file * or directory - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public List listXAttrs(Path path) throws IOException { @@ -1249,7 +1459,7 @@ public List listXAttrs(Path path) * * @param path Path to remove extended attribute * @param name xattr name - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void removeXAttr(Path path, String name) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() @@ -1259,6 +1469,10 @@ public void removeXAttr(Path path, String name) throws IOException { /** * The specification of this method matches that of * {@link FileContext#createSnapshot(Path, String)}. + * + * @param path the path + * @param snapshotName snapshot name + * @throws IOException raised on errors performing I/O. */ public Path createSnapshot(final Path path, final String snapshotName) throws IOException { @@ -1269,6 +1483,11 @@ public Path createSnapshot(final Path path, final String snapshotName) /** * The specification of this method matches that of * {@link FileContext#renameSnapshot(Path, String, String)}. + * + * @param path the path + * @param snapshotOldName snapshot old name + * @param snapshotNewName snapshot new name + * @throws IOException raised on errors performing I/O. */ public void renameSnapshot(final Path path, final String snapshotOldName, final String snapshotNewName) throws IOException { @@ -1279,6 +1498,10 @@ public void renameSnapshot(final Path path, final String snapshotOldName, /** * The specification of this method matches that of * {@link FileContext#deleteSnapshot(Path, String)}. + * + * @param snapshotDir snapshot dir + * @param snapshotName snapshot name + * @throws IOException raised on errors performing I/O. */ public void deleteSnapshot(final Path snapshotDir, final String snapshotName) throws IOException { @@ -1289,7 +1512,7 @@ public void deleteSnapshot(final Path snapshotDir, final String snapshotName) /** * Set the source path to satisfy storage policy. * @param path The source path referring to either a directory or a file. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void satisfyStoragePolicy(final Path path) throws IOException { throw new UnsupportedOperationException( @@ -1303,6 +1526,7 @@ public void satisfyStoragePolicy(final Path path) throws IOException { * @param policyName the name of the target storage policy. The list * of supported Storage policies can be retrieved * via {@link #getAllStoragePolicies}. + * @throws IOException raised on errors performing I/O. */ public void setStoragePolicy(final Path path, final String policyName) throws IOException { @@ -1314,7 +1538,7 @@ public void setStoragePolicy(final Path path, final String policyName) /** * Unset the storage policy set for a given file or directory. * @param src file or directory path. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void unsetStoragePolicy(final Path src) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() @@ -1326,7 +1550,7 @@ public void unsetStoragePolicy(final Path src) throws IOException { * * @param src file or directory path. * @return storage policy for give file. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public BlockStoragePolicySpi getStoragePolicy(final Path src) throws IOException { @@ -1338,7 +1562,7 @@ public BlockStoragePolicySpi getStoragePolicy(final Path src) * Retrieve all the storage policies supported by this file system. * * @return all storage policies supported by this filesystem. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Collection getAllStoragePolicies() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java index 892a01f0f21fc..ad41107e4adc7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java @@ -46,7 +46,7 @@ public interface DelegationTokenIssuer { /** * Unconditionally get a new token with the optional renewer. Returning * null indicates the service does not issue tokens. - * @param renewer + * @param renewer renewer * @return the token * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java index 2e27b3ca5b5fd..f71385f76f8a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java @@ -63,12 +63,15 @@ public abstract class TokenRenewer { public abstract long renew(Token token, Configuration conf ) throws IOException, InterruptedException; - - /** - * Cancel the given token - * @throws IOException - * @throws InterruptedException - */ + + /** + * Cancel the given token + * + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException thrown when a thread is waiting, sleeping, + * or otherwise occupied, and the thread is interrupted, + * either before or during the activity. + */ public abstract void cancel(Token token, Configuration conf ) throws IOException, InterruptedException; From 8ccd2b5fcd8208d630f0692c30e1f6897cdf0968 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Tue, 10 May 2022 15:53:35 -0700 Subject: [PATCH 19/53] HADOOP-18229. Fix some java doc compilation errors OpensslCipher.java empty

tag KeyProvider.java warning: no @return KeyProviderCryptoExtension.java warning: no @throws for java.io.IOException TokenRenewer.java warning: no @param for token, checkstyle ValueQueue.java warning: no description for @throws AbstractFileSystem.java warning: no @return --- .../main/java/org/apache/hadoop/crypto/OpensslCipher.java | 8 ++------ .../java/org/apache/hadoop/crypto/key/KeyProvider.java | 2 ++ .../hadoop/crypto/key/KeyProviderCryptoExtension.java | 2 ++ .../java/org/apache/hadoop/crypto/key/kms/ValueQueue.java | 6 +++--- .../java/org/apache/hadoop/fs/AbstractFileSystem.java | 3 +++ .../org/apache/hadoop/security/token/TokenRenewer.java | 5 ++++- 6 files changed, 16 insertions(+), 10 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java index ac8652cae03ae..b166cfc8611b3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java @@ -230,21 +230,17 @@ public int update(ByteBuffer input, ByteBuffer output) * Finishes a multiple-part operation. The data is encrypted or decrypted, * depending on how this cipher was initialized. *

- *

* The result is stored in the output buffer. Upon return, the output buffer's * position will have advanced by n, where n is the value returned by this * method; the output buffer's limit will not have changed. - *

- *

+ *

* If output.remaining() bytes are insufficient to hold the result, * a ShortBufferException is thrown. *

- *

* Upon finishing, this method resets this cipher object to the state it was * in when previously initialized. That is, the object is available to encrypt * or decrypt more data. - *

- *

+ *

* If any exception is thrown, this cipher object need to be reset before it * can be used again. * diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index e7727684e40ac..5b3df7d3a8196 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -466,6 +466,7 @@ public abstract KeyVersion getKeyVersion(String versionName * Get key metadata in bulk. * @param names the names of the keys to get * @throws IOException raised on errors performing I/O. + * @return Metadata Array */ public Metadata[] getKeysMetadata(String... names) throws IOException { Metadata[] result = new Metadata[names.length]; @@ -479,6 +480,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException { * Get the key material for all versions of a specific key name. * @return the list of key material * @throws IOException raised on errors performing I/O. + * @return KeyVersion List */ public abstract List getKeyVersions(String name) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java index cc767ab545488..99cab35e351c6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java @@ -178,6 +178,7 @@ public interface CryptoExtension extends KeyProviderExtension.Extension { * Calls to this method allows the underlying KeyProvider to warm-up any * implementation specific caches used to store the Encrypted Keys. * @param keyNames Array of Key Names + * @throws IOException thrown if the key material could not be encrypted */ public void warmUpEncryptedKeys(String... keyNames) throws IOException; @@ -487,6 +488,7 @@ protected KeyProviderCryptoExtension(KeyProvider keyProvider, * Notifies the Underlying CryptoExtension implementation to warm up any * implementation specific caches for the specified KeyVersions * @param keyNames Arrays of key Names + * @throws IOException raised on errors performing I/O. */ public void warmUpEncryptedKeys(String... keyNames) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java index cc54ad2df1cee..ebe41b71f9517 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java @@ -268,7 +268,7 @@ public ValueQueue(final int numValues, final float lowWaterMark, long expiry, * Initializes the Value Queues for the provided keys by calling the * fill Method with "numInitValues" values * @param keyNames Array of key Names - * @throws ExecutionException + * @throws ExecutionException executionException */ public void initializeQueuesForKeys(String... keyNames) throws ExecutionException { @@ -285,8 +285,8 @@ public void initializeQueuesForKeys(String... keyNames) * function to add 1 value to Queue and then drain it. * @param keyName String key name * @return E the next value in the Queue - * @throws IOException - * @throws ExecutionException + * @throws IOException raised on errors performing I/O. + * @throws ExecutionException executionException */ public E getNext(String keyName) throws IOException, ExecutionException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index 73b1e79efb010..0ef81b60329a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -993,6 +993,7 @@ public abstract void setTimes(final Path f, final long mtime, * @throws FileNotFoundException file not found exception * @throws UnresolvedLinkException unresolved link exception * @throws IOException raised on errors performing I/O. + * @return File Check sum */ public abstract FileChecksum getFileChecksum(final Path f) throws AccessControlException, FileNotFoundException, @@ -1009,6 +1010,7 @@ public abstract FileChecksum getFileChecksum(final Path f) * @throws FileNotFoundException file not found exception * @throws UnresolvedLinkException unresolved link exception * @throws IOException raised on errors performing I/O. + * @return File Status */ public abstract FileStatus getFileStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -1473,6 +1475,7 @@ public void removeXAttr(Path path, String name) throws IOException { * @param path the path * @param snapshotName snapshot name * @throws IOException raised on errors performing I/O. + * @return path */ public Path createSnapshot(final Path path, final String snapshotName) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java index f71385f76f8a4..032978aad3b6d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java @@ -65,7 +65,10 @@ public abstract long renew(Token token, ) throws IOException, InterruptedException; /** - * Cancel the given token + * Cancel the given token. + * + * @param token the token being checked + * @param conf configuration * * @throws IOException raised on errors performing I/O. * @throws InterruptedException thrown when a thread is waiting, sleeping, From 26c9f37174d20552fc6cab2e709da6bf5207f1f3 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Wed, 11 May 2022 00:44:16 -0700 Subject: [PATCH 20/53] HADOOP-18229. Fix some java doc compilation errors FileContext.java no @return, no description for @param, no @throws for java.io.FileNotFoundException etc, FileSystem.java no @throws for java.io.IOException, no @param for uri etc KeyProvider.java warning: @return has already been specified --- .../apache/hadoop/crypto/key/KeyProvider.java | 3 +- .../org/apache/hadoop/fs/FileContext.java | 57 +++++++++++++------ .../java/org/apache/hadoop/fs/FileSystem.java | 20 +++++-- 3 files changed, 56 insertions(+), 24 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index 5b3df7d3a8196..19e620b0e84b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -478,9 +478,10 @@ public Metadata[] getKeysMetadata(String... names) throws IOException { /** * Get the key material for all versions of a specific key name. + * + * @param name the base name of the key * @return the list of key material * @throws IOException raised on errors performing I/O. - * @return KeyVersion List */ public abstract List getKeyVersions(String name) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index f3004ce7e03a3..e2a96bc16880c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -411,6 +411,7 @@ protected static FileContext getFileContext( * * @throws UnsupportedFileSystemException If the file system from the default * configuration is not supported + * @return file context */ public static FileContext getFileContext() throws UnsupportedFileSystemException { @@ -554,6 +555,7 @@ public void setWorkingDirectory(final Path newWDir) throws IOException { /** * Gets the working directory for wd-relative names (such a "foo/bar"). + * @return the path */ public Path getWorkingDirectory() { return workingDir; @@ -600,13 +602,14 @@ public void setUMask(final FsPermission newUmask) { * @throws FileNotFoundException If f does not exist * @throws AccessControlException if access denied * @throws IOException If an IO Error occurred - * + * @throws UnresolvedLinkException If unresolved link occurred + * * Exceptions applicable to file systems accessed over RPC: * @throws RpcClientException If an exception occurred in the RPC client * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server - * + * * RuntimeExceptions: * @throws InvalidPathException If path f is not valid */ @@ -620,7 +623,7 @@ public Path resolvePath(final Path f) throws FileNotFoundException, * A Fully-qualified path has scheme and authority specified and an absolute * path. * Use the default file system and working dir in this FileContext to qualify. - * @param path + * @param path the path * @return qualified path */ public Path makeQualified(final Path path) { @@ -759,6 +762,7 @@ public FSDataOutputStream build() throws IOException { * * Client should expect {@link FSDataOutputStreamBuilder#build()} throw the * same exceptions as create(Path, EnumSet, CreateOpts...). + * @throws IOException If an I/O error occurred */ public FSDataOutputStreamBuilder create(final Path f) throws IOException { @@ -832,6 +836,8 @@ public Void next(final AbstractFileSystem fs, final Path p) * * RuntimeExceptions: * @throws InvalidPathException If path f is invalid + * + * @return if delete success true, not false */ public boolean delete(final Path f, final boolean recursive) throws AccessControlException, FileNotFoundException, @@ -862,6 +868,7 @@ public Boolean next(final AbstractFileSystem fs, final Path p) * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server + * @return input stream */ public FSDataInputStream open(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { @@ -892,6 +899,7 @@ public FSDataInputStream next(final AbstractFileSystem fs, final Path p) * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server + * @return output stream */ public FSDataInputStream open(final Path f, final int bufferSize) throws AccessControlException, FileNotFoundException, @@ -1001,6 +1009,7 @@ public Boolean next(final AbstractFileSystem fs, final Path p) * * @param src path to be renamed * @param dst new path after rename + * @param options rename options * * @throws AccessControlException If access is denied * @throws FileAlreadyExistsException If dst already exists and @@ -1613,9 +1622,12 @@ public RemoteIterator next( } /** + * List CorruptFile Blocks. + * + * @param path the path * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) - * @throws IOException + * @throws IOException If an I/O error occurred */ public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { @@ -2276,7 +2288,7 @@ private static void checkDependencies(Path qualSrc, Path qualDst) * Are qualSrc and qualDst of the same file system? * @param qualPath1 - fully qualified path * @param qualPath2 - fully qualified path - * @return + * @return is same fs true,not false */ private static boolean isSameFS(Path qualPath1, Path qualPath2) { URI srcUri = qualPath1.toUri(); @@ -2299,6 +2311,13 @@ public synchronized void run() { /** * Resolves all symbolic links in the specified path. * Returns the new path object. + * + * @param f the path + * @throws FileNotFoundException If f does not exist + * @throws UnresolvedLinkException If unresolved link occurred + * @throws AccessControlException If access is denied. + * @throws IOException If an I/O error occurred + * @return resolve path */ protected Path resolve(final Path f) throws FileNotFoundException, UnresolvedLinkException, AccessControlException, IOException { @@ -2316,6 +2335,7 @@ public Path next(final AbstractFileSystem fs, final Path p) * to, but not including the final path component. * @param f path to resolve * @return the new path object. + * @throws IOException If an I/O error occurred */ protected Path resolveIntermediate(final Path f) throws IOException { return new FSLinkResolver() { @@ -2334,7 +2354,7 @@ public FileStatus next(final AbstractFileSystem fs, final Path p) * @param f * Path which needs to be resolved * @return List of AbstractFileSystems accessed in the path - * @throws IOException + * @throws IOException If an I/O error occurred */ Set resolveAbstractFileSystems(final Path f) throws IOException { @@ -2395,7 +2415,7 @@ public static Map getAllStatistics() { * @param p Path for which delegations tokens are requested. * @param renewer the account name that is allowed to renew the token. * @return List of delegation tokens. - * @throws IOException + * @throws IOException If an I/O error occurred */ @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" }) public List> getDelegationTokens( @@ -2547,7 +2567,7 @@ public AclStatus next(final AbstractFileSystem fs, final Path p) * @param path Path to modify * @param name xattr name. * @param value xattr value. - * @throws IOException + * @throws IOException If an I/O error occurred */ public void setXAttr(Path path, String name, byte[] value) throws IOException { @@ -2566,7 +2586,7 @@ public void setXAttr(Path path, String name, byte[] value) * @param name xattr name. * @param value xattr value. * @param flag xattr set flag - * @throws IOException + * @throws IOException If an I/O error occurred */ public void setXAttr(Path path, final String name, final byte[] value, final EnumSet flag) throws IOException { @@ -2591,7 +2611,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * @param path Path to get extended attribute * @param name xattr name. * @return byte[] xattr value. - * @throws IOException + * @throws IOException If an I/O error occurred */ public byte[] getXAttr(Path path, final String name) throws IOException { final Path absF = fixRelativePart(path); @@ -2614,7 +2634,7 @@ public byte[] next(final AbstractFileSystem fs, final Path p) * @param path Path to get extended attributes * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs * of the file or directory - * @throws IOException + * @throws IOException If an I/O error occurred */ public Map getXAttrs(Path path) throws IOException { final Path absF = fixRelativePart(path); @@ -2638,7 +2658,7 @@ public Map next(final AbstractFileSystem fs, final Path p) * @param names XAttr names. * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs * of the file or directory - * @throws IOException + * @throws IOException If an I/O error occurred */ public Map getXAttrs(Path path, final List names) throws IOException { @@ -2661,7 +2681,7 @@ public Map next(final AbstractFileSystem fs, final Path p) * * @param path Path to remove extended attribute * @param name xattr name - * @throws IOException + * @throws IOException If an I/O error occurred */ public void removeXAttr(Path path, final String name) throws IOException { final Path absF = fixRelativePart(path); @@ -2685,7 +2705,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * @param path Path to get extended attributes * @return List{@literal <}String{@literal >} of the XAttr names of the * file or directory - * @throws IOException + * @throws IOException If an I/O error occurred */ public List listXAttrs(Path path) throws IOException { final Path absF = fixRelativePart(path); @@ -2802,7 +2822,7 @@ public Void next(final AbstractFileSystem fs, final Path p) /** * Set the source path to satisfy storage policy. * @param path The source path referring to either a directory or a file. - * @throws IOException + * @throws IOException If an I/O error occurred */ public void satisfyStoragePolicy(final Path path) throws IOException { @@ -2824,6 +2844,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * @param policyName the name of the target storage policy. The list * of supported Storage policies can be retrieved * via {@link #getAllStoragePolicies}. + * @throws IOException If an I/O error occurred */ public void setStoragePolicy(final Path path, final String policyName) throws IOException { @@ -2841,7 +2862,7 @@ public Void next(final AbstractFileSystem fs, final Path p) /** * Unset the storage policy set for a given file or directory. * @param src file or directory path. - * @throws IOException + * @throws IOException If an I/O error occurred */ public void unsetStoragePolicy(final Path src) throws IOException { final Path absF = fixRelativePart(src); @@ -2860,7 +2881,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * * @param path file or directory path. * @return storage policy for give file. - * @throws IOException + * @throws IOException If an I/O error occurred */ public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException { final Path absF = fixRelativePart(path); @@ -2878,7 +2899,7 @@ public BlockStoragePolicySpi next(final AbstractFileSystem fs, * Retrieve all the storage policies supported by this file system. * * @return all storage policies supported by this filesystem. - * @throws IOException + * @throws IOException If an I/O error occurred */ public Collection getAllStoragePolicies() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 180f5d1608164..04bb38a28ccda 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -281,6 +281,8 @@ public FileSystem run() throws IOException { /** * Returns the configured FileSystem implementation. * @param conf the configuration to use + * @return FileSystem + * @throws IOException If an I/O error occurred */ public static FileSystem get(Configuration conf) throws IOException { return get(getDefaultUri(conf), conf); @@ -391,6 +393,7 @@ protected URI getCanonicalUri() { * not specified and if {@link #getDefaultPort()} returns a * default port. * + * @param uri url * @return URI * @see NetUtils#getCanonicalUri(URI, int) */ @@ -458,7 +461,14 @@ public String getCanonicalServiceName() { @Deprecated public String getName() { return getUri().toString(); } - /** @deprecated call {@link #get(URI, Configuration)} instead. */ + /** + * @deprecated call {@link #get(URI, Configuration)} instead. + * + * @param name name + * @param conf configuration + * @return file system + * @throws IOException If an I/O error occurred + */ @Deprecated public static FileSystem getNamed(String name, Configuration conf) throws IOException { @@ -1948,7 +1958,7 @@ public boolean hasMore() { * if this is the first call. * @return * @throws FileNotFoundException - * @throws IOException + * @throws IOException If an I/O error occurred */ @InterfaceAudience.Private protected DirectoryEntries listStatusBatch(Path f, byte[] token) throws @@ -2685,7 +2695,7 @@ public short getDefaultReplication(Path path) { * In some FileSystem implementations such as HDFS metadata * synchronization is essential to guarantee consistency of read requests * particularly in HA setting. - * @throws IOException + * @throws IOException If an I/O error occurred * @throws UnsupportedOperationException */ public void msync() throws IOException, UnsupportedOperationException { @@ -3221,7 +3231,7 @@ public void removeXAttr(Path path, String name) throws IOException { /** * Set the source path to satisfy storage policy. * @param path The source path referring to either a directory or a file. - * @throws IOException + * @throws IOException If an I/O error occurred */ public void satisfyStoragePolicy(final Path path) throws IOException { throw new UnsupportedOperationException( @@ -3529,7 +3539,7 @@ FileSystem getUnique(URI uri, Configuration conf) throws IOException{ * @param conf configuration * @param key key to store/retrieve this FileSystem in the cache * @return a cached or newly instantiated FileSystem. - * @throws IOException + * @throws IOException If an I/O error occurred */ private FileSystem getInternal(URI uri, Configuration conf, Key key) throws IOException{ From 430385b118bda79aa280e8462849d0031cd27a9d Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Wed, 11 May 2022 05:24:44 -0700 Subject: [PATCH 21/53] HADOOP-18229. Fix some java doc compilation errors. FileContext.java no @param for src, empty

tag etc, FileSystem.java warning: no @param for f, warning: no @return etc --- .../org/apache/hadoop/fs/FileContext.java | 30 ++++++++++----- .../java/org/apache/hadoop/fs/FileSystem.java | 37 ++++++++++++++++++- 2 files changed, 55 insertions(+), 12 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index e2a96bc16880c..29b711e492138 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -1811,6 +1811,11 @@ public ContentSummary getContentSummary(Path f) /** * See {@link #listStatus(Path[], PathFilter)} + * + * @param files files + * @throws AccessControlException If access is denied + * @throws FileNotFoundException If files does not exist + * @throws IOException If an I/O error occurred */ public FileStatus[] listStatus(Path[] files) throws AccessControlException, FileNotFoundException, IOException { @@ -2066,36 +2071,29 @@ public LocatedFileStatus next() throws IOException { *

? *
Matches any single character. * - *

*

* *
Matches zero or more characters. * - *

*

[abc] *
Matches a single character from character set * {a,b,c}. * - *

*

[a-b] *
Matches a single character from the character range * {a...b}. Note: character a must be * lexicographically less than or equal to character b. * - *

*

[^a] *
Matches a single char that is not from character set or range * {a}. Note that the ^ character must occur * immediately to the right of the opening bracket. * - *

*

\c *
Removes (escapes) any special meaning of character c. * - *

*

{ab,cd} *
Matches a string from the string set {ab, cd} - * - *

+ * *

{ab,c{de,fh}} *
Matches a string from string set {ab, cde, cfh} * @@ -2156,6 +2154,18 @@ public FileStatus[] globStatus(final Path pathPattern, /** * Copy file from src to dest. See * {@link #copy(Path, Path, boolean, boolean)} + * + * @param src src + * @param dst dst + * @throws AccessControlException If access is denied + * @throws FileAlreadyExistsException If file src already exists + * @throws FileNotFoundException if next file does not exist any more + * @throws ParentNotDirectoryException If parent of src is not a + * directory. + * @throws UnsupportedFileSystemException If file system for + * src/dst is not supported + * @thorws IOException If an I/O error occurred + * @return if success copy true, not false */ public boolean copy(final Path src, final Path dst) throws AccessControlException, FileAlreadyExistsException, @@ -2166,8 +2176,8 @@ public boolean copy(final Path src, final Path dst) /** * Copy from src to dst, optionally deleting src and overwriting dst. - * @param src - * @param dst + * @param src src + * @param dst dst * @param deleteSource - delete src if true * @param overwrite overwrite dst if true; throw IOException if dst exists * and overwrite is false. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 04bb38a28ccda..0bd78498a1802 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -1084,6 +1084,7 @@ public FSDataOutputStream create(Path f, boolean overwrite) * @param f the file to create * @param progress to report progress * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, Progressable progress) throws IOException { @@ -1100,6 +1101,7 @@ public FSDataOutputStream create(Path f, Progressable progress) * @param f the file to create * @param replication the replication factor * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, short replication) throws IOException { @@ -1118,6 +1120,7 @@ public FSDataOutputStream create(Path f, short replication) * @param replication the replication factor * @param progress to report progress * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, short replication, Progressable progress) throws IOException { @@ -1135,6 +1138,7 @@ public FSDataOutputStream create(Path f, short replication, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1155,6 +1159,7 @@ public FSDataOutputStream create(Path f, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1175,6 +1180,7 @@ public FSDataOutputStream create(Path f, * @param bufferSize the size of the buffer to be used. * @param replication required block replication for the file. * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1193,6 +1199,7 @@ public FSDataOutputStream create(Path f, * @param bufferSize the size of the buffer to be used. * @param replication required block replication for the file. * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1219,6 +1226,7 @@ public FSDataOutputStream create(Path f, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) + * @return output stream */ public abstract FSDataOutputStream create(Path f, FsPermission permission, @@ -1240,6 +1248,7 @@ public abstract FSDataOutputStream create(Path f, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) + * @return output stream */ public FSDataOutputStream create(Path f, FsPermission permission, @@ -1266,6 +1275,7 @@ public FSDataOutputStream create(Path f, * found in conf will be used. * @throws IOException IO failure * @see #setPermission(Path, FsPermission) + * @return output stream */ public FSDataOutputStream create(Path f, FsPermission permission, @@ -1287,6 +1297,16 @@ public FSDataOutputStream create(Path f, * the permission with umask before calling this method. * This a temporary method added to support the transition from FileSystem * to FileContext for user applications. + * + * @param f path + * @param absolutePermission permission + * @param flag create flag + * @param bufferSize buffer size + * @param replication replication + * @param blockSize block size + * @param progress progress + * @param checksumOpt check sum opt + * @return output stream * @throws IOException IO failure */ @Deprecated @@ -1341,6 +1361,11 @@ protected boolean primitiveMkdir(Path f, FsPermission absolutePermission) * with umask before calling this method. * This a temporary method added to support the transition from FileSystem * to FileContext for user applications. + * + * @param f the path + * @param absolutePermission permission + * @param createParent create parent + * @throws IOException IO failure */ @Deprecated protected void primitiveMkdir(Path f, FsPermission absolutePermission, @@ -1380,6 +1405,7 @@ protected void primitiveMkdir(Path f, FsPermission absolutePermission, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) + * @return output stream */ public FSDataOutputStream createNonRecursive(Path f, boolean overwrite, @@ -1403,6 +1429,7 @@ public FSDataOutputStream createNonRecursive(Path f, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) + * @return output stream */ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -1426,6 +1453,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) + * @return output stream */ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, EnumSet flags, int bufferSize, short replication, long blockSize, @@ -1440,6 +1468,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, * Important: the default implementation is not atomic * @param f path to use for create * @throws IOException IO failure + * @return if create new file success true,not false */ public boolean createNewFile(Path f) throws IOException { if (exists(f)) { @@ -1474,6 +1503,7 @@ public FSDataOutputStream append(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). + * @return output stream */ public FSDataOutputStream append(Path f, int bufferSize) throws IOException { return append(f, bufferSize, null); @@ -1487,6 +1517,7 @@ public FSDataOutputStream append(Path f, int bufferSize) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). + * @return output stream */ public abstract FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException; @@ -1525,7 +1556,7 @@ public short getReplication(Path src) throws IOException { * This is the default behavior. * @param src file name * @param replication new replication - * @throws IOException + * @throws IOException an IO failure * @return true if successful, or the feature in unsupported; * false if replication is supported but the file does not exist, * or is a directory @@ -1554,11 +1585,12 @@ public boolean setReplication(Path src, short replication) *

* If OVERWRITE option is not passed as an argument, rename fails * if the dst already exists. + *

*

* If OVERWRITE option is passed as an argument, rename overwrites * the dst if it is a file or an empty directory. Rename fails if dst is * a non-empty directory. - *

+ *

* Note that atomicity of rename is dependent on the file system * implementation. Please refer to the file system documentation for * details. This default implementation is non atomic. @@ -1566,6 +1598,7 @@ public boolean setReplication(Path src, short replication) * This method is deprecated since it is a temporary method added to * support the transition from FileSystem to FileContext for user * applications. + *

* * @param src path to be renamed * @param dst new path after rename From e0188932cae0942a54a147254a92dced566dfe02 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Wed, 11 May 2022 16:05:16 -0700 Subject: [PATCH 22/53] HADOOP-18229. Fix some java doc compilation errors FileContext.java no description for @param, unknown tag: thorws etc FileSystem.java no @return, no @param for uri etc KeyProviderCryptoExtension.java no description for @param --- .../key/KeyProviderCryptoExtension.java | 2 +- .../org/apache/hadoop/fs/FileContext.java | 27 ++++++++++--------- .../java/org/apache/hadoop/fs/FileSystem.java | 25 +++++++++++++++-- 3 files changed, 39 insertions(+), 15 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java index 99cab35e351c6..7e85eef5cc741 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java @@ -560,7 +560,7 @@ public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv) * Calls {@link CryptoExtension#drain(String)} for the given key name on the * underlying {@link CryptoExtension}. * - * @param keyName + * @param keyName key name */ public void drain(String keyName) { getExtension().drain(keyName); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 29b711e492138..d48918f280ee7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -366,8 +366,8 @@ public AbstractFileSystem run() throws UnsupportedFileSystemException { * Create a FileContext with specified FS as default using the specified * config. * - * @param defFS - * @param aConf + * @param defFS default fs + * @param aConf configutration * @return new FileContext with specified FS as default. */ public static FileContext getFileContext(final AbstractFileSystem defFS, @@ -378,7 +378,7 @@ public static FileContext getFileContext(final AbstractFileSystem defFS, /** * Create a FileContext for specified file system using the default config. * - * @param defaultFS + * @param defaultFS default fs * @return a FileContext with the specified AbstractFileSystem * as the default FS. */ @@ -431,7 +431,7 @@ public static FileContext getLocalFSFileContext() /** * Create a FileContext for specified URI using the default config. * - * @param defaultFsUri + * @param defaultFsUri defaultFsUri * @return a FileContext with the specified URI as the default FS. * * @throws UnsupportedFileSystemException If the file system for @@ -445,8 +445,8 @@ public static FileContext getFileContext(final URI defaultFsUri) /** * Create a FileContext for specified default URI using the specified config. * - * @param defaultFsUri - * @param aConf + * @param defaultFsUri defaultFsUri + * @param aConf configrution * @return new FileContext for specified uri * @throws UnsupportedFileSystemException If the file system with specified is * not supported @@ -477,7 +477,7 @@ public static FileContext getFileContext(final URI defaultFsUri, * {@link #getFileContext(URI, Configuration)} instead of this one. * * - * @param aConf + * @param aConf configration * @return new FileContext * @throws UnsupportedFileSystemException If file system in the config * is not supported @@ -1061,7 +1061,7 @@ public Void next(final AbstractFileSystem fs, final Path p) /** * Set permission of a path. - * @param f + * @param f the path * @param permission - the new absolute permission (umask is not applied) * * @throws AccessControlException If access is denied @@ -1205,7 +1205,7 @@ public FileChecksum next(final AbstractFileSystem fs, final Path p) * Set the verify checksum flag for the file system denoted by the path. * This is only applicable if the * corresponding FileSystem supports checksum. By default doesn't do anything. - * @param verifyChecksum + * @param verifyChecksum verify check sum * @param f set the verifyChecksum for the Filesystem containing this path * * @throws AccessControlException If access is denied @@ -1260,8 +1260,9 @@ public FileStatus next(final AbstractFileSystem fs, final Path p) /** * Synchronize client metadata state. * - * @throws IOException - * @throws UnsupportedOperationException + * @throws IOException If an I/O error occurred + * @throws UnsupportedOperationException If file system for f is + * not supported */ public void msync() throws IOException, UnsupportedOperationException { defaultFS.msync(); @@ -1751,6 +1752,7 @@ public class Util { * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server + * @return if f exists true, not false */ public boolean exists(final Path f) throws AccessControlException, UnsupportedFileSystemException, IOException { @@ -1816,6 +1818,7 @@ public ContentSummary getContentSummary(Path f) * @throws AccessControlException If access is denied * @throws FileNotFoundException If files does not exist * @throws IOException If an I/O error occurred + * @return file status array */ public FileStatus[] listStatus(Path[] files) throws AccessControlException, FileNotFoundException, IOException { @@ -2164,7 +2167,7 @@ public FileStatus[] globStatus(final Path pathPattern, * directory. * @throws UnsupportedFileSystemException If file system for * src/dst is not supported - * @thorws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred * @return if success copy true, not false */ public boolean copy(final Path src, final Path dst) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 0bd78498a1802..5ec78e943e9c0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -377,6 +377,7 @@ public String getScheme() { * implement that method. * * @see #canonicalizeUri(URI) + * @return the URI of this filesystem. */ protected URI getCanonicalUri() { return canonicalizeUri(getUri()); @@ -457,7 +458,10 @@ public String getCanonicalServiceName() { : null; } - /** @deprecated call {@link #getUri()} instead.*/ + /** + * @return uri to string + * @deprecated call {@link #getUri()} instead. + */ @Deprecated public String getName() { return getUri().toString(); } @@ -523,6 +527,9 @@ public static LocalFileSystem getLocal(Configuration conf) * configuration and URI, cached and returned to the caller. * * + * @param uri uri of the filesystem + * @param conf configrution + * @return filesystem instance * @throws IOException if the FileSystem cannot be instantiated. */ public static FileSystem get(URI uri, Configuration conf) throws IOException { @@ -552,7 +559,7 @@ public static FileSystem get(URI uri, Configuration conf) throws IOException { /** * Returns the FileSystem for this URI's scheme and authority and the * given user. Internally invokes {@link #newInstance(URI, Configuration)} - * @param uri of the filesystem + * @param uri uri of the filesystem * @param conf the configuration to use * @param user to perform the get as * @return filesystem instance @@ -870,6 +877,7 @@ protected void checkPath(Path path) { * @param start offset into the given file * @param len length for which to get locations for * @throws IOException IO failure + * @return block location array */ public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { @@ -910,6 +918,7 @@ public BlockLocation[] getFileBlockLocations(FileStatus file, * @param len length for which to get locations for * @throws FileNotFoundException when the path does not exist * @throws IOException IO failure + * @return block location array */ public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException { @@ -972,6 +981,7 @@ public Path resolvePath(final Path p) throws IOException { * @param f the file name to open * @param bufferSize the size of the buffer to be used. * @throws IOException IO failure + * @return input stream */ public abstract FSDataInputStream open(Path f, int bufferSize) throws IOException; @@ -980,6 +990,7 @@ public abstract FSDataInputStream open(Path f, int bufferSize) * Opens an FSDataInputStream at the indicated Path. * @param f the file to open * @throws IOException IO failure + * @return input stream */ public FSDataInputStream open(Path f) throws IOException { return open(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -997,6 +1008,7 @@ public FSDataInputStream open(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException If {@link #open(PathHandle, int)} * not overridden by subclass + * @return input stream */ public FSDataInputStream open(PathHandle fd) throws IOException { return open(fd, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -1014,6 +1026,7 @@ public FSDataInputStream open(PathHandle fd) throws IOException { * not satisfied * @throws IOException IO failure * @throws UnsupportedOperationException If not overridden by subclass + * @return input stream */ public FSDataInputStream open(PathHandle fd, int bufferSize) throws IOException { @@ -1031,6 +1044,7 @@ public FSDataInputStream open(PathHandle fd, int bufferSize) * not overridden by subclass. * @throws UnsupportedOperationException If this FileSystem cannot enforce * the specified constraints. + * @return path handle */ public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) { // method is final with a default so clients calling getPathHandle(stat) @@ -1046,6 +1060,7 @@ public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) { * @param stat Referent in the target FileSystem * @param opt Constraints that determine the validity of the * {@link PathHandle} reference. + * @return path handle */ protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) { throw new UnsupportedOperationException(); @@ -1056,6 +1071,7 @@ protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) { * Files are overwritten by default. * @param f the file to create * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f) throws IOException { return create(f, true); @@ -1067,6 +1083,7 @@ public FSDataOutputStream create(Path f) throws IOException { * @param overwrite if a file with this name already exists, then if true, * the file will be overwritten, and if false an exception will be thrown. * @throws IOException IO failure + * @return output stream */ public FSDataOutputStream create(Path f, boolean overwrite) throws IOException { @@ -1158,6 +1175,7 @@ public FSDataOutputStream create(Path f, * @param overwrite if a file with this name already exists, then if true, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. + * @param progress to report progress * @throws IOException IO failure * @return output stream */ @@ -1179,6 +1197,7 @@ public FSDataOutputStream create(Path f, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. * @param replication required block replication for the file. + * @param blockSize the size of the buffer to be used. * @throws IOException IO failure * @return output stream */ @@ -1198,6 +1217,8 @@ public FSDataOutputStream create(Path f, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. * @param replication required block replication for the file. + * @param blockSize the size of the buffer to be used. + * @param progress to report progress * @throws IOException IO failure * @return output stream */ From 35ad959333ba416c1d54f46acc0188e2fcf974f6 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 00:36:11 -0700 Subject: [PATCH 23/53] YARN-11122: Add GetClusterNodesHADOOP-18229. Fix some java doc compilation errors. FileSystem.java no @param for f, no @param for link, no @return --- .../java/org/apache/hadoop/fs/FileSystem.java | 51 +++++++++++++++++-- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 5ec78e943e9c0..9dee8158dedd2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -1510,6 +1510,7 @@ public boolean createNewFile(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). + * @return output stream */ public FSDataOutputStream append(Path f) throws IOException { return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -1623,6 +1624,7 @@ public boolean setReplication(Path src, short replication) * * @param src path to be renamed * @param dst new path after rename + * @param option rename options * @throws FileNotFoundException src path does not exist, or the parent * path of dst does not exist. * @throws FileAlreadyExistsException dest path exists and is a file @@ -1717,6 +1719,9 @@ public boolean truncate(Path f, long newLength) throws IOException { /** * Delete a file/directory. + * @param f the path + * @throws IOException IO failure + * @return if delete success true, not false * @deprecated Use {@link #delete(Path, boolean)} instead. */ @Deprecated @@ -1833,6 +1838,7 @@ public boolean exists(Path f) throws IOException { * @param f path to check * @throws IOException IO failure * @deprecated Use {@link #getFileStatus(Path)} instead + * @return if f is directory true, not false */ @Deprecated public boolean isDirectory(Path f) throws IOException { @@ -1850,6 +1856,7 @@ public boolean isDirectory(Path f) throws IOException { * @param f path to check * @throws IOException IO failure * @deprecated Use {@link #getFileStatus(Path)} instead + * @return if f is file true, not false */ @Deprecated public boolean isFile(Path f) throws IOException { @@ -1862,6 +1869,7 @@ public boolean isFile(Path f) throws IOException { /** * The number of bytes in a file. + * @param f the path * @return the number of bytes; 0 for a directory * @deprecated Use {@link #getFileStatus(Path)} instead. * @throws FileNotFoundException if the path does not resolve @@ -1876,6 +1884,7 @@ public long getLength(Path f) throws IOException { * @param f path to use * @throws FileNotFoundException if the path does not resolve * @throws IOException IO failure + * @return content summary */ public ContentSummary getContentSummary(Path f) throws IOException { FileStatus status = getFileStatus(f); @@ -2010,8 +2019,8 @@ public boolean hasMore() { * @param f Path to list * @param token opaque iteration token returned by previous call, or null * if this is the first call. - * @return - * @throws FileNotFoundException + * @return directory entries + * @throws FileNotFoundException when the path does not exist * @throws IOException If an I/O error occurred */ @InterfaceAudience.Private @@ -2043,6 +2052,8 @@ private void listStatus(ArrayList results, Path f, /** * List corrupted file blocks. + * + * @param path the path * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) * @throws UnsupportedOperationException if the operation is unsupported @@ -2458,6 +2469,7 @@ public boolean mkdirs(Path f) throws IOException { * @param f path to create * @param permission to apply to f * @throws IOException IO failure + * @return if mkdir success true, not false */ public abstract boolean mkdirs(Path f, FsPermission permission ) throws IOException; @@ -2674,7 +2686,9 @@ public long getUsed() throws IOException { /** * Return the total size of all files from a specified path. + * @param path the path * @throws IOException IO failure + * @return the number of path content summary */ public long getUsed(Path path) throws IOException { return getContentSummary(path).getLength(); @@ -2750,7 +2764,7 @@ public short getDefaultReplication(Path path) { * synchronization is essential to guarantee consistency of read requests * particularly in HA setting. * @throws IOException If an I/O error occurred - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException if the operation is unsupported */ public void msync() throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException(getClass().getCanonicalName() + @@ -2826,6 +2840,8 @@ static void checkAccessPermissions(FileStatus stat, FsAction mode) /** * See {@link FileContext#fixRelativePart}. + * @param p the path + * @return relative part */ protected Path fixRelativePart(Path p) { if (p.isUriPathAbsolute()) { @@ -2837,6 +2853,18 @@ protected Path fixRelativePart(Path p) { /** * See {@link FileContext#createSymlink(Path, Path, boolean)}. + * + * @param target target path + * @param link link + * @param createParent create parent + * @throws AccessControlException if access is denied + * @throws FileAlreadyExistsException when the path does not exist + * @throws FileNotFoundException when the path does not exist + * @throws ParentNotDirectoryException if the parent path of dest is not + * a directory + * @throws UnsupportedFileSystemException if there was no known implementation + * for the scheme. + * @throws IOException see specific implementation */ public void createSymlink(final Path target, final Path link, final boolean createParent) throws AccessControlException, @@ -2850,8 +2878,14 @@ public void createSymlink(final Path target, final Path link, /** * See {@link FileContext#getFileLinkStatus(Path)}. - * @throws FileNotFoundException when the path does not exist - * @throws IOException see specific implementation + * + * @param f the path + * @throws AccessControlException if access is denied + * @throws FileNotFoundException when the path does not exist + * @throws IOException see specific implementation + * @throws UnsupportedFileSystemException if there was no known implementation + * for the scheme. + * @return file status */ public FileStatus getFileLinkStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -2869,8 +2903,11 @@ public boolean supportsSymlinks() { /** * See {@link FileContext#getLinkTarget(Path)}. + * @param f the path * @throws UnsupportedOperationException if the operation is unsupported * (default outcome). + * @throws IOException IO failure + * @return the path */ public Path getLinkTarget(Path f) throws IOException { // Supporting filesystems should override this method @@ -2880,8 +2917,11 @@ public Path getLinkTarget(Path f) throws IOException { /** * See {@link AbstractFileSystem#getLinkTarget(Path)}. + * @param f the path * @throws UnsupportedOperationException if the operation is unsupported * (default outcome). + * @throws IOException IO failure + * @return the path */ protected Path resolveLink(Path f) throws IOException { // Supporting filesystems should override this method @@ -4454,6 +4494,7 @@ public static synchronized List getAllStatistics() { /** * Get the statistics for a particular file system. + * @param scheme scheme * @param cls the class to lookup * @return a statistics object * @deprecated use {@link #getGlobalStorageStatistics()} From df3140a0bb81fcbddbf842c1262a69b17a52e26e Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 00:36:11 -0700 Subject: [PATCH 24/53] HADOOP-18229. Fix some java doc compilation errors. FileSystem.java no @param for f, no @param for link, no @return --- .../java/org/apache/hadoop/fs/FileSystem.java | 51 +++++++++++++++++-- 1 file changed, 46 insertions(+), 5 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 5ec78e943e9c0..9dee8158dedd2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -1510,6 +1510,7 @@ public boolean createNewFile(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). + * @return output stream */ public FSDataOutputStream append(Path f) throws IOException { return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -1623,6 +1624,7 @@ public boolean setReplication(Path src, short replication) * * @param src path to be renamed * @param dst new path after rename + * @param option rename options * @throws FileNotFoundException src path does not exist, or the parent * path of dst does not exist. * @throws FileAlreadyExistsException dest path exists and is a file @@ -1717,6 +1719,9 @@ public boolean truncate(Path f, long newLength) throws IOException { /** * Delete a file/directory. + * @param f the path + * @throws IOException IO failure + * @return if delete success true, not false * @deprecated Use {@link #delete(Path, boolean)} instead. */ @Deprecated @@ -1833,6 +1838,7 @@ public boolean exists(Path f) throws IOException { * @param f path to check * @throws IOException IO failure * @deprecated Use {@link #getFileStatus(Path)} instead + * @return if f is directory true, not false */ @Deprecated public boolean isDirectory(Path f) throws IOException { @@ -1850,6 +1856,7 @@ public boolean isDirectory(Path f) throws IOException { * @param f path to check * @throws IOException IO failure * @deprecated Use {@link #getFileStatus(Path)} instead + * @return if f is file true, not false */ @Deprecated public boolean isFile(Path f) throws IOException { @@ -1862,6 +1869,7 @@ public boolean isFile(Path f) throws IOException { /** * The number of bytes in a file. + * @param f the path * @return the number of bytes; 0 for a directory * @deprecated Use {@link #getFileStatus(Path)} instead. * @throws FileNotFoundException if the path does not resolve @@ -1876,6 +1884,7 @@ public long getLength(Path f) throws IOException { * @param f path to use * @throws FileNotFoundException if the path does not resolve * @throws IOException IO failure + * @return content summary */ public ContentSummary getContentSummary(Path f) throws IOException { FileStatus status = getFileStatus(f); @@ -2010,8 +2019,8 @@ public boolean hasMore() { * @param f Path to list * @param token opaque iteration token returned by previous call, or null * if this is the first call. - * @return - * @throws FileNotFoundException + * @return directory entries + * @throws FileNotFoundException when the path does not exist * @throws IOException If an I/O error occurred */ @InterfaceAudience.Private @@ -2043,6 +2052,8 @@ private void listStatus(ArrayList results, Path f, /** * List corrupted file blocks. + * + * @param path the path * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) * @throws UnsupportedOperationException if the operation is unsupported @@ -2458,6 +2469,7 @@ public boolean mkdirs(Path f) throws IOException { * @param f path to create * @param permission to apply to f * @throws IOException IO failure + * @return if mkdir success true, not false */ public abstract boolean mkdirs(Path f, FsPermission permission ) throws IOException; @@ -2674,7 +2686,9 @@ public long getUsed() throws IOException { /** * Return the total size of all files from a specified path. + * @param path the path * @throws IOException IO failure + * @return the number of path content summary */ public long getUsed(Path path) throws IOException { return getContentSummary(path).getLength(); @@ -2750,7 +2764,7 @@ public short getDefaultReplication(Path path) { * synchronization is essential to guarantee consistency of read requests * particularly in HA setting. * @throws IOException If an I/O error occurred - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException if the operation is unsupported */ public void msync() throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException(getClass().getCanonicalName() + @@ -2826,6 +2840,8 @@ static void checkAccessPermissions(FileStatus stat, FsAction mode) /** * See {@link FileContext#fixRelativePart}. + * @param p the path + * @return relative part */ protected Path fixRelativePart(Path p) { if (p.isUriPathAbsolute()) { @@ -2837,6 +2853,18 @@ protected Path fixRelativePart(Path p) { /** * See {@link FileContext#createSymlink(Path, Path, boolean)}. + * + * @param target target path + * @param link link + * @param createParent create parent + * @throws AccessControlException if access is denied + * @throws FileAlreadyExistsException when the path does not exist + * @throws FileNotFoundException when the path does not exist + * @throws ParentNotDirectoryException if the parent path of dest is not + * a directory + * @throws UnsupportedFileSystemException if there was no known implementation + * for the scheme. + * @throws IOException see specific implementation */ public void createSymlink(final Path target, final Path link, final boolean createParent) throws AccessControlException, @@ -2850,8 +2878,14 @@ public void createSymlink(final Path target, final Path link, /** * See {@link FileContext#getFileLinkStatus(Path)}. - * @throws FileNotFoundException when the path does not exist - * @throws IOException see specific implementation + * + * @param f the path + * @throws AccessControlException if access is denied + * @throws FileNotFoundException when the path does not exist + * @throws IOException see specific implementation + * @throws UnsupportedFileSystemException if there was no known implementation + * for the scheme. + * @return file status */ public FileStatus getFileLinkStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -2869,8 +2903,11 @@ public boolean supportsSymlinks() { /** * See {@link FileContext#getLinkTarget(Path)}. + * @param f the path * @throws UnsupportedOperationException if the operation is unsupported * (default outcome). + * @throws IOException IO failure + * @return the path */ public Path getLinkTarget(Path f) throws IOException { // Supporting filesystems should override this method @@ -2880,8 +2917,11 @@ public Path getLinkTarget(Path f) throws IOException { /** * See {@link AbstractFileSystem#getLinkTarget(Path)}. + * @param f the path * @throws UnsupportedOperationException if the operation is unsupported * (default outcome). + * @throws IOException IO failure + * @return the path */ protected Path resolveLink(Path f) throws IOException { // Supporting filesystems should override this method @@ -4454,6 +4494,7 @@ public static synchronized List getAllStatistics() { /** * Get the statistics for a particular file system. + * @param scheme scheme * @param cls the class to lookup * @return a statistics object * @deprecated use {@link #getGlobalStorageStatistics()} From 5fb63e9c0abf808e463a51fd64abc95ddc6a7ace Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 04:15:36 -0700 Subject: [PATCH 25/53] HADOOP-18229. Fix some java doc compilation errors. AvroFSInput.java error BatchedRemoteIterator.java error BlockLocation.java error FileSystem.java error --- .../org/apache/hadoop/fs/AvroFSInput.java | 13 +++++- .../hadoop/fs/BatchedRemoteIterator.java | 3 ++ .../org/apache/hadoop/fs/BlockLocation.java | 45 +++++++++++++++++++ .../java/org/apache/hadoop/fs/FileSystem.java | 18 ++++---- 4 files changed, 69 insertions(+), 10 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java index 213fbc24c4db0..d8f87b07e5d16 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java @@ -36,13 +36,22 @@ public class AvroFSInput implements Closeable, SeekableInput { private final FSDataInputStream stream; private final long len; - /** Construct given an {@link FSDataInputStream} and its length. */ + /** + * Construct given an {@link FSDataInputStream} and its length. + * + * @param in inputstream + * @param len len + */ public AvroFSInput(final FSDataInputStream in, final long len) { this.stream = in; this.len = len; } - /** Construct given a {@link FileContext} and a {@link Path}. */ + /** Construct given a {@link FileContext} and a {@link Path}. + * @param fc filecontext + * @param p the path + * @throws IOException If an I/O error occurred + * */ public AvroFSInput(final FileContext fc, final Path p) throws IOException { FileStatus status = fc.getFileStatus(p); this.len = status.getLen(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java index 607fffbcc701a..18f6b8137f85e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java @@ -68,6 +68,7 @@ public BatchedRemoteIterator(K prevKey) { * * @param prevKey The key to send. * @return A list of replies. + * @throws IOException If an I/O error occurred */ public abstract BatchedEntries makeRequest(K prevKey) throws IOException; @@ -102,6 +103,8 @@ public boolean hasNext() throws IOException { /** * Return the next list key associated with an element. + * @param element element + * @return K Generics Type */ public abstract K elementToPrevKey(E element); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java index 29358dd7d1086..657be6fc95a07 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java @@ -85,6 +85,7 @@ public BlockLocation() { /** * Copy constructor. + * @param that blocklocation */ public BlockLocation(BlockLocation that) { this.hosts = that.hosts; @@ -100,6 +101,10 @@ public BlockLocation(BlockLocation that) { /** * Constructor with host, name, offset and length. + * @param names names array + * @param hosts host array + * @param offset offset + * @param length length */ public BlockLocation(String[] names, String[] hosts, long offset, long length) { @@ -108,6 +113,11 @@ public BlockLocation(String[] names, String[] hosts, long offset, /** * Constructor with host, name, offset, length and corrupt flag. + * @param names names + * @param hosts hosts + * @param offset offset + * @param length length + * @param corrupt corrupt */ public BlockLocation(String[] names, String[] hosts, long offset, long length, boolean corrupt) { @@ -116,6 +126,11 @@ public BlockLocation(String[] names, String[] hosts, long offset, /** * Constructor with host, name, network topology, offset and length. + * @param names names + * @param hosts hosts + * @param topologyPaths topologyPaths + * @param offset offset + * @param length length */ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths, long offset, long length) { @@ -125,6 +140,12 @@ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths, /** * Constructor with host, name, network topology, offset, length * and corrupt flag. + * @param names names + * @param hosts hosts + * @param topologyPaths topologyPaths + * @param offset offset + * @param length length + * @param corrupt corrupt */ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths, long offset, long length, boolean corrupt) { @@ -177,6 +198,8 @@ public BlockLocation(String[] names, String[] hosts, String[] cachedHosts, /** * Get the list of hosts (hostname) hosting this block. + * @return hosts array + * @throws IOException If an I/O error occurred */ public String[] getHosts() throws IOException { return hosts; @@ -184,6 +207,7 @@ public String[] getHosts() throws IOException { /** * Get the list of hosts (hostname) hosting a cached replica of the block. + * @return cached hosts */ public String[] getCachedHosts() { return cachedHosts; @@ -191,6 +215,8 @@ public String[] getCachedHosts() { /** * Get the list of names (IP:xferPort) hosting this block. + * @return names array + * @throws IOException If an I/O error occurred. */ public String[] getNames() throws IOException { return names; @@ -199,6 +225,8 @@ public String[] getNames() throws IOException { /** * Get the list of network topology paths for each of the hosts. * The last component of the path is the "name" (IP:xferPort). + * @return topology paths + * @throws IOException If an I/O error occurred */ public String[] getTopologyPaths() throws IOException { return topologyPaths; @@ -206,6 +234,7 @@ public String[] getTopologyPaths() throws IOException { /** * Get the storageID of each replica of the block. + * @return storage ids */ public String[] getStorageIds() { return storageIds; @@ -213,6 +242,7 @@ public String[] getStorageIds() { /** * Get the storage type of each replica of the block. + * @return storage type of each replica of the block */ public StorageType[] getStorageTypes() { return storageTypes; @@ -220,6 +250,7 @@ public StorageType[] getStorageTypes() { /** * Get the start offset of file associated with this block. + * @return start offset of file associated with this block */ public long getOffset() { return offset; @@ -227,6 +258,7 @@ public long getOffset() { /** * Get the length of the block. + * @return length of the block */ public long getLength() { return length; @@ -234,6 +266,7 @@ public long getLength() { /** * Get the corrupt flag. + * @return corrupt flag */ public boolean isCorrupt() { return corrupt; @@ -241,6 +274,7 @@ public boolean isCorrupt() { /** * Return true if the block is striped (erasure coded). + * @return if the block is striped true, not false */ public boolean isStriped() { return false; @@ -248,6 +282,7 @@ public boolean isStriped() { /** * Set the start offset of file associated with this block. + * @param offset start offset */ public void setOffset(long offset) { this.offset = offset; @@ -255,6 +290,7 @@ public void setOffset(long offset) { /** * Set the length of block. + * @param length length of block */ public void setLength(long length) { this.length = length; @@ -262,6 +298,7 @@ public void setLength(long length) { /** * Set the corrupt flag. + * @param corrupt corrupt flag */ public void setCorrupt(boolean corrupt) { this.corrupt = corrupt; @@ -269,6 +306,8 @@ public void setCorrupt(boolean corrupt) { /** * Set the hosts hosting this block. + * @param hosts hosts array + * @throws IOException If an I/O error occurred */ public void setHosts(String[] hosts) throws IOException { if (hosts == null) { @@ -280,6 +319,7 @@ public void setHosts(String[] hosts) throws IOException { /** * Set the hosts hosting a cached replica of this block. + * @param cachedHosts cached hosts */ public void setCachedHosts(String[] cachedHosts) { if (cachedHosts == null) { @@ -291,6 +331,8 @@ public void setCachedHosts(String[] cachedHosts) { /** * Set the names (host:port) hosting this block. + * @param names names + * @throws IOException If an I/O error occurred */ public void setNames(String[] names) throws IOException { if (names == null) { @@ -302,6 +344,9 @@ public void setNames(String[] names) throws IOException { /** * Set the network topology paths of the hosts. + * + * @param topologyPaths topology paths + * @throws IOException If an I/O error occurred */ public void setTopologyPaths(String[] topologyPaths) throws IOException { if (topologyPaths == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 9dee8158dedd2..421a0a1c54874 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -1624,7 +1624,7 @@ public boolean setReplication(Path src, short replication) * * @param src path to be renamed * @param dst new path after rename - * @param option rename options + * @param options rename options * @throws FileNotFoundException src path does not exist, or the parent * path of dst does not exist. * @throws FileAlreadyExistsException dest path exists and is a file @@ -2147,36 +2147,29 @@ public FileStatus[] listStatus(Path[] files, PathFilter filter) *
? *
Matches any single character. * - *

*

* *
Matches zero or more characters. * - *

*

[abc] *
Matches a single character from character set * {a,b,c}. * - *

*

[a-b] *
Matches a single character from the character range * {a...b}. Note that character a must be * lexicographically less than or equal to character b. * - *

*

[^a] *
Matches a single character that is not from character set or range * {a}. Note that the ^ character must occur * immediately to the right of the opening bracket. * - *

*

\c *
Removes (escapes) any special meaning of character c. * - *

*

{ab,cd} *
Matches a string from the string set {ab, cd} * - *

*

{ab,c{de,fh}} *
Matches a string from the string set {ab, cde, cfh} * @@ -2407,6 +2400,7 @@ public LocatedFileStatus next() throws IOException { /** Return the current user's home directory in this FileSystem. * The default implementation returns {@code "/user/$USER/"}. + * @return the path */ public Path getHomeDirectory() { String username; @@ -2517,6 +2511,7 @@ public void moveFromLocalFile(Path src, Path dst) * @param delSrc whether to delete the src * @param src path * @param dst path + * @throws IOException IO failure */ public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { @@ -2631,6 +2626,7 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst, * @param fsOutputFile path of output file * @param tmpLocalFile path of local tmp file * @throws IOException IO failure + * @return the path */ public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { @@ -2678,6 +2674,7 @@ public void close() throws IOException { /** * Return the total size of all files in the filesystem. * @throws IOException IO failure + * @return the number of path used */ public long getUsed() throws IOException { Path path = new Path("/"); @@ -2711,6 +2708,7 @@ public long getBlockSize(Path f) throws IOException { * Return the number of bytes that large input files should be optimally * be split into to minimize I/O time. * @deprecated use {@link #getDefaultBlockSize(Path)} instead + * @return default block size */ @Deprecated public long getDefaultBlockSize() { @@ -2896,6 +2894,7 @@ public FileStatus getFileLinkStatus(final Path f) /** * See {@link AbstractFileSystem#supportsSymlinks()}. + * @return if support symlinkls true, not false */ public boolean supportsSymlinks() { return false; @@ -4486,6 +4485,7 @@ public static synchronized Map getStatistics() { /** * Return the FileSystem classes that have Statistics. * @deprecated use {@link #getGlobalStorageStatistics()} + * @return statistics lists */ @Deprecated public static synchronized List getAllStatistics() { @@ -4529,6 +4529,7 @@ public static synchronized void clearStatistics() { /** * Print all statistics for all file systems to {@code System.out} + * @throws IOException If an I/O error occurred */ public static synchronized void printStatistics() throws IOException { @@ -4569,6 +4570,7 @@ public StorageStatistics getStorageStatistics() { /** * Get the global storage statistics. + * @return global storage statistics */ public static GlobalStorageStatistics getGlobalStorageStatistics() { return GlobalStorageStatistics.INSTANCE; From ddc34fc0aa3f64d7c9e2b98d0332ad7ac9f7c743 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 16:51:11 -0700 Subject: [PATCH 26/53] HADOOP-18229. Fix some java doc compilation errors. ByteBufferUtil.java no @param for stream, no @param for bufferPool etc. CachingGetSpaceUsed.java warning: no @param for builder, warning: no @return etc. FilterFileSystem.java warning: no description for @throws. --- .../main/java/org/apache/hadoop/fs/ByteBufferUtil.java | 6 ++++++ .../java/org/apache/hadoop/fs/CachingGetSpaceUsed.java | 9 +++++++++ .../main/java/org/apache/hadoop/fs/FilterFileSystem.java | 4 ++-- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java index 6576fe5827d94..a9790773b1707 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java @@ -47,6 +47,12 @@ private static boolean streamHasByteBufferRead(InputStream stream) { /** * Perform a fallback read. + * + * @param stream input stream + * @param bufferPool bufferPool + * @param maxLength maxLength + * @throws IOException raised on errors performing I/O. + * @return byte buffer */ public static ByteBuffer fallbackRead( InputStream stream, ByteBufferPool bufferPool, int maxLength) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java index 362d125b09df5..e0af53bac6f7d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java @@ -53,6 +53,9 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed { /** * This is the constructor used by the builder. * All overriding classes should implement this. + * + * @param builder builder + * @throws IOException raised on errors performing I/O. */ public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder) throws IOException { @@ -140,6 +143,8 @@ public String getDirPath() { /** * Increment the cached value of used space. + * + * @param value dfs used value */ public void incDfsUsed(long value) { used.addAndGet(value); @@ -154,6 +159,8 @@ boolean running() { /** * How long in between runs of the background refresh. + * + * @return refresh interval */ @VisibleForTesting public long getRefreshInterval() { @@ -163,6 +170,8 @@ public long getRefreshInterval() { /** * Randomize the refresh interval timing by this amount, the actual interval will be chosen * uniformly between {@code interval-jitter} and {@code interval+jitter}. + * + * @return between interval-jitter and interval+jitter */ @VisibleForTesting public long getJitter() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index 607aa263622f6..331e2ab9830c8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -233,7 +233,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, * * @param src file name * @param replication new replication - * @throws IOException + * @throws IOException raised on errors performing I/O. * @return true if successful; * false if file does not exist or is a directory */ @@ -304,7 +304,7 @@ public Path getHomeDirectory() { * Set the current working directory for the given file system. All relative * paths will be resolved relative to it. * - * @param newDir + * @param newDir new dir */ @Override public void setWorkingDirectory(Path newDir) { From a39012a0e27a66968043352a4ba06c06e6263c0f Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 22:19:59 -0700 Subject: [PATCH 27/53] HADOOP-18229. Fix some java doc compilation errors. ChecksumFileSystem.java warning: no @param for src, warning: no @param for copyCrc etc ChecksumFs.java warning: no @return, warning: no @param for file --- .../apache/hadoop/fs/ChecksumFileSystem.java | 40 +++++++++++++++---- .../java/org/apache/hadoop/fs/ChecksumFs.java | 37 +++++++++++++---- 2 files changed, 62 insertions(+), 15 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 59ffe00bcb24d..578ef03956d9e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -102,25 +102,44 @@ public FileSystem getRawFileSystem() { return fs; } - /** Return the name of the checksum file associated with a file.*/ + /** + * Return the name of the checksum file associated with a file. + * + * @param file the file path + * @return name of the checksum file associated with a file + */ public Path getChecksumFile(Path file) { return new Path(file.getParent(), "." + file.getName() + ".crc"); } - /** Return true iff file is a checksum file name.*/ + /** + * Return true if file is a checksum file name. + * + * @param file the file path + * @return if file is a checksum file true, not false + */ public static boolean isChecksumFile(Path file) { String name = file.getName(); return name.startsWith(".") && name.endsWith(".crc"); } - /** Return the length of the checksum file given the size of the + /** + * Return the length of the checksum file given the size of the * actual file. - **/ + * + * @param file the file path + * @param fileSize file size + * @return checksum length + */ public long getChecksumFileLength(Path file, long fileSize) { return getChecksumLength(fileSize, getBytesPerSum()); } - /** Return the bytes Per Checksum */ + /** + * Return the bytes Per Checksum + * + * @return bytes per check sum + */ public int getBytesPerSum() { return bytesPerChecksum; } @@ -362,6 +381,7 @@ public synchronized void seek(long pos) throws IOException { * Opens an FSDataInputStream at the indicated Path. * @param f the file name to open * @param bufferSize the size of the buffer to be used. + * @throws IOException if an I/O error occurs. */ @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { @@ -669,7 +689,7 @@ boolean apply(Path p) throws IOException { * Implement the abstract setReplication of FileSystem * @param src file name * @param replication new replication - * @throws IOException + * @throws IOException if an I/O error occurs. * @return true if successful; * false if file does not exist or is a directory */ @@ -754,7 +774,7 @@ public boolean accept(Path file) { * @param f * given path * @return the statuses of the files/directories in the given path - * @throws IOException + * @throws IOException if an I/O error occurs. */ @Override public FileStatus[] listStatus(Path f) throws IOException { @@ -775,7 +795,7 @@ public RemoteIterator listStatusIterator(final Path p) * @param f * given path * @return the statuses of the files/directories in the given patch - * @throws IOException + * @throws IOException if an I/O error occurs. */ @Override public RemoteIterator listLocatedStatus(Path f) @@ -811,6 +831,10 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst) * Copy it from FS control to the local dst name. * If src and dst are directories, the copyCrc parameter * determines whether to copy CRC files. + * @param src src path + * @param dst dst path + * @param copyCrc copy csc flag + * @throws IOException if an I/O error occurs. */ @SuppressWarnings("deprecation") public void copyToLocalFile(Path src, Path dst, boolean copyCrc) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index bc1122c56a2bd..a12b0e9b98613 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -70,30 +70,53 @@ public void setVerifyChecksum(boolean inVerifyChecksum) { this.verifyChecksum = inVerifyChecksum; } - /** get the raw file system. */ + /** + * get the raw file system. + * + * @return abstract file system + */ public AbstractFileSystem getRawFs() { return getMyFs(); } - /** Return the name of the checksum file associated with a file.*/ + /** + * Return the name of the checksum file associated with a file. + * + * @param file the file path + * @return the checksum file associated with a file + */ public Path getChecksumFile(Path file) { return new Path(file.getParent(), "." + file.getName() + ".crc"); } - /** Return true iff file is a checksum file name.*/ + /** + * Return true iff file is a checksum file name. + * + * @param file the file path + * @return if is checksum file true,not false + */ public static boolean isChecksumFile(Path file) { String name = file.getName(); return name.startsWith(".") && name.endsWith(".crc"); } - /** Return the length of the checksum file given the size of the + /** + * Return the length of the checksum file given the size of the * actual file. - **/ + * + * @param file the file path + * @param fileSize file size + * @return check sum file length + */ public long getChecksumFileLength(Path file, long fileSize) { return getChecksumLength(fileSize, getBytesPerSum()); } - /** Return the bytes Per Checksum. */ + /** + * Return the bytes Per Checksum. + * + * @return bytes per sum + */ public int getBytesPerSum() { return defaultBytesPerChecksum; } @@ -433,7 +456,7 @@ private boolean isDirectory(Path f) * Implement the abstract setReplication of FileSystem * @param src file name * @param replication new replication - * @throws IOException + * @throws IOException if an I/O error occurs. * @return true if successful; * false if file does not exist or is a directory */ From 618c934486bc36d17c800b98f2f194fb4574bcee Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 22:51:16 -0700 Subject: [PATCH 28/53] HADOOP-18229. Fix some java doc compilation errors. CommonConfigurationKeysPublic.java warning: empty

tag CompositeCrcFileChecksum.java warning: no @param for crc ContentSummary.java warning: no @param for length, no @param for spaceQuota etc. FileChecksum.java warning: no @return QuotaUsage.java warning: no @param for builder,warning: no @return --- .../fs/CommonConfigurationKeysPublic.java | 6 +- .../hadoop/fs/CompositeCrcFileChecksum.java | 8 ++- .../org/apache/hadoop/fs/ContentSummary.java | 24 ++++++- .../org/apache/hadoop/fs/FileChecksum.java | 25 ++++++-- .../java/org/apache/hadoop/fs/QuotaUsage.java | 62 +++++++++++++++---- 5 files changed, 103 insertions(+), 22 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index a799e883bcf2a..fdc5d3a40c106 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic { /** * Number of filesystems instances can be created in parallel. - *

+ *

* A higher number here does not necessarily improve performance, especially * for object stores, where multiple threads may be attempting to create an FS * instance for the same URI. - *

+ *

* Default value: {@value}. */ public static final String FS_CREATION_PARALLEL_COUNT = @@ -181,7 +181,7 @@ public class CommonConfigurationKeysPublic { /** * Default value for {@link #FS_CREATION_PARALLEL_COUNT}. - *

+ *
* Default value: {@value}. */ public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java index e1ed5cbcfcaa6..9c2ceb0526565 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java @@ -37,7 +37,13 @@ public class CompositeCrcFileChecksum extends FileChecksum { private DataChecksum.Type crcType; private int bytesPerCrc; - /** Create a CompositeCrcFileChecksum. */ + /** + * Create a CompositeCrcFileChecksum. + * + * @param crc crc + * @param crcType crcType + * @param bytesPerCrc bytesPerCrc + */ public CompositeCrcFileChecksum( int crc, DataChecksum.Type crcType, int bytesPerCrc) { this.crc = crc; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java index 79850e1a2f291..1050083cea78d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java @@ -149,17 +149,31 @@ public ContentSummary build() { @Deprecated public ContentSummary() {} - /** Constructor, deprecated by ContentSummary.Builder + /** + * Constructor, deprecated by ContentSummary.Builder * This constructor implicitly set spaceConsumed the same as length. * spaceConsumed and length must be set explicitly with * ContentSummary.Builder + * + * @param length length + * @param fileCount file count + * @param directoryCount directory count * */ @Deprecated public ContentSummary(long length, long fileCount, long directoryCount) { this(length, fileCount, directoryCount, -1L, length, -1L); } - /** Constructor, deprecated by ContentSummary.Builder */ + /** + * Constructor, deprecated by ContentSummary.Builder. + * + * @param length length + * @param fileCount file count + * @param directoryCount directory count + * @param quota quota + * @param spaceConsumed space consumed + * @param spaceQuota space quota + * */ @Deprecated public ContentSummary( long length, long fileCount, long directoryCount, long quota, @@ -172,7 +186,11 @@ public ContentSummary( setSpaceQuota(spaceQuota); } - /** Constructor for ContentSummary.Builder*/ + /** + * Constructor for ContentSummary.Builder. + * + * @param builder builder + */ private ContentSummary(Builder builder) { super(builder); this.length = builder.length; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java index 6822fa485622f..679c5811f19e4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java @@ -28,20 +28,37 @@ @InterfaceAudience.Public @InterfaceStability.Stable public abstract class FileChecksum implements Writable { - /** The checksum algorithm name */ + /** + * The checksum algorithm name. + * + * @return algorithm name + */ public abstract String getAlgorithmName(); - /** The length of the checksum in bytes */ + /** + * The length of the checksum in bytes. + * + * @return length + */ public abstract int getLength(); - /** The value of the checksum in bytes */ + /** + * The value of the checksum in bytes. + * + * @return byte array + */ public abstract byte[] getBytes(); public ChecksumOpt getChecksumOpt() { return null; } - /** Return true if both the algorithms and the values are the same. */ + /** + * Return true if both the algorithms and the values are the same. + * + * @param other other + * @return if equal true, not false + */ @Override public boolean equals(Object other) { if (other == this) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java index b00a31891c867..215f9b233d3e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java @@ -105,7 +105,9 @@ public QuotaUsage build() { // Make it protected for the deprecated ContentSummary constructor. protected QuotaUsage() { } - /** Build the instance based on the builder. */ + /** Build the instance based on the builder. + * @param builder bulider + */ protected QuotaUsage(Builder builder) { this.fileAndDirectoryCount = builder.fileAndDirectoryCount; this.quota = builder.quota; @@ -127,37 +129,67 @@ protected void setSpaceQuota(long spaceQuota) { this.spaceQuota = spaceQuota; } - /** Return the directory count. */ + /** + * Return the directory count. + * + * @return file and directory count + */ public long getFileAndDirectoryCount() { return fileAndDirectoryCount; } - /** Return the directory quota. */ + /** + * Return the directory quota. + * + * @return quota + */ public long getQuota() { return quota; } - /** Return (disk) space consumed. */ + /** + * Return (disk) space consumed. + * + * @return space consumed + */ public long getSpaceConsumed() { return spaceConsumed; } - /** Return (disk) space quota. */ + /** + * Return (disk) space quota. + * + * @return space quota + */ public long getSpaceQuota() { return spaceQuota; } - /** Return storage type quota. */ + /** + * Return storage type quota. + * + * @param type storage type + * @return type quota + */ public long getTypeQuota(StorageType type) { return (typeQuota != null) ? typeQuota[type.ordinal()] : -1L; } - /** Return storage type consumed. */ + /** + * Return storage type consumed. + * + * @param type storage type + * @return type consumed + */ public long getTypeConsumed(StorageType type) { return (typeConsumed != null) ? typeConsumed[type.ordinal()] : 0L; } - /** Return true if any storage type quota has been set. */ + /** + * Return true if any storage type quota has been set. + * + * @return if any storage type quota has been set true, not false + * */ public boolean isTypeQuotaSet() { if (typeQuota != null) { for (StorageType t : StorageType.getTypesSupportingQuota()) { @@ -169,7 +201,12 @@ public boolean isTypeQuotaSet() { return false; } - /** Return true if any storage type consumption information is available. */ + /** + * Return true if any storage type consumption information is available. + * + * @return if any storage type consumption information + * is available, not false + */ public boolean isTypeConsumedAvailable() { if (typeConsumed != null) { for (StorageType t : StorageType.getTypesSupportingQuota()) { @@ -271,11 +308,14 @@ public String toString(boolean hOption) { return toString(hOption, false, null); } - /** Return the string representation of the object in the output format. + /** + * Return the string representation of the object in the output format. * if hOption is false file sizes are returned in bytes * if hOption is true file sizes are returned in human readable * * @param hOption a flag indicating if human readable output if to be used + * @param tOption type option + * @param types storage types * @return the string representation of the object */ public String toString(boolean hOption, @@ -328,7 +368,7 @@ protected String getTypesQuotaUsage(boolean hOption, /** * return the header of with the StorageTypes. * - * @param storageTypes + * @param storageTypes storage types * @return storage header string */ public static String getStorageTypeHeader(List storageTypes) { From 89b7883173f186a553e8714659ef2a7a46af4e1c Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Thu, 12 May 2022 23:37:43 -0700 Subject: [PATCH 29/53] HADOOP-18229. Fix some java doc compilation errors. CreateFlag.java warning: no @param for flag DelegationTokenRenewer.java warning: no @return Shell.java warning: no @throws for java.io.IOException,no @return etc. DF.java no @throws for java.io.IOException FSBuilder.java warning: no @param for key,warning: no @return,warning: no @param for key etc. --- .../java/org/apache/hadoop/fs/CreateFlag.java | 2 + .../main/java/org/apache/hadoop/fs/DF.java | 10 +- .../hadoop/fs/DelegationTokenRenewer.java | 26 ++++- .../java/org/apache/hadoop/fs/FSBuilder.java | 51 ++++++++-- .../java/org/apache/hadoop/util/Shell.java | 98 ++++++++++++++++--- 5 files changed, 160 insertions(+), 27 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java index 71993713ad2eb..b197d43d8c792 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java @@ -189,6 +189,8 @@ public static void validate(Object path, boolean pathExists, /** * Validate the CreateFlag for the append operation. The flag must contain * APPEND, and cannot contain OVERWRITE. + * + * @param flag enum set flag */ public static void validateForAppend(EnumSet flag) { validate(flag); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java index da4636b2c0fbe..3c06e97401341 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java @@ -65,7 +65,10 @@ public String getDirPath() { return dirPath; } - /** @return a string indicating which filesystem volume we're checking. */ + /** + * @return a string indicating which filesystem volume we're checking. + * @throws IOException raised on errors performing I/O. + */ public String getFilesystem() throws IOException { if (Shell.WINDOWS) { this.filesystem = dirFile.getCanonicalPath().substring(0, 2); @@ -100,7 +103,10 @@ public int getPercentUsed() { return (int) (used * 100.0 / cap); } - /** @return the filesystem mount point for the indicated volume */ + /** + * @return the filesystem mount point for the indicated volume + * @throws IOException raised on errors performing I/O. + */ public String getMount() throws IOException { // Abort early if specified path does not exist if (!dirFile.exists()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java index 33905dcbb77fd..6244797119602 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java @@ -47,7 +47,11 @@ public interface Renewable { /** @return the renew token. */ public Token getRenewToken(); - /** Set delegation token. */ + /** + * Set delegation token. + * @param generic type T + * @param token token + */ public void setDelegationToken(Token token); } @@ -172,7 +176,11 @@ public String toString() { /** Queue to maintain the RenewActions to be processed by the {@link #run()} */ private volatile DelayQueue> queue = new DelayQueue>(); - /** For testing purposes */ + /** + * For testing purposes. + * + * @return renew queue length + */ @VisibleForTesting protected int getRenewQueueLength() { return queue.size(); @@ -211,7 +219,13 @@ static synchronized void reset() { } } - /** Add a renew action to the queue. */ + /** + * Add a renew action to the queue. + * + * @param generic type T + * @param fs file system + * @return renew action + * */ @SuppressWarnings("static-access") public RenewAction addRenewAction(final T fs) { synchronized (this) { @@ -230,8 +244,10 @@ public RenewAction addRenewAction(final T /** * Remove the associated renew action from the queue - * - * @throws IOException + * + * @param generic type T + * @param fs file system + * @throws IOException raised on errors performing I/O. */ public void removeRenewAction( final T fs) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java index a4c7254cfeb3c..fe72e117903ba 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java @@ -37,12 +37,17 @@ public interface FSBuilder> { /** * Set optional Builder parameter. + * @param key key + * @param value value + * @return generic type B */ B opt(@Nonnull String key, @Nonnull String value); /** * Set optional boolean parameter for the Builder. - * + * @param key key + * @param value value + * @return generic type B * @see #opt(String, String) */ B opt(@Nonnull String key, boolean value); @@ -50,6 +55,9 @@ public interface FSBuilder> { /** * Set optional int parameter for the Builder. * + * @param key key + * @param value value + * @return generic type B * @see #opt(String, String) */ B opt(@Nonnull String key, int value); @@ -57,6 +65,9 @@ public interface FSBuilder> { /** * Set optional float parameter for the Builder. * + * @param key key + * @param value value + * @return generic type B * @see #opt(String, String) */ B opt(@Nonnull String key, float value); @@ -64,6 +75,9 @@ public interface FSBuilder> { /** * Set optional long parameter for the Builder. * + * @param key key + * @param value value + * @return generic type B * @see #opt(String, String) */ B opt(@Nonnull String key, long value); @@ -71,13 +85,18 @@ public interface FSBuilder> { /** * Set optional double parameter for the Builder. * + * @param key key + * @param value value + * @return generic type B * @see #opt(String, String) */ B opt(@Nonnull String key, double value); /** * Set an array of string values as optional parameter for the Builder. - * + * @param key key + * @param values values + * @return generic type B * @see #opt(String, String) */ B opt(@Nonnull String key, @Nonnull String... values); @@ -87,47 +106,64 @@ public interface FSBuilder> { * * If the option is not supported or unavailable, * the client should expect {@link #build()} throws IllegalArgumentException. + * + * @param key key + * @param value value + * @return generic type B */ B must(@Nonnull String key, @Nonnull String value); /** * Set mandatory boolean option. * + * @param key key + * @param value value + * @return generic type B * @see #must(String, String) */ B must(@Nonnull String key, boolean value); /** * Set mandatory int option. - * + * @param key key + * @param value value + * @return generic type B * @see #must(String, String) */ B must(@Nonnull String key, int value); /** * Set mandatory float option. - * + * @param key key + * @param value value + * @return generic type B * @see #must(String, String) */ B must(@Nonnull String key, float value); /** * Set mandatory long option. - * + * @param key key + * @param value value + * @return generic type B * @see #must(String, String) */ B must(@Nonnull String key, long value); /** * Set mandatory double option. - * + * @param key key + * @param value value + * @return generic type B * @see #must(String, String) */ B must(@Nonnull String key, double value); /** * Set a string array as mandatory option. - * + * @param key key + * @param values values + * @return generic type B * @see #must(String, String) */ B must(@Nonnull String key, @Nonnull String... values); @@ -139,6 +175,7 @@ public interface FSBuilder> { * @throws UnsupportedOperationException if the filesystem does not support * the specific operation. * @throws IOException on filesystem IO errors. + * @return generic type S */ S build() throws IllegalArgumentException, UnsupportedOperationException, IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index 084e2b8f5e3b6..b72ce63f5d06f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -122,6 +122,7 @@ public static boolean isJavaVersionAtLeast(int version) { * delimiters, no extra count will be added for delimiters. * * @param commands command parts, including any space delimiters + * @throws IOException raised on errors performing I/O. */ public static void checkWindowsCommandLineLength(String...commands) throws IOException { @@ -205,7 +206,11 @@ private static OSType getOSType() { public static final boolean PPC_64 = System.getProperties().getProperty("os.arch").contains("ppc64"); - /** a Unix command to get the current user's groups list. */ + /** + * a Unix command to get the current user's groups list. + * + * @return group command array + */ public static String[] getGroupsCommand() { return (WINDOWS)? new String[]{"cmd", "/c", "groups"} : new String[]{"groups"}; @@ -216,6 +221,9 @@ public static String[] getGroupsCommand() { * If the OS is not WINDOWS, the command will get the user's primary group * first and finally get the groups list which includes the primary group. * i.e. the user's primary group will be included twice. + * + * @param user user + * @return groups for user command */ public static String[] getGroupsForUserCommand(final String user) { //'groups username' command return is inconsistent across different unixes @@ -235,6 +243,9 @@ public static String[] getGroupsForUserCommand(final String user) { * first and finally get the groups list which includes the primary group. * i.e. the user's primary group will be included twice. * This command does not support Windows and will only return group names. + * + * @param user user + * @return groups id for user command */ public static String[] getGroupsIDForUserCommand(final String user) { //'groups username' command return is inconsistent across different unixes @@ -248,19 +259,34 @@ public static String[] getGroupsIDForUserCommand(final String user) { } } - /** A command to get a given netgroup's user list. */ + /** + * A command to get a given netgroup's user list. + * + * @param netgroup net group + * @return users for net group command + */ public static String[] getUsersForNetgroupCommand(final String netgroup) { //'groups username' command return is non-consistent across different unixes return new String[] {"getent", "netgroup", netgroup}; } - /** Return a command to get permission information. */ + /** + * Return a command to get permission information. + * + * @return permission command + */ public static String[] getGetPermissionCommand() { return (WINDOWS) ? new String[] { getWinUtilsPath(), "ls", "-F" } : new String[] { "ls", "-ld" }; } - /** Return a command to set permission. */ + /** + * Return a command to set permission. + * + * @param perm permission + * @param recursive recursive + * @return set permission command + */ public static String[] getSetPermissionCommand(String perm, boolean recursive) { if (recursive) { return (WINDOWS) ? @@ -290,21 +316,37 @@ public static String[] getSetPermissionCommand(String perm, return cmdWithFile; } - /** Return a command to set owner. */ + /** + * Return a command to set owner. + * + * @param owner owner + * @return set owner command + */ public static String[] getSetOwnerCommand(String owner) { return (WINDOWS) ? new String[] { getWinUtilsPath(), "chown", "\"" + owner + "\"" } : new String[] { "chown", owner }; } - /** Return a command to create symbolic links. */ + /** + * Return a command to create symbolic links. + * + * @param target target + * @param link link + * @return symlink command + */ public static String[] getSymlinkCommand(String target, String link) { return WINDOWS ? new String[] { getWinUtilsPath(), "symlink", link, target } : new String[] { "ln", "-s", target, link }; } - /** Return a command to read the target of the a symbolic link. */ + /** + * Return a command to read the target of the a symbolic link. + * + * @param link link + * @return read link command + */ public static String[] getReadlinkCommand(String link) { return WINDOWS ? new String[] { getWinUtilsPath(), "readlink", link } @@ -320,7 +362,13 @@ public static String[] getCheckProcessIsAliveCommand(String pid) { return getSignalKillCommand(0, pid); } - /** Return a command to send a signal to a given pid. */ + /** + * Return a command to send a signal to a given pid. + * + * @param code code + * @param pid pid + * @return signal kill command + */ public static String[] getSignalKillCommand(int code, String pid) { // Code == 0 means check alive if (Shell.WINDOWS) { @@ -347,7 +395,11 @@ public static String[] getSignalKillCommand(int code, String pid) { /** Regular expression for environment variables: {@value}. */ public static final String ENV_NAME_REGEX = "[A-Za-z_][A-Za-z0-9_]*"; - /** Return a regular expression string that match environment variables. */ + /** + * Return a regular expression string that match environment variables. + * + * @return environment variable regex + */ public static String getEnvironmentVariableRegex() { return (WINDOWS) ? "%(" + ENV_NAME_REGEX + "?)%" @@ -890,7 +942,11 @@ protected void setWorkingDirectory(File dir) { this.dir = dir; } - /** Check to see if a command needs to be executed and execute if needed. */ + /** + * Check to see if a command needs to be executed and execute if needed. + * + * @throws IOException raised on errors performing I/O. + */ protected void run() throws IOException { if (lastTime + interval > Time.monotonicNow()) { return; @@ -902,7 +958,11 @@ protected void run() throws IOException { runCommand(); } - /** Run the command. */ + /** + * Run the command. + * + * @throws IOException raised on errors performing I/O. + */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null; @@ -1049,10 +1109,19 @@ private static void joinThread(Thread t) { } } - /** return an array containing the command name and its parameters. */ + /** + * return an array containing the command name and its parameters. + * + * @return exec string array + */ protected abstract String[] getExecString(); - /** Parse the execution result */ + /** + * Parse the execution result + * + * @param lines lines + * @throws IOException raised on errors performing I/O. + * */ protected abstract void parseExecResult(BufferedReader lines) throws IOException; @@ -1283,6 +1352,7 @@ private void setTimedOut() { * the Shell interface. * @param cmd shell command to execute. * @return the output of the executed command. + * @throws IOException raised on errors performing I/O. */ public static String execCommand(String ... cmd) throws IOException { return execCommand(null, cmd, 0L); @@ -1367,6 +1437,8 @@ public static void destroyAllShellProcesses() { /** * Static method to return a Set of all Shell objects. + * + * @return all shells set */ public static Set getAllShells() { synchronized (CHILD_SHELLS) { From b30f011c1cb48a08c520a4df36fb435cb8d1a887 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 02:07:55 -0700 Subject: [PATCH 30/53] HADOOP-18229. Fix some java doc compilation errors. AbstractFSBuilderImpl.java warning: no @return, ChecksumFileSystem.java warning: no @param for file warning: no @return etc, FileEncryptionInfo.java warning: no @param for version,no @param for isdir etc, FileStatus.java warning: no @param for block_replication, warning: no @param for blocksize etc, FileSystem.java warning: no @return, FileSystemLinkResolver.java warning: no description for @throws, FSDataOutputStreamBuilder.java warning: no @return, warning: no @param for key etc, NetUtils.java warning: no @param for conf, warning: no @return etc, SecurityUtil.java warning: no @param for flag, warning: no @param for etc, StorageStatistics.java warning: no @return, warning: no @param for key --- .../apache/hadoop/fs/ChecksumFileSystem.java | 2 +- .../hadoop/fs/FSDataOutputStreamBuilder.java | 31 ++++++++++ .../apache/hadoop/fs/FileEncryptionInfo.java | 4 ++ .../java/org/apache/hadoop/fs/FileStatus.java | 14 +++++ .../java/org/apache/hadoop/fs/FileSystem.java | 1 + .../hadoop/fs/FileSystemLinkResolver.java | 8 +-- .../apache/hadoop/fs/StorageStatistics.java | 5 ++ .../hadoop/fs/impl/AbstractFSBuilderImpl.java | 2 + .../java/org/apache/hadoop/net/NetUtils.java | 61 +++++++++++++------ .../apache/hadoop/security/SecurityUtil.java | 12 ++++ 10 files changed, 115 insertions(+), 25 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 578ef03956d9e..6d7afadd78f50 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -136,7 +136,7 @@ public long getChecksumFileLength(Path file, long fileSize) { } /** - * Return the bytes Per Checksum + * Return the bytes Per Checksum. * * @return bytes per check sum */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java index c96d499d17ba6..6212fa58c2228 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java @@ -123,6 +123,8 @@ public abstract class FSDataOutputStreamBuilder /** * Constructor. + * @param fileSystem file system + * @param p the path */ protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem, @Nonnull Path p) { @@ -149,6 +151,9 @@ protected FsPermission getPermission() { /** * Set permission for the file. + * + * @param perm permission + * @return B Generics Type */ public B permission(@Nonnull final FsPermission perm) { checkNotNull(perm); @@ -162,6 +167,9 @@ protected int getBufferSize() { /** * Set the size of the buffer to be used. + * + * @param bufSize buffer size + * @return Generics Type B */ public B bufferSize(int bufSize) { bufferSize = bufSize; @@ -174,6 +182,9 @@ protected short getReplication() { /** * Set replication factor. + * + * @param replica replica + * @return Generics Type B */ public B replication(short replica) { replication = replica; @@ -186,6 +197,9 @@ protected long getBlockSize() { /** * Set block size. + * + * @param blkSize block size + * @return B Generics Type */ public B blockSize(long blkSize) { blockSize = blkSize; @@ -194,6 +208,8 @@ public B blockSize(long blkSize) { /** * Return true to create the parent directories if they do not exist. + * + * @return if create the parent directories if they do not exist true,not false */ protected boolean isRecursive() { return recursive; @@ -201,6 +217,8 @@ protected boolean isRecursive() { /** * Create the parent directory if they do not exist. + * + * @return B Generics Type */ public B recursive() { recursive = true; @@ -213,6 +231,9 @@ protected Progressable getProgress() { /** * Set the facility of reporting progress. + * + * @param prog progress + * @return B Generics Type */ public B progress(@Nonnull final Progressable prog) { checkNotNull(prog); @@ -226,6 +247,8 @@ protected EnumSet getFlags() { /** * Create an FSDataOutputStream at the specified path. + * + * return Generics Type B */ public B create() { flags.add(CreateFlag.CREATE); @@ -236,6 +259,9 @@ public B create() { * Set to true to overwrite the existing file. * Set it to false, an exception will be thrown when calling {@link #build()} * if the file exists. + * + * @param overwrite overrite + * @return Generics Type B */ public B overwrite(boolean overwrite) { if (overwrite) { @@ -248,6 +274,8 @@ public B overwrite(boolean overwrite) { /** * Append to an existing file (optional operation). + * + * @return Generics Type B */ public B append() { flags.add(CreateFlag.APPEND); @@ -260,6 +288,9 @@ protected ChecksumOpt getChecksumOpt() { /** * Set checksum opt. + * + * @param chksumOpt check sum opt + * @return Generics Type B */ public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) { checkNotNull(chksumOpt); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java index 9260b9a62c62e..4fd80572e60b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java @@ -52,6 +52,8 @@ public class FileEncryptionInfo implements Serializable { * @param keyName name of the key used for the encryption zone * @param ezKeyVersionName name of the KeyVersion used to encrypt the * encrypted data encryption key. + * @param version version + * @return file encryption info */ public FileEncryptionInfo(final CipherSuite suite, final CryptoProtocolVersion version, final byte[] edek, @@ -134,6 +136,8 @@ public String toString() { * * NOTE: * Currently this method is used by CLI for backward compatibility. + * + * @return stable string */ public String toStringStable() { StringBuilder builder = new StringBuilder("{") diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java index d7ca8f172f8e2..18e7154a7d613 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java @@ -116,6 +116,17 @@ public FileStatus(long length, boolean isdir, int block_replication, /** * Constructor for file systems on which symbolic links are not supported + * + * @param length length + * @param isdir isdir + * @param block_replication block replication + * @param blocksize block size + * @param modification_time modification time + * @param access_time access_time + * @param permission permission + * @param owner owner + * @param group group + * @param path the path */ public FileStatus(long length, boolean isdir, int block_replication, @@ -182,6 +193,7 @@ public FileStatus(long length, boolean isdir, int block_replication, * Copy constructor. * * @param other FileStatus to copy + * @throws IOException raised on errors performing I/O. */ public FileStatus(FileStatus other) throws IOException { // It's important to call the getters here instead of directly accessing the @@ -375,6 +387,8 @@ protected void setGroup(String group) { /** * @return The contents of the symbolic link. + * + * @throws IOException raised on errors performing I/O. */ public Path getSymlink() throws IOException { if (!isSymlink()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 421a0a1c54874..84dc9a01494de 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -4127,6 +4127,7 @@ public void run() { /** * Get or create the thread-local data associated with the current thread. + * @return statistics data */ public StatisticsData getThreadStatistics() { StatisticsData data = threadData.get(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java index 7eec0eb7cec54..e2011e279a5e6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java @@ -38,8 +38,8 @@ public abstract class FileSystemLinkResolver { * an UnresolvedLinkException if called on an unresolved {@link Path}. * @param p Path on which to perform an operation * @return Generic type returned by operation - * @throws IOException - * @throws UnresolvedLinkException + * @throws IOException raised on errors performing I/O. + * @throws UnresolvedLinkException unresolved link exception */ abstract public T doCall(final Path p) throws IOException, UnresolvedLinkException; @@ -54,7 +54,7 @@ abstract public T doCall(final Path p) throws IOException, * @param p * Resolved Target of path * @return Generic type determined by implementation - * @throws IOException + * @throws IOException raised on errors performing I/O. */ abstract public T next(final FileSystem fs, final Path p) throws IOException; @@ -66,7 +66,7 @@ abstract public T doCall(final Path p) throws IOException, * @param filesys FileSystem with which to try call * @param path Path with which to try call * @return Generic type determined by implementation - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public T resolve(final FileSystem filesys, final Path path) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java index 2efe4566344ee..1122e5fbf1162 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java @@ -127,6 +127,7 @@ public StorageStatistics(String name) { /** * Get the name of this StorageStatistics object. + * @return name of this StorageStatistics object */ public String getName() { return name; @@ -145,12 +146,15 @@ public String getScheme() { * * The values returned will depend on the type of FileSystem or FileContext * object. The values do not necessarily reflect a snapshot in time. + * + * @return LongStatistic Iterator */ public abstract Iterator getLongStatistics(); /** * Get the value of a statistic. * + * @param key key * @return null if the statistic is not being tracked or is not a * long statistic. The value of the statistic, otherwise. */ @@ -159,6 +163,7 @@ public String getScheme() { /** * Return true if a statistic is being tracked. * + * @param key key * @return True only if the statistic is being tracked. */ public abstract boolean isTracked(String key); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java index 9d3a46d633253..44380904b3f26 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java @@ -340,12 +340,14 @@ public Configuration getOptions() { /** * Get all the keys that are set as mandatory keys. + * @return mandatory keys */ public Set getMandatoryKeys() { return Collections.unmodifiableSet(mandatoryKeys); } /** * Get all the keys that are set as optional keys. + * @return optional keys */ public Set getOptionalKeys() { return Collections.unmodifiableSet(optionalKeys); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index fead87d7907d7..eef7d7c31964a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -133,7 +133,8 @@ public static SocketFactory getDefaultSocketFactory(Configuration conf) { * Get the socket factory corresponding to the given proxy URI. If the * given proxy URI corresponds to an absence of configuration parameter, * returns null. If the URI is malformed raises an exception. - * + * + * @param conf configuration * @param propValue the property which is the class name of the * SocketFactory to instantiate; assumed non null and non empty. * @return a socket factory as defined in the property value. @@ -151,19 +152,26 @@ public static SocketFactory getSocketFactoryFromProperty( } /** - * Util method to build socket addr from either: + * Util method to build socket addr from either. * {@literal :} * {@literal ://:/} + * + * @param target target + * @return socket addr */ public static InetSocketAddress createSocketAddr(String target) { return createSocketAddr(target, -1); } /** - * Util method to build socket addr from either: + * Util method to build socket addr from either. * {@literal } * {@literal :} * {@literal ://:/} + * + * @param target target + * @param defaultPort default port + * @return socket addr */ public static InetSocketAddress createSocketAddr(String target, int defaultPort) { @@ -183,6 +191,7 @@ public static InetSocketAddress createSocketAddr(String target, * @param configName the name of the configuration from which * target was loaded. This is used in the * exception message in the case that parsing fails. + * @return socket addr */ public static InetSocketAddress createSocketAddr(String target, int defaultPort, @@ -204,6 +213,7 @@ public static InetSocketAddress createSocketAddr(String target, * target was loaded. This is used in the * exception message in the case that parsing fails. * @param useCacheIfPresent Whether use cache when create URI + * @return socket addr */ public static InetSocketAddress createSocketAddr(String target, int defaultPort, @@ -361,8 +371,8 @@ private static String canonicalizeHost(String host) { * daemons, one can set up mappings from those hostnames to "localhost". * {@link NetUtils#getStaticResolution(String)} can be used to query for * the actual hostname. - * @param host - * @param resolvedName + * @param host the hostname or IP use to instantiate the object + * @param resolvedName resolved name */ public static void addStaticResolution(String host, String resolvedName) { synchronized (hostToResolved) { @@ -374,7 +384,7 @@ public static void addStaticResolution(String host, String resolvedName) { * Retrieves the resolved name for the passed host. The resolved name must * have been set earlier using * {@link NetUtils#addStaticResolution(String, String)} - * @param host + * @param host the hostname or IP use to instantiate the object * @return the resolution */ public static String getStaticResolution(String host) { @@ -410,7 +420,7 @@ public static List getAllStaticResolutions() { * the server binds to "0.0.0.0". This returns "hostname:port" of the server, * or "127.0.0.1:port" when the getListenerAddress() returns "0.0.0.0:port". * - * @param server + * @param server server * @return socket address that a client can use to connect to the server. */ public static InetSocketAddress getConnectAddress(Server server) { @@ -438,8 +448,11 @@ public static InetSocketAddress getConnectAddress(InetSocketAddress addr) { /** * Same as getInputStream(socket, socket.getSoTimeout()). - *

- * + *
+ * + * @param socket socket + * @throws IOException raised on errors performing I/O. + * @return SocketInputWrapper for reading from the socket. * @see #getInputStream(Socket, long) */ public static SocketInputWrapper getInputStream(Socket socket) @@ -462,11 +475,11 @@ public static SocketInputWrapper getInputStream(Socket socket) * * @see Socket#getChannel() * - * @param socket + * @param socket socket * @param timeout timeout in milliseconds. zero for waiting as * long as necessary. * @return SocketInputWrapper for reading from the socket. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static SocketInputWrapper getInputStream(Socket socket, long timeout) throws IOException { @@ -494,9 +507,9 @@ public static SocketInputWrapper getInputStream(Socket socket, long timeout) * * @see #getOutputStream(Socket, long) * - * @param socket + * @param socket socket * @return OutputStream for writing to the socket. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static OutputStream getOutputStream(Socket socket) throws IOException { @@ -516,11 +529,11 @@ public static OutputStream getOutputStream(Socket socket) * * @see Socket#getChannel() * - * @param socket + * @param socket socket * @param timeout timeout in milliseconds. This may not always apply. zero * for waiting as long as necessary. * @return OutputStream for writing to the socket. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static OutputStream getOutputStream(Socket socket, long timeout) throws IOException { @@ -541,9 +554,10 @@ public static OutputStream getOutputStream(Socket socket, long timeout) * * @see java.net.Socket#connect(java.net.SocketAddress, int) * - * @param socket + * @param socket socket * @param address the remote address * @param timeout timeout in milliseconds + * @throws IOException raised on errors performing I/O. */ public static void connect(Socket socket, SocketAddress address, @@ -555,10 +569,11 @@ public static void connect(Socket socket, * Like {@link NetUtils#connect(Socket, SocketAddress, int)} but * also takes a local address and port to bind the socket to. * - * @param socket + * @param socket socket * @param endpoint the remote address * @param localAddr the local address to bind the socket to * @param timeout timeout in milliseconds + * @throws IOException raised on errors performing I/O. */ public static void connect(Socket socket, SocketAddress endpoint, @@ -644,7 +659,7 @@ public static List normalizeHostNames(Collection names) { * Performs a sanity check on the list of hostnames/IPs to verify they at least * appear to be valid. * @param names - List of hostnames/IPs - * @throws UnknownHostException + * @throws UnknownHostException Unknown Host Exception */ public static void verifyHostnames(String[] names) throws UnknownHostException { for (String name: names) { @@ -735,6 +750,9 @@ public static String getHostname() { /** * Compose a "host:port" string from the address. + * + * @param addr address + * @return hort port string */ public static String getHostPortString(InetSocketAddress addr) { return addr.getHostName() + ":" + addr.getPort(); @@ -969,6 +987,8 @@ private static String quoteHost(final String hostname) { } /** + * isValidSubnet. + * @param subnet subnet * @return true if the given string is a subnet specified * using CIDR notation, false otherwise */ @@ -1004,6 +1024,7 @@ private static void addMatchingAddrs(NetworkInterface nif, * @param returnSubinterfaces * whether to return IPs associated with subinterfaces * @throws IllegalArgumentException if subnet is invalid + * @return ips */ public static List getIPs(String subnet, boolean returnSubinterfaces) { @@ -1083,8 +1104,8 @@ public static Set getFreeSocketPorts(int numOfPorts) { * Return an @{@link InetAddress} to bind to. If bindWildCardAddress is true * than returns null. * - * @param localAddr - * @param bindWildCardAddress + * @param localAddr local addr + * @param bindWildCardAddress bind wildcard address * @return InetAddress */ public static InetAddress bindToLocalAddress(InetAddress localAddr, boolean diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index c9423490635cb..187ea28632bd5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -116,6 +116,8 @@ private static void setConfigurationInternal(Configuration conf) { /** * For use only by tests and initialization + * + * @param flag flag */ @InterfaceAudience.Private @VisibleForTesting @@ -487,6 +489,10 @@ public static Text buildTokenService(URI uri) { * Perform the given action as the daemon's login user. If the login * user cannot be determined, this will log a FATAL error and exit * the whole JVM. + * + * @param action action + * @param generic type T + * @return generic type T */ public static T doAsLoginUserOrFatal(PrivilegedAction action) { if (UserGroupInformation.isSecurityEnabled()) { @@ -511,6 +517,7 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) { * @param action the action to perform * @return the result of the action * @throws IOException in the event of error + * @return generic type T */ public static T doAsLoginUser(PrivilegedExceptionAction action) throws IOException { @@ -522,6 +529,7 @@ public static T doAsLoginUser(PrivilegedExceptionAction action) * InterruptedException is thrown, it is converted to an IOException. * * @param action the action to perform + * @param generic type T * @return the result of the action * @throws IOException in the event of error */ @@ -745,9 +753,13 @@ public static boolean isPrivilegedPort(final int port) { /** * Utility method to fetch ZK auth info from the configuration. + * + * @param conf configuration + * @param configKey config key * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read * @throws ZKUtil.BadAuthFormatException if the auth format is invalid + * @return ZKAuthInfo List */ public static List getZKAuthInfos(Configuration conf, String configKey) throws IOException { From 402620856ebe2ceb1674e005a0133aea6dab6d97 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 03:18:13 -0700 Subject: [PATCH 31/53] HADOOP-18229. Fix some java doc compilation errors. Command.java warning: no description for @throws, warning: no @return etc. FileUtil.java warning: no @param for useLocal, warning: no description for @param etc. FSInputChecker.java warning: no @return, warning: no @param for checksum etc. FSLinkResolver.java warning: no description for @throws FSOutputSummer.java warning: no description for @param, warning: no description for @throws etc FsShell.java warning: no description for @throws FsStatus.java warning: no @return,warning: no @param for remaining GlobalStorageStatistics.java warning: no @return GlobExpander.java warning: no description for @throws HardLink.java warning: no @throws for java.io.IOException, warning: no @return etc. HarFileSystem.java warning: no description for @throws --- .../org/apache/hadoop/fs/FSInputChecker.java | 10 ++- .../org/apache/hadoop/fs/FSLinkResolver.java | 2 +- .../org/apache/hadoop/fs/FSOutputSummer.java | 8 ++ .../java/org/apache/hadoop/fs/FileUtil.java | 86 +++++++++++++++---- .../java/org/apache/hadoop/fs/FsShell.java | 2 +- .../java/org/apache/hadoop/fs/FsStatus.java | 23 ++++- .../org/apache/hadoop/fs/GlobExpander.java | 4 +- .../hadoop/fs/GlobalStorageStatistics.java | 2 + .../org/apache/hadoop/fs/HarFileSystem.java | 8 +- .../java/org/apache/hadoop/fs/HardLink.java | 6 ++ .../org/apache/hadoop/fs/shell/Command.java | 24 ++++-- 11 files changed, 139 insertions(+), 36 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index de66eab713ab6..459114e89cc85 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -82,6 +82,7 @@ protected FSInputChecker( Path file, int numOfRetries) { * @param sum the type of Checksum engine * @param chunkSize maximun chunk size * @param checksumSize the number byte of each checksum + * @param verifyChecksum verify check sum */ protected FSInputChecker( Path file, int numOfRetries, boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) { @@ -118,6 +119,7 @@ protected FSInputChecker( Path file, int numOfRetries, * @param len maximum number of bytes to read * @param checksum the data buffer into which to write checksums * @return number of bytes read + * @throws IOException raised on errors performing I/O. */ abstract protected int readChunk(long pos, byte[] buf, int offset, int len, byte[] checksum) throws IOException; @@ -129,7 +131,10 @@ abstract protected int readChunk(long pos, byte[] buf, int offset, int len, */ abstract protected long getChunkPosition(long pos); - /** Return true if there is a need for checksum verification */ + /** + * Return true if there is a need for checksum verification + * @return if there is a need for checksum verification true, not false + */ protected synchronized boolean needChecksum() { return verifyChecksum && sum != null; } @@ -357,6 +362,9 @@ private void verifySums(final byte b[], final int off, int read) * Convert a checksum byte array to a long * This is deprecated since 0.22 since it is no longer in use * by this class. + * + * @param checksum check sum + * @return crc */ @Deprecated static public long checksum2long(byte[] checksum) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java index ffe4b34ca5fdb..f85cf7a858152 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java @@ -74,7 +74,7 @@ abstract public T next(final AbstractFileSystem fs, final Path p) * @param fc FileContext used to access file systems. * @param path The path to resolve symlinks on. * @return Generic type determined by the implementation of next. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public T resolve(final FileContext fc, final Path path) throws IOException { int count = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java index 6de026b9d17c0..cf819fe1e1e41 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java @@ -186,6 +186,8 @@ public void flush() throws IOException { /** * Return the number of valid bytes currently in the buffer. + * + * @return buffer data size */ protected synchronized int getBufferedDataSize() { return count; @@ -227,6 +229,10 @@ private void writeChecksumChunks(byte b[], int off, int len) /** * Converts a checksum integer value to a byte stream + * + * @param sum check sum + * @param checksumSize check sum size + * @return byte stream */ static public byte[] convertToByteStream(Checksum sum, int checksumSize) { return int2byte((int)sum.getValue(), new byte[checksumSize]); @@ -245,6 +251,8 @@ static byte[] int2byte(int integer, byte[] bytes) { /** * Resets existing buffer with a new one of the specified size. + * + * @param size size */ protected synchronized void setChecksumBufSize(int size) { this.buf = new byte[size]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 7400ca36daa5c..96f5298c366d1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -162,6 +162,8 @@ public static void fullyDeleteOnExit(final File file) { * (3) If dir is a normal file, it is deleted. * (4) If dir is a normal directory, then dir and all its contents recursively * are deleted. + * @param dir dir + * @return fully delete status */ public static boolean fullyDelete(final File dir) { return fullyDelete(dir, false); @@ -257,6 +259,9 @@ private static boolean deleteImpl(final File f, final boolean doLog) { * we return false, the directory may be partially-deleted. * If dir is a symlink to a directory, all the contents of the actual * directory pointed to by dir will be deleted. + * + * @param dir dir + * @return fullyDeleteContents Status */ public static boolean fullyDeleteContents(final File dir) { return fullyDeleteContents(dir, false); @@ -267,8 +272,11 @@ public static boolean fullyDeleteContents(final File dir) { * we return false, the directory may be partially-deleted. * If dir is a symlink to a directory, all the contents of the actual * directory pointed to by dir will be deleted. + * + * @param dir dir * @param tryGrantPermissions if 'true', try grant +rwx permissions to this * and all the underlying directories before trying to delete their contents. + * @return fully delete contents status */ public static boolean fullyDeleteContents(final File dir, final boolean tryGrantPermissions) { if (tryGrantPermissions) { @@ -311,7 +319,7 @@ public static boolean fullyDeleteContents(final File dir, final boolean tryGrant * * @param fs {@link FileSystem} on which the path is present * @param dir directory to recursively delete - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link FileSystem#delete(Path, boolean)} */ @Deprecated @@ -343,7 +351,17 @@ private static void checkDependencies(FileSystem srcFS, } } - /** Copy files between FileSystems. */ + /** + * Copy files between FileSystems. + * @param srcFS src fs + * @param src src + * @param dstFS dst fs + * @param dst dst + * @param deleteSource delete source + * @param conf configuration + * @return if copy success true, not false + * @throws IOException raised on errors performing I/O. + */ public static boolean copy(FileSystem srcFS, Path src, FileSystem dstFS, Path dst, boolean deleteSource, @@ -391,7 +409,19 @@ public static boolean copy(FileSystem srcFS, Path[] srcs, return returnVal; } - /** Copy files between FileSystems. */ + /** + * Copy files between FileSystems. + * + * @param srcFS srcFs + * @param src src + * @param dstFS dstFs + * @param dst dst + * @param deleteSource delete source + * @param overwrite overwrite + * @param conf configuration + * @throws IOException raised on errors performing I/O. + * + */ public static boolean copy(FileSystem srcFS, Path src, FileSystem dstFS, Path dst, boolean deleteSource, @@ -403,17 +433,17 @@ public static boolean copy(FileSystem srcFS, Path src, /** * Copy a file/directory tree within/between filesystems. - *

+ *
* returns true if the operation succeeded. When deleteSource is true, * this means "after the copy, delete(source) returned true" * If the destination is a directory, and mkdirs (dest) fails, * the operation will return false rather than raise any exception. - *

+ *
* The overwrite flag is about overwriting files; it has no effect about * handing an attempt to copy a file atop a directory (expect an IOException), * or a directory over a path which contains a file (mkdir will fail, so * "false"). - *

+ *
* The operation is recursive, and the deleteSource operation takes place * as each subdirectory is copied. Therefore, if an operation fails partway * through, the source tree may be partially deleted. @@ -471,7 +501,18 @@ public static boolean copy(FileSystem srcFS, FileStatus srcStatus, } - /** Copy local files to a FileSystem. */ + /** + * Copy local files to a FileSystem. + * + * @param src src + * @param dstFS dstFs + * @param dst dst + * @param deleteSource delete source + * @param conf configuration + * @throws IOException raised on errors performing I/O. + * @return true if the operation succeeded. + * + */ public static boolean copy(File src, FileSystem dstFS, Path dst, boolean deleteSource, @@ -514,7 +555,17 @@ public static boolean copy(File src, } } - /** Copy FileSystem files to local files. */ + /** + * Copy FileSystem files to local files. + * + * @param srcFS srcFs + * @param src src + * @param dst dst + * @param deleteSource delete source + * @param conf configuration + * @throws IOException raised on errors performing I/O. + * @return true if the operation succeeded. + */ public static boolean copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf) throws IOException { @@ -958,7 +1009,7 @@ public static void unTar(InputStream inputStream, File untarDir, * * @param inFile The tar file as input. * @param untarDir The untar directory where to untar the tar file. - * @throws IOException + * @throws IOException an exception occurred */ public static void unTar(File inFile, File untarDir) throws IOException { if (!untarDir.mkdirs()) { @@ -1169,6 +1220,7 @@ public static class HardLink extends org.apache.hadoop.fs.HardLink { * @param target the target for symlink * @param linkname the symlink * @return 0 on success + * @throws IOException raised on errors performing I/O. */ public static int symLink(String target, String linkname) throws IOException{ @@ -1230,8 +1282,8 @@ public static int symLink(String target, String linkname) throws IOException{ * @param filename the name of the file to change * @param perm the permission string * @return the exit code from the command - * @throws IOException - * @throws InterruptedException + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException command interrupted */ public static int chmod(String filename, String perm ) throws IOException, InterruptedException { @@ -1245,7 +1297,7 @@ public static int chmod(String filename, String perm * @param perm permission string * @param recursive true, if permissions should be changed recursively * @return the exit code from the command. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static int chmod(String filename, String perm, boolean recursive) throws IOException { @@ -1271,7 +1323,7 @@ public static int chmod(String filename, String perm, boolean recursive) * @param file the file to change * @param username the new user owner name * @param groupname the new group owner name - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void setOwner(File file, String username, String groupname) throws IOException { @@ -1288,7 +1340,7 @@ public static void setOwner(File file, String username, * Platform independent implementation for {@link File#setReadable(boolean)} * File#setReadable does not work as expected on Windows. * @param f input file - * @param readable + * @param readable readable * @return true on success, false otherwise */ public static boolean setReadable(File f, boolean readable) { @@ -1309,7 +1361,7 @@ public static boolean setReadable(File f, boolean readable) { * Platform independent implementation for {@link File#setWritable(boolean)} * File#setWritable does not work as expected on Windows. * @param f input file - * @param writable + * @param writable writable * @return true on success, false otherwise */ public static boolean setWritable(File f, boolean writable) { @@ -1333,7 +1385,7 @@ public static boolean setWritable(File f, boolean writable) { * behavior on Windows as on Unix platforms. Creating, deleting or renaming * a file within that folder will still succeed on Windows. * @param f input file - * @param executable + * @param executable executable * @return true on success, false otherwise */ public static boolean setExecutable(File f, boolean executable) { @@ -1412,7 +1464,7 @@ public static boolean canExecute(File f) { * of forking if group == other. * @param f the file to change * @param permission the new permissions - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void setPermission(File f, FsPermission permission ) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java index 7275b70227f99..73258661ec191 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java @@ -130,7 +130,7 @@ public Path getCurrentTrashDir() throws IOException { * Returns the current trash location for the path specified * @param path to be deleted * @return path to the trash - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Path getCurrentTrashDir(Path path) throws IOException { return getTrash().getCurrentTrashDir(path); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java index d392c7d765d72..dafb66f2edcba 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java @@ -35,24 +35,39 @@ public class FsStatus implements Writable { private long used; private long remaining; - /** Construct a FsStatus object, using the specified statistics */ + /** + * Construct a FsStatus object, using the specified statistics + * + * @param capacity capacity + * @param used used + * @param remaining remaining + */ public FsStatus(long capacity, long used, long remaining) { this.capacity = capacity; this.used = used; this.remaining = remaining; } - /** Return the capacity in bytes of the file system */ + /** + * Return the capacity in bytes of the file system. + * @return capacity + */ public long getCapacity() { return capacity; } - /** Return the number of bytes used on the file system */ + /** + * Return the number of bytes used on the file system. + * @return used + */ public long getUsed() { return used; } - /** Return the number of remaining bytes on the file system */ + /** + * Return the number of remaining bytes on the file system. + * @return remaining + */ public long getRemaining() { return remaining; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java index cb430ed3f6251..efa10b1805895 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java @@ -56,9 +56,9 @@ public StringWithOffset(String string, int offset) { * {a,b}/{c/\d} - {a,b}/c/d * * - * @param filePattern + * @param filePattern file pattern * @return expanded file patterns - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static List expand(String filePattern) throws IOException { List fullyExpanded = new ArrayList(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java index 30ce07a422e6e..9509f8436ab03 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java @@ -104,6 +104,8 @@ public synchronized void reset() { /** * Get an iterator that we can use to iterate throw all the global storage * statistics objects. + * + * @return StorageStatistics Iterator */ synchronized public Iterator iterator() { Entry first = map.firstEntry(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java index 7e12d0a11e953..1d64b0bcbe921 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java @@ -463,7 +463,7 @@ static BlockLocation[] fixBlockLocations(BlockLocation[] locations, * @param start the start of the desired range in the contained file * @param len the length of the desired range * @return block locations for this segment of file - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, @@ -525,7 +525,7 @@ private void fileStatusesInIndex(HarStatus parent, List statuses) * Combine the status stored in the index and the underlying status. * @param h status stored in the index * @return the combined file status - * @throws IOException + * @throws IOException raised on errors performing I/O. */ private FileStatus toFileStatus(HarStatus h) throws IOException { final Path p = h.isDir ? archivePath : new Path(archivePath, h.partName); @@ -635,7 +635,7 @@ public long getModificationTime() { * while creating a hadoop archive. * @param f the path in har filesystem * @return filestatus. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public FileStatus getFileStatus(Path f) throws IOException { @@ -1104,7 +1104,7 @@ public void setDropBehind(Boolean dropBehind) throws IOException { * @param start the start position in the part file * @param length the length of valid data in the part file * @param bufsize the buffer size - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public HarFSDataInputStream(FileSystem fs, Path p, long start, long length, int bufsize) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java index 855fbb04e59b4..6cc8f9ef1b5f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java @@ -156,6 +156,7 @@ String[] linkCount(File file) throws IOException { * Creates a hardlink. * @param file - existing source file * @param linkName - desired target link file + * @throws IOException raised on errors performing I/O. */ public static void createHardLink(File file, File linkName) throws IOException { @@ -177,6 +178,7 @@ public static void createHardLink(File file, File linkName) * @param fileBaseNames - list of path-less file names, as returned by * parentDir.list() * @param linkDir - where the hardlinks should be put. It must already exist. + * @throws IOException raised on errors performing I/O. */ public static void createHardLinkMult(File parentDir, String[] fileBaseNames, File linkDir) throws IOException { @@ -204,6 +206,10 @@ public static void createHardLinkMult(File parentDir, String[] fileBaseNames, /** * Retrieves the number of links to the specified file. + * + * @param fileName file name + * @throws IOException raised on errors performing I/O. + * @return link count */ public static int getLinkCount(File fileName) throws IOException { if (fileName == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java index 0bdb47730a929..d2728374f5011 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java @@ -77,7 +77,11 @@ protected Command() { err = System.err; } - /** Constructor */ + /** + * Constructor. + * + * @param conf configuration + */ protected Command(Configuration conf) { super(conf); } @@ -109,7 +113,7 @@ protected int getDepth() { * Execute the command on the input path data. Commands can override to make * use of the resolved filesystem. * @param pathData The input path with resolved filesystem - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected void run(PathData pathData) throws IOException { run(pathData.path); @@ -136,11 +140,19 @@ public int runAll() { return exitCode; } - /** sets the command factory for later use */ + /** + * sets the command factory for later use + * @param factory factory + */ public void setCommandFactory(CommandFactory factory) { this.commandFactory = factory; } - /** retrieves the command factory */ + + /** + * retrieves the command factory + * + * @return command factory + */ protected CommandFactory getCommandFactory() { return this.commandFactory; } @@ -201,7 +213,7 @@ public int run(String...argv) { * IllegalArgumentException is thrown, the FsShell object will print the * short usage of the command. * @param args the command line arguments - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected void processOptions(LinkedList args) throws IOException {} @@ -211,7 +223,7 @@ protected void processOptions(LinkedList args) throws IOException {} * {@link #expandArguments(LinkedList)} and pass the resulting list to * {@link #processArguments(LinkedList)} * @param args the list of argument strings - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected void processRawArguments(LinkedList args) throws IOException { From 9a6ae6981bed034be8b96b13b52555328734ed57 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 04:57:22 -0700 Subject: [PATCH 32/53] HADOOP-18229. Fix some java doc compilation errors. LocalDirAllocator.java no description for @param, LocalFileSystem.java warning: no @return, warning: no @param for path, MD5MD5CRC32CastagnoliFileChecksum.java warning: no @param for md5,warning: no @param for crcPerBlock, MD5MD5CRC32FileChecksum.java warning: no @return, MD5MD5CRC32GzipFileChecksum.java warning: no @param for md5, MultipartUploaderBuilder.java warning: no @return, Options.java no description for @throws, RawLocalFileSystem.java warning: no description for @throws, Stat.java warning: no description for @return, Trash.java warning: no @throws for java.io.IOException, warning: no @return --- .../apache/hadoop/fs/LocalDirAllocator.java | 31 +++++++------- .../org/apache/hadoop/fs/LocalFileSystem.java | 6 ++- .../fs/MD5MD5CRC32CastagnoliFileChecksum.java | 8 +++- .../hadoop/fs/MD5MD5CRC32FileChecksum.java | 13 +++++- .../fs/MD5MD5CRC32GzipFileChecksum.java | 8 +++- .../hadoop/fs/MultipartUploaderBuilder.java | 15 +++++++ .../java/org/apache/hadoop/fs/Options.java | 7 +++- .../apache/hadoop/fs/RawLocalFileSystem.java | 7 +++- .../main/java/org/apache/hadoop/fs/Stat.java | 4 +- .../main/java/org/apache/hadoop/fs/Trash.java | 41 ++++++++++++++++--- 10 files changed, 111 insertions(+), 29 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java index 5f266a7b82555..e9a011154a449 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java @@ -78,8 +78,9 @@ public class LocalDirAllocator { private final DiskValidator diskValidator; - /**Create an allocator object - * @param contextCfgItemName + /** + * Create an allocator object. + * @param contextCfgItemName contextCfgItemName */ public LocalDirAllocator(String contextCfgItemName) { this.contextCfgItemName = contextCfgItemName; @@ -123,7 +124,7 @@ private AllocatorPerContext obtainContext(String contextCfgItemName) { * available disk) * @param conf the Configuration object * @return the complete path to the file on a local disk - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Path getLocalPathForWrite(String pathStr, Configuration conf) throws IOException { @@ -139,7 +140,7 @@ public Path getLocalPathForWrite(String pathStr, * @param size the size of the file that is going to be written * @param conf the Configuration object * @return the complete path to the file on a local disk - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Path getLocalPathForWrite(String pathStr, long size, Configuration conf) throws IOException { @@ -156,7 +157,7 @@ public Path getLocalPathForWrite(String pathStr, long size, * @param conf the Configuration object * @param checkWrite ensure that the path is writable * @return the complete path to the file on a local disk - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Path getLocalPathForWrite(String pathStr, long size, Configuration conf, @@ -171,7 +172,7 @@ public Path getLocalPathForWrite(String pathStr, long size, * @param pathStr the requested file (this will be searched) * @param conf the Configuration object * @return the complete path to the file on a local disk - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Path getLocalPathToRead(String pathStr, Configuration conf) throws IOException { @@ -184,7 +185,7 @@ public Path getLocalPathToRead(String pathStr, * @param pathStr the path underneath the roots * @param conf the configuration to look up the roots in * @return all of the paths that exist under any of the roots - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Iterable getAllLocalPathsToRead(String pathStr, Configuration conf @@ -205,7 +206,7 @@ public Iterable getAllLocalPathsToRead(String pathStr, * @param size the size of the file that is going to be written * @param conf the Configuration object * @return a unique temporary file - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public File createTmpFileForWrite(String pathStr, long size, Configuration conf) throws IOException { @@ -213,8 +214,9 @@ public File createTmpFileForWrite(String pathStr, long size, return context.createTmpFileForWrite(pathStr, size, conf); } - /** Method to check whether a context is valid - * @param contextCfgItemName + /** + * Method to check whether a context is valid. + * @param contextCfgItemName contextCfgItemName * @return true/false */ public static boolean isContextValid(String contextCfgItemName) { @@ -224,9 +226,9 @@ public static boolean isContextValid(String contextCfgItemName) { } /** - * Removes the context from the context config items + * Removes the context from the context config items. * - * @param contextCfgItemName + * @param contextCfgItemName contextCfgItemName */ @Deprecated @InterfaceAudience.LimitedPrivate({"MapReduce"}) @@ -236,8 +238,9 @@ public static void removeContext(String contextCfgItemName) { } } - /** We search through all the configured dirs for the file's existence - * and return true when we find + /** + * We search through all the configured dirs for the file's existence + * and return true when we find. * @param pathStr the requested file (this will be searched) * @param conf the Configuration object * @return true if files exist. false otherwise diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java index c41190a7b360b..38cefaa663155 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java @@ -71,7 +71,11 @@ public LocalFileSystem(FileSystem rawLocalFileSystem) { super(rawLocalFileSystem); } - /** Convert a path to a File. */ + /** + * Convert a path to a File. + * @param path the path + * @return file + */ public File pathToFile(Path path) { return ((RawLocalFileSystem)fs).pathToFile(path); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java index 5a4a6a97cc4f7..bff8eed214c56 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java @@ -28,7 +28,13 @@ public MD5MD5CRC32CastagnoliFileChecksum() { this(0, 0, null); } - /** Create a MD5FileChecksum */ + /** + * Create a MD5FileChecksum. + * + * @param bytesPerCRC bytesPerCRC + * @param crcPerBlock crcPerBlock + * @param md5 md5 + */ public MD5MD5CRC32CastagnoliFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { super(bytesPerCRC, crcPerBlock, md5); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java index 3fdb7e982621c..604f71c8f7c1e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java @@ -44,7 +44,13 @@ public MD5MD5CRC32FileChecksum() { this(0, 0, null); } - /** Create a MD5FileChecksum */ + /** + * Create a MD5FileChecksum. + * + * @param bytesPerCRC bytesPerCRC + * @param crcPerBlock crcPerBlock + * @param md5 md5 + */ public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { this.bytesPerCRC = bytesPerCRC; this.crcPerBlock = crcPerBlock; @@ -76,7 +82,10 @@ public byte[] getBytes() { return WritableUtils.toByteArray(this); } - /** returns the CRC type */ + /** + * returns the CRC type. + * @return data check sum type + */ public DataChecksum.Type getCrcType() { // default to the one that is understood by all releases. return DataChecksum.Type.CRC32; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java index 5164d0200d28d..a23baf4e11b43 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java @@ -28,7 +28,13 @@ public MD5MD5CRC32GzipFileChecksum() { this(0, 0, null); } - /** Create a MD5FileChecksum */ + /** + * Create a MD5FileChecksum. + * + * @param bytesPerCRC bytesPerCRC + * @param crcPerBlock crcPerBlock + * @param md5 md5 + */ public MD5MD5CRC32GzipFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { super(bytesPerCRC, crcPerBlock, md5); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java index 44d9fb7a65218..7c24f6695d0e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java @@ -33,26 +33,35 @@ public interface MultipartUploaderBuilder Date: Fri, 13 May 2022 05:57:39 -0700 Subject: [PATCH 33/53] HADOOP-18229. Fix some java doc compilation errors. AbstractMultipartUploader.java warning: no description for @throws etc, AclStatus.java warning: no @return etc, FsAction.java warning: no @return etc, FsCreateModes.java warning: no @return etc, FsPermission.java warning: no @return etc, FutureDataInputStreamBuilderImpl.java warning: no @return etc, FutureIOSupport.java warning: no @return etc, MultipartUploaderBuilderImpl.java warning: no @param for p etc, TrashPolicy.java warning: no @throws for java.io.IOException etc, XAttrCodec.java warning: no @throws for java.io.IOException etc. --- .../org/apache/hadoop/fs/TrashPolicy.java | 15 ++++- .../java/org/apache/hadoop/fs/XAttrCodec.java | 6 +- .../fs/impl/AbstractMultipartUploader.java | 2 +- .../FutureDataInputStreamBuilderImpl.java | 5 ++ .../hadoop/fs/impl/FutureIOSupport.java | 2 + .../fs/impl/MultipartUploaderBuilderImpl.java | 3 + .../hadoop/fs/permission/AclStatus.java | 4 +- .../apache/hadoop/fs/permission/FsAction.java | 20 +++++-- .../hadoop/fs/permission/FsCreateModes.java | 9 ++- .../hadoop/fs/permission/FsPermission.java | 57 ++++++++++++++++--- 10 files changed, 104 insertions(+), 19 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java index 64fb81be99ee3..b8b67c9e8b2ff 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java @@ -60,27 +60,33 @@ public void initialize(Configuration conf, FileSystem fs) { /** * Returns whether the Trash Policy is enabled for this filesystem. + * + * @return if isEnabled true,not false. */ public abstract boolean isEnabled(); /** * Move a file or directory to the current trash directory. * @return false if the item is already in the trash or trash is disabled + * @throws IOException raised on errors performing I/O. */ public abstract boolean moveToTrash(Path path) throws IOException; /** - * Create a trash checkpoint. + * Create a trash checkpoint. + * @throws IOException raised on errors performing I/O. */ public abstract void createCheckpoint() throws IOException; /** * Delete old trash checkpoint(s). + * @throws IOException raised on errors performing I/O. */ public abstract void deleteCheckpoint() throws IOException; /** * Delete all checkpoints immediately, ie empty trash. + * @throws IOException raised on errors performing I/O. */ public abstract void deleteCheckpointsImmediately() throws IOException; @@ -94,6 +100,8 @@ public void initialize(Configuration conf, FileSystem fs) { * TrashPolicy#getCurrentTrashDir(Path path). * It returns the trash location correctly for the path specified no matter * the path is in encryption zone or not. + * + * @return the path */ public abstract Path getCurrentTrashDir(); @@ -102,7 +110,7 @@ public void initialize(Configuration conf, FileSystem fs) { * Policy * @param path path to be deleted * @return current trash directory for the path to be deleted - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Path getCurrentTrashDir(Path path) throws IOException { throw new UnsupportedOperationException(); @@ -111,6 +119,9 @@ public Path getCurrentTrashDir(Path path) throws IOException { /** * Return a {@link Runnable} that periodically empties the trash of all * users, intended to be run by the superuser. + * + * @throws IOException raised on errors performing I/O. + * @return Runnable */ public abstract Runnable getEmptier() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java index 3d65275e673d6..de1a5322e1ee0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java @@ -67,7 +67,7 @@ public enum XAttrCodec { * the given string is treated as text. * @param value string representation of the value. * @return byte[] the value - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static byte[] decodeValue(String value) throws IOException { byte[] result = null; @@ -102,9 +102,9 @@ public static byte[] decodeValue(String value) throws IOException { * while strings encoded as hexadecimal and base64 are prefixed with * 0x and 0s, respectively. * @param value byte[] value - * @param encoding + * @param encoding encoding * @return String string representation of value - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static String encodeValue(byte[] value, XAttrCodec encoding) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java index 416924e18d87c..f9ae9f55cc17f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java @@ -127,7 +127,7 @@ protected void checkPutArguments(Path filePath, * {@inheritDoc}. * @param path path to abort uploads under. * @return a future to -1. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public CompletableFuture abortUploadsUnderPath(Path path) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java index 70e39de7388c3..cbeb06a60c0eb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java @@ -126,6 +126,9 @@ protected int getBufferSize() { /** * Set the size of the buffer to be used. + * + * @param bufSize buffer size + * @return FutureDataInputStreamBuilder */ public FutureDataInputStreamBuilder bufferSize(int bufSize) { bufferSize = bufSize; @@ -137,6 +140,8 @@ public FutureDataInputStreamBuilder bufferSize(int bufSize) { * This must be used after the constructor has been invoked to create * the actual builder: it allows for subclasses to do things after * construction. + * + * @return FutureDataInputStreamBuilder */ public FutureDataInputStreamBuilder builder() { return getThisBuilder(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java index f47e5f4fbfbd6..6b1fea7351a26 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java @@ -75,6 +75,8 @@ public static T awaitFuture(final Future future) * See {@link FutureIO#awaitFuture(Future, long, TimeUnit)}. * @param future future to evaluate * @param type of the result. + * @param timeout timeout + * @param unit unit * @return the result, if all went well. * @throws InterruptedIOException future was interrupted * @throws IOException if something went wrong diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java index 5584e647849f5..c704cb116c5d6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java @@ -88,6 +88,9 @@ protected MultipartUploaderBuilderImpl(@Nonnull FileContext fc, /** * Constructor. + * + * @param fileSystem fileSystem + * @param p path */ protected MultipartUploaderBuilderImpl(@Nonnull FileSystem fileSystem, @Nonnull Path p) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java index 25b9ba659048a..ab273b305543b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java @@ -185,7 +185,8 @@ public Builder stickyBit(boolean stickyBit) { /** * Sets the permission for the file. - * @param permission + * @param permission permission + * @return Builder */ public Builder setPermission(FsPermission permission) { this.permission = permission; @@ -224,6 +225,7 @@ private AclStatus(String owner, String group, boolean stickyBit, /** * Get the effective permission for the AclEntry * @param entry AclEntry to get the effective action + * @return FsAction */ public FsAction getEffectivePermission(AclEntry entry) { return getEffectivePermission(entry, permission); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java index 97dcf816c16ad..7e328d2c31450 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java @@ -48,7 +48,8 @@ private FsAction(String s) { /** * Return true if this action implies that action. - * @param that + * @param that FsAction that + * @return if implies true,not false */ public boolean implies(FsAction that) { if (that != null) { @@ -57,15 +58,26 @@ public boolean implies(FsAction that) { return false; } - /** AND operation. */ + /** + * AND operation. + * @param that FsAction that + * @return FsAction + */ public FsAction and(FsAction that) { return vals[ordinal() & that.ordinal()]; } - /** OR operation. */ + /** + * OR operation. + * @param that FsAction that + * @return FsAction + */ public FsAction or(FsAction that) { return vals[ordinal() | that.ordinal()]; } - /** NOT operation. */ + /** + * NOT operation. + * @return FsAction + */ public FsAction not() { return vals[7 - ordinal()]; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java index 2bd6f1f3b9126..fd67607c1723e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java @@ -35,7 +35,10 @@ public final class FsCreateModes extends FsPermission { /** * Create from unmasked mode and umask. * - * If the mode is already an FsCreateModes object, return it. + * @param mode mode + * @param umask umask + * @retutn If the mode is already + * an FsCreateModes object, return it. */ public static FsPermission applyUMask(FsPermission mode, FsPermission umask) { @@ -47,6 +50,10 @@ public static FsPermission applyUMask(FsPermission mode, /** * Create from masked and unmasked modes. + * + * @param masked masked + * @param unmasked unmasked + * @return FsCreateModes */ public static FsCreateModes create(FsPermission masked, FsPermission unmasked) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index 51c113af2702e..c416e5f41a2e6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -56,7 +56,11 @@ public class FsPermission implements Writable, Serializable, /** Maximum acceptable length of a permission string to parse */ public static final int MAX_PERMISSION_LENGTH = 10; - /** Create an immutable {@link FsPermission} object. */ + /** + * Create an immutable {@link FsPermission} object. + * @param permission permission + * @return FsPermission + */ public static FsPermission createImmutable(short permission) { return new ImmutableFsPermission(permission); } @@ -85,7 +89,7 @@ public FsPermission(FsAction u, FsAction g, FsAction o, boolean sb) { /** * Construct by the given mode. - * @param mode + * @param mode mode * @see #toShort() */ public FsPermission(short mode) { fromShort(mode); } @@ -145,13 +149,22 @@ public FsPermission(String mode) { this(new RawParser(mode).getPermission()); } - /** Return user {@link FsAction}. */ + /** + * Return user {@link FsAction}. + * return FsAction useraction + */ public FsAction getUserAction() {return useraction;} - /** Return group {@link FsAction}. */ + /** + * Return group {@link FsAction}. + * return FsAction groupaction + */ public FsAction getGroupAction() {return groupaction;} - /** Return other {@link FsAction}. */ + /** + * Return other {@link FsAction}. + * return FsAction otheraction + */ public FsAction getOtherAction() {return otheraction;} private void set(FsAction u, FsAction g, FsAction o, boolean sb) { @@ -180,6 +193,7 @@ public void readFields(DataInput in) throws IOException { /** * Get masked permission if exists. + * @return masked */ public FsPermission getMasked() { return null; @@ -187,6 +201,7 @@ public FsPermission getMasked() { /** * Get unmasked permission if exists. + * @return unmasked */ public FsPermission getUnmasked() { return null; @@ -194,6 +209,10 @@ public FsPermission getUnmasked() { /** * Create and initialize a {@link FsPermission} from {@link DataInput}. + * + * @param in data input + * @throws IOException raised on errors performing I/O. + * @return FsPermission */ public static FsPermission read(DataInput in) throws IOException { FsPermission p = new FsPermission(); @@ -203,6 +222,7 @@ public static FsPermission read(DataInput in) throws IOException { /** * Encode the object to a short. + * @return object to a short */ public short toShort() { int s = (stickyBit ? 1 << 9 : 0) | @@ -301,6 +321,9 @@ public FsPermission applyUMask(FsPermission umask) { * '-' sets bits in the mask. * * Octal umask, the specified bits are set in the file mode creation mask. + * + * @param conf configuration + * @return FsPermission UMask */ public static FsPermission getUMask(Configuration conf) { int umask = DEFAULT_UMASK; @@ -346,7 +369,11 @@ public boolean getAclBit() { } /** - * Returns true if the file is encrypted or directory is in an encryption zone + * Returns true if the file is encrypted or directory is in an encryption zone. + * + * @return if the file is encrypted or directory + * is in an encryption zone true, not false + * * @deprecated Get encryption bit from the * {@link org.apache.hadoop.fs.FileStatus} object. */ @@ -357,6 +384,9 @@ public boolean getEncryptedBit() { /** * Returns true if the file or directory is erasure coded. + * + * @return if the file or directory is + * erasure coded true, not false * @deprecated Get ec bit from the {@link org.apache.hadoop.fs.FileStatus} * object. */ @@ -365,7 +395,11 @@ public boolean getErasureCodedBit() { return false; } - /** Set the user file creation mask (umask) */ + /** + * Set the user file creation mask (umask) + * @param conf configuration + * @param umask umask + */ public static void setUMask(Configuration conf, FsPermission umask) { conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort())); } @@ -379,6 +413,8 @@ public static void setUMask(Configuration conf, FsPermission umask) { * {@link FsPermission#getDirDefault()} for directory, and use * {@link FsPermission#getFileDefault()} for file. * This method is kept for compatibility. + * + * @return Default FsPermission */ public static FsPermission getDefault() { return new FsPermission((short)00777); @@ -386,6 +422,8 @@ public static FsPermission getDefault() { /** * Get the default permission for directory. + * + * @return DirDefault FsPermission */ public static FsPermission getDirDefault() { return new FsPermission((short)00777); @@ -393,6 +431,8 @@ public static FsPermission getDirDefault() { /** * Get the default permission for file. + * + * @return FileDefault FsPermission */ public static FsPermission getFileDefault() { return new FsPermission((short)00666); @@ -400,6 +440,8 @@ public static FsPermission getFileDefault() { /** * Get the default permission for cache pools. + * + * @return CachePoolDefault FsPermission */ public static FsPermission getCachePoolDefault() { return new FsPermission((short)00755); @@ -408,6 +450,7 @@ public static FsPermission getCachePoolDefault() { /** * Create a FsPermission from a Unix symbolic permission string * @param unixSymbolicPermission e.g. "-rw-rw-rw-" + * @return FsPermission */ public static FsPermission valueOf(String unixSymbolicPermission) { if (unixSymbolicPermission == null) { From e354b8387fea2106dc50aa0c4a146b69954d007b Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 08:18:54 -0700 Subject: [PATCH 34/53] HADOOP-18229. Fix some java doc compilation errors. AccessControlList.java warning: no @return, AuditingFunctions.java warning: no @return, AvroSerialization.java warning: no @return, warning: no @param for clazz etc, BlockDecompressorStream.java warning: no description for @throws, Bzip2Compressor.java warning: no @param for conf, Bzip2Decompressor.java warning: no @param for conserveMemory, CBZip2InputStream.java warning: no description for @throws, CBZip2OutputStream.java warning: no @return, Command.java warning: no description for @throws, CompressionCodec.java warning: no description for @throws, CompressionInputStream.java warning: no description for @throws, CompressionOutputStream.java warning: no @throws for java.io.IOException, Compressor.java warning: no @throws for java.io.IOException, CredentialProvider.java warning: no description for @throws, CredentialShell.java warning: no description for @param, Decompressor.java warning: no description for @throws, DecompressorStream.java: warning: no description for @throws, ErasureCoder.java: warning: no @return, ErasureCodingStep.java: warning: no description for @param, ErasureDecoder.java warning: no description for @param, ErasureDecodingStep.java warning: no description for @param, ErasureEncoder.java: warning: no description for @param, ErasureEncodingStep.java warning: no description for @param, FSInputChecker.java: warning: no @return, FsStatus.java: warning: no @return, HHErasureCodingStep.java warning: no description for @param, HHXORErasureDecodingStep.java warning: no description for @param, HHXORErasureEncodingStep.java warning: no description for @param, ImpersonationProvider.java warning: no description for @throws, Key.java warning: no description for @param, NetUtils.java: warning: no description for @param, ProxyUsers.java: warning: no description for @throws, RefreshAuthorizationPolicyProtocol: warning: no description for @throws, Serialization.java: warning: no @param for c, Shell.java: warning: no @return, SplittableCompressionCodec.java: warning: no @throws for java.io.IOException, XORErasureDecoder.java warning: no description for @param --- .../java/org/apache/hadoop/fs/FSInputChecker.java | 2 +- .../main/java/org/apache/hadoop/fs/FsStatus.java | 2 +- .../java/org/apache/hadoop/fs/shell/Command.java | 4 ++-- .../hadoop/fs/store/audit/AuditingFunctions.java | 2 ++ .../hadoop/io/compress/BlockDecompressorStream.java | 4 ++-- .../apache/hadoop/io/compress/CompressionCodec.java | 8 ++++---- .../hadoop/io/compress/CompressionInputStream.java | 8 +++++--- .../hadoop/io/compress/CompressionOutputStream.java | 4 +++- .../org/apache/hadoop/io/compress/Compressor.java | 3 +++ .../org/apache/hadoop/io/compress/Decompressor.java | 2 +- .../hadoop/io/compress/DecompressorStream.java | 2 +- .../io/compress/SplittableCompressionCodec.java | 2 ++ .../hadoop/io/compress/bzip2/Bzip2Compressor.java | 1 + .../hadoop/io/compress/bzip2/Bzip2Decompressor.java | 2 ++ .../hadoop/io/compress/bzip2/CBZip2InputStream.java | 8 +++++--- .../io/compress/bzip2/CBZip2OutputStream.java | 5 +++++ .../hadoop/io/erasurecode/coder/ErasureCoder.java | 1 + .../io/erasurecode/coder/ErasureCodingStep.java | 5 +++-- .../hadoop/io/erasurecode/coder/ErasureDecoder.java | 10 +++++----- .../io/erasurecode/coder/ErasureDecodingStep.java | 6 +++--- .../hadoop/io/erasurecode/coder/ErasureEncoder.java | 2 +- .../io/erasurecode/coder/ErasureEncodingStep.java | 6 +++--- .../io/erasurecode/coder/HHErasureCodingStep.java | 4 ++-- .../erasurecode/coder/HHXORErasureDecodingStep.java | 4 ++-- .../erasurecode/coder/HHXORErasureEncodingStep.java | 4 ++-- .../io/erasurecode/coder/XORErasureDecoder.java | 2 +- .../apache/hadoop/io/serializer/Serialization.java | 5 +++++ .../io/serializer/avro/AvroSerialization.java | 6 ++++++ .../main/java/org/apache/hadoop/net/NetUtils.java | 1 - .../hadoop/security/alias/CredentialProvider.java | 13 +++++++------ .../hadoop/security/alias/CredentialShell.java | 6 +++--- .../security/authorize/AccessControlList.java | 1 + .../security/authorize/ImpersonationProvider.java | 4 ++-- .../hadoop/security/authorize/ProxyUsers.java | 12 ++++++------ .../RefreshAuthorizationPolicyProtocol.java | 2 +- .../src/main/java/org/apache/hadoop/util/Shell.java | 2 +- .../main/java/org/apache/hadoop/util/bloom/Key.java | 4 ++-- 37 files changed, 97 insertions(+), 62 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index 459114e89cc85..e367f3666c6eb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -132,7 +132,7 @@ abstract protected int readChunk(long pos, byte[] buf, int offset, int len, abstract protected long getChunkPosition(long pos); /** - * Return true if there is a need for checksum verification + * Return true if there is a need for checksum verification. * @return if there is a need for checksum verification true, not false */ protected synchronized boolean needChecksum() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java index dafb66f2edcba..fd69dc7615bbd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java @@ -36,7 +36,7 @@ public class FsStatus implements Writable { private long remaining; /** - * Construct a FsStatus object, using the specified statistics + * Construct a FsStatus object, using the specified statistics. * * @param capacity capacity * @param used used diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java index d2728374f5011..038fa43069b97 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java @@ -141,7 +141,7 @@ public int runAll() { } /** - * sets the command factory for later use + * sets the command factory for later use. * @param factory factory */ public void setCommandFactory(CommandFactory factory) { @@ -149,7 +149,7 @@ public void setCommandFactory(CommandFactory factory) { } /** - * retrieves the command factory + * retrieves the command factory. * * @return command factory */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java index acc82766be190..21ae5606f101a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java @@ -86,6 +86,8 @@ public static InvocationRaisingIOE withinAuditSpan( * activates and deactivates the span around the inner one. * @param auditSpan audit span * @param operation operation + * @param Generics Type T + * @param Generics Type R * @return a new invocation. */ public static FunctionRaisingIOE withinAuditSpan( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java index de457d192400d..ff10332ea8d5a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java @@ -43,7 +43,7 @@ public class BlockDecompressorStream extends DecompressorStream { * @param in input stream * @param decompressor decompressor to use * @param bufferSize size of buffer - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public BlockDecompressorStream(InputStream in, Decompressor decompressor, int bufferSize) throws IOException { @@ -55,7 +55,7 @@ public BlockDecompressorStream(InputStream in, Decompressor decompressor, * * @param in input stream * @param decompressor decompressor to use - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public BlockDecompressorStream(InputStream in, Decompressor decompressor) throws IOException { super(in, decompressor); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java index f37aadfcb57f3..d064e1b914707 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java @@ -39,7 +39,7 @@ public interface CompressionCodec { * * @param out the location for the final output stream * @return a stream the user can write uncompressed data to have it compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ CompressionOutputStream createOutputStream(OutputStream out) throws IOException; @@ -51,7 +51,7 @@ CompressionOutputStream createOutputStream(OutputStream out) * @param out the location for the final output stream * @param compressor compressor to use * @return a stream the user can write uncompressed data to have it compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) @@ -77,7 +77,7 @@ CompressionOutputStream createOutputStream(OutputStream out, * * @param in the stream to read compressed bytes from * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ CompressionInputStream createInputStream(InputStream in) throws IOException; @@ -88,7 +88,7 @@ CompressionOutputStream createOutputStream(OutputStream out, * @param in the stream to read compressed bytes from * @param decompressor decompressor to use * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java index 55bb132e9c87c..017c89a327a5c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java @@ -53,7 +53,7 @@ public abstract class CompressionInputStream extends InputStream * the decompressed bytes from the given stream. * * @param in The input stream to be compressed. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected CompressionInputStream(InputStream in) throws IOException { if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)) { @@ -93,6 +93,8 @@ public IOStatistics getIOStatistics() { /** * Reset the decompressor to its initial state and discard any buffered data, * as the underlying stream may have been repositioned. + * + * @throws IOException raised on errors performing I/O. */ public abstract void resetState() throws IOException; @@ -118,7 +120,7 @@ public long getPos() throws IOException { /** * This method is current not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException Unsupported Operation Exception */ @Override @@ -129,7 +131,7 @@ public void seek(long pos) throws UnsupportedOperationException { /** * This method is current not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException Unsupported Operation Exception */ @Override public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java index 2a11ace81702c..aebcffa8117c7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java @@ -48,7 +48,7 @@ public abstract class CompressionOutputStream extends OutputStream /** * Create a compression output stream that writes * the compressed bytes to the given stream. - * @param out + * @param out out */ protected CompressionOutputStream(OutputStream out) { this.out = out; @@ -89,12 +89,14 @@ public void flush() throws IOException { /** * Finishes writing compressed data to the output stream * without closing the underlying stream. + * @throws IOException raised on errors performing I/O. */ public abstract void finish() throws IOException; /** * Reset the compression to the initial state. * Does not reset the underlying stream. + * @throws IOException raised on errors performing I/O. */ public abstract void resetState() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java index 537837faa0a51..8ecd3eb4cb7ef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java @@ -65,11 +65,13 @@ public interface Compressor { /** * Return number of uncompressed bytes input so far. + * @return bytes read */ public long getBytesRead(); /** * Return number of compressed bytes output so far. + * @return bytes written */ public long getBytesWritten(); @@ -97,6 +99,7 @@ public interface Compressor { * @param off Start offset of the data * @param len Size of the buffer * @return The actual number of bytes of compressed data. + * @throws IOException raised on errors performing I/O. */ public int compress(byte[] b, int off, int len) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java index e9558fab87325..30d4e29892eb7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java @@ -96,7 +96,7 @@ public interface Decompressor { * @param off Start offset of the data * @param len Size of the buffer * @return The actual number of bytes of uncompressed data. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public int decompress(byte[] b, int off, int len) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java index 570d15c7f16aa..745105ce873af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java @@ -80,7 +80,7 @@ public DecompressorStream(InputStream in, Decompressor decompressor) * Allow derived classes to directly set the underlying stream. * * @param in Underlying input stream. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected DecompressorStream(InputStream in) throws IOException { super(in); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java index a756f47260c33..e5a04f2e7e88f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java @@ -61,6 +61,7 @@ public enum READ_MODE {CONTINUOUS, BYBLOCK}; * Create a stream as dictated by the readMode. This method is used when * the codecs wants the ability to work with the underlying stream positions. * + * @param decompressor decompressor * @param seekableIn The seekable input stream (seeks in compressed data) * @param start The start offset into the compressed stream. May be changed * by the underlying codec. @@ -69,6 +70,7 @@ public enum READ_MODE {CONTINUOUS, BYBLOCK}; * @param readMode Controls whether stream position is reported continuously * from the compressed stream only only at block boundaries. * @return a stream to read uncompressed bytes from + * @throws IOException raised on errors performing I/O. */ SplitCompressionInputStream createInputStream(InputStream seekableIn, Decompressor decompressor, long start, long end, READ_MODE readMode) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java index 5713c56df6aef..f753af1b9fee9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java @@ -67,6 +67,7 @@ public Bzip2Compressor() { /** * Creates a new compressor, taking settings from the configuration. + * @param conf configuration */ public Bzip2Compressor(Configuration conf) { this(Bzip2Factory.getBlockSize(conf), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java index 72ba97630e206..afa963e6b5da9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java @@ -50,6 +50,8 @@ public class Bzip2Decompressor implements Decompressor { /** * Creates a new decompressor. + * @param conserveMemory conserveMemory + * @param directBufferSize directBufferSize */ public Bzip2Decompressor(boolean conserveMemory, int directBufferSize) { this.conserveMemory = conserveMemory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java index 8426d25c2950e..0c1f1802025b0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java @@ -152,6 +152,7 @@ public enum STATE { * This method reports the processed bytes so far. Please note that this * statistic is only updated on block boundaries and only when the stream is * initiated in BYBLOCK mode. + * @return ProcessedByteCount */ public long getProcessedByteCount() { return reportedBytesReadFromCompressedStream; @@ -209,7 +210,7 @@ private int readAByte(InputStream inStream) throws IOException { * @param marker The bit pattern to be found in the stream * @param markerBitLength No of bits in the marker * @return true if the marker was found otherwise false - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws IllegalArgumentException if marketBitLength is greater than 63 */ public boolean skipToNextMarker(long marker, int markerBitLength) @@ -282,7 +283,8 @@ private void makeMaps() { * the magic. Thus callers have to skip the first two bytes. Otherwise this * constructor will throw an exception. *

- * + * @param in in + * @param readMode READ_MODE * @throws IOException * if the stream content is malformed or an I/O error occurs. * @throws NullPointerException @@ -326,7 +328,7 @@ private CBZip2InputStream(final InputStream in, READ_MODE readMode, boolean skip * * @return long Number of bytes between current stream position and the * next BZip2 block start marker. - * @throws IOException + * @throws IOException raised on errors performing I/O. * */ public static long numberOfBytesTillNextMarker(final InputStream in) throws IOException{ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index 794f9d02229ec..dde473fd1feb6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -210,6 +210,10 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { /** * This method is accessible by subclasses for historical purposes. If you * don't know what it does then you don't need it. + * @param len len + * @param freq freq + * @param alphaSize alphaSize + * @param maxLen maxLen */ protected static void hbMakeCodeLengths(char[] len, int[] freq, int alphaSize, int maxLen) { @@ -846,6 +850,7 @@ private void endCompression() throws IOException { /** * Returns the blocksize parameter specified at construction time. + * @return blocksize */ public final int getBlockSize() { return this.blockSize100k; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java index b5ae1f1e399a8..4ce8b9c663d7e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java @@ -68,6 +68,7 @@ public interface ErasureCoder extends Configurable { * * @param blockGroup the erasure coding block group containing all necessary * information for codec calculation + * @return ErasureCodingStep */ ErasureCodingStep calculateCoding(ECBlockGroup blockGroup); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java index fb89d99a0540c..87b528c1a484f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java @@ -46,8 +46,9 @@ public interface ErasureCodingStep { /** * Perform encoding or decoding given the input chunks, and generated results * will be written to the output chunks. - * @param inputChunks - * @param outputChunks + * @param inputChunks inputChunks + * @param outputChunks outputChunks + * @throws IOException raised on errors performing I/O. */ void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java index 004fd38df1141..5a06ee883bb7d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java @@ -65,7 +65,7 @@ public ErasureCoderOptions getOptions() { /** * We have all the data blocks and parity blocks as input blocks for * recovering by default. It's codec specific - * @param blockGroup + * @param blockGroup blockGroup * @return input blocks */ protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) { @@ -83,7 +83,7 @@ protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) { /** * Which blocks were erased ? - * @param blockGroup + * @param blockGroup blockGroup * @return output blocks to recover */ protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) { @@ -118,7 +118,7 @@ public void release() { /** * Perform decoding against a block blockGroup. - * @param blockGroup + * @param blockGroup blockGroup * @return decoding step for caller to do the real work */ protected abstract ErasureCodingStep prepareDecodingStep( @@ -126,7 +126,7 @@ protected abstract ErasureCodingStep prepareDecodingStep( /** * Get the number of erased blocks in the block group. - * @param blockGroup + * @param blockGroup blockGroup * @return number of erased blocks */ protected int getNumErasedBlocks(ECBlockGroup blockGroup) { @@ -153,7 +153,7 @@ protected static int getNumErasedBlocks(ECBlock[] inputBlocks) { /** * Get indexes of erased blocks from inputBlocks - * @param inputBlocks + * @param inputBlocks inputBlocks * @return indexes of erased blocks from inputBlocks */ protected int[] getErasedIndexes(ECBlock[] inputBlocks) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java index 24f55470e1727..c5927c9cdf59c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java @@ -37,10 +37,10 @@ public class ErasureDecodingStep implements ErasureCodingStep { /** * The constructor with all the necessary info. - * @param inputBlocks + * @param inputBlocks inputBlocks * @param erasedIndexes the indexes of erased blocks in inputBlocks array - * @param outputBlocks - * @param rawDecoder + * @param outputBlocks outputBlocks + * @param rawDecoder rawDecoder */ public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes, ECBlock[] outputBlocks, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java index 81666e9b76b2e..3102d6f2c9533 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java @@ -83,7 +83,7 @@ public void release() { /** * Perform encoding against a block group. - * @param blockGroup + * @param blockGroup blockGroup * @return encoding step for caller to do the real work */ protected abstract ErasureCodingStep prepareEncodingStep( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java index 5fc5c7a09928f..854017c6bad59 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java @@ -36,9 +36,9 @@ public class ErasureEncodingStep implements ErasureCodingStep { /** * The constructor with all the necessary info. - * @param inputBlocks - * @param outputBlocks - * @param rawEncoder + * @param inputBlocks inputBlocks + * @param outputBlocks outputBlocks + * @param rawEncoder rawEncoder */ public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks, RawErasureEncoder rawEncoder) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java index a0f5b72710679..a568499ec897d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java @@ -38,8 +38,8 @@ public abstract class HHErasureCodingStep /** * Constructor given input blocks and output blocks. * - * @param inputBlocks - * @param outputBlocks + * @param inputBlocks inputBlocks + * @param outputBlocks outputBlocks */ public HHErasureCodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java index 16a3c0fa61c4b..6f8ab521b1a33 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java @@ -43,9 +43,9 @@ public class HHXORErasureDecodingStep extends HHErasureCodingStep { /** * The constructor with all the necessary info. - * @param inputBlocks + * @param inputBlocks inputBlocks * @param erasedIndexes the indexes of erased blocks in inputBlocks array - * @param outputBlocks + * @param outputBlocks outputBlocks * @param rawDecoder underlying RS decoder for hitchhiker decoding * @param rawEncoder underlying XOR encoder for hitchhiker decoding */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java index 6a5644270117b..5d5e60508f24a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java @@ -40,8 +40,8 @@ public class HHXORErasureEncodingStep extends HHErasureCodingStep { /** * The constructor with all the necessary info. * - * @param inputBlocks - * @param outputBlocks + * @param inputBlocks inputBlocks + * @param outputBlocks outputBlocks * @param rsRawEncoder underlying RS encoder for hitchhiker encoding * @param xorRawEncoder underlying XOR encoder for hitchhiker encoding */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java index 1a0e5c030e070..16c7417446088 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java @@ -53,7 +53,7 @@ protected ErasureCodingStep prepareDecodingStep( /** * Which blocks were erased ? For XOR it's simple we only allow and return one * erased block, either data or parity. - * @param blockGroup + * @param blockGroup blockGroup * @return output blocks to recover */ @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java index f17375a2551fa..12a9eeb2f3b78 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java @@ -34,16 +34,21 @@ public interface Serialization { /** * Allows clients to test whether this {@link Serialization} * supports the given class. + * + * @param c class + * @return if accept true,not false */ boolean accept(Class c); /** * @return a {@link Serializer} for the given class. + * @param c class */ Serializer getSerializer(Class c); /** * @return a {@link Deserializer} for the given class. + * @param c class */ Deserializer getDeserializer(Class c); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java index f340cb3a98a44..7280e3f44e4dc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java @@ -61,18 +61,24 @@ public Serializer getSerializer(Class c) { /** * Return an Avro Schema instance for the given class. + * @param t Generics Type T + * @return schema */ @InterfaceAudience.Private public abstract Schema getSchema(T t); /** * Create and return Avro DatumWriter for the given class. + * @param clazz clazz + * @return DatumWriter */ @InterfaceAudience.Private public abstract DatumWriter getWriter(Class clazz); /** * Create and return Avro DatumReader for the given class. + * @param clazz clazz + * @return DatumReader */ @InterfaceAudience.Private public abstract DatumReader getReader(Class clazz); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index eef7d7c31964a..fd9cd7c946b1f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -448,7 +448,6 @@ public static InetSocketAddress getConnectAddress(InetSocketAddress addr) { /** * Same as getInputStream(socket, socket.getSoTimeout()). - *
* * @param socket socket * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java index 113dcaeb5e644..d93e0e609b1ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java @@ -91,7 +91,7 @@ public boolean isTransient() { /** * Ensures that any changes to the credentials are written to persistent * store. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract void flush() throws IOException; @@ -99,7 +99,7 @@ public boolean isTransient() { * Get the credential entry for a specific alias. * @param alias the name of a specific credential * @return the credentialEntry - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract CredentialEntry getCredentialEntry(String alias) throws IOException; @@ -107,7 +107,7 @@ public abstract CredentialEntry getCredentialEntry(String alias) /** * Get the aliases for all credentials. * @return the list of alias names - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract List getAliases() throws IOException; @@ -115,7 +115,8 @@ public abstract CredentialEntry getCredentialEntry(String alias) * Create a new credential. The given alias must not already exist. * @param name the alias of the credential * @param credential the credential value for the alias. - * @throws IOException + * @throws IOException raised on errors performing I/O. + * @return CredentialEntry */ public abstract CredentialEntry createCredentialEntry(String name, char[] credential) throws IOException; @@ -123,7 +124,7 @@ public abstract CredentialEntry createCredentialEntry(String name, /** * Delete the given credential. * @param name the alias of the credential to delete - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public abstract void deleteCredentialEntry(String name) throws IOException; @@ -133,7 +134,7 @@ public abstract CredentialEntry createCredentialEntry(String name, * means. If true, the password should be provided by the caller using * setPassword(). * @return Whether or not the provider requires a password - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean needsPassword() throws IOException { return false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java index 06d42207ecba5..c998bd51a5c38 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java @@ -70,9 +70,9 @@ public class CredentialShell extends CommandShell { * % hadoop credential check alias [-provider providerPath] * % hadoop credential delete alias [-provider providerPath] [-f] * - * @param args + * @param args args * @return 0 if the argument(s) were recognized, 1 otherwise - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override protected int init(String[] args) throws IOException { @@ -523,7 +523,7 @@ public void format(String message) { * * @param args * Command line arguments - * @throws Exception + * @throws Exception exception */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new CredentialShell(), args); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java index aa5b01fbed113..8453f4f59c6e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java @@ -296,6 +296,7 @@ else if (!users.isEmpty()) { /** * Returns the access control list as a String that can be used for building a * new instance by sending it to the constructor of {@link AccessControlList}. + * @return acl string */ public String getAclString() { StringBuilder sb = new StringBuilder(INITIAL_CAPACITY); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java index eff77d8942cf7..df022c38076bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java @@ -46,7 +46,7 @@ public interface ImpersonationProvider extends Configurable { * be preferred to avoid possibly re-resolving the ip address. * @param user ugi of the effective or proxy user which contains a real user. * @param remoteAddress the ip address of client. - * @throws AuthorizationException + * @throws AuthorizationException Authorization Exception */ default void authorize(UserGroupInformation user, String remoteAddress) throws AuthorizationException { @@ -62,7 +62,7 @@ default void authorize(UserGroupInformation user, String remoteAddress) * * @param user ugi of the effective or proxy user which contains a real user * @param remoteAddress the ip address of client - * @throws AuthorizationException + * @throws AuthorizationException Authorization Exception */ void authorize(UserGroupInformation user, InetAddress remoteAddress) throws AuthorizationException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java index be05e110b59cf..dede4c925ae0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java @@ -94,7 +94,7 @@ public static void refreshSuperUserGroupsConfiguration(Configuration conf) { * * @param user ugi of the effective or proxy user which contains a real user * @param remoteAddress the ip address of client - * @throws AuthorizationException + * @throws AuthorizationException Authorization Exception */ public static void authorize(UserGroupInformation user, String remoteAddress) throws AuthorizationException { @@ -106,7 +106,7 @@ public static void authorize(UserGroupInformation user, * * @param user ugi of the effective or proxy user which contains a real user * @param remoteAddress the inet address of client - * @throws AuthorizationException + * @throws AuthorizationException Authorization Exception */ public static void authorize(UserGroupInformation user, InetAddress remoteAddress) throws AuthorizationException { @@ -125,10 +125,10 @@ private static ImpersonationProvider getSip() { /** * This function is kept to provide backward compatibility. - * @param user - * @param remoteAddress - * @param conf - * @throws AuthorizationException + * @param user user + * @param remoteAddress remote address + * @param conf configuration + * @throws AuthorizationException Authorization Exception * @deprecated use {@link #authorize(UserGroupInformation, String)} instead. */ @Deprecated diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java index 0f0b25d8344e2..51a900fa71cb0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java @@ -41,7 +41,7 @@ public interface RefreshAuthorizationPolicyProtocol { /** * Refresh the service-level authorization policy in-effect. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Idempotent void refreshServiceAcl() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index b72ce63f5d06f..d49de10c61cd2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -1117,7 +1117,7 @@ private static void joinThread(Thread t) { protected abstract String[] getExecString(); /** - * Parse the execution result + * Parse the execution result. * * @param lines lines * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java index e8ad18cfc87e3..be97b55bbc25b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java @@ -100,8 +100,8 @@ public Key(byte[] value, double weight) { } /** - * @param value - * @param weight + * @param value value + * @param weight weight */ public void set(byte[] value, double weight) { if (value == null) { From 0a9bbff1adf70ef37c79b3daea584e884a696986 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 17:08:30 -0700 Subject: [PATCH 35/53] HADOOP-18229. Fix some java doc compilation 150 warnings and 6 erros. --- .../fs/CommonConfigurationKeysPublic.java | 3 +- .../apache/hadoop/fs/FileEncryptionInfo.java | 1 - .../java/org/apache/hadoop/fs/FileUtil.java | 10 +- .../main/java/org/apache/hadoop/fs/Trash.java | 4 +- .../org/apache/hadoop/fs/TrashPolicy.java | 1 + .../hadoop/fs/permission/FsCreateModes.java | 2 +- .../hadoop/fs/shell/find/BaseExpression.java | 17 +++- .../hadoop/fs/shell/find/Expression.java | 15 ++- .../hadoop/fs/shell/find/FindOptions.java | 1 + .../apache/hadoop/fs/shell/find/Result.java | 21 +++- .../hadoop/ha/ActiveStandbyElector.java | 29 +++++- .../java/org/apache/hadoop/ha/HAAdmin.java | 3 + .../apache/hadoop/ha/HAServiceProtocol.java | 8 +- .../org/apache/hadoop/ha/HAServiceTarget.java | 8 +- .../apache/hadoop/io/compress/BZip2Codec.java | 8 +- .../apache/hadoop/io/compress/CodecPool.java | 10 +- .../io/compress/CompressionCodecFactory.java | 5 +- .../apache/hadoop/io/compress/Lz4Codec.java | 8 +- .../hadoop/io/compress/SnappyCodec.java | 8 +- .../hadoop/io/compress/ZStandardCodec.java | 8 +- .../hadoop/io/erasurecode/CodecUtil.java | 2 + .../io/erasurecode/ErasureCodeNative.java | 2 + .../io/erasurecode/grouper/BlockGrouper.java | 6 +- .../sink/ganglia/AbstractGangliaSink.java | 5 +- .../metrics2/sink/ganglia/GangliaSink30.java | 2 +- .../metrics2/sink/ganglia/GangliaSink31.java | 2 +- .../apache/hadoop/security/SecurityUtil.java | 2 +- .../AbstractDelegationTokenSecretManager.java | 96 +++++++++++++++---- .../hadoop/util/concurrent/AsyncGet.java | 7 +- .../hadoop/util/curator/ZKCuratorManager.java | 12 ++- .../functional/CommonCallableSupplier.java | 5 + .../util/functional/RemoteIterators.java | 12 ++- 32 files changed, 245 insertions(+), 78 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index fdc5d3a40c106..5225236509294 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -181,8 +181,9 @@ public class CommonConfigurationKeysPublic { /** * Default value for {@link #FS_CREATION_PARALLEL_COUNT}. - *
+ *

* Default value: {@value}. + *

*/ public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT = 64; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java index 4fd80572e60b4..915f73f19b5c6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java @@ -53,7 +53,6 @@ public class FileEncryptionInfo implements Serializable { * @param ezKeyVersionName name of the KeyVersion used to encrypt the * encrypted data encryption key. * @param version version - * @return file encryption info */ public FileEncryptionInfo(final CipherSuite suite, final CryptoProtocolVersion version, final byte[] edek, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 96f5298c366d1..308d008dfff0b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -420,7 +420,7 @@ public static boolean copy(FileSystem srcFS, Path[] srcs, * @param overwrite overwrite * @param conf configuration * @throws IOException raised on errors performing I/O. - * + * @return true if the operation succeeded. */ public static boolean copy(FileSystem srcFS, Path src, FileSystem dstFS, Path dst, @@ -433,20 +433,21 @@ public static boolean copy(FileSystem srcFS, Path src, /** * Copy a file/directory tree within/between filesystems. - *
+ *

* returns true if the operation succeeded. When deleteSource is true, * this means "after the copy, delete(source) returned true" * If the destination is a directory, and mkdirs (dest) fails, * the operation will return false rather than raise any exception. - *
+ *

* The overwrite flag is about overwriting files; it has no effect about * handing an attempt to copy a file atop a directory (expect an IOException), * or a directory over a path which contains a file (mkdir will fail, so * "false"). - *
+ *

* The operation is recursive, and the deleteSource operation takes place * as each subdirectory is copied. Therefore, if an operation fails partway * through, the source tree may be partially deleted. + *

* @param srcFS source filesystem * @param srcStatus status of source * @param dstFS destination filesystem @@ -1769,6 +1770,7 @@ public static List getJarsInDirectory(String path) { * wildcard path to return all jars from the directory to use in a classpath. * * @param path the path to the directory. The path may include the wildcard. + * @param useLocal use local * @return the list of jars as URLs, or an empty list if there are no jars, or * the directory does not exist */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java index 450e62b4d5d51..f34b40f992e5d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java @@ -99,9 +99,9 @@ public static boolean moveToAppropriateTrash(FileSystem fs, Path p, } /** - * Returns whether the trash is enabled for this filesystem + * Returns whether the trash is enabled for this filesystem. * - * return if isEnabled true,not false + * @return return if isEnabled true,not false */ public boolean isEnabled() { return trashPolicy.isEnabled(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java index b8b67c9e8b2ff..e4c7f4035248d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java @@ -67,6 +67,7 @@ public void initialize(Configuration conf, FileSystem fs) { /** * Move a file or directory to the current trash directory. + * @param path the path * @return false if the item is already in the trash or trash is disabled * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java index fd67607c1723e..a684fd33f94d4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java @@ -37,7 +37,7 @@ public final class FsCreateModes extends FsPermission { * * @param mode mode * @param umask umask - * @retutn If the mode is already + * @return If the mode is already * an FsCreateModes object, return it. */ public static FsPermission applyUMask(FsPermission mode, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java index 5069d2d34e51c..0f4c1771012f0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java @@ -38,12 +38,18 @@ public abstract class BaseExpression implements Expression, Configurable { private String[] usage = { "Not yet implemented" }; private String[] help = { "Not yet implemented" }; - /** Sets the usage text for this {@link Expression} */ + /** + * Sets the usage text for this {@link Expression} . + * @param usage usage array + */ protected void setUsage(String[] usage) { this.usage = usage; } - /** Sets the help text for this {@link Expression} */ + /** + * Sets the help text for this {@link Expression} . + * @param help help + */ protected void setHelp(String[] help) { this.help = help; } @@ -92,7 +98,10 @@ public void finish() throws IOException { /** Children of this expression. */ private LinkedList children = new LinkedList(); - /** Return the options to be used by this expression. */ + /** + * Return the options to be used by this expression. + * @return options + */ protected FindOptions getOptions() { return (this.options == null) ? new FindOptions() : this.options; } @@ -265,6 +274,7 @@ protected void addArgument(String arg) { * @param depth * current depth in the process directories * @return FileStatus + * @throws IOException raised on errors performing I/O. */ protected FileStatus getFileStatus(PathData item, int depth) throws IOException { @@ -295,6 +305,7 @@ protected Path getPath(PathData item) throws IOException { * * @param item PathData * @return FileSystem + * @throws IOException raised on errors performing I/O. */ protected FileSystem getFileSystem(PathData item) throws IOException { return item.fs; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java index ccad631028cc9..3a4265c0ab7a5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java @@ -30,13 +30,15 @@ public interface Expression { /** * Set the options for this expression, called once before processing any * items. + * @param options options + * @throws IOException raised on errors performing I/O. */ public void setOptions(FindOptions options) throws IOException; /** * Prepares the expression for execution, called once after setting options * and before processing any options. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void prepare() throws IOException; @@ -46,13 +48,14 @@ public interface Expression { * @param item {@link PathData} item to be processed * @param depth distance of the item from the command line argument * @return {@link Result} of applying the expression to the item + * @throws IOException raised on errors performing I/O. */ public Result apply(PathData item, int depth) throws IOException; /** * Finishes the expression, called once after processing all items. * - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void finish() throws IOException; @@ -76,15 +79,21 @@ public interface Expression { /** * Indicates whether this expression performs an action, i.e. provides output * back to the user. + * @return if is action true, not false */ public boolean isAction(); - /** Identifies the expression as an operator rather than a primary. */ + /** + * Identifies the expression as an operator rather than a primary. + * @return if is operator true, not false + */ public boolean isOperator(); /** * Returns the precedence of this expression * (only applicable to operators). + * + * @return precedence */ public int getPrecedence(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java index b0f1be5c35c93..e3f24835f800f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java @@ -264,6 +264,7 @@ public void setConfiguration(Configuration configuration) { /** * Return the {@link Configuration} return configuration {@link Configuration} + * @return configuration */ public Configuration getConfiguration() { return this.configuration; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java index 2ef9cb4a801d6..a7dee3a97439b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java @@ -35,23 +35,36 @@ private Result(boolean success, boolean recurse) { this.descend = recurse; } - /** Should further directories be descended. */ + /** + * Should further directories be descended. + * @return if is pass true,not false. + * */ public boolean isDescend() { return this.descend; } - /** Should processing continue. */ + /** + * Should processing continue. + * @return if is pass true,not false + */ public boolean isPass() { return this.success; } - /** Returns the combination of this and another result. */ + /** + * Returns the combination of this and another result. + * @param other other + * @return result + */ public Result combine(Result other) { return new Result(this.isPass() && other.isPass(), this.isDescend() && other.isDescend()); } - /** Negate this result. */ + /** + * Negate this result. + * @return Result + */ public Result negate() { return new Result(!this.isPass(), this.isDescend()); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 041f8cab49c4d..7394e5fb46633 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -91,6 +91,8 @@ public interface ActiveStandbyElectorCallback { * * Callback implementations are expected to manage their own * timeouts (e.g. when making an RPC to a remote node). + * + * @throws ServiceFailedException Service Failed Exception */ void becomeActive() throws ServiceFailedException; @@ -119,6 +121,8 @@ public interface ActiveStandbyElectorCallback { * If there is any fatal error (e.g. wrong ACL's, unexpected Zookeeper * errors or Zookeeper persistent unavailability) then notifyFatalError is * called to notify the app about it. + * + * @param errorMessage error message */ void notifyFatalError(String errorMessage); @@ -204,8 +208,13 @@ enum State { * ZK connection * @param app * reference to callback interface object + * @param maxRetryNum maxRetryNum * @throws IOException + * raised on errors performing I/O. * @throws HadoopIllegalArgumentException + * if valid data is not supplied. + * @throws KeeperException + * other zookeeper operation errors. */ public ActiveStandbyElector(String zookeeperHostPorts, int zookeeperSessionTimeout, String parentZnodeName, List acl, @@ -246,7 +255,11 @@ public ActiveStandbyElector(String zookeeperHostPorts, * @param failFast * whether need to add the retry when establishing ZK connection. * @throws IOException + * raised on errors performing I/O. * @throws HadoopIllegalArgumentException + * if valid data is not supplied. + * @throws KeeperException + * other zookeeper operation errors. */ public ActiveStandbyElector(String zookeeperHostPorts, int zookeeperSessionTimeout, String parentZnodeName, List acl, @@ -312,6 +325,8 @@ public synchronized void joinElection(byte[] data) /** * @return true if the configured parent znode exists + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException interrupted exception. */ public synchronized boolean parentZNodeExists() throws IOException, InterruptedException { @@ -327,6 +342,10 @@ public synchronized boolean parentZNodeExists() /** * Utility function to ensure that the configured base znode exists. * This recursively creates the znode as well as all of its parents. + * + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException interrupted exception. + * @throws KeeperException other zookeeper operation errors. */ public synchronized void ensureParentZNode() throws IOException, InterruptedException, KeeperException { @@ -371,6 +390,9 @@ public synchronized void ensureParentZNode() * This recursively deletes everything within the znode as well as the * parent znode itself. It should only be used when it's certain that * no electors are currently participating in the election. + * + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException interrupted exception. */ public synchronized void clearParentZNode() throws IOException, InterruptedException { @@ -435,6 +457,7 @@ public static class ActiveNotFoundException extends Exception { * @throws KeeperException * other zookeeper operation errors * @throws InterruptedException + * interrupted exception * @throws IOException * when ZooKeeper connection could not be established */ @@ -684,7 +707,7 @@ synchronized void processWatchEvent(ZooKeeper zk, WatchedEvent event) { * inherit and mock out the zookeeper instance * * @return new zookeeper client instance - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KeeperException zookeeper connectionloss exception */ protected synchronized ZooKeeper connectToZooKeeper() throws IOException, @@ -714,7 +737,7 @@ protected synchronized ZooKeeper connectToZooKeeper() throws IOException, * inherit and pass in a mock object for zookeeper * * @return new zookeeper client instance - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected ZooKeeper createZooKeeper() throws IOException { return new ZooKeeper(zkHostPort, zkSessionTimeout, watcher); @@ -781,6 +804,8 @@ private void reJoinElection(int sleepTime) { * Sleep for the given number of milliseconds. * This is non-static, and separated out, so that unit tests * can override the behavior not to sleep. + * + * @param sleepMs sleep ms */ @VisibleForTesting protected void sleepFor(int sleepMs) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java index c6949e561e2a2..d557e587652ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java @@ -326,6 +326,9 @@ private int getServiceState(final CommandLine cmd) /** * Return the serviceId as is, we are assuming it was * given as a service address of form {@literal <}host:ipcport{@literal >}. + * + * @param serviceId serviceId + * @return service addr */ protected String getServiceAddr(String serviceId) { return serviceId; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java index 74a3d121a1abe..6eeb93012b125 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java @@ -118,7 +118,8 @@ public void monitorHealth() throws HealthCheckFailedException, /** * Request service to transition to active state. No operation, if the * service is already in active state. - * + * + * @param reqInfo * @throws ServiceFailedException * if transition from standby to active fails. * @throws AccessControlException @@ -135,7 +136,8 @@ public void transitionToActive(StateChangeRequestInfo reqInfo) /** * Request service to transition to standby state. No operation, if the * service is already in standby state. - * + * + * @param reqInfo reqInfo * @throws ServiceFailedException * if transition from active to standby fails. * @throws AccessControlException @@ -153,6 +155,7 @@ public void transitionToStandby(StateChangeRequestInfo reqInfo) * Request service to transition to observer state. No operation, if the * service is already in observer state. * + * @param reqInfo reqInfo * @throws ServiceFailedException * if transition from standby to observer fails. * @throws AccessControlException @@ -176,6 +179,7 @@ void transitionToObserver(StateChangeRequestInfo reqInfo) * @throws IOException * if other errors happen * @see HAServiceStatus + * @return HAServiceStatus */ @Idempotent public HAServiceStatus getServiceStatus() throws AccessControlException, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java index 2e6b1fe113479..324c5f2225c19 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java @@ -93,6 +93,9 @@ public abstract void checkFencingConfigured() /** * @return a proxy to connect to the target HA Service. + * @param timeoutMs timeout in milliseconds + * @param conf Configuration + * @throws IOException raised on errors performing I/O. */ public HAServiceProtocol getProxy(Configuration conf, int timeoutMs) throws IOException { @@ -115,7 +118,7 @@ public HAServiceProtocol.HAServiceState getTransitionTargetHAStatus() { * returned proxy defaults to using {@link #getAddress()}, which means this * method's behavior is identical to {@link #getProxy(Configuration, int)}. * - * @param conf Configuration + * @param conf configuration * @param timeoutMs timeout in milliseconds * @return a proxy to connect to the target HA service for health monitoring * @throws IOException if there is an error @@ -154,6 +157,9 @@ private HAServiceProtocol getProxyForAddress(Configuration conf, /** * @return a proxy to the ZKFC which is associated with this HA service. + * @param conf configuration + * @param timeoutMs timeout in milliseconds + * @throws IOException raised on errors performing I/O. */ public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java index 7fd5633daa698..7640f7ed7a6f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java @@ -99,7 +99,7 @@ public BZip2Codec() { } * @param out the location for the final output stream * @return a stream the user can write uncompressed data to, to have it * compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out) @@ -116,7 +116,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) * @param compressor compressor to use * @return a stream the user can write uncompressed data to, to have it * compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out, @@ -154,7 +154,7 @@ public Compressor createCompressor() { * * @param in the stream to read compressed bytes from * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in) @@ -171,7 +171,7 @@ public CompressionInputStream createInputStream(InputStream in) * @param in the stream to read compressed bytes from * @param decompressor decompressor to use * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java index 2ac2ca65173f0..50a98778655a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java @@ -235,7 +235,10 @@ public static void returnDecompressor(Decompressor decompressor) { /** * Return the number of leased {@link Compressor}s for this - * {@link CompressionCodec} + * {@link CompressionCodec}. + * + * @param codec codec + * @return the number of leased. */ public static int getLeasedCompressorsCount(CompressionCodec codec) { return (codec == null) ? 0 : getLeaseCount(compressorCounts, @@ -244,7 +247,10 @@ public static int getLeasedCompressorsCount(CompressionCodec codec) { /** * Return the number of leased {@link Decompressor}s for this - * {@link CompressionCodec} + * {@link CompressionCodec}. + * + * @param codec codec + * @return the number of leased */ public static int getLeasedDecompressorsCount(CompressionCodec codec) { return (codec == null) ? 0 : getLeaseCount(decompressorCounts, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index a195ed4e77fd4..6291d083e83fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -171,6 +171,8 @@ public static void setCodecClasses(Configuration conf, /** * Find the codecs specified in the config value io.compression.codecs * and register them. Defaults to gzip and deflate. + * + * @param conf configuration */ public CompressionCodecFactory(Configuration conf) { codecs = new TreeMap(); @@ -293,7 +295,8 @@ public static String removeSuffix(String filename, String suffix) { /** * A little test program. - * @param args + * @param args arguments + * @throws Exception exception */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java index 8bfb7fe95c4e2..a5afb706c99c1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java @@ -61,7 +61,7 @@ public Configuration getConf() { * * @param out the location for the final output stream * @return a stream the user can write uncompressed data to have it compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out) @@ -77,7 +77,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) * @param out the location for the final output stream * @param compressor compressor to use * @return a stream the user can write uncompressed data to have it compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out, @@ -125,7 +125,7 @@ public Compressor createCompressor() { * * @param in the stream to read compressed bytes from * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in) @@ -141,7 +141,7 @@ public CompressionInputStream createInputStream(InputStream in) * @param in the stream to read compressed bytes from * @param decompressor decompressor to use * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java index 77cf36a339b34..d64c6e512f87c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java @@ -61,7 +61,7 @@ public Configuration getConf() { * * @param out the location for the final output stream * @return a stream the user can write uncompressed data to have it compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out) @@ -77,7 +77,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) * @param out the location for the final output stream * @param compressor compressor to use * @return a stream the user can write uncompressed data to have it compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out, @@ -122,7 +122,7 @@ public Compressor createCompressor() { * * @param in the stream to read compressed bytes from * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in) @@ -138,7 +138,7 @@ public CompressionInputStream createInputStream(InputStream in) * @param in the stream to read compressed bytes from * @param decompressor decompressor to use * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java index a7afebc0c49ae..139e81eb73cc2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java @@ -116,7 +116,7 @@ private static int getBufferSize(Configuration conf) { * * @param out the location for the final output stream * @return a stream the user can write uncompressed data to have compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out) @@ -132,7 +132,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) * @param out the location for the final output stream * @param compressor compressor to use * @return a stream the user can write uncompressed data to have compressed - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionOutputStream createOutputStream(OutputStream out, @@ -173,7 +173,7 @@ public Compressor createCompressor() { * * @param in the stream to read compressed bytes from * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in) @@ -189,7 +189,7 @@ public CompressionInputStream createInputStream(InputStream in) * @param in the stream to read compressed bytes from * @param decompressor decompressor to use * @return a stream to read uncompressed bytes from - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public CompressionInputStream createInputStream(InputStream in, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java index 2632f4b82f070..d302932fa8fd5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java @@ -83,6 +83,7 @@ private CodecUtil() { } /** * Create encoder corresponding to given codec. * @param options Erasure codec options + * @param conf configuration * @return erasure encoder */ public static ErasureEncoder createEncoder(Configuration conf, @@ -100,6 +101,7 @@ public static ErasureEncoder createEncoder(Configuration conf, /** * Create decoder corresponding to given codec. * @param options Erasure codec options + * @param conf configuration * @return erasure decoder */ public static ErasureDecoder createDecoder(Configuration conf, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java index ec317eee4dc3e..b931a68bddbe8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java @@ -61,6 +61,7 @@ private ErasureCodeNative() {} /** * Are native libraries loaded? + * @return if is native code loaded true,not false */ public static boolean isNativeCodeLoaded() { return LOADING_FAILURE_REASON == null; @@ -82,6 +83,7 @@ public static void checkNativeCodeLoaded() { /** * Get the native library name that's available or supported. + * @return library name */ public static native String getLibraryName(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java index 3f1b0c22941bd..1a7757cbc16da 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java @@ -33,7 +33,7 @@ public class BlockGrouper { /** * Set EC schema. - * @param schema + * @param schema schema */ public void setSchema(ECSchema schema) { this.schema = schema; @@ -41,7 +41,7 @@ public void setSchema(ECSchema schema) { /** * Get EC schema. - * @return + * @return ECSchema */ protected ECSchema getSchema() { return schema; @@ -67,7 +67,7 @@ public int getRequiredNumParityBlocks() { * Calculating and organizing BlockGroup, to be called by ECManager * @param dataBlocks Data blocks to compute parity blocks against * @param parityBlocks To be computed parity blocks - * @return + * @return ECBlockGroup */ public ECBlockGroup makeBlockGroup(ECBlock[] dataBlocks, ECBlock[] parityBlocks) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java index 804e90330fba3..5c5fe97f42610 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java @@ -212,7 +212,7 @@ private void loadGangliaConf(GangliaConfType gtype) { /** * Lookup GangliaConf from cache. If not found, return default values * - * @param metricName + * @param metricName metricName * @return looked up GangliaConf */ protected GangliaConf getGangliaConfForMetric(String metricName) { @@ -253,6 +253,7 @@ private void pad() { /** * Puts an integer into the buffer as 4 bytes, big-endian. + * @param i i */ protected void xdr_int(int i) { buffer[offset++] = (byte) ((i >> 24) & 0xff); @@ -263,7 +264,7 @@ protected void xdr_int(int i) { /** * Sends Ganglia Metrics to the configured hosts - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected void emitToGangliaHosts() throws IOException { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java index 3e8314ee884d8..196824f433c81 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java @@ -216,7 +216,7 @@ private GangliaSlope calculateSlope(GangliaConf gConf, * @param value The value of the metric * @param gConf The GangliaConf for this metric * @param gSlope The slope for this metric - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected void emitMetric(String groupName, String name, String type, String value, GangliaConf gConf, GangliaSlope gSlope) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java index 5aebff8c031a9..fae0d4e85e1ec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java @@ -42,7 +42,7 @@ public class GangliaSink31 extends GangliaSink30 { * @param value The value of the metric * @param gConf The GangliaConf for this metric * @param gSlope The slope for this metric - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override protected void emitMetric(String groupName, String name, String type, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 187ea28632bd5..277a037a53742 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -515,9 +515,9 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) { * InterruptedException is thrown, it is converted to an IOException. * * @param action the action to perform + * @param Generics Type T * @return the result of the action * @throws IOException in the event of error - * @return generic type T */ public static T doAsLoginUser(PrivilegedExceptionAction action) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index 2dec2df4b9d6c..72bdfceed78be 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -152,7 +152,10 @@ public AbstractDelegationTokenSecretManager(long delegationKeyUpdateInterval, this.metrics = DelegationTokenSecretManagerMetrics.create(); } - /** should be called before this object is used */ + /** + * should be called before this object is used. + * @throws IOException raised on errors performing I/O. + */ public void startThreads() throws IOException { Preconditions.checkState(!running); updateCurrentKey(); @@ -175,6 +178,8 @@ public synchronized void reset() { /** * Total count of active delegation tokens. + * + * @return currentTokens.size. */ public long getCurrentTokensSize() { return currentTokens.size(); @@ -182,8 +187,11 @@ public long getCurrentTokensSize() { /** * Add a previously used master key to cache (when NN restarts), - * should be called before activate(). - * */ + * should be called before activate(). + * + * @param key delegation key + * @throws IOException raised on errors performing I/O. + */ public synchronized void addKey(DelegationKey key) throws IOException { if (running) // a safety check throw new IOException("Can't add delegation key to a running SecretManager."); @@ -233,7 +241,9 @@ protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOExce /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @return currentId */ protected synchronized int getCurrentKeyId() { return currentId; @@ -241,7 +251,9 @@ protected synchronized int getCurrentKeyId() { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @return currentId */ protected synchronized int incrementCurrentKeyId() { return ++currentId; @@ -249,7 +261,9 @@ protected synchronized int incrementCurrentKeyId() { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param keyId keyId */ protected synchronized void setCurrentKeyId(int keyId) { currentId = keyId; @@ -257,7 +271,9 @@ protected synchronized void setCurrentKeyId(int keyId) { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @return delegationTokenSequenceNumber */ protected synchronized int getDelegationTokenSeqNum() { return delegationTokenSequenceNumber; @@ -265,7 +281,9 @@ protected synchronized int getDelegationTokenSeqNum() { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @return delegationTokenSequenceNumber */ protected synchronized int incrementDelegationTokenSeqNum() { return ++delegationTokenSequenceNumber; @@ -273,7 +291,9 @@ protected synchronized int incrementDelegationTokenSeqNum() { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param seqNum seqNum */ protected synchronized void setDelegationTokenSeqNum(int seqNum) { delegationTokenSequenceNumber = seqNum; @@ -281,7 +301,9 @@ protected synchronized void setDelegationTokenSeqNum(int seqNum) { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param keyId keyId */ protected DelegationKey getDelegationKey(int keyId) { return allKeys.get(keyId); @@ -289,7 +311,10 @@ protected DelegationKey getDelegationKey(int keyId) { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param key DelegationKey + * @throws IOException raised on errors performing I/O. */ protected void storeDelegationKey(DelegationKey key) throws IOException { allKeys.put(key.getKeyId(), key); @@ -298,7 +323,10 @@ protected void storeDelegationKey(DelegationKey key) throws IOException { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param key DelegationKey + * @throws IOException raised on errors performing I/O. */ protected void updateDelegationKey(DelegationKey key) throws IOException { allKeys.put(key.getKeyId(), key); @@ -307,6 +335,9 @@ protected void updateDelegationKey(DelegationKey key) throws IOException { /** * For subclasses externalizing the storage, for example Zookeeper * based implementations + * + * @param ident ident + * @return DelegationTokenInformation */ protected DelegationTokenInformation getTokenInfo(TokenIdent ident) { return currentTokens.get(ident); @@ -314,7 +345,11 @@ protected DelegationTokenInformation getTokenInfo(TokenIdent ident) { /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param ident ident + * @param tokenInfo tokenInfo + * @throws IOException raised on errors performing I/O. */ protected void storeToken(TokenIdent ident, DelegationTokenInformation tokenInfo) throws IOException { @@ -325,7 +360,11 @@ protected void storeToken(TokenIdent ident, /** * For subclasses externalizing the storage, for example Zookeeper - * based implementations + * based implementations. + * + * @param ident ident + * @param tokenInfo tokenInfo + * @throws IOException raised on errors performing I/O. */ protected void updateToken(TokenIdent ident, DelegationTokenInformation tokenInfo) throws IOException { @@ -341,7 +380,7 @@ protected void updateToken(TokenIdent ident, * startThreads() is called) * @param identifier identifier read from persistent storage * @param renewDate token renew time - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public synchronized void addPersistedDelegationToken( TokenIdent identifier, long renewDate) throws IOException { @@ -460,6 +499,10 @@ protected synchronized byte[] createPassword(TokenIdent identifier) { * Find the DelegationTokenInformation for the given token id, and verify that * if the token is expired. Note that this method should be called with * acquiring the secret manager's monitor. + * + * @param identifier identifier + * @throws InvalidToken invalid token exception + * @return DelegationTokenInformation */ protected DelegationTokenInformation checkToken(TokenIdent identifier) throws InvalidToken { @@ -503,7 +546,7 @@ public synchronized String getTokenTrackingId(TokenIdent identifier) { * Verifies that the given identifier and password are valid and match. * @param identifier Token identifier. * @param password Password in the token. - * @throws InvalidToken + * @throws InvalidToken InvalidToken */ public synchronized void verifyToken(TokenIdent identifier, byte[] password) throws InvalidToken { @@ -577,6 +620,9 @@ public synchronized long renewToken(Token token, /** * Cancel a token by removing it from cache. + * + * @param token token + * @param canceller canceller * @return Identifier of the canceled token * @throws InvalidToken for invalid token * @throws AccessControlException if the user isn't allowed to cancel @@ -640,15 +686,25 @@ public DelegationTokenInformation(long renewDate, byte[] password, this.password = password; this.trackingId = trackingId; } - /** returns renew date */ + /** + * returns renew date. + * @return renew date + */ public long getRenewDate() { return renewDate; } - /** returns password */ + /** + * returns password. + * @return password + */ byte[] getPassword() { return password; } - /** returns tracking id */ + + /** + * returns tracking id. + * @return tracking id + */ public String getTrackingId() { return trackingId; } @@ -753,7 +809,7 @@ public void run() { * * @param token the token where to extract the identifier * @return the delegation token identifier - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public TokenIdent decodeTokenIdentifier(Token token) throws IOException { return token.decodeIdentifier(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java index 9304b483952d0..d50dbc8f3efca 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java @@ -52,7 +52,12 @@ R get(long timeout, TimeUnit unit) /** Utility */ class Util { - /** Use {@link #get(long, TimeUnit)} timeout parameters to wait. */ + /** + * Use {@link #get(long, TimeUnit)} timeout parameters to wait. + * @param obj object + * @param timeout timeout + * @param unit unit + */ public static void wait(Object obj, long timeout, TimeUnit unit) throws InterruptedException { if (timeout < 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java index ef9cec6677302..f818556077f00 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java @@ -83,8 +83,11 @@ public void close() { /** * Utility method to fetch the ZK ACLs from the configuration. + * + * @param conf configuration * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read + * @return acl list */ public static List getZKAcls(Configuration conf) throws IOException { // Parse authentication from configuration. @@ -102,9 +105,12 @@ public static List getZKAcls(Configuration conf) throws IOException { /** * Utility method to fetch ZK auth info from the configuration. + * + * @param conf configuration * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read * @throws ZKUtil.BadAuthFormatException if the auth format is invalid + * @return ZKAuthInfo List */ public static List getZKAuths(Configuration conf) throws IOException { @@ -167,7 +173,7 @@ public void start(List authInfos) throws IOException { * Get ACLs for a ZNode. * @param path Path of the ZNode. * @return The list of ACLs. - * @throws Exception + * @throws Exception If it cannot contact Zookeeper. */ public List getACL(final String path) throws Exception { return curator.getACL().forPath(path); @@ -186,7 +192,7 @@ public byte[] getData(final String path) throws Exception { /** * Get the data in a ZNode. * @param path Path of the ZNode. - * @param stat + * @param stat stat * @return The data in the ZNode. * @throws Exception If it cannot contact Zookeeper. */ @@ -363,7 +369,9 @@ public void safeCreate(String path, byte[] data, List acl, /** * Deletes the path. Checks for existence of path as well. + * * @param path Path to be deleted. + * @param fencingNodePath fencingNodePath * @throws Exception if any problem occurs while performing deletion. */ public void safeDelete(final String path, List fencingACL, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java index 32e299b4d45b1..89c4568a56075 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java @@ -89,7 +89,9 @@ public static CompletableFuture submit(final Executor executor, /** * Wait for a list of futures to complete. If the list is empty, * return immediately. + * * @param futures list of futures. + * @param Generics Type T * @throws IOException if one of the called futures raised an IOE. * @throws RuntimeException if one of the futures raised one. */ @@ -105,6 +107,8 @@ public static void waitForCompletion( /** * Wait for a single of future to complete, extracting IOEs afterwards. + * + * @param Generics Type T * @param future future to wait for. * @throws IOException if one of the called futures raised an IOE. * @throws RuntimeException if one of the futures raised one. @@ -124,6 +128,7 @@ public static void waitForCompletion(final CompletableFuture future) /** * Wait for a single of future to complete, ignoring exceptions raised. * @param future future to wait for. + * @param Generics Type T */ public static void waitForCompletionIgnoringExceptions( @Nullable final CompletableFuture future) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java index 68261a22e44f4..bc4c91ae9c078 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java @@ -99,6 +99,7 @@ public static RemoteIterator remoteIteratorFromSingleton( /** * Create a remote iterator from a java.util.Iterator. * @param type + * @param iterator iterator * @return a remote iterator */ public static RemoteIterator remoteIteratorFromIterator( @@ -110,6 +111,7 @@ public static RemoteIterator remoteIteratorFromIterator( * Create a remote iterator from a java.util.Iterable -e.g. a list * or other collection. * @param type + * @param iterable iterable * @return a remote iterator */ public static RemoteIterator remoteIteratorFromIterable( @@ -120,6 +122,7 @@ public static RemoteIterator remoteIteratorFromIterable( /** * Create a remote iterator from an array. * @param type + * @param array array * @return a remote iterator */ public static RemoteIterator remoteIteratorFromArray(T[] array) { @@ -158,10 +161,11 @@ public static RemoteIterator typeCastingRemoteIterator( * Create a RemoteIterator from a RemoteIterator and a filter * function which returns true for every element to be passed * through. - *

+ *

* Elements are filtered in the hasNext() method; if not used * the filtering will be done on demand in the {@code next()} * call. + *

* @param type * @param iterator source * @param filter filter @@ -218,16 +222,16 @@ public static T[] toArray(RemoteIterator source, /** * Apply an operation to all values of a RemoteIterator. - *

+ * * If the iterator is an IOStatisticsSource returning a non-null * set of statistics, and this classes log is set to DEBUG, * then the statistics of the operation are evaluated and logged at * debug. - *

+ *

* The number of entries processed is returned, as it is useful to * know this, especially during tests or when reporting values * to users. - *

+ *

* This does not close the iterator afterwards. * @param source iterator source * @param consumer consumer of the values. From 963b9077cc2803335c69d12129e604093981684e Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 17:19:12 -0700 Subject: [PATCH 36/53] HADOOP-18229. Fix some java doc compilation 3 warnings. --- .../src/main/java/org/apache/hadoop/io/MapFile.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 51db0b3f0afef..9612616d84bb1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -154,7 +154,9 @@ public Writer(Configuration conf, FileSystem fs, String dirName, valueClass(valClass)); } - /** Create the named map using the named key comparator. + /** Create the named map using the named key comparator. + * @param conf configuration + * @param fs filesystem * @deprecated Use Writer(Configuration, Path, Option...) instead. */ @Deprecated From 1757b3eb06302d7dd32200aad21a0d1f43d74ae0 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 17:24:41 -0700 Subject: [PATCH 37/53] HADOOP-18229. Fix some java doc compilation 1 warnings. --- .../src/main/java/org/apache/hadoop/io/MapFile.java | 1 + 1 file changed, 1 insertion(+) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 9612616d84bb1..c6cba88304971 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -145,6 +145,7 @@ public Writer(Configuration conf, FileSystem fs, String dirName, /** Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, From 5bcd161f3b1ef91fb5b4a0fb8e2be40f211ee45c Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 18:47:16 -0700 Subject: [PATCH 38/53] HADOOP-18229. Fix some java doc compilation 150 warnings. AbstractMapWritable.java warning: no @param for clazz etc, ArrayFile.java warning: no @param for fs etc, ArrayPrimitiveWritable.java warning: no @param for componentType, BinaryComparable.java warning: no @return, HttpServer2.java warning: no description for @throws, IOStatisticsBinding.java warning: no @param for , MapFile.java warning: no @param for fs, MetricsSystem.java warning: no description for @exception, MetricsSystemMXBean.java warning: no description for @throws, OperationDuration.java: warning: empty

tag, WritableComparator.java: warning: no @param for keyClass --- .../statistics/impl/IOStatisticsBinding.java | 4 + .../org/apache/hadoop/http/HttpServer2.java | 13 ++ .../apache/hadoop/io/AbstractMapWritable.java | 22 +++- .../java/org/apache/hadoop/io/ArrayFile.java | 69 +++++++++-- .../hadoop/io/ArrayPrimitiveWritable.java | 4 +- .../apache/hadoop/io/BinaryComparable.java | 9 ++ .../java/org/apache/hadoop/io/MapFile.java | 117 ++++++++++++++++-- .../apache/hadoop/io/WritableComparator.java | 12 +- .../apache/hadoop/metrics2/MetricsSystem.java | 6 +- .../hadoop/metrics2/MetricsSystemMXBean.java | 10 +- .../apache/hadoop/util/OperationDuration.java | 4 +- 11 files changed, 232 insertions(+), 38 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java index c45dfc21a1b1d..6a5d01fb3b074 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java @@ -141,6 +141,7 @@ public static String entryToString( /** * Convert entry values to the string format used in logging. * + * @param type of values. * @param name statistic name * @param value stat value * @return formatted string @@ -178,6 +179,8 @@ private static Map copyMap( /** * A passthrough copy operation suitable for immutable * types, including numbers. + * + * @param type of values. * @param src source object * @return the source object */ @@ -437,6 +440,7 @@ public static Function trackJavaFunctionDuration( * @param input input callable. * @param return type. * @return the result of the operation. + * @throws IOException raised on errors performing I/O. */ public static B trackDuration( DurationTrackerFactory factory, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index 49807ac4b4597..5abe36653e37b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -270,6 +270,7 @@ public Builder setName(String name){ * specifies the binding address, and the port specifies the * listening port. Unspecified or zero port means that the server * can listen to any port. + * @return Builder */ public Builder addEndpoint(URI endpoint) { endpoints.add(endpoint); @@ -280,6 +281,9 @@ public Builder addEndpoint(URI endpoint) { * Set the hostname of the http server. The host name is used to resolve the * _HOST field in Kerberos principals. The hostname of the first listener * will be used if the name is unspecified. + * + * @param hostName hostName + * @return Builder */ public Builder hostName(String hostName) { this.hostName = hostName; @@ -308,6 +312,9 @@ public Builder keyPassword(String password) { /** * Specify whether the server should authorize the client in SSL * connections. + * + * @param value value + * @return Builder */ public Builder needsClientAuth(boolean value) { this.needsClientAuth = value; @@ -332,6 +339,9 @@ public Builder setConf(Configuration conf) { /** * Specify the SSL configuration to load. This API provides an alternative * to keyStore/keyPassword/trustStore. + * + * @param sslCnf sslCnf + * @return Builder */ public Builder setSSLConf(Configuration sslCnf) { this.sslConf = sslCnf; @@ -1610,6 +1620,7 @@ public String toString() { * @param request the servlet request. * @param response the servlet response. * @return TRUE/FALSE based on the logic decribed above. + * @throws IOException raised on errors performing I/O. */ public static boolean isInstrumentationAccessAllowed( ServletContext servletContext, HttpServletRequest request, @@ -1631,6 +1642,8 @@ public static boolean isInstrumentationAccessAllowed( * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. * + * @param servletContext servletContext + * @param request request * @param response used to send the error response if user does not have admin access. * @return true if admin-authorized, false otherwise * @throws IOException diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java index eef74628e16b1..616d5ebccf05a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java @@ -84,7 +84,10 @@ private synchronized void addToMap(Class clazz, byte id) { idToClassMap.put(id, clazz); } - /** Add a Class to the maps if it is not already present. */ + /** + * Add a Class to the maps if it is not already present. + * @param clazz clazz + */ protected synchronized void addToMap(Class clazz) { if (classToIdMap.containsKey(clazz)) { return; @@ -97,17 +100,28 @@ protected synchronized void addToMap(Class clazz) { addToMap(clazz, id); } - /** @return the Class class for the specified id */ + /** + * the Class class for the specified id. + * @param id id + * @return the Class class for the specified id + */ protected Class getClass(byte id) { return idToClassMap.get(id); } - /** @return the id for the specified Class */ + /** + * get id. + * @return the id for the specified Class + * @param clazz clazz + */ protected byte getId(Class clazz) { return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1; } - /** Used by child copy constructors. */ + /** + * Used by child copy constructors. + * @param other other + */ protected synchronized void copy(Writable other) { if (other != null) { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java index bee5fd2cb430c..b51be38f0aa8c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java @@ -38,7 +38,15 @@ protected ArrayFile() {} // no public ctor public static class Writer extends MapFile.Writer { private LongWritable count = new LongWritable(0); - /** Create the named file for values of the named class. */ + /** + * Create the named file for values of the named class. + * + * @param conf configuration + * @param fs file system + * @param file file + * @param valClass valClass + * @throws IOException raised on errors performing I/O. + */ public Writer(Configuration conf, FileSystem fs, String file, Class valClass) throws IOException { @@ -46,7 +54,17 @@ public Writer(Configuration conf, FileSystem fs, valueClass(valClass)); } - /** Create the named file for values of the named class. */ + /** + * Create the named file for values of the named class. + * + * @param conf configuration + * @param fs file system + * @param file file + * @param valClass valClass + * @param compress compress + * @param progress progress + * @throws IOException raised on errors performing I/O. + */ public Writer(Configuration conf, FileSystem fs, String file, Class valClass, CompressionType compress, Progressable progress) @@ -58,7 +76,11 @@ public Writer(Configuration conf, FileSystem fs, progressable(progress)); } - /** Append a value to the file. */ + /** + * Append a value to the file. + * @param value value + * @throws IOException raised on errors performing I/O. + */ public synchronized void append(Writable value) throws IOException { super.append(count, value); // add to map count.set(count.get()+1); // increment count @@ -69,31 +91,60 @@ public synchronized void append(Writable value) throws IOException { public static class Reader extends MapFile.Reader { private LongWritable key = new LongWritable(); - /** Construct an array reader for the named file.*/ + /** + * Construct an array reader for the named file. + * @param fs FileSystem + * @param file file + * @param conf configuration + * @throws IOException raised on errors performing I/O. + */ public Reader(FileSystem fs, String file, Configuration conf) throws IOException { super(new Path(file), conf); } - /** Positions the reader before its nth value. */ + /** + * Positions the reader before its nth value. + * + * @param n n key + * @throws IOException raised on errors performing I/O. + */ public synchronized void seek(long n) throws IOException { key.set(n); seek(key); } - /** Read and return the next value in the file. */ + /** + * Read and return the next value in the file. + * + * @param value value + * @throws IOException raised on errors performing I/O. + * @return Writable + */ public synchronized Writable next(Writable value) throws IOException { return next(key, value) ? value : null; } - /** Returns the key associated with the most recent call to {@link + /** + * Returns the key associated with the most recent call to {@link * #seek(long)}, {@link #next(Writable)}, or {@link - * #get(long,Writable)}. */ + * #get(long,Writable)}. + * + * @return key key + * @throws IOException raised on errors performing I/O. + * @return seek long + */ public synchronized long key() throws IOException { return key.get(); } - /** Return the nth value in the file. */ + /** + * Return the nth value in the file. + * @param n n key + * @param value value + * @throws IOException raised on errors performing I/O. + * @return writable + */ public synchronized Writable get(long n, Writable value) throws IOException { key.set(n); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java index 2b6f3166bc282..adafe0412bc83 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java @@ -106,7 +106,9 @@ public ArrayPrimitiveWritable() { /** * Construct an instance of known type but no value yet - * for use with type-specific wrapper classes + * for use with type-specific wrapper classes. + * + * @param componentType componentType */ public ArrayPrimitiveWritable(Class componentType) { checkPrimitive(componentType); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java index a32c44c8e5058..24ad68fab0176 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java @@ -31,11 +31,15 @@ public abstract class BinaryComparable implements Comparable { /** * Return n st bytes 0..n-1 from {#getBytes()} are valid. + * + * @return length */ public abstract int getLength(); /** * Return representative byte array for this instance. + * + * @return getBytes */ public abstract byte[] getBytes(); @@ -53,6 +57,11 @@ public int compareTo(BinaryComparable other) { /** * Compare bytes from {#getBytes()} to those provided. + * + * @param other other + * @param off off + * @param len len + * @return compareBytes */ public int compareTo(byte[] other, int off, int len) { return WritableComparator.compareBytes(getBytes(), 0, getLength(), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index c6cba88304971..5519507848253 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -98,8 +98,16 @@ public static class Writer implements java.io.Closeable { private long lastIndexKeyCount = Long.MIN_VALUE; - /** Create the named map for keys of the named class. + /** + * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * + * @param conf configuration + * @param fs filesystem + * @param dirName dirName + * @param keyClass keyClass + * @param valClass valClass + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, @@ -108,8 +116,18 @@ public Writer(Configuration conf, FileSystem fs, String dirName, this(conf, new Path(dirName), keyClass(keyClass), valueClass(valClass)); } - /** Create the named map for keys of the named class. + /** + * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * + * @param conf configuration + * @param fs fs + * @param dirName dirName + * @param keyClass keyClass + * @param valClass valClass + * @param compress compress + * @param progress progress + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, @@ -120,8 +138,19 @@ public Writer(Configuration conf, FileSystem fs, String dirName, compression(compress), progressable(progress)); } - /** Create the named map for keys of the named class. + /** + * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * + * @param conf configuration + * @param fs FileSystem + * @param dirName dirName + * @param keyClass keyClass + * @param valClass valClass + * @param compress compress + * @param codec codec + * @param progress progress + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, @@ -132,8 +161,16 @@ public Writer(Configuration conf, FileSystem fs, String dirName, compression(compress, codec), progressable(progress)); } - /** Create the named map for keys of the named class. + /** + * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * @param conf configuration + * @param fs fs + * @param dirName dirName + * @param keyClass keyClass + * @param valClass valClass + * @param compress compress + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, @@ -145,6 +182,11 @@ public Writer(Configuration conf, FileSystem fs, String dirName, /** Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * @param conf configuration + * @param fs fs + * @param dirName dirName + * @param comparator comparator + * @param valClass valClass * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -158,6 +200,11 @@ public Writer(Configuration conf, FileSystem fs, String dirName, /** Create the named map using the named key comparator. * @param conf configuration * @param fs filesystem + * @param dirName dirName + * @param comparator comparator + * @param valClass valClass + * @param compress compress + * @throws IOException raised on errors performing I/O. * @deprecated Use Writer(Configuration, Path, Option...) instead. */ @Deprecated @@ -168,8 +215,18 @@ public Writer(Configuration conf, FileSystem fs, String dirName, valueClass(valClass), compression(compress)); } - /** Create the named map using the named key comparator. + /** + * Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...)} instead. + * + * @param conf configuration + * @param fs filesystem + * @param dirName dirName + * @param comparator comparator + * @param valClass valClass + * @param compress CompressionType + * @param progress progress + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, @@ -181,8 +238,19 @@ public Writer(Configuration conf, FileSystem fs, String dirName, progressable(progress)); } - /** Create the named map using the named key comparator. + /** + * Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...) instead. + * + * @param conf configuration + * @param fs FileSystem + * @param dirName dirName + * @param comparator comparator + * @param valClass valClass + * @param compress CompressionType + * @param codec codec + * @param progress progress + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(Configuration conf, FileSystem fs, String dirName, @@ -288,16 +356,26 @@ public Writer(Configuration conf, this.index = SequenceFile.createWriter(conf, indexOptions); } - /** The number of entries that are added before an index entry is added.*/ + /** + * The number of entries that are added before an index entry is added. + * @return indexInterval + */ public int getIndexInterval() { return indexInterval; } - /** Sets the index interval. + /** + * Sets the index interval. * @see #getIndexInterval() + * + * @param interval interval */ public void setIndexInterval(int interval) { indexInterval = interval; } - /** Sets the index interval and stores it in conf + /** + * Sets the index interval and stores it in conf. * @see #getIndexInterval() + * + * @param conf configuration + * @param interval interval */ public static void setIndexInterval(Configuration conf, int interval) { conf.setInt(INDEX_INTERVAL, interval); @@ -310,8 +388,14 @@ public synchronized void close() throws IOException { index.close(); } - /** Append a key/value pair to the map. The key must be greater or equal - * to the previous key added to the map. */ + /** + * Append a key/value pair to the map. The key must be greater or equal + * to the previous key added to the map. + * + * @param key key + * @param val value + * @throws IOException raised on errors performing I/O. + */ public synchronized void append(WritableComparable key, Writable val) throws IOException { @@ -672,9 +756,16 @@ else if (cmp > 0) return -(low + 1); // key not found. } - /** Read the next key/value pair in the map into key and + /** + * Read the next key/value pair in the map into key and * val. Returns true if such a pair exists and false when at - * the end of the map */ + * the end of the map. + * + * @param key WritableComparable + * @param val Writable + * @return if such a pair exists true,not false + * @throws IOException raised on errors performing I/O. + */ public synchronized boolean next(WritableComparable key, Writable val) throws IOException { return data.next(key, val); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java index 1754b8d06f6fa..2f90a084ddf19 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java @@ -46,7 +46,12 @@ public class WritableComparator implements RawComparator, Configurable { private Configuration conf; - /** For backwards compatibility. **/ + /** + * For backwards compatibility. + * + * @param c WritableComparable Type + * @return WritableComparator + */ public static WritableComparator get(Class c) { return get(c, null); } @@ -111,7 +116,10 @@ protected WritableComparator() { this(null); } - /** Construct for a {@link WritableComparable} implementation. */ + /** + * Construct for a {@link WritableComparable} implementation. + * @param keyClass WritableComparable Class + */ protected WritableComparator(Class keyClass) { this(keyClass, null, false); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java index a277abd6e1384..e4693ed775e2d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java @@ -50,7 +50,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean { * the annotations of the source object.) * @param desc the description of the source (or null. See above.) * @return the source object - * @exception MetricsException + * @exception MetricsException Metrics Exception */ public abstract T register(String name, String desc, T source); @@ -65,7 +65,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean { * @param the actual type of the source object * @param source object to register * @return the source object - * @exception MetricsException + * @exception MetricsException Metrics Exception */ public T register(T source) { return register(null, null, source); @@ -85,7 +85,7 @@ public T register(T source) { * @param name of the sink. Must be unique. * @param desc the description of the sink * @return the sink - * @exception MetricsException + * @exception MetricsException Metrics Exception */ public abstract T register(String name, String desc, T sink); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java index e471ab7498ce4..f0fd7689b8604 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java @@ -29,19 +29,19 @@ public interface MetricsSystemMXBean { /** * Start the metrics system - * @throws MetricsException + * @throws MetricsException Metrics Exception */ public void start(); /** * Stop the metrics system - * @throws MetricsException + * @throws MetricsException Metrics Exception */ public void stop(); /** * Start metrics MBeans - * @throws MetricsException + * @throws MetricsException Metrics Exception */ public void startMetricsMBeans(); @@ -49,7 +49,7 @@ public interface MetricsSystemMXBean { * Stop metrics MBeans. * Note, it doesn't stop the metrics system control MBean, * i.e this interface. - * @throws MetricsException + * @throws MetricsException Metrics Exception */ public void stopMetricsMBeans(); @@ -57,7 +57,7 @@ public interface MetricsSystemMXBean { * @return the current config * Avoided getConfig, as it'll turn into a "Config" attribute, * which doesn't support multiple line values in jconsole. - * @throws MetricsException + * @throws MetricsException Metrics Exception */ public String currentConfig(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java index fdd25286a2300..1fb920e99f08e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/OperationDuration.java @@ -95,9 +95,11 @@ public String toString() { /** * Get the duration in milliseconds. - *

+ * + *

* This will be 0 until a call * to {@link #finished()} has been made. + *

* @return the currently recorded duration. */ public long value() { From 0f7af84b0059923753a0ced329efb4ebeba691f3 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Fri, 13 May 2022 19:32:04 -0700 Subject: [PATCH 39/53] HADOOP-18229. Fix some java doc compilation 150 warnings. --- .../hadoop/fs/FSDataOutputStreamBuilder.java | 2 +- .../hadoop/fs/shell/find/BaseExpression.java | 2 + .../hadoop/ha/ActiveStandbyElector.java | 1 + .../apache/hadoop/ha/HAServiceProtocol.java | 2 +- .../org/apache/hadoop/ha/HealthMonitor.java | 3 + .../hadoop/ha/ZKFailoverController.java | 2 + .../org/apache/hadoop/http/HtmlQuoting.java | 1 + .../org/apache/hadoop/http/HttpServer2.java | 23 +++- .../java/org/apache/hadoop/io/ArrayFile.java | 1 - .../org/apache/hadoop/io/BloomMapFile.java | 2 +- .../org/apache/hadoop/io/BooleanWritable.java | 9 +- .../io/BoundedByteArrayOutputStream.java | 14 ++- .../org/apache/hadoop/io/ByteWritable.java | 10 +- .../org/apache/hadoop/io/BytesWritable.java | 4 + .../apache/hadoop/io/CompressedWritable.java | 13 ++- .../org/apache/hadoop/io/DataInputBuffer.java | 23 +++- .../apache/hadoop/io/DataOutputBuffer.java | 32 +++++- .../org/apache/hadoop/io/EnumSetWritable.java | 15 ++- .../java/org/apache/hadoop/io/MapFile.java | 67 +++++++++-- .../apache/hadoop/io/WritableComparator.java | 108 +++++++++++++++--- .../AbstractDelegationTokenSecretManager.java | 1 + .../hadoop/util/concurrent/AsyncGet.java | 1 + .../hadoop/util/curator/ZKCuratorManager.java | 1 + .../apache/hadoop/util/hash/JenkinsHash.java | 2 +- 24 files changed, 277 insertions(+), 62 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java index 6212fa58c2228..e7d79f2a90f10 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java @@ -248,7 +248,7 @@ protected EnumSet getFlags() { /** * Create an FSDataOutputStream at the specified path. * - * return Generics Type B + * @return return Generics Type B */ public B create() { flags.add(CreateFlag.CREATE); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java index 0f4c1771012f0..542f3e9134993 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java @@ -295,6 +295,8 @@ protected FileStatus getFileStatus(PathData item, int depth) * @param item * PathData * @return Path + * + * @throws IOException raised on errors performing I/O. */ protected Path getPath(PathData item) throws IOException { return item.path; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 7394e5fb46633..2236c9cdf4195 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -254,6 +254,7 @@ public ActiveStandbyElector(String zookeeperHostPorts, * reference to callback interface object * @param failFast * whether need to add the retry when establishing ZK connection. + * @param maxRetryNum max Retry Num * @throws IOException * raised on errors performing I/O. * @throws HadoopIllegalArgumentException diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java index 6eeb93012b125..66604cc39134c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java @@ -119,7 +119,7 @@ public void monitorHealth() throws HealthCheckFailedException, * Request service to transition to active state. No operation, if the * service is already in active state. * - * @param reqInfo + * @param reqInfo reqInfo * @throws ServiceFailedException * if transition from standby to active fails. * @throws AccessControlException diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java index 7e90fb77a0702..f0d1f29b7f95c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java @@ -184,6 +184,9 @@ private void tryConnect() { /** * Connect to the service to be monitored. Stubbed out for easier testing. + * + * @throws IOException raised on errors performing I/O. + * @return HAServiceProtocol */ protected HAServiceProtocol createProxy() throws IOException { return targetToMonitor.getHealthMonitorProxy(conf, rpcTimeout, rpcConnectRetries); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java index 87a80b868cdb1..13e55ccfb3a16 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java @@ -153,6 +153,8 @@ protected abstract void checkRpcAdminAccess() * the ZKFC will do all of its work. This is so that multiple federated * nameservices can run on the same ZK quorum without having to manually * configure them to separate subdirectories. + * + * @return ScopeInsideParentNode */ protected abstract String getScopeInsideParentNode(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java index 51db21c185f20..5f47ddb339212 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java @@ -80,6 +80,7 @@ public static boolean needsQuoting(String str) { * @param buffer the byte array to take the characters from * @param off the index of the first byte to quote * @param len the number of bytes to quote + * @throws IOException raised on errors performing I/O. */ public static void quoteHtmlChars(OutputStream output, byte[] buffer, int off, int len) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index 5abe36653e37b..3bf3b590cb9fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -908,8 +908,11 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) { /** * Add default apps. + * + * @param parent contexthandlercollection * @param appDir The application directory - * @throws IOException + * @param conf configuration + * @throws IOException raised on errors performing I/O. */ protected void addDefaultApps(ContextHandlerCollection parent, final String appDir, Configuration conf) throws IOException { @@ -1190,6 +1193,12 @@ public void addGlobalFilter(String name, String classname, /** * Define a filter for a context and set up default url mappings. + * + * @param ctx ctx + * @param name name + * @param classname classname + * @param parameters parameters + * @param urls urls */ public static void defineFilter(ServletContextHandler ctx, String name, String classname, Map parameters, String[] urls) { @@ -1300,6 +1309,7 @@ public int getPort() { /** * Get the address that corresponds to a particular connector. * + * @param index index * @return the corresponding address for the connector, or null if there's no * such connector or the connector is not bounded or was closed. */ @@ -1319,6 +1329,9 @@ public InetSocketAddress getConnectorAddress(int index) { /** * Set the min, max number of worker threads (simultaneous connections). + * + * @param min min + * @param max max */ public void setThreads(int min, int max) { QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool(); @@ -1345,6 +1358,8 @@ private void initSpnego(Configuration conf, String hostName, /** * Start the server. Does not wait for the server to start. + * + * @throws IOException raised on errors performing I/O. */ public void start() throws IOException { try { @@ -1519,7 +1534,9 @@ void openListeners() throws Exception { } /** - * stop the server + * stop the server. + * + * @throws Exception exception */ public void stop() throws Exception { MultiException exception = null; @@ -1646,7 +1663,7 @@ public static boolean isInstrumentationAccessAllowed( * @param request request * @param response used to send the error response if user does not have admin access. * @return true if admin-authorized, false otherwise - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static boolean hasAdministratorAccess( ServletContext servletContext, HttpServletRequest request, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java index b51be38f0aa8c..ce0075aedcc14 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java @@ -132,7 +132,6 @@ public synchronized Writable next(Writable value) throws IOException { * * @return key key * @throws IOException raised on errors performing I/O. - * @return seek long */ public synchronized long key() throws IOException { return key.get(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java index 519fcd74cbb71..91ea07d5de412 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java @@ -259,7 +259,7 @@ private void initBloomFilter(Path dirName, * probability of false positives. * @param key key to check * @return false iff key doesn't exist, true if key probably exists. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean probablyHasKey(WritableComparable key) throws IOException { if (bloomFilter == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java index 0079079a7921d..a779254fdc277 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java @@ -35,21 +35,24 @@ public class BooleanWritable implements WritableComparable { */ public BooleanWritable() {}; - /** + /** + * @param value value */ public BooleanWritable(boolean value) { set(value); } /** - * Set the value of the BooleanWritable + * Set the value of the BooleanWritable. + * @param value value */ public void set(boolean value) { this.value = value; } /** - * Returns the value of the BooleanWritable + * Returns the value of the BooleanWritable. + * @return the value of the BooleanWritable */ public boolean get() { return value; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java index c27449d36189c..470e61ed1a302 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java @@ -114,20 +114,28 @@ public void reset() { this.currentPointer = startOffset; } - /** Return the current limit */ + /** + * Return the current limit. + * @return limit + */ public int getLimit() { return limit; } - /** Returns the underlying buffer. + /** + * Returns the underlying buffer. * Data is only valid to {@link #size()}. + * @return the underlying buffer */ public byte[] getBuffer() { return buffer; } - /** Returns the length of the valid data + /** + * Returns the length of the valid data * currently in the buffer. + * + * @return the length of the valid data */ public int size() { return currentPointer - startOffset; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java index ffcdea2c9a3ab..86374fc4b8fa0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java @@ -33,10 +33,16 @@ public ByteWritable() {} public ByteWritable(byte value) { set(value); } - /** Set the value of this ByteWritable. */ + /** + * Set the value of this ByteWritable. + * @param value value. + */ public void set(byte value) { this.value = value; } - /** Return the value of this ByteWritable. */ + /** + * Return the value of this ByteWritable. + * @return value bytes + */ public byte get() { return value; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java index c5538c9e56e85..2e753d489979d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java @@ -77,6 +77,8 @@ public BytesWritable(byte[] bytes, int length) { /** * Get a copy of the bytes that is exactly the length of the data. * See {@link #getBytes()} for faster access to the underlying array. + * + * @return copyBytes */ public byte[] copyBytes() { return Arrays.copyOf(bytes, size); @@ -95,6 +97,7 @@ public byte[] getBytes() { /** * Get the data from the BytesWritable. * @deprecated Use {@link #getBytes()} instead. + * @return data from the BytesWritable. */ @Deprecated public byte[] get() { @@ -112,6 +115,7 @@ public int getLength() { /** * Get the current size of the buffer. * @deprecated Use {@link #getLength()} instead. + * @return current size of the buffer */ @Deprecated public int getSize() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java index 6550e1f2fde04..1f303a8888a04 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java @@ -67,7 +67,11 @@ protected void ensureInflated() { } } - /** Subclasses implement this instead of {@link #readFields(DataInput)}. */ + /** + * Subclasses implement this instead of {@link #readFields(DataInput)}. + * @param in data input + * @throws IOException raised on errors performing I/O. + */ protected abstract void readFieldsCompressed(DataInput in) throws IOException; @@ -87,7 +91,12 @@ public final void write(DataOutput out) throws IOException { out.write(compressed); } - /** Subclasses implement this instead of {@link #write(DataOutput)}. */ + /** + * Subclasses implement this instead of {@link #write(DataOutput)}. + * + * @param out data output + * @throws IOException raised on errors performing I/O. + */ protected abstract void writeCompressed(DataOutput out) throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java index 63c41c2e75008..e707d4a83fca3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java @@ -140,12 +140,23 @@ private DataInputBuffer(Buffer buffer) { this.buffer = buffer; } - /** Resets the data that the buffer reads. */ + /** + * Resets the data that the buffer reads. + * + * @param input input + * @param length length + */ public void reset(byte[] input, int length) { buffer.reset(input, 0, length); } - /** Resets the data that the buffer reads. */ + /** + * Resets the data that the buffer reads. + * + * @param input input + * @param start start + * @param length length + */ public void reset(byte[] input, int start, int length) { buffer.reset(input, start, length); } @@ -154,12 +165,18 @@ public byte[] getData() { return buffer.getData(); } - /** Returns the current position in the input. */ + /** + * Returns the current position in the input. + * + * @return position + */ public int getPosition() { return buffer.getPosition(); } /** * Returns the index one greater than the last valid character in the input * stream buffer. + * + * @return length. */ public int getLength() { return buffer.getLength(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java index 1d86b89701c03..fec36488b96b1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java @@ -99,27 +99,45 @@ private DataOutputBuffer(Buffer buffer) { this.buffer = buffer; } - /** Returns the current contents of the buffer. + /** + * Returns the current contents of the buffer. * Data is only valid to {@link #getLength()}. + * + * @return data byte */ public byte[] getData() { return buffer.getData(); } - /** Returns the length of the valid data currently in the buffer. */ + /** + * Returns the length of the valid data currently in the buffer. + * @return length + */ public int getLength() { return buffer.getLength(); } - /** Resets the buffer to empty. */ + /** + * Resets the buffer to empty. + * @return DataOutputBuffer + */ public DataOutputBuffer reset() { this.written = 0; buffer.reset(); return this; } - /** Writes bytes from a DataInput directly into the buffer. */ + /** + * Writes bytes from a DataInput directly into the buffer. + * @param in data input + * @param length length + * @throws IOException raised on errors performing I/O. + */ public void write(DataInput in, int length) throws IOException { buffer.write(in, length); } - /** Write to a file stream */ + /** + * Write to a file stream. + * @param out OutputStream + * @throws IOException raised on errors performing I/O. + */ public void writeTo(OutputStream out) throws IOException { buffer.writeTo(out); } @@ -128,6 +146,10 @@ public void writeTo(OutputStream out) throws IOException { * Overwrite an integer into the internal buffer. Note that this call can only * be used to overwrite existing data in the buffer, i.e., buffer#count cannot * be increased, and DataOutputStream#written cannot be increased. + * + * @param v v + * @param offset offset + * @throws IOException raised on errors performing I/O. */ public void writeInt(int v, int offset) throws IOException { Preconditions.checkState(offset + 4 <= buffer.getLength()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java index be86159519b87..7482b0304e54d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java @@ -64,8 +64,8 @@ public boolean add(E e) { * the argument value's size is bigger than zero, the argument * elementType is not be used. * - * @param value - * @param elementType + * @param value enumSet value + * @param elementType elementType */ public EnumSetWritable(EnumSet value, Class elementType) { set(value, elementType); @@ -75,7 +75,7 @@ public EnumSetWritable(EnumSet value, Class elementType) { * Construct a new EnumSetWritable. Argument value should not be null * or empty. * - * @param value + * @param value enumSet value */ public EnumSetWritable(EnumSet value) { this(value, null); @@ -88,8 +88,8 @@ public EnumSetWritable(EnumSet value) { * null. If the argument value's size is bigger than zero, the * argument elementType is not be used. * - * @param value - * @param elementType + * @param value enumSet Value + * @param elementType elementType */ public void set(EnumSet value, Class elementType) { if ((value == null || value.size() == 0) @@ -106,7 +106,10 @@ public void set(EnumSet value, Class elementType) { } } - /** Return the value of this EnumSetWritable. */ + /** + * Return the value of this EnumSetWritable. + * @return EnumSet + */ public EnumSet get() { return value; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 5519507848253..bf96cd2aee87b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -457,10 +457,18 @@ public static class Reader implements java.io.Closeable { private WritableComparable[] keys; private long[] positions; - /** Returns the class of keys in this file. */ + /** + * Returns the class of keys in this file. + * + * @return keyClass + */ public Class getKeyClass() { return data.getKeyClass(); } - /** Returns the class of values in this file. */ + /** + * Returns the class of values in this file. + * + * @return Value Class + */ public Class getValueClass() { return data.getValueClass(); } public static interface Option extends SequenceFile.Reader.Option {} @@ -490,8 +498,14 @@ public Reader(Path dir, Configuration conf, open(dir, comparator, conf, opts); } - /** Construct a map reader for the named map. + /** + * Construct a map reader for the named map. * @deprecated + * + * @param fs FileSystem + * @param dirName dirName + * @param conf configuration + * @throws IOException raised on errors performing I/O. */ @Deprecated public Reader(FileSystem fs, String dirName, @@ -537,6 +551,12 @@ protected synchronized void open(Path dir, /** * Override this method to specialize the type of * {@link SequenceFile.Reader} returned. + * + * @param dataFile data file + * @param conf configuration + * @param options options + * @throws IOException raised on errors performing I/O. + * @return SequenceFile.Reader */ protected SequenceFile.Reader createDataFileReader(Path dataFile, Configuration conf, @@ -603,13 +623,21 @@ private void readIndex() throws IOException { } } - /** Re-positions the reader before its first key. */ + /** + * Re-positions the reader before its first key. + * + * @throws IOException raised on errors performing I/O. + */ public synchronized void reset() throws IOException { data.seek(firstPosition); } - /** Get the key at approximately the middle of the file. Or null if the - * file is empty. + /** + * Get the key at approximately the middle of the file. Or null if the + * file is empty. + * + * @throws IOException raised on errors performing I/O. + * @return WritableComparable */ public synchronized WritableComparable midKey() throws IOException { @@ -621,9 +649,11 @@ public synchronized WritableComparable midKey() throws IOException { return keys[(count - 1) / 2]; } - /** Reads the final key from the file. + /** + * Reads the final key from the file. * * @param key key to read into + * @throws IOException raised on errors performing I/O. */ public synchronized void finalKey(WritableComparable key) throws IOException { @@ -643,9 +673,14 @@ public synchronized void finalKey(WritableComparable key) } } - /** Positions the reader at the named key, or if none such exists, at the + /** + * Positions the reader at the named key, or if none such exists, at the * first entry after the named key. Returns true iff the named key exists * in this map. + * + * @param key key + * @throws IOException raised on errors performing I/O. + * @return if the named key exists in this map true, not false. */ public synchronized boolean seek(WritableComparable key) throws IOException { return seekInternal(key) == 0; @@ -771,7 +806,13 @@ public synchronized boolean next(WritableComparable key, Writable val) return data.next(key, val); } - /** Return the value for the named key, or null if none exists. */ + /** + * Return the value for the named key, or null if none exists. + * @param key key + * @param val val + * @throws Writable if such a pair exists true,not false + * @throws IOException raised on errors performing I/O. + */ public synchronized Writable get(WritableComparable key, Writable val) throws IOException { if (seek(key)) { @@ -786,9 +827,10 @@ public synchronized Writable get(WritableComparable key, Writable val) * Returns key or if it does not exist, at the first entry * after the named key. * -- * @param key - key that we're trying to find -- * @param val - data value if key is found -- * @return - the key that was the closest match or null if eof. + * @param key - key that we're trying to find + * @param val - data value if key is found + * @return - the key that was the closest match or null if eof. + * @throws IOException raised on errors performing I/O. */ public synchronized WritableComparable getClosest(WritableComparable key, Writable val) @@ -805,6 +847,7 @@ public synchronized WritableComparable getClosest(WritableComparable key, * the first entry that falls just before the key. Otherwise, * return the record that sorts just after. * @return - the key that was the closest match or null if eof. + * @throws IOException raised on errors performing I/O. */ public synchronized WritableComparable getClosest(WritableComparable key, Writable val, final boolean before) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java index 2f90a084ddf19..53f81e34db8c6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java @@ -56,7 +56,12 @@ public static WritableComparator get(Class c) { return get(c, null); } - /** Get a comparator for a {@link WritableComparable} implementation. */ + /** + * Get a comparator for a {@link WritableComparable} implementation. + * @param c class + * @param conf configuration + * @return WritableComparator + */ public static WritableComparator get( Class c, Configuration conf) { WritableComparator comparator = comparators.get(c); @@ -100,9 +105,13 @@ private static void forceInit(Class cls) { } } - /** Register an optimized comparator for a {@link WritableComparable} + /** + * Register an optimized comparator for a {@link WritableComparable} * implementation. Comparators registered with this method must be - * thread-safe. */ + * thread-safe. + * @param c class + * @param comparator WritableComparator + */ public static void define(Class c, WritableComparator comparator) { comparators.put(c, comparator); } @@ -144,10 +153,16 @@ protected WritableComparator(Class keyClass, } } - /** Returns the WritableComparable implementation class. */ + /** + * Returns the WritableComparable implementation class. + * @return WritableComparable. + */ public Class getKeyClass() { return keyClass; } - /** Construct a new {@link WritableComparable} instance. */ + /** + * Construct a new {@link WritableComparable} instance. + * @return WritableComparable. + */ public WritableComparable newKey() { return ReflectionUtils.newInstance(keyClass, conf); } @@ -176,27 +191,54 @@ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { return compare(key1, key2); // compare them } - /** Compare two WritableComparables. + /** + * Compare two WritableComparables. * - *

The default implementation uses the natural ordering, calling {@link - * Comparable#compareTo(Object)}. */ + * The default implementation uses the natural ordering, calling {@link + * Comparable#compareTo(Object)}. + * @param a the first object to be compared. + * @param b the second object to be compared. + * @return compare result. + */ @SuppressWarnings("unchecked") public int compare(WritableComparable a, WritableComparable b) { return a.compareTo(b); } + /** + * Compare two Object. + * + * @param a the first object to be compared. + * @param b the second object to be compared. + * @return compare result. + */ @Override public int compare(Object a, Object b) { return compare((WritableComparable)a, (WritableComparable)b); } - /** Lexicographic order of binary data. */ + /** + * Lexicographic order of binary data. + * @param b1 b1 + * @param s1 s1 + * @param l1 l1 + * @param b2 b2 + * @param s2 s2 + * @param l2 l2 + * @return compare bytes + */ public static int compareBytes(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { return FastByteComparisons.compareTo(b1, s1, l1, b2, s2, l2); } - /** Compute hash for binary data. */ + /** + * Compute hash for binary data. + * @param bytes bytes + * @param offset offset + * @param length length + * @return hash for binary data + */ public static int hashBytes(byte[] bytes, int offset, int length) { int hash = 1; for (int i = offset; i < offset + length; i++) @@ -204,18 +246,33 @@ public static int hashBytes(byte[] bytes, int offset, int length) { return hash; } - /** Compute hash for binary data. */ + /** + * Compute hash for binary data. + * @param bytes bytes + * @param length length + * @return hash for binary data. + */ public static int hashBytes(byte[] bytes, int length) { return hashBytes(bytes, 0, length); } - /** Parse an unsigned short from a byte array. */ + /** + * Parse an unsigned short from a byte array. + * @param bytes bytes + * @param start start + * @return unsigned short from a byte array + */ public static int readUnsignedShort(byte[] bytes, int start) { return (((bytes[start] & 0xff) << 8) + ((bytes[start+1] & 0xff))); } - /** Parse an integer from a byte array. */ + /** + * Parse an integer from a byte array. + * @param bytes bytes + * @param start start + * @return integer from a byte array + */ public static int readInt(byte[] bytes, int start) { return (((bytes[start ] & 0xff) << 24) + ((bytes[start+1] & 0xff) << 16) + @@ -224,18 +281,33 @@ public static int readInt(byte[] bytes, int start) { } - /** Parse a float from a byte array. */ + /** + * Parse a float from a byte array. + * @param bytes bytes + * @param start start + * @return float from a byte array + */ public static float readFloat(byte[] bytes, int start) { return Float.intBitsToFloat(readInt(bytes, start)); } - /** Parse a long from a byte array. */ + /** + * Parse a long from a byte array. + * @param bytes bytes. + * @param start start. + * @return long from a byte array + */ public static long readLong(byte[] bytes, int start) { return ((long)(readInt(bytes, start)) << 32) + (readInt(bytes, start+4) & 0xFFFFFFFFL); } - /** Parse a double from a byte array. */ + /** + * Parse a double from a byte array. + * @param bytes bytes + * @param start start + * @return double from a byte array + */ public static double readDouble(byte[] bytes, int start) { return Double.longBitsToDouble(readLong(bytes, start)); } @@ -244,7 +316,7 @@ public static double readDouble(byte[] bytes, int start) { * Reads a zero-compressed encoded long from a byte array and returns it. * @param bytes byte array with decode long * @param start starting index - * @throws java.io.IOException + * @throws IOException raised on errors performing I/O. * @return deserialized long */ public static long readVLong(byte[] bytes, int start) throws IOException { @@ -269,7 +341,7 @@ public static long readVLong(byte[] bytes, int start) throws IOException { * Reads a zero-compressed encoded integer from a byte array and returns it. * @param bytes byte array with the encoded integer * @param start start index - * @throws java.io.IOException + * @throws IOException raised on errors performing I/O. * @return deserialized integer */ public static int readVInt(byte[] bytes, int start) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index beb4085eef032..063d0a8687b8e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -305,6 +305,7 @@ protected synchronized void setDelegationTokenSeqNum(int seqNum) { * based implementations. * * @param keyId keyId + * @return DelegationKey */ protected DelegationKey getDelegationKey(int keyId) { return allKeys.get(keyId); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java index d50dbc8f3efca..fce21dab9413c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java @@ -57,6 +57,7 @@ class Util { * @param obj object * @param timeout timeout * @param unit unit + * @throws InterruptedException if the thread is interrupted. */ public static void wait(Object obj, long timeout, TimeUnit unit) throws InterruptedException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java index f818556077f00..54f0fb2a74604 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java @@ -372,6 +372,7 @@ public void safeCreate(String path, byte[] data, List acl, * * @param path Path to be deleted. * @param fencingNodePath fencingNodePath + * @param fencingACL fencingACL * @throws Exception if any problem occurs while performing deletion. */ public void safeDelete(final String path, List fencingACL, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java index 3f62aef00a5f6..595a09db3f824 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java @@ -247,7 +247,7 @@ public int hash(byte[] key, int nbytes, int initval) { /** * Compute the hash of the specified file * @param args name of file to compute hash of. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void main(String[] args) throws IOException { if (args.length != 1) { From 6875e4b4c9885ee6e9a4bcafd17eaee1d869ae40 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sat, 14 May 2022 07:10:09 -0700 Subject: [PATCH 40/53] HADOOP-18229. Fix some java doc compilation 250+ warnings. --- .../fs/statistics/IOStatisticsSnapshot.java | 6 ++ .../apache/hadoop/fs/viewfs/ConfigUtil.java | 56 ++++++++++------ .../org/apache/hadoop/fs/viewfs/FsGetter.java | 8 +++ .../apache/hadoop/fs/viewfs/InodeTree.java | 40 ++++++++---- .../fs/viewfs/MountTableConfigLoader.java | 1 + .../hadoop/fs/viewfs/ViewFileSystem.java | 12 ++-- .../viewfs/ViewFileSystemOverloadScheme.java | 10 ++- .../hadoop/fs/viewfs/ViewFileSystemUtil.java | 5 +- .../org/apache/hadoop/fs/viewfs/ViewFs.java | 2 +- .../java/org/apache/hadoop/io/MapFile.java | 2 +- .../org/apache/hadoop/io/SequenceFile.java | 31 +++++++-- .../io/compress/zlib/ZlibCompressor.java | 1 + .../io/compress/zlib/ZlibDecompressor.java | 2 + .../hadoop/io/compress/zlib/ZlibFactory.java | 2 +- .../io/compress/zstd/ZStandardCompressor.java | 2 + .../compress/zstd/ZStandardDecompressor.java | 1 + .../web/DelegationTokenAuthenticatedURL.java | 6 ++ .../DelegationTokenAuthenticationFilter.java | 1 + .../web/DelegationTokenAuthenticator.java | 8 +++ .../org/apache/hadoop/util/IdGenerator.java | 5 +- .../apache/hadoop/util/InstrumentedLock.java | 1 + .../hadoop/util/IntrusiveCollection.java | 22 +++++++ .../apache/hadoop/util/JsonSerialization.java | 2 + .../apache/hadoop/util/JvmPauseMonitor.java | 3 + .../apache/hadoop/util/LightWeightGSet.java | 12 +++- .../hadoop/util/LightWeightResizableGSet.java | 2 + .../org/apache/hadoop/util/LineReader.java | 6 +- .../java/org/apache/hadoop/util/Lists.java | 28 +++++++- .../org/apache/hadoop/util/MachineList.java | 6 +- .../apache/hadoop/util/NativeCodeLoader.java | 8 ++- .../hadoop/util/NativeLibraryChecker.java | 3 +- .../java/org/apache/hadoop/util/Options.java | 2 +- .../apache/hadoop/util/PrintJarMainClass.java | 2 +- .../org/apache/hadoop/util/PriorityQueue.java | 36 ++++++++--- .../org/apache/hadoop/util/ProgramDriver.java | 20 +++--- .../java/org/apache/hadoop/util/Progress.java | 49 +++++++++++--- .../org/apache/hadoop/util/ProtoUtil.java | 4 ++ .../org/apache/hadoop/util/QuickSort.java | 3 + .../apache/hadoop/util/ReflectionUtils.java | 15 ++++- .../java/org/apache/hadoop/util/RunJar.java | 7 +- .../apache/hadoop/util/SequentialNumber.java | 17 ++++- .../org/apache/hadoop/util/ServletUtil.java | 16 ++++- .../java/org/apache/hadoop/util/Sets.java | 64 ++++++++++++++++--- .../hadoop/util/ShutdownThreadsHelper.java | 8 ++- .../org/apache/hadoop/util/StopWatch.java | 3 + .../apache/hadoop/util/StringInterner.java | 3 + .../org/apache/hadoop/util/StringUtils.java | 59 ++++++++++++++--- .../java/org/apache/hadoop/util/Time.java | 2 + .../org/apache/hadoop/util/ToolRunner.java | 7 +- .../java/org/apache/hadoop/util/XMLUtils.java | 6 +- .../java/org/apache/hadoop/util/ZKUtil.java | 1 + .../hadoop/util/functional/package-info.java | 6 +- 52 files changed, 498 insertions(+), 126 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java index 7e18a83e77257..4a84d47de77db 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java @@ -238,6 +238,8 @@ public static JsonSerialization serializer() { /** * Serialize by converting each map to a TreeMap, and saving that * to the stream. + * @param s ObjectOutputStream + * @throws IOException raised on errors performing I/O. */ private synchronized void writeObject(ObjectOutputStream s) throws IOException { @@ -253,6 +255,10 @@ private synchronized void writeObject(ObjectOutputStream s) /** * Deserialize by loading each TreeMap, and building concurrent * hash maps from them. + * + * @param s ObjectInputStream + * @throws IOException raised on errors performing I/O. + * @throws ClassNotFoundException class not found exception */ private void readObject(final ObjectInputStream s) throws IOException, ClassNotFoundException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java index ead2a365f3ae6..1faf215e50553 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java @@ -48,7 +48,7 @@ public static String getConfigViewFsPrefix() { /** * Add a link to the config for the specified mount table * @param conf - add the link to this conf - * @param mountTableName + * @param mountTableName mountTable * @param src - the src path name * @param target - the target URI link */ @@ -71,9 +71,10 @@ public static void addLink(final Configuration conf, final String src, /** * Add a LinkMergeSlash to the config for the specified mount table. - * @param conf - * @param mountTableName - * @param target + * + * @param conf configuration + * @param mountTableName mountTable + * @param target target */ public static void addLinkMergeSlash(Configuration conf, final String mountTableName, final URI target) { @@ -83,8 +84,9 @@ public static void addLinkMergeSlash(Configuration conf, /** * Add a LinkMergeSlash to the config for the default mount table. - * @param conf - * @param target + * + * @param conf configuration + * @param target targets */ public static void addLinkMergeSlash(Configuration conf, final URI target) { addLinkMergeSlash(conf, getDefaultMountTableName(conf), target); @@ -92,9 +94,10 @@ public static void addLinkMergeSlash(Configuration conf, final URI target) { /** * Add a LinkFallback to the config for the specified mount table. - * @param conf - * @param mountTableName - * @param target + * + * @param conf configuration + * @param mountTableName mountTable + * @param target targets */ public static void addLinkFallback(Configuration conf, final String mountTableName, final URI target) { @@ -104,8 +107,9 @@ public static void addLinkFallback(Configuration conf, /** * Add a LinkFallback to the config for the default mount table. - * @param conf - * @param target + * + * @param conf configuration + * @param target targets */ public static void addLinkFallback(Configuration conf, final URI target) { addLinkFallback(conf, getDefaultMountTableName(conf), target); @@ -113,9 +117,10 @@ public static void addLinkFallback(Configuration conf, final URI target) { /** * Add a LinkMerge to the config for the specified mount table. - * @param conf - * @param mountTableName - * @param targets + * + * @param conf configuration + * @param mountTableName mountTable + * @param targets targets */ public static void addLinkMerge(Configuration conf, final String mountTableName, final URI[] targets) { @@ -125,8 +130,9 @@ public static void addLinkMerge(Configuration conf, /** * Add a LinkMerge to the config for the default mount table. - * @param conf - * @param targets + * + * @param conf configuration + * @param targets targets array */ public static void addLinkMerge(Configuration conf, final URI[] targets) { addLinkMerge(conf, getDefaultMountTableName(conf), targets); @@ -134,6 +140,12 @@ public static void addLinkMerge(Configuration conf, final URI[] targets) { /** * Add nfly link to configuration for the given mount table. + * + * @param conf configuration + * @param mountTableName mount table + * @param src src + * @param settings settings + * @param targets targets */ public static void addLinkNfly(Configuration conf, String mountTableName, String src, String settings, final String targets) { @@ -144,12 +156,13 @@ public static void addLinkNfly(Configuration conf, String mountTableName, } /** + * Add nfly link to configuration for the given mount table. * - * @param conf - * @param mountTableName - * @param src - * @param settings - * @param targets + * @param conf configuration + * @param mountTableName mount table + * @param src src + * @param settings settings + * @param targets targets */ public static void addLinkNfly(Configuration conf, String mountTableName, String src, String settings, final URI ... targets) { @@ -202,6 +215,7 @@ public static void setHomeDirConf(final Configuration conf, * Add config variable for homedir the specified mount table * @param conf - add to this conf * @param homedir - the home dir path starting with slash + * @param mountTableName - the mount table */ public static void setHomeDirConf(final Configuration conf, final String mountTableName, final String homedir) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java index c72baac25fb75..b6490e6b9db84 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java @@ -34,6 +34,9 @@ public class FsGetter { /** * Gets new file system instance of given uri. + * @param uri uri + * @param conf configuration + * @throws IOException raised on errors performing I/O. */ public FileSystem getNewInstance(URI uri, Configuration conf) throws IOException { @@ -42,6 +45,11 @@ public FileSystem getNewInstance(URI uri, Configuration conf) /** * Gets file system instance of given uri. + * + * @param uri uri + * @param conf configuration + * @throws IOException raised on errors performing I/O. + * @return FileSystem */ public FileSystem get(URI uri, Configuration conf) throws IOException { return FileSystem.get(uri, conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java index fb7c46fb662b5..c03e41bae7ff3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java @@ -364,6 +364,8 @@ public static class INodeLink extends INode { /** * Get the target of the link. If a merge link then it returned * as "," separated URI list. + * + * @return the path */ public Path getTargetLink() { StringBuilder result = new StringBuilder(targetDirLinkList[0].toString()); @@ -387,7 +389,7 @@ INodeLink getLink() { /** * Get the instance of FileSystem to use, creating one if needed. * @return An Initialized instance of T - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public T getTargetFileSystem() throws IOException { if (targetFileSystem != null) { @@ -500,6 +502,7 @@ private void createLink(final String src, final String target, /** * The user of this class must subclass and implement the following * 3 abstract methods. + * @return Function */ protected abstract Function initAndGetTargetFs(); @@ -590,14 +593,19 @@ Configuration getConfig() { } /** - * Create Inode Tree from the specified mount-table specified in Config - * @param config - the mount table keys are prefixed with - * FsConstants.CONFIG_VIEWFS_PREFIX - * @param viewName - the name of the mount table - if null use defaultMT name - * @throws UnsupportedFileSystemException - * @throws URISyntaxException - * @throws FileAlreadyExistsException - * @throws IOException + * Create Inode Tree from the specified mount-table specified in Config. + * + * @param config - the mount table keys are prefixed with + * FsConstants.CONFIG_VIEWFS_PREFIX + * @param viewName - the name of the mount table - if null use defaultMT name + * @param theUri theUri + * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts + * @throws UnsupportedFileSystemException file system for uri is + * not found + * @throws URISyntaxException if the URI does not have an authority it is badly formed. + * @throws FileAlreadyExistsException there is a file at the path specified + * or is discovered on one of its ancestors. + * @throws IOException raised on errors performing I/O. */ protected InodeTree(final Configuration config, final String viewName, final URI theUri, boolean initingUriAsFallbackOnNoMounts) @@ -871,9 +879,9 @@ boolean isLastInternalDirLink() { /** * Resolve the pathname p relative to root InodeDir. * @param p - input path - * @param resolveLastComponent + * @param resolveLastComponent resolveLastComponent * @return ResolveResult which allows further resolution of the remaining path - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public ResolveResult resolve(final String p, final boolean resolveLastComponent) throws IOException { @@ -1000,9 +1008,9 @@ private Path getRemainingPath(String[] path, int startIndex) { * resolveLastComponent: true * then return value is s3://hadoop.apache.com/_hadoop * - * @param srcPath - * @param resolveLastComponent - * @return + * @param srcPath srcPath + * @param resolveLastComponent resolveLastComponent + * @return ResolveResult */ protected ResolveResult tryResolveInRegexMountpoint(final String srcPath, final boolean resolveLastComponent) { @@ -1029,6 +1037,10 @@ protected ResolveResult tryResolveInRegexMountpoint(final String srcPath, * targetOfResolvedPathStr: /targetTestRoot/hadoop-user1 * remainingPath: /hadoop_dir1 * + * @param resultKind resultKind + * @param resolvedPathStr resolvedPathStr + * @param targetOfResolvedPathStr targetOfResolvedPathStr + * @param remainingPath remainingPath * @return targetFileSystem or null on exceptions. */ protected ResolveResult buildResolveResultForRegexMountPoint( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java index bc2c3ea93c58c..5fcd77cd29155 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java @@ -38,6 +38,7 @@ public interface MountTableConfigLoader { * a directory in the case of multiple versions of mount-table * files(Recommended option). * @param conf - Configuration object to add mount table. + * @throws IOException raised on errors performing I/O. */ void load(String mountTableConfigPath, Configuration conf) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index 8f4631b0e833e..d2a9bb667f893 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -107,6 +107,8 @@ static AccessControlException readOnlyMountTable(final String operation, /** * Gets file system creator instance. + * + * @return fs getter */ protected FsGetter fsGetter() { return new FsGetter(); @@ -273,7 +275,7 @@ private Path makeAbsolute(final Path f) { * {@link FileSystem#createFileSystem(URI, Configuration)} * * After this constructor is called initialize() is called. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public ViewFileSystem() throws IOException { ugi = UserGroupInformation.getCurrentUser(); @@ -394,9 +396,9 @@ protected FileSystem getTargetFileSystem(final String settings, } /** - * Convenience Constructor for apps to call directly - * @param conf - * @throws IOException + * Convenience Constructor for apps to call directly. + * @param conf configuration + * @throws IOException raised on errors performing I/O. */ public ViewFileSystem(final Configuration conf) throws IOException { this(FsConstants.VIEWFS_URI, conf); @@ -1314,7 +1316,7 @@ public FsStatus getStatus(Path p) throws IOException { * Constants#CONFIG_VIEWFS_LINK_MERGE_SLASH} is supported and is a valid * mount point. Else, throw NotInMountpointException. * - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public long getUsed() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java index e91b66512d5bf..99c626be3a214 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java @@ -139,6 +139,8 @@ public boolean supportAutoAddingFallbackOnNoMounts() { /** * Sets whether to add fallback automatically when no mount points found. + * + * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts */ public void setSupportAutoAddingFallbackOnNoMounts( boolean addAutoFallbackOnNoMounts) { @@ -320,7 +322,8 @@ private T newInstance(Class theClass, URI uri, Configuration conf) { * * @param path - fs uri path * @param conf - configuration - * @throws IOException + * @throws IOException raised on errors performing I/O. + * @return file system */ public FileSystem getRawFileSystem(Path path, Configuration conf) throws IOException { @@ -339,6 +342,11 @@ public FileSystem getRawFileSystem(Path path, Configuration conf) /** * Gets the mount path info, which contains the target file system and * remaining path to pass to the target file system. + * + * @param path the path + * @param conf configuration + * @return mount path info + * @throws IOException raised on errors performing I/O. */ public MountPathInfo getMountPathInfo(Path path, Configuration conf) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java index f486a10b4c8f9..1f05076f47397 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java @@ -44,7 +44,7 @@ private ViewFileSystemUtil() { /** * Check if the FileSystem is a ViewFileSystem. * - * @param fileSystem + * @param fileSystem file system * @return true if the fileSystem is ViewFileSystem */ public static boolean isViewFileSystem(final FileSystem fileSystem) { @@ -54,7 +54,7 @@ public static boolean isViewFileSystem(final FileSystem fileSystem) { /** * Check if the FileSystem is a ViewFileSystemOverloadScheme. * - * @param fileSystem + * @param fileSystem file system * @return true if the fileSystem is ViewFileSystemOverloadScheme */ public static boolean isViewFileSystemOverloadScheme( @@ -101,6 +101,7 @@ public static boolean isViewFileSystemOverloadScheme( * @param fileSystem - ViewFileSystem on which mount point exists * @param path - URI for which FsStatus is requested * @return Map of ViewFsMountPoint and FsStatus + * @throws IOException raised on errors performing I/O. */ public static Map getStatus( FileSystem fileSystem, Path path) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java index d98082fe5c1e0..5f54c9cdd06aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java @@ -909,7 +909,7 @@ public void unsetStoragePolicy(final Path src) * * @param src file or directory path. * @return storage policy for give file. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public BlockStoragePolicySpi getStoragePolicy(final Path src) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index bf96cd2aee87b..87feb1029ea6b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -810,7 +810,7 @@ public synchronized boolean next(WritableComparable key, Writable val) * Return the value for the named key, or null if none exists. * @param key key * @param val val - * @throws Writable if such a pair exists true,not false + * @return Writable if such a pair exists true,not false * @throws IOException raised on errors performing I/O. */ public synchronized Writable get(WritableComparable key, Writable val) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 890e7916ab076..420fe51492a70 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -2173,13 +2173,22 @@ public synchronized Class getValueClass() { return valClass; } - /** Returns true if values are compressed. */ + /** + * Returns true if values are compressed. + * @return if values are compressed true, not false + */ public boolean isCompressed() { return decompress; } - /** Returns true if records are block-compressed. */ + /** + * Returns true if records are block-compressed. + * @return if records are block-compressed true, not false + */ public boolean isBlockCompressed() { return blockCompressed; } - /** Returns the compression codec of data in this file. */ + /** + * Returns the compression codec of data in this file. + * @return CompressionCodec + */ public CompressionCodec getCompressionCodec() { return codec; } private byte[] getSync() { @@ -2202,7 +2211,10 @@ public CompressionType getCompressionType() { } } - /** Returns the metadata object of the file */ + /** + * Returns the metadata object of the file. + * @return metadata + */ public Metadata getMetadata() { return this.metadata; } @@ -2311,7 +2323,7 @@ private synchronized void seekToCurrentValue() throws IOException { /** * Get the 'value' corresponding to the last read 'key'. * @param val : The 'value' to be read. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public synchronized void getCurrentValue(Writable val) throws IOException { @@ -2392,8 +2404,13 @@ private Object deserializeValue(Object val) throws IOException { return valDeserializer.deserialize(val); } - /** Read the next key in the file into key, skipping its - * value. True if another entry exists, and false at end of file. */ + /** + * Read the next key in the file into key, skipping its + * value.True if another entry exists, and false at end of file. + * + * @param key key + * + */ public synchronized boolean next(Writable key) throws IOException { if (key.getClass() != getKeyClass()) throw new IOException("wrong key class: "+key.getClass().getName() diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index da8a90bb3170e..a3ce3ab076581 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -240,6 +240,7 @@ public ZlibCompressor() { /** * Creates a new compressor, taking settings from the configuration. + * @param conf configuration */ public ZlibCompressor(Configuration conf) { this(ZlibFactory.getCompressionLevel(conf), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java index f642d7713035d..5f749748f30ec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java @@ -101,6 +101,8 @@ static boolean isNativeZlibLoaded() { /** * Creates a new decompressor. + * @param header header + * @param directBufferSize directBufferSize */ public ZlibDecompressor(CompressionHeader header, int directBufferSize) { this.header = header; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java index 883f1717eea93..f4bae38dc457e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java @@ -66,7 +66,7 @@ public static void loadNativeZLib() { /** * Set the flag whether to use native library. Used for testing non-native * libraries - * + * @param isLoaded isLoaded */ @VisibleForTesting public static void setNativeZlibLoaded(final boolean isLoaded) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java index bc51f3d98a505..dfef01044d2c9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java @@ -84,6 +84,8 @@ public static int getRecommendedBufferSize() { /** * Creates a new compressor with the default compression level. * Compressed data will be generated in ZStandard format. + * @param level level + * @param bufferSize bufferSize */ public ZStandardCompressor(int level, int bufferSize) { this(level, bufferSize, bufferSize); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java index adf2fe629f8f7..c9ef509c6dce2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java @@ -73,6 +73,7 @@ public ZStandardDecompressor() { /** * Creates a new decompressor. + * @param bufferSize bufferSize */ public ZStandardDecompressor(int bufferSize) { this.directBufferSize = bufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java index 0988826605fbb..2815f56818501 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java @@ -336,6 +336,10 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs) /** * Select a delegation token from all tokens in credentials, based on url. + * + * @param url url + * @param creds credentials + * @return token */ @InterfaceAudience.Private public org.apache.hadoop.security.token.Token @@ -407,6 +411,7 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs) * @param token the authentication token with the Delegation Token to renew. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. + * @return delegation token long value */ public long renewDelegationToken(URL url, Token token) throws IOException, AuthenticationException { @@ -423,6 +428,7 @@ public long renewDelegationToken(URL url, Token token) * @param doAsUser the user to do as, which will be the token owner. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. + * @return delegation token long value */ public long renewDelegationToken(URL url, Token token, String doAsUser) throws IOException, AuthenticationException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java index be061bb63f3ee..3de8d3ab91377 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java @@ -125,6 +125,7 @@ protected Properties getConfiguration(String configPrefix, * Set AUTH_TYPE property to the name of the corresponding authentication * handler class based on the input properties. * @param props input properties. + * @throws ServletException servlet exception */ protected void setAuthHandlerClass(Properties props) throws ServletException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java index 19427dcfafeb4..2694df5a97485 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java @@ -163,6 +163,7 @@ public void authenticate(URL url, AuthenticatedURL.Token token) * @param renewer the renewer user. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. + * @return abstract delegation token identifier */ public Token getDelegationToken(URL url, AuthenticatedURL.Token token, String renewer) @@ -182,6 +183,7 @@ public Token getDelegationToken(URL url, * @param doAsUser the user to do as, which will be the token owner. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. + * @return abstract delegation token identifier */ public Token getDelegationToken(URL url, AuthenticatedURL.Token token, String renewer, String doAsUser) @@ -207,8 +209,10 @@ public Token getDelegationToken(URL url, * @param url the URL to renew the delegation token from. Only HTTP/S URLs are * supported. * @param token the authentication token with the Delegation Token to renew. + * @param dToken abstract delegation token identifier. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. + * @return delegation token long value */ public long renewDelegationToken(URL url, AuthenticatedURL.Token token, @@ -225,8 +229,10 @@ public long renewDelegationToken(URL url, * supported. * @param token the authentication token with the Delegation Token to renew. * @param doAsUser the user to do as, which will be the token owner. + * @param dToken abstract delegation token identifier. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. + * @return delegation token long value */ public long renewDelegationToken(URL url, AuthenticatedURL.Token token, @@ -245,6 +251,7 @@ public long renewDelegationToken(URL url, * @param url the URL to cancel the delegation token from. Only HTTP/S URLs * are supported. * @param token the authentication token with the Delegation Token to cancel. + * @param dToken abstract delegation token identifier * @throws IOException if an IO error occurred. */ public void cancelDelegationToken(URL url, @@ -261,6 +268,7 @@ public void cancelDelegationToken(URL url, * @param url the URL to cancel the delegation token from. Only HTTP/S URLs * are supported. * @param token the authentication token with the Delegation Token to cancel. + * @param dToken abstract delegation token identifier * @param doAsUser the user to do as, which will be the token owner. * @throws IOException if an IO error occurred. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java index c14727a3771da..0d348bca83a17 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java @@ -26,6 +26,9 @@ @InterfaceAudience.Private public interface IdGenerator { - /** Increment and then return the next value. */ + /** + * Increment and then return the next value. + * @return long value + */ public long nextValue(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java index e83736cd3e35a..e314c4b738de0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java @@ -185,6 +185,7 @@ protected void startLockTiming() { * * @param acquireTime - timestamp just after acquiring the lock. * @param releaseTime - timestamp just before releasing the lock. + * @param checkLockHeld checkLockHeld */ protected void check(long acquireTime, long releaseTime, boolean checkLockHeld) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java index 21d8ad34a8784..54091f2bcc389 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java @@ -49,38 +49,59 @@ public interface Element { /** * Insert this element into the list. This is the first thing that will * be called on the element. + * + * @param list list + * @param prev prev + * @param next next */ void insertInternal(IntrusiveCollection list, Element prev, Element next); /** * Set the prev pointer of an element already in the list. + * + * @param list list + * @param prev prev */ void setPrev(IntrusiveCollection list, Element prev); /** * Set the next pointer of an element already in the list. + * + * @param list list + * @param next next */ void setNext(IntrusiveCollection list, Element next); /** * Remove an element from the list. This is the last thing that will be * called on an element. + * + * @param list list */ void removeInternal(IntrusiveCollection list); /** * Get the prev pointer of an element. + * + * @param list list + * @return Element */ Element getPrev(IntrusiveCollection list); /** * Get the next pointer of an element. + * + * @param list list + * @return Element */ Element getNext(IntrusiveCollection list); /** * Returns true if this element is in the provided list. + * + * @param list list + * @return if this element is in the provided list true, not false. */ boolean isInList(IntrusiveCollection list); } @@ -260,6 +281,7 @@ public T[] toArray(T[] array) { * Add an element to the end of the list. * * @param elem The new element to add. + * @return add result */ @Override public boolean add(E elem) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index 002c725490f4b..c57085eae160a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -287,6 +287,7 @@ public T load(FileSystem fs, Path path, @Nullable FileStatus status) * @param fs filesystem * @param path path * @param overwrite should any existing file be overwritten + * @param instance instance * @throws IOException IO exception */ public void save(FileSystem fs, Path path, T instance, @@ -324,6 +325,7 @@ public byte[] toBytes(T instance) throws IOException { * @param bytes byte array * @throws IOException IO problems * @throws EOFException not enough data + * @return byte array */ public T fromBytes(byte[] bytes) throws IOException { return fromJson(new String(bytes, 0, bytes.length, UTF_8)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java index feb4f9b9d3f01..dc8672a4ef543 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java @@ -214,6 +214,9 @@ public void run() { * This main function just leaks memory into a list. Running this class * with a 1GB heap will very quickly go into "GC hell" and result in * log messages about the GC pauses. + * + * @param args args + * @throws Exception Exception */ @SuppressWarnings("resource") public static void main(String []args) throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java index d32d1f37b42fa..46e95234b57f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java @@ -177,6 +177,8 @@ public E put(final E element) { * Remove the element corresponding to the key, * given key.hashCode() == index. * + * @param key key + * @param index index * @return If such element exists, return it. * Otherwise, return null. */ @@ -270,7 +272,11 @@ public String toString() { return b.toString(); } - /** Print detailed information of this object. */ + /** + * Print detailed information of this object. + * + * @param out out + */ public void printDetails(final PrintStream out) { out.print(this + ", entries = ["); for(int i = 0; i < entries.length; i++) { @@ -357,6 +363,10 @@ public void setTrackModification(boolean trackModification) { * Let e = round(log_2 t). * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. + * + * @param mapName mapName + * @param percentage percentage + * @return compute capacity */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java index 7e7ececb32ee8..9658e3ea887a5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java @@ -116,6 +116,8 @@ public synchronized void getIterator(Consumer> consumer) { /** * Resize the internal table to given capacity. + * + * @param cap capacity */ @SuppressWarnings("unchecked") protected synchronized void resize(int cap) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java index 520ddf6bdf401..08bd8102b1388 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java @@ -89,7 +89,7 @@ public LineReader(InputStream in, int bufferSize) { * Configuration. * @param in input stream * @param conf configuration - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public LineReader(InputStream in, Configuration conf) throws IOException { this(in, conf.getInt(IO_FILE_BUFFER_SIZE_KEY, DEFAULT_BUFFER_SIZE)); @@ -133,7 +133,7 @@ public LineReader(InputStream in, int bufferSize, * @param in input stream * @param conf configuration * @param recordDelimiterBytes The delimiter - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public LineReader(InputStream in, Configuration conf, byte[] recordDelimiterBytes) throws IOException { @@ -146,7 +146,7 @@ public LineReader(InputStream in, Configuration conf, /** * Close the underlying stream. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void close() throws IOException { in.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java index 5d9cc0502afaa..623ae7f0ea53d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java @@ -44,6 +44,9 @@ private Lists() { /** * Creates a mutable, empty {@code ArrayList} instance. + * + * @param Generics Type E + * @return ArrayList Generics Type E */ public static ArrayList newArrayList() { return new ArrayList<>(); @@ -59,6 +62,10 @@ public static ArrayList newArrayList() { * {@link Arrays#asList asList} * {@code (...))}, or for creating an empty list then calling * {@link Collections#addAll}. + * + * @param Generics Type E + * @param elements elements + * @return ArrayList Generics Type E */ @SafeVarargs public static ArrayList newArrayList(E... elements) { @@ -76,6 +83,9 @@ public static ArrayList newArrayList(E... elements) { * Creates a mutable {@code ArrayList} instance containing the * given elements; a very thin shortcut for creating an empty list then * calling Iterables#addAll. + * + * @param Generics Type E + * @return ArrayList Generics Type E */ public static ArrayList newArrayList(Iterable elements) { if (elements == null) { @@ -90,6 +100,9 @@ public static ArrayList newArrayList(Iterable elements) { * Creates a mutable {@code ArrayList} instance containing the * given elements; a very thin shortcut for creating an empty list * and then calling Iterators#addAll. + * + * @param Generics Type E + * @return ArrayList Generics Type E */ public static ArrayList newArrayList(Iterator elements) { ArrayList list = newArrayList(); @@ -102,6 +115,7 @@ public static ArrayList newArrayList(Iterator elements) { * specified initial size; * simply delegates to {@link ArrayList#ArrayList(int)}. * + * @param Generics Type E * @param initialArraySize the exact size of the initial backing array for * the returned array list * ({@code ArrayList} documentation calls this value the "capacity"). @@ -126,6 +140,8 @@ public static ArrayList newArrayListWithCapacity( * @return a new, empty {@code ArrayList}, sized appropriately to hold the * estimated number of elements. * @throws IllegalArgumentException if {@code estimatedSize} is negative. + * + * @param Generics Type E */ public static ArrayList newArrayListWithExpectedSize( int estimatedSize) { @@ -140,7 +156,10 @@ public static ArrayList newArrayListWithExpectedSize( * outperform {@code LinkedList} except in certain rare and specific * situations. Unless you have * spent a lot of time benchmarking your specific needs, use one of those - * instead. + * instead.

+ * + * @param Generics Type E + * @return Generics Type E List */ public static LinkedList newLinkedList() { return new LinkedList<>(); @@ -155,7 +174,11 @@ public static LinkedList newLinkedList() { * {@link java.util.ArrayDeque} consistently * outperform {@code LinkedList} except in certain rare and specific * situations. Unless you have spent a lot of time benchmarking your - * specific needs, use one of those instead. + * specific needs, use one of those instead.

+ * + * @param elements elements + * @param Generics Type E + * @return Generics Type E List */ public static LinkedList newLinkedList( Iterable elements) { @@ -238,6 +261,7 @@ private static boolean addAll(Collection addTo, * @param originalList original big list. * @param pageSize desired size of each sublist ( last one * may be smaller) + * @param Generics Type * @return a list of sub lists. */ public static List> partition(List originalList, int pageSize) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java index d07264c88ddd8..04fd8c47d0757 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java @@ -89,7 +89,7 @@ public MachineList(Collection hostEntries) { /** * Accepts a collection of ip/cidr/host addresses * - * @param hostEntries + * @param hostEntries hostEntries * @param addressFactory addressFactory to convert host to InetAddress */ public MachineList(Collection hostEntries, @@ -139,7 +139,7 @@ public MachineList(Collection hostEntries, * {@link #includes(InetAddress)} should be preferred * to avoid possibly re-resolving the ip address. * - * @param ipAddress + * @param ipAddress ipAddress * @return true if ipAddress is part of the list */ public boolean includes(String ipAddress) { @@ -161,7 +161,7 @@ public boolean includes(String ipAddress) { /** * Accepts an inet address and return true if address is in the list. - * @param address + * @param address address * @return true if address is part of the list */ public boolean includes(InetAddress address) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java index 11d1176f92a59..9aa3dcc6a8e0a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java @@ -76,16 +76,22 @@ public static boolean isNativeCodeLoaded() { /** * Returns true only if this build was compiled with support for ISA-L. + * + * @return if this build was compiled with support for ISA-L true, not false */ public static native boolean buildSupportsIsal(); /** - * Returns true only if this build was compiled with support for ZStandard. + * Returns true only if this build was compiled with support for ZStandard. + * + * @return if this build was compiled with support for ZStandard true,not false. */ public static native boolean buildSupportsZstd(); /** * Returns true only if this build was compiled with support for openssl. + * + * @return if this build was compiled with support for openssl true,not false. */ public static native boolean buildSupportsOpenssl(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java index 3847902e79743..a53e31db61c13 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java @@ -39,7 +39,8 @@ public class NativeLibraryChecker { LoggerFactory.getLogger(NativeLibraryChecker.class); /** - * A tool to test native library availability, + * A tool to test native library availability, + * @param args args */ public static void main(String[] args) { String usage = "NativeLibraryChecker [-a|-h]\n" diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Options.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Options.java index 23169e3af3533..ccd494e5e40d9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Options.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Options.java @@ -126,7 +126,7 @@ public Progressable getValue() { * @param cls the dynamic class to find * @param opts the list of options to look through * @return the first option that matches - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @SuppressWarnings("unchecked") public static T getOption(Class cls, base [] opts diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java index df571f35e2fcd..99c1a206a5807 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java @@ -31,7 +31,7 @@ public class PrintJarMainClass { /** - * @param args + * @param args args */ public static void main(String[] args) { try (JarFile jar_file = new JarFile(args[0])) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java index ebb943bcb6285..b371e630e7e9f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java @@ -31,11 +31,19 @@ public abstract class PriorityQueue { private int size; private int maxSize; - /** Determines the ordering of objects in this priority queue. Subclasses - must define this one method. */ + /** + * Determines the ordering of objects in this priority queue. Subclasses + must define this one method. + * @param a object a + * @param b object b + * @return if a < b true, not false + */ protected abstract boolean lessThan(Object a, Object b); - /** Subclass constructors must call this. */ + /** + * Subclass constructors must call this. + * @param maxSize max size + */ @SuppressWarnings("unchecked") protected final void initialize(int maxSize) { size = 0; @@ -48,6 +56,7 @@ protected final void initialize(int maxSize) { * Adds an Object to a PriorityQueue in log(size) time. * If one tries to add more objects than maxSize from initialize * a RuntimeException (ArrayIndexOutOfBound) is thrown. + * @param element element */ public final void put(T element) { size++; @@ -58,7 +67,7 @@ public final void put(T element) { /** * Adds element to the PriorityQueue in log(size) time if either * the PriorityQueue is not full, or not lessThan(element, top()). - * @param element + * @param element element * @return true if element is added, false otherwise. */ public boolean insert(T element){ @@ -75,7 +84,11 @@ else if (size > 0 && !lessThan(element, top())){ return false; } - /** Returns the least element of the PriorityQueue in constant time. */ + /** + * Returns the least element of the PriorityQueue in constant time. + * + * @return T Generics Type T + */ public final T top() { if (size > 0) return heap[1]; @@ -83,8 +96,11 @@ public final T top() { return null; } - /** Removes and returns the least element of the PriorityQueue in log(size) - time. */ + /** + * Removes and returns the least element of the PriorityQueue in log(size) + time. + * @return T Generics Type T + */ public final T pop() { if (size > 0) { T result = heap[1]; // save first value @@ -109,7 +125,11 @@ public final void adjustTop() { } - /** Returns the number of elements currently stored in the PriorityQueue. */ + /** + * Returns the number of elements currently stored in the PriorityQueue. + * + * @return size + */ public final int size() { return size; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java index 347e5087eaa37..7851e62139487 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java @@ -91,12 +91,12 @@ private static void printUsage(Map programs) { } /** - * This is the method that adds the classed to the repository + * This is the method that adds the classed to the repository. * @param name The name of the string you want the class instance to be called with * @param mainClass The class that you want to add to the repository * @param description The description of the class - * @throws NoSuchMethodException - * @throws SecurityException + * @throws NoSuchMethodException when a particular method cannot be found. + * @throws SecurityException security manager to indicate a security violation. */ public void addClass(String name, Class mainClass, String description) throws Throwable { @@ -111,10 +111,10 @@ public void addClass(String name, Class mainClass, String description) * of the command line arguments. * @param args The argument from the user. args[0] is the command to run. * @return -1 on error, 0 on success - * @throws NoSuchMethodException - * @throws SecurityException - * @throws IllegalAccessException - * @throws IllegalArgumentException + * @throws NoSuchMethodException when a particular method cannot be found. + * @throws SecurityException security manager to indicate a security violation. + * @throws IllegalAccessException for backward compatibility. + * @throws IllegalArgumentException if the arg is invalid. * @throws Throwable Anything thrown by the example program's main */ public int run(String[] args) @@ -146,7 +146,11 @@ public int run(String[] args) } /** - * API compatible with Hadoop 1.x + * API compatible with Hadoop 1.x. + * + * @param argv argv + * @throws Throwable Anything thrown + * by the example program's main */ public void driver(String[] argv) throws Throwable { if (run(argv) == -1) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java index bd1c0f4a62a78..f35afc90c19c1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java @@ -53,14 +53,21 @@ public class Progress { /** Creates a new root node. */ public Progress() {} - /** Adds a named node to the tree. */ + /** + * Adds a named node to the tree. + * @param status status + * @return Progress + */ public Progress addPhase(String status) { Progress phase = addPhase(); phase.setStatus(status); return phase; } - /** Adds a node to the tree. Gives equal weightage to all phases */ + /** + * Adds a node to the tree. Gives equal weightage to all phases. + * @return Progress + */ public synchronized Progress addPhase() { Progress phase = addNewPhase(); // set equal weightage for all phases @@ -77,7 +84,13 @@ private synchronized Progress addNewPhase() { return phase; } - /** Adds a named node with a specified progress weightage to the tree. */ + /** + * Adds a named node with a specified progress weightage to the tree. + * + * @param status status + * @param weightage weightage + * @return Progress + */ public Progress addPhase(String status, float weightage) { Progress phase = addPhase(weightage); phase.setStatus(status); @@ -85,7 +98,12 @@ public Progress addPhase(String status, float weightage) { return phase; } - /** Adds a node with a specified progress weightage to the tree. */ + /** + * Adds a node with a specified progress weightage to the tree. + * + * @param weightage weightage + * @return Progress + */ public synchronized Progress addPhase(float weightage) { Progress phase = new Progress(); progressWeightagesForPhases.add(weightage); @@ -104,7 +122,11 @@ public synchronized Progress addPhase(float weightage) { return phase; } - /** Adds n nodes to the tree. Gives equal weightage to all phases */ + /** + * Adds n nodes to the tree. Gives equal weightage to all phases. + * + * @param n n + */ public synchronized void addPhases(int n) { for (int i = 0; i < n; i++) { addNewPhase(); @@ -136,7 +158,10 @@ public synchronized void startNextPhase() { currentPhase++; } - /** Returns the current sub-node executing. */ + /** + * Returns the current sub-node executing. + * @return Progress + */ public synchronized Progress phase() { return phases.get(currentPhase); } @@ -158,7 +183,10 @@ public void complete() { } } - /** Called during execution on a leaf node to set its progress. */ + /** + * Called during execution on a leaf node to set its progress. + * @param progress progress + */ public synchronized void set(float progress) { if (Float.isNaN(progress)) { progress = 0; @@ -188,7 +216,10 @@ else if (progress == Float.POSITIVE_INFINITY) { this.progress = progress; } - /** Returns the overall progress of the root. */ + /** + * Returns the overall progress of the root. + * @return progress + */ // this method probably does not need to be synchronized as getInternal() is // synchronized and the node's parent never changes. Still, it doesn't hurt. public synchronized float get() { @@ -202,6 +233,8 @@ public synchronized float get() { /** * Returns progress in this node. get() would give overall progress of the * root node(not just given current node). + * + * @return progress */ public synchronized float getProgress() { return getInternal(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java index 9807adc50d6d1..506a16c3b10bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java @@ -83,6 +83,10 @@ public static int readRawVarint32(DataInput in) throws IOException { * as the old connection context as was done for writable where * the effective and real users are set based on the auth method. * + * @param protocol protocol + * @param ugi ugi + * @param authMethod authMethod + * @return IpcConnectionContextProto */ public static IpcConnectionContextProto makeIpcConnectionContext( final String protocol, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java index 73d8d90d42507..0097eaa6b5756 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java @@ -40,6 +40,9 @@ private static void fix(IndexedSortable s, int p, int r) { /** * Deepest recursion before giving up and doing a heapsort. * Returns 2 * ceil(log(n)). + * + * @param x x + * @return MaxDepth */ protected static int getMaxDepth(int x) { if (x <= 0) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java index 47e44c9e09f8d..2de99fce3c542 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java @@ -120,6 +120,7 @@ private static void setJobConf(Object theObject, Configuration conf) { * * @param theClass class of which an object is created * @param conf Configuration + * @param Generics Type T * @return a new object */ @SuppressWarnings("unchecked") @@ -133,6 +134,7 @@ public static T newInstance(Class theClass, Configuration conf) { * @param conf Configuration * @param argTypes the types of the arguments * @param values the values of the arguments + * @param Generics Type * @return a new object */ @SuppressWarnings("unchecked") @@ -284,6 +286,7 @@ public static void logThreadInfo(Logger log, * Return the correctly-typed {@link Class} of the given object. * * @param o object whose correctly-typed Class is to be obtained + * @param Generics Type T * @return the correctly typed Class of the given object. */ @SuppressWarnings("unchecked") @@ -332,11 +335,13 @@ private static SerializationFactory getFactory(Configuration conf) { } /** - * Make a copy of the writable object using serialization to a buffer + * Make a copy of the writable object using serialization to a buffer. * @param src the object to copy from * @param dst the object to copy into, which is destroyed + * @param Generics Type + * @param conf configuration * @return dst param (the copy) - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @SuppressWarnings("unchecked") public static T copy(Configuration conf, @@ -368,6 +373,9 @@ public static void cloneWritableInto(Writable dst, /** * Gets all the declared fields of a class including fields declared in * superclasses. + * + * @param clazz clazz + * @return field List */ public static List getDeclaredFieldsIncludingInherited(Class clazz) { List fields = new ArrayList(); @@ -390,6 +398,9 @@ public int compare(Field a, Field b) { /** * Gets all the declared methods of a class including methods declared in * superclasses. + * + * @param clazz clazz + * @return Method List */ public static List getDeclaredMethodsIncludingInherited(Class clazz) { List methods = new ArrayList(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java index 50126002b7be7..fc3f3780414d0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java @@ -156,6 +156,7 @@ public static void unJar(InputStream inputStream, File toDir, * @param inputStream the jar stream to unpack * @param toDir the destination directory into which to unpack the jar * @param unpackRegex the pattern to match jar entries against + * @param name name * * @throws IOException if an I/O error has occurred or toDir * cannot be created and does not already exist @@ -231,7 +232,11 @@ private static void ensureDirectory(File dir) throws IOException { } /** Run a Hadoop job jar. If the main class is not in the jar's manifest, - * then it must be provided on the command line. */ + * then it must be provided on the command line. + * + * @param args args + * @throws Throwable error + */ public static void main(String[] args) throws Throwable { new RunJar().run(args); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java index 685e92d628136..0298b4e32f5d2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java @@ -30,7 +30,10 @@ public abstract class SequentialNumber implements IdGenerator { private final AtomicLong currentValue; - /** Create a new instance with the given initial value. */ + /** + * Create a new instance with the given initial value. + * @param initialValue initialValue + */ protected SequentialNumber(final long initialValue) { currentValue = new AtomicLong(initialValue); } @@ -40,7 +43,10 @@ public long getCurrentValue() { return currentValue.get(); } - /** Set current value. */ + /** + * Set current value. + * @param value value + */ public void setCurrentValue(long value) { currentValue.set(value); } @@ -63,7 +69,12 @@ public long nextValue() { return currentValue.incrementAndGet(); } - /** Skip to the new value. */ + /** + * Skip to the new value. + * @param newValue newValue + * @throws IllegalStateException + * Cannot skip to less than the current value + */ public void skipTo(long newValue) throws IllegalStateException { for(;;) { final long c = getCurrentValue(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java index bb367278e537e..9e88f53b6ff79 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java @@ -30,7 +30,12 @@ @InterfaceStability.Unstable public class ServletUtil { /** - * Initial HTML header + * Initial HTML header. + * + * @param response response + * @param title title + * @throws IOException raised on errors performing I/O. + * @return PrintWriter */ public static PrintWriter initHTML(ServletResponse response, String title ) throws IOException { @@ -47,6 +52,10 @@ public static PrintWriter initHTML(ServletResponse response, String title /** * Get a parameter from a ServletRequest. * Return null if the parameter contains only white spaces. + * + * @param request request + * @param name name + * @return get a parameter from a ServletRequest */ public static String getParameter(ServletRequest request, String name) { String s = request.getParameter(name); @@ -58,8 +67,13 @@ public static String getParameter(ServletRequest request, String name) { } /** + * parseLongParam. + * + * @param request request + * @param param param * @return a long value as passed in the given parameter, throwing * an exception if it is not present or if it is not a valid number. + * @throws IOException raised on errors performing I/O. */ public static long parseLongParam(ServletRequest request, String param) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java index bddcbeb21f26a..afd02a47a3bc0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java @@ -55,7 +55,10 @@ private Sets() { * instead. If {@code E} is an {@link Enum} type, use {@link EnumSet#noneOf} * instead. Otherwise, strongly consider using a {@code LinkedHashSet} * instead, at the cost of increased memory footprint, to get - * deterministic iteration behavior. + * deterministic iteration behavior.

+ * + * @param Generics Type E + * @return a new, empty {@code TreeSet} */ public static HashSet newHashSet() { return new HashSet(); @@ -66,8 +69,9 @@ public static HashSet newHashSet() { * natural sort ordering of its elements. * *

Note: if mutability is not required, use ImmutableSortedSet#of() - * instead. + * instead.

* + * @param Generics Type E * @return a new, empty {@code TreeSet} */ public static TreeSet newTreeSet() { @@ -83,11 +87,15 @@ public static TreeSet newTreeSet() { * instead. If {@code E} is an {@link Enum} type, use * {@link EnumSet#of(Enum, Enum[])} instead. Otherwise, strongly consider * using a {@code LinkedHashSet} instead, at the cost of increased memory - * footprint, to get deterministic iteration behavior. + * footprint, to get deterministic iteration behavior.

* *

This method is just a small convenience, either for * {@code newHashSet(}{@link Arrays#asList}{@code (...))}, or for creating an - * empty set then calling {@link Collections#addAll}. + * empty set then calling {@link Collections#addAll}.

+ * + * @param Generics Type E + * @param elements the elements that the set should contain + * @return a new, empty thread-safe {@code Set} */ @SafeVarargs public static HashSet newHashSet(E... elements) { @@ -103,10 +111,14 @@ public static HashSet newHashSet(E... elements) { * *

Note: if mutability is not required and the elements are * non-null, use ImmutableSet#copyOf(Iterable) instead. (Or, change - * {@code elements} to be a FluentIterable and call {@code elements.toSet()}.) + * {@code elements} to be a FluentIterable and call {@code elements.toSet()}.)

* *

Note: if {@code E} is an {@link Enum} type, use - * newEnumSet(Iterable, Class) instead. + * newEnumSet(Iterable, Class) instead.

+ * + * @param Generics Type E + * @param elements the elements that the set should contain + * @return a new, empty thread-safe {@code Set} */ public static HashSet newHashSet(Iterable elements) { return (elements instanceof Collection) @@ -135,6 +147,7 @@ public static HashSet newHashSet(Iterable elements) { * then calling Iterables#addAll. This method is not very useful and will * likely be deprecated in the future. * + * @param Generics Type E * @param elements the elements that the set should contain * @return a new {@code TreeSet} containing those elements (minus duplicates) */ @@ -163,13 +176,17 @@ private static boolean addAll(TreeSet addTo, * calling Iterators#addAll. * *

Note: if mutability is not required and the elements are - * non-null, use ImmutableSet#copyOf(Iterator) instead. + * non-null, use ImmutableSet#copyOf(Iterator) instead.

* *

Note: if {@code E} is an {@link Enum} type, you should create - * an {@link EnumSet} instead. + * an {@link EnumSet} instead.

* *

Overall, this method is not very useful and will likely be deprecated - * in the future. + * in the future.

+ * + * @param Generics Type E + * @param elements elements + * @return a new, empty thread-safe {@code Set} */ public static HashSet newHashSet(Iterator elements) { HashSet set = newHashSet(); @@ -184,10 +201,11 @@ public static HashSet newHashSet(Iterator elements) { * expect it to do. * *

This behavior can't be broadly guaranteed, but has been tested with - * OpenJDK 1.7 and 1.8. + * OpenJDK 1.7 and 1.8.

* * @param expectedSize the number of elements you expect to add to the * returned set + * @param Generics Type E * @return a new, empty hash set with enough capacity to hold * {@code expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative @@ -223,6 +241,11 @@ private static boolean addAll(Collection addTo, *

Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). + * + * @param set1 set1 + * @param set2 set2 + * @param Generics Type E + * @return a new, empty thread-safe {@code Set} */ public static Set intersection(final Set set1, final Set set2) { @@ -246,6 +269,11 @@ public static Set intersection(final Set set1, * based on different equivalence relations (as {@link HashSet}, * {@link TreeSet}, and the {@link Map#keySet} of an * {@code IdentityHashMap} all are). + * + * @param set1 set1 + * @param set2 set2 + * @param Generics Type E + * @return a new, empty thread-safe {@code Set} */ public static Set union( final Set set1, final Set set2) { @@ -272,6 +300,11 @@ public static Set union( * This method is used to find difference for HashSets. For TreeSets with * strict order requirement, recommended method is * {@link #differenceInTreeSets(Set, Set)}. + * + * @param set1 set1 + * @param set2 set2 + * @param Generics Type E + * @return a new, empty thread-safe {@code Set} */ public static Set difference( final Set set1, final Set set2) { @@ -297,6 +330,11 @@ public static Set difference( * * This method is used to find difference for TreeSets. For HashSets, * recommended method is {@link #difference(Set, Set)}. + * + * @param Generics Type E + * @param set1 set1 + * @param set2 set2 + * @return a new, empty thread-safe {@code Set} */ public static Set differenceInTreeSets( final Set set1, final Set set2) { @@ -320,6 +358,11 @@ public static Set differenceInTreeSets( *

Results are undefined if {@code set1} and {@code set2} are sets based * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). + * + * @param set1 set1 + * @param set2 set2 + * @param Generics Type E + * @return a new, empty thread-safe {@code Set} */ public static Set symmetricDifference( final Set set1, final Set set2) { @@ -345,6 +388,7 @@ public static Set symmetricDifference( *

Unlike {@code HashSet}, this class does NOT allow {@code null} to be * used as an element. The set is serializable. * + * @param Generics Type * @return a new, empty thread-safe {@code Set} */ public static Set newConcurrentHashSet() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java index 16673129cb7fe..dc13697f158ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java @@ -68,10 +68,12 @@ public static boolean shutdownThread(Thread thread, } /** + * shutdownExecutorService. + * * @param service {@link ExecutorService to be shutdown} * @return true if the service is terminated, * false otherwise - * @throws InterruptedException + * @throws InterruptedException if the thread is interrupted. */ public static boolean shutdownExecutorService(ExecutorService service) throws InterruptedException { @@ -79,13 +81,15 @@ public static boolean shutdownExecutorService(ExecutorService service) } /** + * shutdownExecutorService. + * * @param service {@link ExecutorService to be shutdown} * @param timeoutInMs time to wait for {@link * ExecutorService#awaitTermination(long, java.util.concurrent.TimeUnit)} * calls in milli seconds. * @return true if the service is terminated, * false otherwise - * @throws InterruptedException + * @throws InterruptedException if the thread is interrupted. */ public static boolean shutdownExecutorService(ExecutorService service, long timeoutInMs) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StopWatch.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StopWatch.java index c0eedf6110d7f..7ccaebedb5662 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StopWatch.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StopWatch.java @@ -89,6 +89,9 @@ public StopWatch reset() { } /** + * now. + * + * @param timeUnit timeUnit. * @return current elapsed time in specified timeunit. */ public long now(TimeUnit timeUnit) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java index 4a30ee2800aa9..040adc3ae61ff 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java @@ -74,6 +74,9 @@ public static String weakIntern(String sample) { /** * Interns all the strings in the given array in place, * returning the same array. + * + * @param strings strings + * @return internStringsInArray */ public static String[] internStringsInArray(String[] strings) { for (int i = 0; i < strings.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index baae69f2e1791..fe839da29281e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -120,7 +120,11 @@ public static String humanReadableInt(long number) { return TraditionalBinaryPrefix.long2String(number, "", 1); } - /** The same as String.format(Locale.ENGLISH, format, objects). */ + /** + * The same as String.format(Locale.ENGLISH, format, objects). + * @param format format + * @param objects objects + */ public static String format(final String format, final Object... objects) { return String.format(Locale.ENGLISH, format, objects); } @@ -156,7 +160,7 @@ public static String arrayToString(String[] strs) { /** * Given an array of bytes it will convert the bytes to a hex string * representation of the bytes - * @param bytes + * @param bytes bytes * @param start start index, inclusively * @param end end index, exclusively * @return hex string representation of the byte array @@ -172,7 +176,11 @@ public static String byteToHexString(byte[] bytes, int start, int end) { return s.toString(); } - /** Same as byteToHexString(bytes, 0, bytes.length). */ + /** + * Same as byteToHexString(bytes, 0, bytes.length). + * @param bytes bytes + * @return byteToHexString + */ public static String byteToHexString(byte bytes[]) { return byteToHexString(bytes, 0, bytes.length); } @@ -203,8 +211,9 @@ public static byte[] hexStringToByte(String hex) { return bts; } /** - * - * @param uris + * uriToString. + * @param uris uris + * @return uriToString */ public static String uriToString(URI[] uris){ if (uris == null) { @@ -242,8 +251,9 @@ public static URI[] stringToURI(String[] str){ } /** - * - * @param str + * stringToPath. + * @param str str + * @return path array */ public static Path[] stringToPath(String[] str){ if (str == null) { @@ -263,6 +273,8 @@ public static Path[] stringToPath(String[] str){ * * @param finishTime finish time * @param startTime start time + * @return a String in the format Xhrs, Ymins, Z sec, + * for the time difference between two times. */ public static String formatTimeDiff(long finishTime, long startTime){ long timeDiff = finishTime - startTime; @@ -275,6 +287,7 @@ public static String formatTimeDiff(long finishTime, long startTime){ * String in the format Xhrs, Ymins, Z sec. * * @param timeDiff The time difference to format + * @return formatTime String */ public static String formatTime(long timeDiff){ StringBuilder buf = new StringBuilder(); @@ -305,6 +318,7 @@ public static String formatTime(long timeDiff){ * more than 100 hours ,it is displayed as 99hrs, 59mins, 59sec. * * @param timeDiff The time difference to format + * @return format time sortable */ public static String formatTimeSortable(long timeDiff) { StringBuilder buf = new StringBuilder(); @@ -563,6 +577,7 @@ public static String[] split( * @param escapeChar character used to escape * @param start from where to search * @param split used to pass back the extracted string + * @return index */ public static int findNext(String str, char separator, char escapeChar, int start, StringBuilder split) { @@ -615,7 +630,12 @@ private static boolean hasChar(char[] chars, char character) { } /** + * escapeString. + * + * @param str str + * @param escapeChar escapeChar * @param charsToEscape array of characters to be escaped + * @return escapeString */ public static String escapeString(String str, char escapeChar, char[] charsToEscape) { @@ -658,7 +678,11 @@ public static String unEscapeString( } /** + * unEscapeString. + * @param str str + * @param escapeChar escapeChar * @param charsToEscape array of characters to unescape + * @return escape string */ public static String unEscapeString(String str, char escapeChar, char[] charsToEscape) { @@ -807,7 +831,10 @@ private TraditionalBinaryPrefix(int bitShift) { } /** - * @return The TraditionalBinaryPrefix object corresponding to the symbol. + * The TraditionalBinaryPrefix object corresponding to the symbol. + * + * @param symbol symbol + * @return traditional binary prefix object */ public static TraditionalBinaryPrefix valueOf(char symbol) { symbol = Character.toUpperCase(symbol); @@ -907,7 +934,7 @@ public static String long2String(long n, String unit, int decimalPlaces) { /** * Escapes HTML Special characters present in the string. - * @param string + * @param string param string * @return HTML Escaped String representation */ public static String escapeHTML(String string) { @@ -942,13 +969,22 @@ public static String escapeHTML(String string) { } /** + * a byte description of the given long interger value. + * + * @param len len * @return a byte description of the given long interger value. */ public static String byteDesc(long len) { return TraditionalBinaryPrefix.long2String(len, "B", 2); } - /** @deprecated use StringUtils.format("%.2f", d). */ + /** + * limitDecimalTo2. + * + * @param d double param + * @return string value ("%.2f") + * @deprecated use StringUtils.format("%.2f", d). + */ @Deprecated public static String limitDecimalTo2(double d) { return format("%.2f", d); @@ -959,6 +995,7 @@ public static String limitDecimalTo2(double d) { * * @param separator Separator to join with. * @param strings Strings to join. + * @return join string */ public static String join(CharSequence separator, Iterable strings) { Iterator i = strings.iterator(); @@ -1054,6 +1091,8 @@ public static String replaceTokens(String template, Pattern pattern, /** * Get stack trace for a given thread. + * @param t thread + * @return stack trace string */ public static String getStackTrace(Thread t) { final StackTraceElement[] stackTrace = t.getStackTrace(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java index 42005f0b09b3e..4c6db79f64f1c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java @@ -81,6 +81,8 @@ public static long monotonicNowNanos() { /** * Convert time in millisecond to human readable format. + * + * @param millis millisecond * @return a human readable string for the input time */ public static String formatTime(long millis) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java index 336700a6e276a..5ac09221c1b12 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java @@ -57,6 +57,7 @@ public class ToolRunner { * @param tool Tool to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. + * @throws Exception Exception */ public static int run(Configuration conf, Tool tool, String[] args) throws Exception{ @@ -89,6 +90,7 @@ public static int run(Configuration conf, Tool tool, String[] args) * @param tool Tool to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. + * @throws Exception exception */ public static int run(Tool tool, String[] args) throws Exception{ @@ -107,7 +109,10 @@ public static void printGenericCommandUsage(PrintStream out) { /** * Print out a prompt to the user, and return true if the user - * responds with "y" or "yes". (case insensitive) + * responds with "y" or "yes". (case insensitive). + * + * @param prompt prompt + * @throws IOException raised on errors performing I/O. */ public static boolean confirmPrompt(String prompt) throws IOException { while (true) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java index 6d3894f5b4ca5..928c15452bcf8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java @@ -39,8 +39,10 @@ public class XMLUtils { * @param styleSheet the style-sheet * @param xml input xml data * @param out output - * @throws TransformerConfigurationException - * @throws TransformerException + * @throws TransformerConfigurationException synopsis signals a problem + * creating a transformer object + * @throws TransformerException this is used for throwing processor + * exceptions before the processing has started. */ public static void transform( InputStream styleSheet, InputStream xml, Writer out diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java index 6d38c606c8c4c..5b642e8a8292a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java @@ -88,6 +88,7 @@ public static int removeSpecificPerms(int perms, int remove) { * Parse comma separated list of ACL entries to secure generated nodes, e.g. * sasl:hdfs/host1@MY.DOMAIN:cdrwa,sasl:hdfs/host2@MY.DOMAIN:cdrwa * + * @param aclString aclString * @return ACL list * @throws BadAclFormatException if an ACL is invalid */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/package-info.java index 1c204bb9979a8..18d23b31ff6d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/package-info.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/package-info.java @@ -18,14 +18,14 @@ /** * Support for functional programming within the Hadoop APIs. - *

+ * * Much of this is needed simply to cope with Java's checked exceptions and * the fact that the java.util.function can only throw runtime exceptions. - *

+ *

* Pretty much all the Hadoop FS APIs raise IOExceptions, hence the need * for these classes. If Java had made a different decision about the * nature of exceptions, life would be better. - *

+ *

* Do note that the {@link org.apache.hadoop.util.functional.RemoteIterators} * iterators go beyond that of the java ones, in terms of declaring themselves * Closeable and implementors of From da105fd3e227e31b8ba58dcedd288a24c64fc8e9 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sat, 14 May 2022 17:08:33 -0700 Subject: [PATCH 41/53] HADOOP-18229. Fix some java doc compilation 100+ warnings. --- .../org/apache/hadoop/fs/viewfs/FsGetter.java | 1 + .../apache/hadoop/fs/viewfs/InodeTree.java | 6 ++- .../io/erasurecode/coder/util/HHUtil.java | 2 + .../erasurecode/rawcoder/util/DumpUtil.java | 10 ++-- .../io/erasurecode/rawcoder/util/GF256.java | 10 +++- .../rawcoder/util/GaloisField.java | 38 +++++++++++--- .../io/erasurecode/rawcoder/util/RSUtil.java | 15 ++++++ .../apache/hadoop/metrics2/util/MBeans.java | 8 +-- .../hadoop/metrics2/util/SampleQuantiles.java | 2 +- .../apache/hadoop/net/unix/DomainSocket.java | 13 ++++- .../org/apache/hadoop/tools/TableListing.java | 11 +++- .../apache/hadoop/util/AsyncDiskService.java | 7 ++- .../BlockingThreadPoolExecutorService.java | 1 + .../org/apache/hadoop/util/CrcComposer.java | 27 ++++++++++ .../java/org/apache/hadoop/util/CrcUtil.java | 37 ++++++++++++++ .../java/org/apache/hadoop/util/Daemon.java | 11 +++- .../org/apache/hadoop/util/DataChecksum.java | 51 +++++++++++++++++-- .../apache/hadoop/util/DirectBufferPool.java | 3 ++ .../org/apache/hadoop/util/DiskChecker.java | 16 +++--- .../hadoop/util/DiskValidatorFactory.java | 2 + .../org/apache/hadoop/util/GcTimeMonitor.java | 35 +++++++++++-- .../org/apache/hadoop/util/GenericsUtil.java | 4 ++ .../java/org/apache/hadoop/util/IPList.java | 2 +- .../apache/hadoop/util/IdentityHashStore.java | 11 ++++ .../apache/hadoop/util/IndexedSortable.java | 7 +++ .../org/apache/hadoop/util/IndexedSorter.java | 8 +++ .../hadoop/util/IntrusiveCollection.java | 1 + .../java/org/apache/hadoop/util/Lists.java | 2 + .../org/apache/hadoop/util/PriorityQueue.java | 2 +- .../org/apache/hadoop/util/StringUtils.java | 1 + .../org/apache/hadoop/util/ToolRunner.java | 3 ++ 31 files changed, 306 insertions(+), 41 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java index b6490e6b9db84..b2986a1c28434 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java @@ -37,6 +37,7 @@ public class FsGetter { * @param uri uri * @param conf configuration * @throws IOException raised on errors performing I/O. + * @return file system */ public FileSystem getNewInstance(URI uri, Configuration conf) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java index c03e41bae7ff3..db1719e992926 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java @@ -597,12 +597,14 @@ Configuration getConfig() { * * @param config - the mount table keys are prefixed with * FsConstants.CONFIG_VIEWFS_PREFIX - * @param viewName - the name of the mount table - if null use defaultMT name + * @param viewName - the name of the mount table + * if null use defaultMT name * @param theUri theUri * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts * @throws UnsupportedFileSystemException file system for uri is * not found - * @throws URISyntaxException if the URI does not have an authority it is badly formed. + * @throws URISyntaxException if the URI does not have an authority + * it is badly formed. * @throws FileAlreadyExistsException there is a file at the path specified * or is discovered on one of its ancestors. * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java index 91d02415bfd93..2fbac7a3457a7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java @@ -202,6 +202,8 @@ public static ByteBuffer getPiggyBackForDecode(ByteBuffer[][] inputs, /** * Find the valid input from all the inputs. + * + * @param Generics Type T * @param inputs input buffers to look for valid input * @return the first valid input */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java index 6de0716174319..b4220bd8dd61b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java @@ -36,6 +36,10 @@ private DumpUtil() { /** * Convert bytes into format like 0x02 02 00 80. * If limit is negative or too large, then all bytes will be converted. + * + * @param bytes bytes + * @param limit limit + * @return bytesToHex */ public static String bytesToHex(byte[] bytes, int limit) { if (limit <= 0 || limit > bytes.length) { @@ -70,8 +74,8 @@ public static void dumpMatrix(byte[] matrix, /** * Print data in hex format in an array of chunks. - * @param header - * @param chunks + * @param header header + * @param chunks chunks */ public static void dumpChunks(String header, ECChunk[] chunks) { System.out.println(); @@ -84,7 +88,7 @@ public static void dumpChunks(String header, ECChunk[] chunks) { /** * Print data in hex format in a chunk. - * @param chunk + * @param chunk chunk */ public static void dumpChunk(ECChunk chunk) { String str; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java index 35534f307a7a0..36ef5abcd0f0f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java @@ -195,6 +195,10 @@ public static byte gfInv(byte a) { * Invert a matrix assuming it's invertible. * * Ported from Intel ISA-L library. + * + * @param inMatrix inMatrix + * @param outMatrix outMatrix + * @param n n */ public static void gfInvertMatrix(byte[] inMatrix, byte[] outMatrix, int n) { byte temp; @@ -262,7 +266,11 @@ public static void gfInvertMatrix(byte[] inMatrix, byte[] outMatrix, int n) { * * Calculates const table gftbl in GF(2^8) from single input A * gftbl(A) = {A{00}, A{01}, A{02}, ... , A{0f} }, {A{00}, A{10}, A{20}, - * ... , A{f0} } -- from ISA-L implementation + * ... , A{f0} } -- from ISA-L implementation. + * + * @param c c + * @param tbl tbl + * @param offset offset */ public static void gfVectMulInit(byte c, byte[] tbl, int offset) { byte c2 = (byte) ((c << 1) ^ ((c & 0x80) != 0 ? 0x1d : 0)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java index f80fceca94c34..2ca9e7b8261eb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java @@ -93,10 +93,11 @@ private GaloisField(int fieldSize, int primitivePolynomial) { } /** - * Get the object performs Galois field arithmetics + * Get the object performs Galois field arithmetics. * * @param fieldSize size of the field * @param primitivePolynomial a primitive polynomial corresponds to the size + * @return GaloisField */ public static GaloisField getInstance(int fieldSize, int primitivePolynomial) { @@ -114,7 +115,8 @@ public static GaloisField getInstance(int fieldSize, } /** - * Get the object performs Galois field arithmetic with default setting + * Get the object performs Galois field arithmetic with default setting. + * @return GaloisField */ public static GaloisField getInstance() { return getInstance(DEFAULT_FIELD_SIZE, DEFAULT_PRIMITIVE_POLYNOMIAL); @@ -236,7 +238,13 @@ public void solveVandermondeSystem(int[] x, int[] y, int len) { } /** - * A "bulk" version to the solving of Vandermonde System + * A "bulk" version to the solving of Vandermonde System. + * + * @param x input x + * @param y input y + * @param outputOffsets input outputOffsets + * @param len input len + * @param dataLen input dataLen */ public void solveVandermondeSystem(int[] x, byte[][] y, int[] outputOffsets, int len, int dataLen) { @@ -269,6 +277,10 @@ public void solveVandermondeSystem(int[] x, byte[][] y, int[] outputOffsets, /** * A "bulk" version of the solveVandermondeSystem, using ByteBuffer. + * + * @param x input x + * @param y input y + * @param len input len */ public void solveVandermondeSystem(int[] x, ByteBuffer[] y, int len) { ByteBuffer p; @@ -413,10 +425,10 @@ public void substitute(byte[][] p, byte[] q, int x) { * Tends to be 2X faster than the "int" substitute in a loop. * * @param p input polynomial - * @param offsets - * @param len + * @param offsets input offset + * @param len input len * @param q store the return result - * @param offset + * @param offset input offset * @param x input field */ public void substitute(byte[][] p, int[] offsets, @@ -440,6 +452,7 @@ public void substitute(byte[][] p, int[] offsets, * @param p input polynomial * @param q store the return result * @param x input field + * @param len input len */ public void substitute(ByteBuffer[] p, int len, ByteBuffer q, int x) { int y = 1, iIdx, oIdx; @@ -459,6 +472,9 @@ public void substitute(ByteBuffer[] p, int len, ByteBuffer q, int x) { /** * The "bulk" version of the remainder. * Warning: This function will modify the "dividend" inputs. + * + * @param divisor divisor + * @param dividend dividend */ public void remainder(byte[][] dividend, int[] divisor) { for (int i = dividend.length - divisor.length; i >= 0; i--) { @@ -476,6 +492,11 @@ public void remainder(byte[][] dividend, int[] divisor) { /** * The "bulk" version of the remainder. * Warning: This function will modify the "dividend" inputs. + * + * @param dividend dividend + * @param offsets offsets + * @param len len + * @param divisor divisor */ public void remainder(byte[][] dividend, int[] offsets, int len, int[] divisor) { @@ -497,6 +518,9 @@ public void remainder(byte[][] dividend, int[] offsets, /** * The "bulk" version of the remainder, using ByteBuffer. * Warning: This function will modify the "dividend" inputs. + * + * @param dividend dividend + * @param divisor divisor */ public void remainder(ByteBuffer[] dividend, int[] divisor) { int idx1, idx2; @@ -519,6 +543,8 @@ public void remainder(ByteBuffer[] dividend, int[] divisor) { /** * Perform Gaussian elimination on the given matrix. This matrix has to be a * fat matrix (number of rows > number of columns). + * + * @param matrix matrix */ public void gaussianElimination(int[][] matrix) { assert(matrix != null && matrix.length > 0 && matrix[0].length > 0 diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java index 43823d0f8c300..e2abbbbdb788b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java @@ -59,6 +59,10 @@ public static void initTables(int k, int rows, byte[] codingMatrix, /** * Ported from Intel ISA-L library. + * + * @param k k + * @param a a + * @param m m */ public static void genCauchyMatrix(byte[] a, int m, int k) { // Identity matrix in high position @@ -82,6 +86,13 @@ public static void genCauchyMatrix(byte[] a, int m, int k) { * * The algorithm is ported from Intel ISA-L library for compatible. It * leverages Java auto-vectorization support for performance. + * + * @param gfTables gfTables + * @param dataLen dataLen + * @param inputs inputs + * @param inputOffsets inputOffsets + * @param outputs outputs + * @param outputOffsets outputOffsets */ public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs, int[] inputOffsets, byte[][] outputs, @@ -133,6 +144,10 @@ public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs, /** * See above. Try to use the byte[] version when possible. + * + * @param gfTables gfTables + * @param inputs inputs + * @param outputs outputs */ public static void encodeData(byte[] gfTables, ByteBuffer[] inputs, ByteBuffer[] outputs) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java index 20b1cd6051961..7de287ad5dfbd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java @@ -63,8 +63,8 @@ private MBeans() { * Where the {@literal and } are the supplied * parameters. * - * @param serviceName - * @param nameName + * @param serviceName serviceName + * @param nameName nameName * @param theMbean - the MBean to register * @return the named used to register the MBean */ @@ -79,8 +79,8 @@ static public ObjectName register(String serviceName, String nameName, * Where the {@literal and } are the supplied * parameters. * - * @param serviceName - * @param nameName + * @param serviceName serviceName + * @param nameName nameName * @param properties - Key value pairs to define additional JMX ObjectName * properties. * @param theMbean - the MBean to register diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java index 737ccc0d788dd..e39bd4d5db68c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java @@ -108,7 +108,7 @@ private double allowableError(int rank) { /** * Add a new value from the stream. * - * @param v + * @param v v */ synchronized public void insert(long v) { buffer[bufferCount] = v; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java index 82c087737cbad..325dbfe888e94 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java @@ -106,6 +106,8 @@ native static void validateSocketPathSecurity0(String path, /** * Return true only if UNIX domain sockets are available. + * + * @return loadingFailureReason */ public static String getLoadingFailureReason() { return loadingFailureReason; @@ -184,6 +186,7 @@ private void unreference(boolean checkClosed) throws ClosedChannelException { * * @param path The path to bind and listen on. * @return The new DomainSocket. + * @throws IOException raised on errors performing I/O. */ public static DomainSocket bindAndListen(String path) throws IOException { if (loadingFailureReason != null) { @@ -387,7 +390,7 @@ public void close() throws IOException { /** * Call shutdown(SHUT_RDWR) on the UNIX domain socket. * - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void shutdown() throws IOException { refCount.reference(); @@ -413,6 +416,7 @@ private native static void sendFileDescriptors0(int fd, * one byte. * @param offset The offset in the jbuf array to start at. * @param length Length of the jbuf array to use. + * @throws IOException raised on errors performing I/O. */ public void sendFileDescriptors(FileDescriptor descriptors[], byte jbuf[], int offset, int length) throws IOException { @@ -433,6 +437,13 @@ private static native int receiveFileDescriptors0(int fd, /** * Receive some FileDescriptor objects from the process on the other side of * this socket, and wrap them in FileInputStream objects. + * + * @param streams input stream + * @param buf input buf + * @param offset input offset + * @param length input length + * @return wrap them in FileInputStream objects + * @throws IOException raised on errors performing I/O. */ public int recvFileInputStreams(FileInputStream[] streams, byte buf[], int offset, int length) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java index 348f86fe1368b..b4264b8a2af14 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java @@ -155,7 +155,9 @@ public Builder addField(String title, Justification justification, } /** - * Whether to hide column headers in table output + * Whether to hide column headers in table output. + * + * @return Builder. */ public Builder hideHeaders() { this.showHeader = false; @@ -164,6 +166,8 @@ public Builder hideHeaders() { /** * Whether to show column headers in table output. This is the default. + * + * @return Builder */ public Builder showHeaders() { this.showHeader = true; @@ -173,6 +177,9 @@ public Builder showHeaders() { /** * Set the maximum width of a row in the TableListing. Must have one or * more wrappable fields for this to take effect. + * + * @param width width + * @return Builder */ public Builder wrapWidth(int width) { this.wrapWidth = width; @@ -181,6 +188,8 @@ public Builder wrapWidth(int width) { /** * Create a new TableListing. + * + * @return TableListing */ public TableListing build() { return new TableListing(columns.toArray(new Column[0]), showHeader, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java index 8e48cb955a3a7..52a33e87bf934 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java @@ -94,6 +94,9 @@ public Thread newThread(Runnable r) { /** * Execute the task sometime in the future, using ThreadPools. + * + * @param root root + * @param task task */ public synchronized void execute(String root, Runnable task) { ThreadPoolExecutor executor = executors.get(root); @@ -123,7 +126,7 @@ public synchronized void shutdown() { * * @param milliseconds The number of milliseconds to wait * @return true if all thread pools are terminated without time limit - * @throws InterruptedException + * @throws InterruptedException if the thread is interrupted. */ public synchronized boolean awaitTermination(long milliseconds) throws InterruptedException { @@ -145,6 +148,8 @@ public synchronized boolean awaitTermination(long milliseconds) /** * Shut down all ThreadPools immediately. + * + * @return Runnable List */ public synchronized List shutdownNow() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java index d08e84f99de29..824c035c3135d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java @@ -117,6 +117,7 @@ private BlockingThreadPoolExecutorService(int permitCount, * @param keepAliveTime time until threads are cleaned up in {@code unit} * @param unit time unit * @param prefixName prefix of name for threads + * @return BlockingThreadPoolExecutorService */ public static BlockingThreadPoolExecutorService newInstance( int activeTasks, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java index 4023995941f5c..4037bd64e7fa1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java @@ -51,6 +51,11 @@ public class CrcComposer { /** * Returns a CrcComposer which will collapse all ingested CRCs into a single * value. + * + * @param type type + * @param bytesPerCrcHint bytesPerCrcHint + * @throws IOException raised on errors performing I/O. + * @return a CrcComposer which will collapse all ingested CRCs into a single value. */ public static CrcComposer newCrcComposer( DataChecksum.Type type, long bytesPerCrcHint) @@ -67,6 +72,13 @@ public static CrcComposer newCrcComposer( * final digest, each corresponding to 10 underlying data bytes. Using * a stripeLength greater than the total underlying data size is equivalent * to using a non-striped CrcComposer. + * + * @param type type + * @param bytesPerCrcHint bytesPerCrcHint + * @param stripeLength stripeLength + * @return a CrcComposer which will collapse CRCs for every combined + * underlying data size which aligns with the specified stripe boundary. + * @throws IOException raised on errors performing I/O. */ public static CrcComposer newStripedCrcComposer( DataChecksum.Type type, long bytesPerCrcHint, long stripeLength) @@ -102,7 +114,11 @@ public static CrcComposer newStripedCrcComposer( * each CRC expected to correspond to exactly {@code bytesPerCrc} underlying * data bytes. * + * @param crcBuffer crcBuffer + * @param offset offset * @param length must be a multiple of the expected byte-size of a CRC. + * @param bytesPerCrc bytesPerCrc + * @throws IOException raised on errors performing I/O. */ public void update( byte[] crcBuffer, int offset, int length, long bytesPerCrc) @@ -125,6 +141,11 @@ public void update( * Composes {@code numChecksumsToRead} additional CRCs into the current digest * out of {@code checksumIn}, with each CRC expected to correspond to exactly * {@code bytesPerCrc} underlying data bytes. + * + * @param checksumIn checksumIn + * @param numChecksumsToRead numChecksumsToRead + * @param bytesPerCrc bytesPerCrc + * @throws IOException raised on errors performing I/O. */ public void update( DataInputStream checksumIn, long numChecksumsToRead, long bytesPerCrc) @@ -138,6 +159,10 @@ public void update( /** * Updates with a single additional CRC which corresponds to an underlying * data size of {@code bytesPerCrc}. + * + * @param crcB crcB + * @param bytesPerCrc bytesPerCrc + * @throws IOException raised on errors performing I/O. */ public void update(int crcB, long bytesPerCrc) throws IOException { if (curCompositeCrc == 0) { @@ -173,6 +198,8 @@ public void update(int crcB, long bytesPerCrc) throws IOException { * total sum bytesPerCrc divided by stripeLength. If the sum of bytesPerCrc * is not a multiple of stripeLength, then the last CRC in the array * corresponds to totalLength % stripeLength underlying data bytes. + * + * @return byte representation of composed CRCs. */ public byte[] digest() { if (curPositionInStripe > 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java index 42eaf148d64c3..e7509885f8c8d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java @@ -44,6 +44,10 @@ private CrcUtil() { * Compute x^({@code lengthBytes} * 8) mod {@code mod}, where {@code mod} is * in "reversed" (little-endian) format such that {@code mod & 1} represents * x^31 and has an implicit term x^32. + * + * @param lengthBytes lengthBytes + * @param mod mod + * @return monomial */ public static int getMonomial(long lengthBytes, int mod) { if (lengthBytes == 0) { @@ -73,7 +77,13 @@ public static int getMonomial(long lengthBytes, int mod) { } /** + * composeWithMonomial. + * + * @param crcA crcA + * @param crcB crcB * @param monomial Precomputed x^(lengthBInBytes * 8) mod {@code mod} + * @param mod mod + * @return compose with monomial */ public static int composeWithMonomial( int crcA, int crcB, int monomial, int mod) { @@ -81,7 +91,13 @@ public static int composeWithMonomial( } /** + * compose. + * + * @param crcA crcA + * @param crcB crcB * @param lengthB length of content corresponding to {@code crcB}, in bytes. + * @param mod mod + * @return compose result. */ public static int compose(int crcA, int crcB, long lengthB, int mod) { int monomial = getMonomial(lengthB, mod); @@ -91,6 +107,9 @@ public static int compose(int crcA, int crcB, long lengthB, int mod) { /** * @return 4-byte array holding the big-endian representation of * {@code value}. + * + * @param value value. + * @return byte array. */ public static byte[] intToBytes(int value) { byte[] buf = new byte[4]; @@ -110,6 +129,11 @@ public static byte[] intToBytes(int value) { * Writes big-endian representation of {@code value} into {@code buf} * starting at {@code offset}. buf.length must be greater than or * equal to offset + 4. + * + * @param buf buf size + * @param offset offset + * @param value value + * @throws IOException raised on errors performing I/O. */ public static void writeInt(byte[] buf, int offset, int value) throws IOException { @@ -127,6 +151,11 @@ public static void writeInt(byte[] buf, int offset, int value) /** * Reads 4-byte big-endian int value from {@code buf} starting at * {@code offset}. buf.length must be greater than or equal to offset + 4. + * + * @param offset offset + * @param buf buf + * @return int + * @throws IOException raised on errors performing I/O. */ public static int readInt(byte[] buf, int offset) throws IOException { @@ -146,6 +175,10 @@ public static int readInt(byte[] buf, int offset) * For use with debug statements; verifies bytes.length on creation, * expecting it to represent exactly one CRC, and returns a hex * formatted value. + * + * @param bytes bytes. + * @throws IOException raised on errors performing I/O. + * @return a list of hex formatted values. */ public static String toSingleCrcString(final byte[] bytes) throws IOException { @@ -161,6 +194,10 @@ public static String toSingleCrcString(final byte[] bytes) * For use with debug statements; verifies bytes.length on creation, * expecting it to be divisible by CRC byte size, and returns a list of * hex formatted values. + * + * @param bytes bytes + * @throws IOException raised on errors performing I/O. + * @return a list of hex formatted values. */ public static String toMultiCrcString(final byte[] bytes) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java index 3b95db6693e03..bdbe4823db2b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java @@ -52,14 +52,21 @@ public Daemon() { super(); } - /** Construct a daemon thread. */ + /** + * Construct a daemon thread. + * @param runnable runnable + */ public Daemon(Runnable runnable) { super(runnable); this.runnable = runnable; this.setName(((Object)runnable).toString()); } - /** Construct a daemon thread to be part of a specified thread group. */ + /** + * Construct a daemon thread to be part of a specified thread group. + * @param group thread group + * @param runnable runnable + */ public Daemon(ThreadGroup group, Runnable runnable) { super(group, runnable); this.runnable = runnable; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java index 32a0adca1979a..5295d532d7e35 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java @@ -69,7 +69,13 @@ public enum Type { this.size = size; } - /** @return the type corresponding to the id. */ + /** + * the type corresponding to the id. + * + * @return the type corresponding to the id. + * @param id id + * @return Type + */ public static Type valueOf(int id) { if (id < 0 || id >= values().length) { throw new IllegalArgumentException("id=" + id @@ -82,6 +88,8 @@ public static Type valueOf(int id) { /** * Create a Crc32 Checksum object. The implementation of the Crc32 algorithm * is chosen depending on the platform. + * + * @return Checksum */ public static Checksum newCrc32() { return new CRC32(); @@ -105,6 +113,9 @@ static Checksum newCrc32C() { } /** + * getCrcPolynomialForType. + * + * @param type type. * @return the int representation of the polynomial associated with the * CRC {@code type}, suitable for use with further CRC arithmetic. * @throws IOException if there is no CRC polynomial applicable @@ -141,7 +152,11 @@ public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) { /** * Creates a DataChecksum from HEADER_LEN bytes from arr[offset]. + * + * @param bytes bytes + * @param offset offset * @return DataChecksum of the type in the array or null in case of an error. + * @throws IOException raised on errors performing I/O. */ public static DataChecksum newDataChecksum(byte[] bytes, int offset) throws IOException { @@ -168,7 +183,10 @@ public static DataChecksum newDataChecksum(byte[] bytes, int offset) /** * This constructs a DataChecksum by reading HEADER_LEN bytes from input - * stream in + * stream in. + * + * @param in data input stream + * @throws IOException raised on errors performing I/O. */ public static DataChecksum newDataChecksum( DataInputStream in ) throws IOException { @@ -215,7 +233,11 @@ public byte[] getHeader() { /** * Writes the current checksum to the stream. * If reset is true, then resets the checksum. + * + * @param out out + * @param reset reset * @return number of bytes written. Will be equal to getChecksumSize(); + * @throws IOException raised on errors performing I/O. */ public int writeValue( DataOutputStream out, boolean reset ) throws IOException { @@ -239,7 +261,12 @@ public int writeValue( DataOutputStream out, boolean reset ) /** * Writes the current checksum to a buffer. * If reset is true, then resets the checksum. + * + * @param buf buf + * @param offset offset + * @param reset reset * @return number of bytes written. Will be equal to getChecksumSize(); + * @throws IOException raised on errors performing I/O. */ public int writeValue( byte[] buf, int offset, boolean reset ) throws IOException { @@ -266,6 +293,9 @@ public int writeValue( byte[] buf, int offset, boolean reset ) /** * Compares the checksum located at buf[offset] with the current checksum. + * + * @param buf buf + * @param offset offset * @return true if the checksum matches and false otherwise. */ public boolean compare( byte buf[], int offset ) { @@ -295,12 +325,19 @@ public Type getChecksumType() { return type; } - /** @return the size for a checksum. */ + /** + * the size for a checksum. + * @return the size for a checksum. + */ public int getChecksumSize() { return type.size; } - /** @return the required checksum size given the data length. */ + /** + * the required checksum size given the data length. + * @param dataSize data size + * @return the required checksum size given the data length. + */ public int getChecksumSize(int dataSize) { return ((dataSize - 1)/getBytesPerChecksum() + 1) * getChecksumSize(); } @@ -525,6 +562,12 @@ public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) { /** * Implementation of chunked calculation specifically on byte arrays. This * is to avoid the copy when dealing with ByteBuffers that have array backing. + * + * @param data data + * @param dataOffset dataOffset + * @param dataLength dataLength + * @param sums sums + * @param sumsOffset sumsOffset */ public void calculateChunkedSums( byte[] data, int dataOffset, int dataLength, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java index 3951ec2609c2c..8611884c7e7e5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java @@ -53,6 +53,9 @@ public class DirectBufferPool { * Allocate a direct buffer of the specified size, in bytes. * If a pooled buffer is available, returns that. Otherwise * allocates a new one. + * + * @param size size + * @return ByteBuffer */ public ByteBuffer getBuffer(int size) { Queue> list = buffersBySize.get(size); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java index 446aad949a2f6..5cb9845c588f4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java @@ -70,8 +70,8 @@ public DiskOutOfSpaceException(String msg) { * Create the directory if it doesn't exist and check that dir is readable, * writable and executable * - * @param dir - * @throws DiskErrorException + * @param dir dir + * @throws DiskErrorException disk problem */ public static void checkDir(File dir) throws DiskErrorException { checkDirInternal(dir); @@ -82,8 +82,8 @@ public static void checkDir(File dir) throws DiskErrorException { * readable, writable and executable. Perform some disk IO to * ensure that the disk is usable for writes. * - * @param dir - * @throws DiskErrorException + * @param dir dir + * @throws DiskErrorException disk problem */ public static void checkDirWithDiskIo(File dir) throws DiskErrorException { @@ -107,8 +107,8 @@ private static void checkDirInternal(File dir) * @param localFS local filesystem * @param dir directory * @param expected permission - * @throws DiskErrorException - * @throws IOException + * @throws DiskErrorException disk problem + * @throws IOException raised on errors performing I/O. */ public static void checkDir(LocalFileSystem localFS, Path dir, FsPermission expected) @@ -125,8 +125,8 @@ public static void checkDir(LocalFileSystem localFS, Path dir, * @param localFS local filesystem * @param dir directory * @param expected permission - * @throws DiskErrorException - * @throws IOException + * @throws DiskErrorException disk problem + * @throws IOException raised on errors performing I/O. */ public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir, FsPermission expected) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java index 67ded618d25b1..a4f80354dde06 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java @@ -40,6 +40,7 @@ private DiskValidatorFactory() { /** * Returns a {@link DiskValidator} instance corresponding to the passed clazz. * @param clazz a class extends {@link DiskValidator} + * @return disk validator */ public static DiskValidator getInstance(Class clazz) { @@ -66,6 +67,7 @@ private DiskValidatorFactory() { * or "read-write" for {@link ReadWriteDiskValidator}. * @param diskValidator canonical class name, for example, "basic" * @throws DiskErrorException if the class cannot be located + * @return disk validator */ @SuppressWarnings("unchecked") public static DiskValidator getInstance(String diskValidator) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java index f189708692d8b..fa969b57a3051 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java @@ -54,6 +54,8 @@ public static class Builder { /** * Set observation window size in milliseconds. + * @param value value + * @return window size in milliseconds */ public Builder observationWindowMs(long value) { this.observationWindowMs = value; @@ -62,6 +64,8 @@ public Builder observationWindowMs(long value) { /** * Set sleep interval in milliseconds. + * @param value value + * @return IntervalMs */ public Builder sleepIntervalMs(long value) { this.sleepIntervalMs = value; @@ -70,6 +74,8 @@ public Builder sleepIntervalMs(long value) { /** * Set the max GC time percentage that triggers the alert handler. + * @param value value + * @return max GC time percentage */ public Builder maxGcTimePercentage(int value) { this.maxGcTimePercentage = value; @@ -78,6 +84,8 @@ public Builder maxGcTimePercentage(int value) { /** * Set the GC alert handler. + * @param value value + * @return GC alert handler */ public Builder gcTimeAlertHandler(GcTimeAlertHandler value) { this.handler = value; @@ -167,7 +175,10 @@ public void shutdown() { shouldRun = false; } - /** Returns a copy of the most recent data measured by this monitor. */ + /** + * Returns a copy of the most recent data measured by this monitor. + * @return a copy of the most recent data measured by this monitor + */ public GcData getLatestGcData() { return curData.clone(); } @@ -227,22 +238,34 @@ public static class GcData implements Cloneable { private long gcMonitorRunTime, totalGcTime, totalGcCount; private int gcTimePercentage; - /** Returns the absolute timestamp when this measurement was taken. */ + /** + * Returns the absolute timestamp when this measurement was taken. + * @return timestamp + */ public long getTimestamp() { return timestamp; } - /** Returns the time since the start of the associated GcTimeMonitor. */ + /** + * Returns the time since the start of the associated GcTimeMonitor. + * @return GcMonitorRunTime + */ public long getGcMonitorRunTime() { return gcMonitorRunTime; } - /** Returns accumulated GC time since this JVM started. */ + /** + * Returns accumulated GC time since this JVM started. + * @return AccumulatedGcTime + */ public long getAccumulatedGcTime() { return totalGcTime; } - /** Returns the accumulated number of GC pauses since this JVM started. */ + /** + * Returns the accumulated number of GC pauses since this JVM started. + * @return AccumulatedGcCount + */ public long getAccumulatedGcCount() { return totalGcCount; } @@ -250,6 +273,8 @@ public long getAccumulatedGcCount() { /** * Returns the percentage (0..100) of time that the JVM spent in GC pauses * within the observation window of the associated GcTimeMonitor. + * + * @return GcTimePercentage */ public int getGcTimePercentage() { return gcTimePercentage; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java index 0aba34845a676..e52ff015531c1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java @@ -51,6 +51,8 @@ public static Class getClass(T t) { * T[]. * @param c the Class object of the items in the list * @param list the list to convert + * @param Generics Type T + * @return T Array */ public static T[] toArray(Class c, List list) { @@ -67,8 +69,10 @@ public static T[] toArray(Class c, List list) * Converts the given List<T> to a an array of * T[]. * @param list the list to convert + * @param Generics Type T * @throws ArrayIndexOutOfBoundsException if the list is empty. * Use {@link #toArray(Class, List)} if the list may be empty. + * @return T Array */ public static T[] toArray(List list) { return toArray(getClass(list.get(0)), list); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java index 3a2616376fbac..e940e08b2ff8a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java @@ -26,7 +26,7 @@ public interface IPList { /** * returns true if the ipAddress is in the IPList. - * @param ipAddress + * @param ipAddress ipAddress * @return boolean value indicating whether the ipAddress is in the IPList */ public abstract boolean isIn(String ipAddress); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java index ecf099feff9d0..b7961200f7d74 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java @@ -111,6 +111,9 @@ private void putInternal(Object k, Object v) { * Inserting a new (key, value) never overwrites a previous one. * In other words, you can insert the same key multiple times and it will * lead to multiple entries. + * + * @param k Generics Type k + * @param v Generics Type v */ public void put(K k, V v) { Preconditions.checkNotNull(k); @@ -142,6 +145,9 @@ private int getElementIndex(K k) { /** * Retrieve a value associated with a given key. + * + * @param k Generics Type k + * @return Generics Type V */ public V get(K k) { int index = getElementIndex(k); @@ -154,6 +160,9 @@ public V get(K k) { /** * Retrieve a value associated with a given key, and delete the * relevant entry. + * + * @param k Generics Type k + * @return Generics Type V */ public V remove(K k) { int index = getElementIndex(k); @@ -185,6 +194,8 @@ public interface Visitor { /** * Visit all key, value pairs in the IdentityHashStore. + * + * @param visitor visitor */ public void visitAll(Visitor visitor) { int length = buffer == null ? 0 : buffer.length; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java index 1aa036e95b735..369f54da4a28a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java @@ -31,11 +31,18 @@ public interface IndexedSortable { /** * Compare items at the given addresses consistent with the semantics of * {@link java.util.Comparator#compare(Object, Object)}. + * + * @param i(int) + * @param j(int) + * @return compare result */ int compare(int i, int j); /** * Swap items at the given addresses. + * + * @param i i(int) + * @param j j(int) */ void swap(int i, int j); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java index bdd024302c99f..ebfd3841dc4b6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java @@ -38,6 +38,10 @@ public interface IndexedSorter { * entry. * @see IndexedSortable#compare * @see IndexedSortable#swap + * + * @param r r + * @param l l + * @param s s */ void sort(IndexedSortable s, int l, int r); @@ -45,6 +49,10 @@ public interface IndexedSorter { * Same as {@link #sort(IndexedSortable,int,int)}, but indicate progress * periodically. * @see #sort(IndexedSortable,int,int) + * @param s s + * @param l l + * @param r r + * @param rep rep */ void sort(IndexedSortable s, int l, int r, Progressable rep); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java index 54091f2bcc389..9c152147b9efa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java @@ -303,6 +303,7 @@ public boolean add(E elem) { * Add an element to the front of the list. * * @param elem The new element to add. + * @return if addFirst success true, not false */ public boolean addFirst(Element elem) { if (elem == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java index 623ae7f0ea53d..9f86a24791700 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java @@ -85,6 +85,7 @@ public static ArrayList newArrayList(E... elements) { * calling Iterables#addAll. * * @param Generics Type E + * @param elements elements * @return ArrayList Generics Type E */ public static ArrayList newArrayList(Iterable elements) { @@ -102,6 +103,7 @@ public static ArrayList newArrayList(Iterable elements) { * and then calling Iterators#addAll. * * @param Generics Type E + * @param elements elements * @return ArrayList Generics Type E */ public static ArrayList newArrayList(Iterator elements) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java index b371e630e7e9f..23b9c45c10855 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java @@ -36,7 +36,7 @@ public abstract class PriorityQueue { must define this one method. * @param a object a * @param b object b - * @return if a < b true, not false + * @return if a less than b true, not false */ protected abstract boolean lessThan(Object a, Object b); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index fe839da29281e..234e9b3b675a8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -124,6 +124,7 @@ public static String humanReadableInt(long number) { * The same as String.format(Locale.ENGLISH, format, objects). * @param format format * @param objects objects + * @return format string */ public static String format(final String format, final Object... objects) { return String.format(Locale.ENGLISH, format, objects); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java index 5ac09221c1b12..bfb0401583f3b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java @@ -113,6 +113,9 @@ public static void printGenericCommandUsage(PrintStream out) { * * @param prompt prompt * @throws IOException raised on errors performing I/O. + * @return if the user + * responds with "y" or "yes". (case insensitive) true, + * not false. */ public static boolean confirmPrompt(String prompt) throws IOException { while (true) { From a21f0119815047c81c5c6e9044e3b79a876349bb Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sun, 15 May 2022 01:28:08 -0700 Subject: [PATCH 42/53] HADOOP-18229. Fix some java doc compilation 700+ warnings. --- .../fs/shell/CommandWithDestination.java | 3 + .../org/apache/hadoop/fs/shell/PathData.java | 3 +- .../fs/statistics/IOStatisticsSupport.java | 1 + .../hadoop/fs/statistics/MeanStatistic.java | 1 + .../apache/hadoop/fs/store/DataBlocks.java | 4 + .../compress/snappy/SnappyDecompressor.java | 2 +- .../rawcoder/DecodingValidator.java | 4 +- .../rawcoder/RawErasureDecoder.java | 2 + .../rawcoder/RawErasureEncoder.java | 5 +- .../hadoop/io/file/tfile/ByteArray.java | 2 +- .../apache/hadoop/io/file/tfile/TFile.java | 92 ++++++++++--------- .../apache/hadoop/io/file/tfile/Utils.java | 20 ++-- .../hadoop/io/retry/AsyncCallHandler.java | 11 ++- .../apache/hadoop/io/retry/RetryPolicies.java | 37 ++++++++ .../apache/hadoop/io/retry/RetryProxy.java | 5 + .../apache/hadoop/io/retry/RetryUtils.java | 4 +- .../hadoop/io/serializer/Deserializer.java | 5 + .../io/serializer/SerializationFactory.java | 2 + .../hadoop/io/serializer/Serializer.java | 5 + .../metrics2/sink/PrometheusMetricsSink.java | 4 + .../apache/hadoop/security/Credentials.java | 32 +++++-- .../security/GroupMappingServiceProvider.java | 8 +- .../org/apache/hadoop/security/Groups.java | 4 +- .../hadoop/security/HadoopKerberosName.java | 4 +- .../org/apache/hadoop/security/KDiag.java | 5 +- .../apache/hadoop/security/KerberosInfo.java | 5 +- .../hadoop/security/NullGroupsMapping.java | 2 +- .../apache/hadoop/security/ProviderUtils.java | 1 + .../security/RefreshUserMappingsProtocol.java | 4 +- .../hadoop/security/SaslInputStream.java | 2 +- .../security/SaslPropertiesResolver.java | 2 +- .../apache/hadoop/security/SaslRpcClient.java | 14 ++- .../apache/hadoop/security/SaslRpcServer.java | 24 ++++- .../hadoop/security/ShellBasedIdMapping.java | 9 +- .../ShellBasedUnixGroupsNetgroupMapping.java | 2 + .../hadoop/security/UserGroupInformation.java | 46 ++++++---- .../ssl/ReloadingX509KeystoreManager.java | 4 +- .../hadoop/security/token/DtFetcher.java | 22 ++++- .../security/token/DtFileOperations.java | 18 ++-- .../hadoop/security/token/DtUtilShell.java | 4 +- .../apache/hadoop/security/token/Token.java | 18 ++-- .../hadoop/security/token/TokenInfo.java | 6 +- .../hadoop/service/CompositeService.java | 2 +- .../hadoop/service/ServiceStateModel.java | 3 + .../apache/hadoop/tools/GetGroupsBase.java | 6 +- .../hadoop/tools/GetUserMappingsProtocol.java | 2 +- .../java/org/apache/hadoop/util/CrcUtil.java | 1 - .../org/apache/hadoop/util/DataChecksum.java | 6 +- 48 files changed, 318 insertions(+), 150 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java index 678225f81e0e3..c698f12fc865c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java @@ -119,6 +119,8 @@ protected void setDirectWrite(boolean flag) { * owner, group and permission information of the source * file will be preserved as far as target {@link FileSystem} * implementation allows. + * + * @param preserve preserve */ protected void setPreserve(boolean preserve) { if (preserve) { @@ -175,6 +177,7 @@ protected void preserve(FileAttribute fileAttribute) { * The last arg is expected to be a local path, if only one argument is * given then the destination will be the current directory * @param args is the list of arguments + * @throws IOException raised on errors performing I/O. */ protected void getLocalDestination(LinkedList args) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java index 2071a16799a5c..da99ac212563e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java @@ -610,10 +610,11 @@ public int hashCode() { /** * Open a file for sequential IO. - *

+ *

* This uses FileSystem.openFile() to request sequential IO; * the file status is also passed in. * Filesystems may use to optimize their IO. + *

* @return an input stream * @throws IOException failure */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java index 75977047c0f2a..90448471c5d04 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java @@ -71,6 +71,7 @@ private IOStatisticsSupport() { * Returns null if the source isn't of the write type * or the return value of * {@link IOStatisticsSource#getIOStatistics()} was null. + * @param source source * @return an IOStatistics instance or null */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java index d9ff0c25c6a21..d330b0dc5a337 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java @@ -207,6 +207,7 @@ public synchronized double mean() { /** * Add another MeanStatistic. * @param other other value + * @return mean statistic */ public synchronized MeanStatistic add(final MeanStatistic other) { if (other.isEmpty()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java index d9d3850ef4e2e..c70d0ee91e15e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java @@ -107,6 +107,7 @@ private DataBlocks() { * @param len number of bytes to be written. * @throws NullPointerException for a null buffer * @throws IndexOutOfBoundsException if indices are out of range + * @throws IOException raised on errors performing I/O. */ public static void validateWriteArgs(byte[] b, int off, int len) throws IOException { @@ -287,6 +288,7 @@ protected BlockFactory(String keyToBufferDir, Configuration conf) { * @param limit limit of the block. * @param statistics stats to work with * @return a new block. + * @throws IOException raised on errors performing I/O. */ public abstract DataBlock create(long index, int limit, BlockUploadStatistics statistics) @@ -482,6 +484,8 @@ public void close() throws IOException { /** * Inner close logic for subclasses to implement. + * + * @throws IOException raised on errors performing I/O. */ protected void innerClose() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java index d3775e286e895..58987c4dda3de 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java @@ -187,7 +187,7 @@ public boolean finished() { * @param off Start offset of the data * @param len Size of the buffer * @return The actual number of bytes of compressed data. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public int decompress(byte[] b, int off, int len) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java index 396aac08cc517..a9bc297739810 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java @@ -68,7 +68,7 @@ public DecodingValidator(RawErasureDecoder decoder) { * @param erasedIndexes indexes of erased units used for decoding * @param outputs decoded output buffers, which are ready to be read after * the call - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void validate(ByteBuffer[] inputs, int[] erasedIndexes, ByteBuffer[] outputs) throws IOException { @@ -133,7 +133,7 @@ public void validate(ByteBuffer[] inputs, int[] erasedIndexes, * @param inputs input buffers used for decoding * @param erasedIndexes indexes of erased units used for decoding * @param outputs decoded output buffers - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void validate(ECChunk[] inputs, int[] erasedIndexes, ECChunk[] outputs) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java index 2ebe94b0385ab..329bf7c3aaf7f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java @@ -80,6 +80,7 @@ public RawErasureDecoder(ErasureCoderOptions coderOptions) { * @param erasedIndexes indexes of erased units in the inputs array * @param outputs output buffers to put decoded data into according to * erasedIndexes, ready for read after the call + * @throws IOException raised on errors performing I/O. */ public synchronized void decode(ByteBuffer[] inputs, int[] erasedIndexes, ByteBuffer[] outputs) throws IOException { @@ -117,6 +118,7 @@ public synchronized void decode(ByteBuffer[] inputs, int[] erasedIndexes, /** * Perform the real decoding using Direct ByteBuffer. * @param decodingState the decoding state + * @throws IOException raised on errors performing I/O. */ protected abstract void doDecode(ByteBufferDecodingState decodingState) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java index 6d2ecd20525f4..d5ccb12c9d6a2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java @@ -100,7 +100,8 @@ public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) /** * Perform the real encoding work using direct ByteBuffer. - * @param encodingState the encoding state + * @param encodingState the encoding state. + * @throws IOException raised on errors performing I/O. */ protected abstract void doEncode(ByteBufferEncodingState encodingState) throws IOException; @@ -111,6 +112,7 @@ protected abstract void doEncode(ByteBufferEncodingState encodingState) * @param inputs input buffers to read data from * @param outputs output buffers to put the encoded data into, read to read * after the call + * @throws IOException raised on errors performing I/O. */ public void encode(byte[][] inputs, byte[][] outputs) throws IOException { ByteArrayEncodingState baeState = new ByteArrayEncodingState( @@ -128,6 +130,7 @@ public void encode(byte[][] inputs, byte[][] outputs) throws IOException { * Perform the real encoding work using bytes array, supporting offsets * and lengths. * @param encodingState the encoding state + * @throws IOException raised on errors performing I/O. */ protected abstract void doEncode(ByteArrayEncodingState encodingState) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java index c6c8b3fe3e1fe..054cd514566f9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java @@ -35,7 +35,7 @@ public final class ByteArray implements RawComparable { /** * Constructing a ByteArray from a {@link BytesWritable}. * - * @param other + * @param other other */ public ByteArray(BytesWritable other) { this(other.getBytes(), 0, other.getLength()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 09cd2825e3cf2..fa6b6ee67cdbb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -276,7 +276,7 @@ private enum State { * * @param conf * The configuration object. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Writer(FSDataOutputStream fsdos, int minBlockSize, String compressName, String comparator, Configuration conf) @@ -350,7 +350,7 @@ public void close() throws IOException { * Buffer for key. * @param value * Buffer for value. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void append(byte[] key, byte[] value) throws IOException { append(key, 0, key.length, value, 0, value.length); @@ -521,7 +521,7 @@ public void close() throws IOException { * exactly as many bytes as specified here before calling close on * the returned output stream. * @return The key appending output stream. - * @throws IOException + * @throws IOException raised on errors performing I/O. * */ public DataOutputStream prepareAppendKey(int length) throws IOException { @@ -548,7 +548,7 @@ public DataOutputStream prepareAppendKey(int length) throws IOException { * the returned output stream. Advertising the value size up-front * guarantees that the value is encoded in one chunk, and avoids * intermediate chunk buffering. - * @throws IOException + * @throws IOException raised on errors performing I/O. * */ public DataOutputStream prepareAppendValue(int length) throws IOException { @@ -588,7 +588,7 @@ public DataOutputStream prepareAppendValue(int length) throws IOException { * {@link TFile#getSupportedCompressionAlgorithms()}. * @return A DataOutputStream that can be used to write Meta Block data. * Closing the stream would signal the ending of the block. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws MetaBlockAlreadyExists * the Meta Block with the same name already exists. */ @@ -616,7 +616,7 @@ public DataOutputStream prepareMetaBlock(String name, String compressName) * Name of the meta block. * @return A DataOutputStream that can be used to write Meta Block data. * Closing the stream would signal the ending of the block. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws MetaBlockAlreadyExists * the Meta Block with the same name already exists. */ @@ -796,8 +796,8 @@ public boolean equals(Object obj) { * The length of TFile. This is required because we have no easy * way of knowing the actual size of the input file through the * File input stream. - * @param conf - * @throws IOException + * @param conf configuration + * @throws IOException raised on errors performing I/O. */ public Reader(FSDataInputStream fsdis, long fileLength, Configuration conf) throws IOException { @@ -896,7 +896,7 @@ synchronized void checkTFileDataIndex() throws IOException { * Get the first key in the TFile. * * @return The first key in the TFile. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawComparable getFirstKey() throws IOException { checkTFileDataIndex(); @@ -907,7 +907,7 @@ public RawComparable getFirstKey() throws IOException { * Get the last key in the TFile. * * @return The last key in the TFile. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawComparable getLastKey() throws IOException { checkTFileDataIndex(); @@ -1043,7 +1043,7 @@ Location getLocationNear(long offset) { * the user supplied offset. * @return the RecordNum to the corresponding entry. If no such entry * exists, it returns the total entry count. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public long getRecordNumNear(long offset) throws IOException { return getRecordNumByLocation(getLocationNear(offset)); @@ -1058,7 +1058,7 @@ public long getRecordNumNear(long offset) throws IOException { * @return the key that fits the requirement; or null if no such key exists * (which could happen if the offset is close to the end of the * TFile). - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawComparable getKeyNear(long offset) throws IOException { int blockIndex = readerBCF.getBlockIndexNear(offset); @@ -1072,7 +1072,7 @@ public RawComparable getKeyNear(long offset) throws IOException { * * @return The scanner object. A valid Scanner is always returned even if * the TFile is empty. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Scanner createScanner() throws IOException { return new Scanner(this, begin, end); @@ -1089,7 +1089,7 @@ public Scanner createScanner() throws IOException { * specified byte-region but always round up to the compression * block boundaries. It is possible that the returned scanner * contains zero key-value pairs even if length is positive. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Scanner createScannerByByteRange(long offset, long length) throws IOException { return new Scanner(this, offset, offset + length); @@ -1106,7 +1106,7 @@ public Scanner createScannerByByteRange(long offset, long length) throws IOExcep * key-value entry of the TFile. * @return The actual coverage of the returned scanner will cover all keys * greater than or equal to the beginKey and less than the endKey. - * @throws IOException + * @throws IOException raised on errors performing I/O. * * @deprecated Use {@link #createScannerByKey(byte[], byte[])} instead. */ @@ -1127,7 +1127,7 @@ public Scanner createScanner(byte[] beginKey, byte[] endKey) * key-value entry of the TFile. * @return The actual coverage of the returned scanner will cover all keys * greater than or equal to the beginKey and less than the endKey. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Scanner createScannerByKey(byte[] beginKey, byte[] endKey) throws IOException { @@ -1147,7 +1147,7 @@ public Scanner createScannerByKey(byte[] beginKey, byte[] endKey) * key-value entry of the TFile. * @return The actual coverage of the returned scanner will cover all keys * greater than or equal to the beginKey and less than the endKey. - * @throws IOException + * @throws IOException raised on errors performing I/O. * * @deprecated Use {@link #createScannerByKey(RawComparable, RawComparable)} * instead. @@ -1169,7 +1169,7 @@ public Scanner createScanner(RawComparable beginKey, RawComparable endKey) * key-value entry of the TFile. * @return The actual coverage of the returned scanner will cover all keys * greater than or equal to the beginKey and less than the endKey. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Scanner createScannerByKey(RawComparable beginKey, RawComparable endKey) throws IOException { @@ -1189,7 +1189,7 @@ public Scanner createScannerByKey(RawComparable beginKey, RawComparable endKey) * The RecordNum for the last record (exclusive). To scan the whole * file, either specify endRecNum==-1 or endRecNum==getEntryCount(). * @return The TFile scanner that covers the specified range of records. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Scanner createScannerByRecordNum(long beginRecNum, long endRecNum) throws IOException { @@ -1313,7 +1313,7 @@ protected Scanner(Reader reader, long offBegin, long offEnd) * @param endKey * End key of the scan. If null, scan up to the last <K, V> * entry of the TFile. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected Scanner(Reader reader, RawComparable beginKey, RawComparable endKey) throws IOException { @@ -1338,7 +1338,7 @@ protected Scanner(Reader reader, RawComparable beginKey, * @param key * The input key * @return true if we find an equal key. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean seekTo(byte[] key) throws IOException { return seekTo(key, 0, key.length); @@ -1356,7 +1356,7 @@ public boolean seekTo(byte[] key) throws IOException { * @param keyLen * key buffer length. * @return true if we find an equal key; false otherwise. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean seekTo(byte[] key, int keyOffset, int keyLen) throws IOException { @@ -1432,7 +1432,7 @@ private void seekTo(Location l) throws IOException { * Rewind to the first entry in the scanner. The entry returned by the * previous entry() call will be invalid. * - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void rewind() throws IOException { seekTo(beginLocation); @@ -1442,7 +1442,7 @@ public void rewind() throws IOException { * Seek to the end of the scanner. The entry returned by the previous * entry() call will be invalid. * - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void seekToEnd() throws IOException { parkCursorAtEnd(); @@ -1455,7 +1455,7 @@ public void seekToEnd() throws IOException { * * @param key * The input key - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void lowerBound(byte[] key) throws IOException { lowerBound(key, 0, key.length); @@ -1472,7 +1472,7 @@ public void lowerBound(byte[] key) throws IOException { * offset in the key buffer. * @param keyLen * key buffer length. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void lowerBound(byte[] key, int keyOffset, int keyLen) throws IOException { @@ -1486,7 +1486,7 @@ public void lowerBound(byte[] key, int keyOffset, int keyLen) * * @param key * The input key - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void upperBound(byte[] key) throws IOException { upperBound(key, 0, key.length); @@ -1503,7 +1503,7 @@ public void upperBound(byte[] key) throws IOException { * offset in the key buffer. * @param keyLen * key buffer length. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void upperBound(byte[] key, int keyOffset, int keyLen) throws IOException { @@ -1516,7 +1516,7 @@ public void upperBound(byte[] key, int keyOffset, int keyLen) * * @return true if the cursor successfully moves. False when cursor is * already at the end location and cannot be advanced. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean advance() throws IOException { if (atEnd()) { @@ -1614,7 +1614,7 @@ void checkKey() throws IOException { * Get an entry to access the key and value. * * @return The Entry object to access the key and value. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Entry entry() throws IOException { checkKey(); @@ -1624,7 +1624,7 @@ public Entry entry() throws IOException { /** * Get the RecordNum corresponding to the entry pointed by the cursor. * @return The RecordNum corresponding to the entry pointed by the cursor. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public long getRecordNum() throws IOException { return reader.getRecordNumByLocation(currentLocation); @@ -1670,7 +1670,7 @@ byte[] getKeyBuffer() { * BytesWritable to hold key. * @param value * BytesWritable to hold value - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void get(BytesWritable key, BytesWritable value) throws IOException { @@ -1684,7 +1684,8 @@ public void get(BytesWritable key, BytesWritable value) * * @param key * BytesWritable to hold the key. - * @throws IOException + * @throws IOException raised on errors performing I/O. + * @return the key into BytesWritable */ public int getKey(BytesWritable key) throws IOException { key.setSize(getKeyLength()); @@ -1698,8 +1699,8 @@ public int getKey(BytesWritable key) throws IOException { * directly uses the buffer inside BytesWritable for storing the value. * The call does not require the value length to be known. * - * @param value - * @throws IOException + * @param value value + * @throws IOException raised on errors performing I/O. */ public long getValue(BytesWritable value) throws IOException { DataInputStream dis = getValueStream(); @@ -1725,7 +1726,7 @@ public long getValue(BytesWritable value) throws IOException { * @param out * The output stream * @return the length of the key. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public int writeKey(OutputStream out) throws IOException { out.write(keyBuffer, 0, klen); @@ -1740,7 +1741,7 @@ public int writeKey(OutputStream out) throws IOException { * @param out * The output stream * @return the length of the value - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public long writeValue(OutputStream out) throws IOException { DataInputStream dis = getValueStream(); @@ -1768,7 +1769,7 @@ public long writeValue(OutputStream out) throws IOException { * not be shorter than the key length. * @return The length of the key. * - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public int getKey(byte[] buf) throws IOException { return getKey(buf, 0); @@ -1784,7 +1785,7 @@ public int getKey(byte[] buf) throws IOException { * the key into. Requiring the key-length + offset no greater * than the buffer length. * @return The length of the key. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public int getKey(byte[] buf, int offset) throws IOException { if ((offset | (buf.length - offset - klen)) < 0) { @@ -1828,10 +1829,11 @@ public int getValueLength() { * without moving the cursor will result in exception: * {@link #getValue(byte[])}, {@link #getValue(byte[], int)}, * {@link #getValueStream}. - * + * + * @param buf buf * @return the length of the value. Does not require * isValueLengthKnown() to be true. - * @throws IOException + * @throws IOException raised on errors performing I/O. * */ public int getValue(byte[] buf) throws IOException { @@ -1846,10 +1848,12 @@ public int getValue(byte[] buf) throws IOException { * functions more than once without moving the cursor will result in * exception: {@link #getValue(byte[])}, {@link #getValue(byte[], int)}, * {@link #getValueStream}. - * + * + * @param buf buf + * @param offset offset * @return the length of the value. Does not require * isValueLengthKnown() to be true. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public int getValue(byte[] buf, int offset) throws IOException { DataInputStream dis = getValueStream(); @@ -1892,7 +1896,7 @@ public int getValue(byte[] buf, int offset) throws IOException { * {@link #getValue(byte[], int)}, {@link #getValueStream}. * * @return The input stream for reading the value. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public DataInputStream getValueStream() throws IOException { if (valueChecked == true) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java index 17a27f16b9a4a..c622e828e4c7d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java @@ -49,7 +49,7 @@ private Utils() { * output stream * @param n * The integer to be encoded - * @throws IOException + * @throws IOException raised on errors performing I/O. * @see Utils#writeVLong(DataOutput, long) */ public static void writeVInt(DataOutput out, int n) throws IOException { @@ -95,7 +95,7 @@ public static void writeVInt(DataOutput out, int n) throws IOException { * output stream * @param n * the integer number - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @SuppressWarnings("fallthrough") public static void writeVLong(DataOutput out, long n) throws IOException { @@ -170,7 +170,7 @@ public static void writeVLong(DataOutput out, long n) throws IOException { * @param in * input stream * @return the decoded integer - * @throws IOException + * @throws IOException raised on errors performing I/O. * * @see Utils#readVLong(DataInput) */ @@ -199,7 +199,7 @@ public static int readVInt(DataInput in) throws IOException { * @param in * input stream * @return the decoded long integer. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static long readVLong(DataInput in) throws IOException { @@ -249,9 +249,9 @@ public static long readVLong(DataInput in) throws IOException { /** * Write a String as a VInt n, followed by n Bytes as in Text format. * - * @param out - * @param s - * @throws IOException + * @param out out + * @param s s + * @throws IOException raised on errors performing I/O. */ public static void writeString(DataOutput out, String s) throws IOException { if (s != null) { @@ -271,7 +271,7 @@ public static void writeString(DataOutput out, String s) throws IOException { * @param in * The input stream. * @return The string - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static String readString(DataInput in) throws IOException { int length = readVInt(in); @@ -299,7 +299,7 @@ public static final class Version implements Comparable { * * @param in * input stream - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Version(DataInput in) throws IOException { major = in.readShort(); @@ -326,7 +326,7 @@ public Version(short major, short minor) { * * @param out * The DataOutput object. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void write(DataOutput out) throws IOException { out.writeShort(major); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java index 06dd0d45b3c2d..8a1e6fe235ac7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java @@ -49,7 +49,11 @@ public class AsyncCallHandler { private static final ThreadLocal> ASYNC_RETURN = new ThreadLocal<>(); - /** @return the async return value from {@link AsyncCallHandler}. */ + /** + * @return the async return value from {@link AsyncCallHandler}. + * @param T + * @param R + */ @InterfaceStability.Unstable @SuppressWarnings("unchecked") public static AsyncGet getAsyncReturn() { @@ -62,7 +66,10 @@ public static AsyncGet getAsyncReturn() { } } - /** For the lower rpc layers to set the async return value. */ + /** + * For the lower rpc layers to set the async return value. + * @param asyncReturn asyncReturn + */ @InterfaceStability.Unstable public static void setLowerLayerAsyncReturn( AsyncGet asyncReturn) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index 842811edb399a..394ccba22a46e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -75,6 +75,10 @@ public class RetryPolicies { *

* Keep trying forever with a fixed time between attempts. *

+ * + * @param sleepTime sleepTime + * @param timeUnit timeUnit + * @return RetryPolicy */ public static final RetryPolicy retryForeverWithFixedSleep(long sleepTime, TimeUnit timeUnit) { @@ -87,6 +91,11 @@ public static final RetryPolicy retryForeverWithFixedSleep(long sleepTime, * Keep trying a limited number of times, waiting a fixed time between attempts, * and then fail by re-throwing the exception. *

+ * + * @param maxRetries maxRetries + * @param sleepTime sleepTime + * @param timeUnit timeUnit + * @return RetryPolicy */ public static final RetryPolicy retryUpToMaximumCountWithFixedSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumCountWithFixedSleep(maxRetries, sleepTime, timeUnit); @@ -97,6 +106,11 @@ public static final RetryPolicy retryUpToMaximumCountWithFixedSleep(int maxRetri * Keep trying for a maximum time, waiting a fixed time between attempts, * and then fail by re-throwing the exception. *

+ * + * @param timeUnit timeUnit + * @param sleepTime sleepTime + * @param maxTime maxTime + * @return RetryPolicy */ public static final RetryPolicy retryUpToMaximumTimeWithFixedSleep(long maxTime, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumTimeWithFixedSleep(maxTime, sleepTime, timeUnit); @@ -108,6 +122,11 @@ public static final RetryPolicy retryUpToMaximumTimeWithFixedSleep(long maxTime, * and then fail by re-throwing the exception. * The time between attempts is sleepTime mutliplied by the number of tries so far. *

+ * + * @param sleepTime sleepTime + * @param maxRetries maxRetries + * @param timeUnit timeUnit + * @return RetryPolicy */ public static final RetryPolicy retryUpToMaximumCountWithProportionalSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumCountWithProportionalSleep(maxRetries, sleepTime, timeUnit); @@ -120,6 +139,12 @@ public static final RetryPolicy retryUpToMaximumCountWithProportionalSleep(int m * The time between attempts is sleepTime mutliplied by a random * number in the range of [0, 2 to the number of retries) *

+ * + * + * @param timeUnit timeUnit + * @param maxRetries maxRetries + * @param sleepTime sleepTime + * @return RetryPolicy */ public static final RetryPolicy exponentialBackoffRetry( int maxRetries, long sleepTime, TimeUnit timeUnit) { @@ -130,6 +155,10 @@ public static final RetryPolicy exponentialBackoffRetry( *

* Set a default policy with some explicit handlers for specific exceptions. *

+ * + * @param exceptionToPolicyMap exceptionToPolicyMap + * @param defaultPolicy defaultPolicy + * @return RetryPolicy */ public static final RetryPolicy retryByException(RetryPolicy defaultPolicy, Map, RetryPolicy> exceptionToPolicyMap) { @@ -141,6 +170,10 @@ public static final RetryPolicy retryByException(RetryPolicy defaultPolicy, * A retry policy for RemoteException * Set a default policy with some explicit handlers for specific exceptions. *

+ * + * @param defaultPolicy defaultPolicy + * @param exceptionToPolicyMap exceptionToPolicyMap + * @return RetryPolicy */ public static final RetryPolicy retryByRemoteException( RetryPolicy defaultPolicy, @@ -150,6 +183,9 @@ public static final RetryPolicy retryByRemoteException( /** * A retry policy for exceptions other than RemoteException. + * @param defaultPolicy defaultPolicy + * @param exceptionToPolicyMap exceptionToPolicyMap + * @return RetryPolicy */ public static final RetryPolicy retryOtherThanRemoteException( RetryPolicy defaultPolicy, @@ -437,6 +473,7 @@ public String toString() { * where t_i and n_i are the i-th pair of sleep time and number of retries. * Note that the white spaces in the string are ignored. * + * @param s input string * @return the parsed object, or null if the parsing fails. */ public static MultipleLinearRandomRetry parseCommaSeparatedString(String s) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java index 7fcd5fd4b0080..a8bc50c702adb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java @@ -34,6 +34,7 @@ public class RetryProxy { * @param iface the interface that the retry will implement * @param implementation the instance whose methods should be retried * @param retryPolicy the policy for retrying method call failures + * @param T * @return the retry proxy */ public static Object create(Class iface, T implementation, @@ -51,6 +52,7 @@ public static Object create(Class iface, T implementation, * @param iface the interface that the retry will implement * @param proxyProvider provides implementation instances whose methods should be retried * @param retryPolicy the policy for retrying or failing over method call failures + * @param T * @return the retry proxy */ public static Object create(Class iface, @@ -69,6 +71,7 @@ public static Object create(Class iface, * {@link RetryPolicies#TRY_ONCE_THEN_FAIL} is used. * * @param iface the interface that the retry will implement + * @param T * @param implementation the instance whose methods should be retried * @param methodNameToPolicyMap a map of method names to retry policies * @return the retry proxy @@ -90,6 +93,8 @@ public static Object create(Class iface, T implementation, * @param iface the interface that the retry will implement * @param proxyProvider provides implementation instances whose methods should be retried * @param methodNameToPolicyMap map of method names to retry policies + * @param defaultPolicy defaultPolicy + * @param T * @return the retry proxy */ public static Object create(Class iface, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java index c035a42d4a751..5a40cf0f08cc2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java @@ -48,7 +48,7 @@ public class RetryUtils { * - non-IOException. * * - * @param conf + * @param conf configuration * @param retryPolicyEnabledKey conf property key for enabling retry * @param defaultRetryPolicyEnabled default retryPolicyEnabledKey conf value * @param retryPolicySpecKey conf property key for retry policy spec @@ -168,7 +168,7 @@ public String toString() { * Retry policy spec: * N pairs of sleep-time and number-of-retries "s1,n1,s2,n2,..." * - * @param conf + * @param conf configuration * @param retryPolicyEnabledKey conf property key for enabling retry * @param defaultRetryPolicyEnabled default retryPolicyEnabledKey conf value * @param retryPolicySpecKey conf property key for retry policy spec diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java index d89442e703fce..87a29565ba606 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java @@ -42,6 +42,8 @@ public interface Deserializer { /** *

Prepare the deserializer for reading.

+ * @param in input stream + * @throws IOException raised on errors performing I/O. */ void open(InputStream in) throws IOException; @@ -53,12 +55,15 @@ public interface Deserializer { * stream. Otherwise, if the object t is null a new * deserialized object will be created. *

+ * @param t t * @return the deserialized object + * @throws IOException raised on errors performing I/O. */ T deserialize(T t) throws IOException; /** *

Close the underlying input stream and clear up any resources.

+ * @throws IOException raised on errors performing I/O. */ void close() throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java index ce0c3fe398eed..0b166ddc1f282 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java @@ -52,6 +52,8 @@ public class SerializationFactory extends Configured { * property from conf, which is a comma-delimited list of * classnames. *

+ * + * @param conf configuration */ public SerializationFactory(Configuration conf) { super(conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java index 2c6dd124c4dc0..50a433c52b138 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java @@ -42,16 +42,21 @@ public interface Serializer { /** *

Prepare the serializer for writing.

+ * @param out output stream + * @throws IOException raised on errors performing I/O. */ void open(OutputStream out) throws IOException; /** *

Serialize t to the underlying output stream.

+ * @param t t + * @throws IOException raised on errors performing I/O. */ void serialize(T t) throws IOException; /** *

Close the underlying output stream and clear up any resources.

+ * @throws IOException raised on errors performing I/O. */ void close() throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java index a59ad5f227be5..19c77cfd4e896 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java @@ -84,6 +84,10 @@ public void putMetrics(MetricsRecord metricsRecord) { /** * Convert CamelCase based names to lower-case names where the separator * is the underscore, to follow prometheus naming conventions. + * + * @param metricName metricName + * @param recordName recordName + * @return prometheusName */ public String prometheusName(String recordName, String metricName) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index e71bf6d40dd21..4a1087a093c28 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -138,6 +138,8 @@ public void addToken(Text alias, Token t) { /** * Return all the tokens in the in-memory map. + * + * @return all the tokens in the in-memory map. */ public Collection> getAllTokens() { return tokenMap.values(); @@ -145,6 +147,8 @@ public Collection> getAllTokens() { /** * Returns an unmodifiable version of the full map of aliases to Tokens. + * + * @return TokenMap */ public Map> getTokenMap() { return Collections.unmodifiableMap(tokenMap); @@ -192,6 +196,8 @@ public void removeSecretKey(Text alias) { /** * Return all the secret key entries in the in-memory map. + * + * @return Text List */ public List getAllSecretKeys() { List list = new java.util.ArrayList(); @@ -202,6 +208,8 @@ public List getAllSecretKeys() { /** * Returns an unmodifiable version of the full map of aliases to secret keys. + * + * @return SecretKeyMap */ public Map getSecretKeyMap() { return Collections.unmodifiableMap(secretKeysMap); @@ -209,9 +217,9 @@ public Map getSecretKeyMap() { /** * Convenience method for reading a token storage file and loading its Tokens. - * @param filename - * @param conf - * @throws IOException + * @param filename filename + * @param conf configuration + * @throws IOException raised on errors performing I/O. */ public static Credentials readTokenStorageFile(Path filename, Configuration conf) @@ -233,9 +241,10 @@ public static Credentials readTokenStorageFile(Path filename, /** * Convenience method for reading a token storage file and loading its Tokens. - * @param filename - * @param conf - * @throws IOException + * @param filename filename + * @param conf configuration + * @throws IOException raised on errors performing I/O. + * @return Token */ public static Credentials readTokenStorageFile(File filename, Configuration conf) @@ -256,6 +265,9 @@ public static Credentials readTokenStorageFile(File filename, /** * Convenience method for reading a token from a DataInputStream. + * + * @param in DataInputStream + * @throws IOException raised on errors performing I/O. */ public void readTokenStorageStream(DataInputStream in) throws IOException { byte[] magic = new byte[TOKEN_STORAGE_MAGIC.length]; @@ -335,8 +347,8 @@ public void writeTokenStorageFile(Path filename, Configuration conf, /** * Stores all the keys to DataOutput. - * @param out - * @throws IOException + * @param out DataOutput + * @throws IOException raised on errors performing I/O. */ @Override public void write(DataOutput out) throws IOException { @@ -401,8 +413,8 @@ void readProto(DataInput in) throws IOException { /** * Loads all the keys. - * @param in - * @throws IOException + * @param in DataInput + * @throws IOException raised on errors performing I/O. */ @Override public void readFields(DataInput in) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/GroupMappingServiceProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/GroupMappingServiceProvider.java index 3a9073bbffaba..f37089fb55a79 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/GroupMappingServiceProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/GroupMappingServiceProvider.java @@ -40,18 +40,18 @@ public interface GroupMappingServiceProvider { * Returns EMPTY list in case of non-existing user * @param user User's name * @return group memberships of user - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public List getGroups(String user) throws IOException; /** * Refresh the cache of groups and user mapping - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void cacheGroupsRefresh() throws IOException; /** * Caches the group user information * @param groups list of groups to add to cache - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void cacheGroupsAdd(List groups) throws IOException; @@ -60,7 +60,7 @@ public interface GroupMappingServiceProvider { * Returns EMPTY set in case of non-existing user * @param user User's name * @return set of group memberships of user - * @throws IOException + * @throws IOException raised on errors performing I/O. */ default Set getGroupsSet(String user) throws IOException { //Override to form the set directly to avoid another conversion diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java index 70c633cdf8a23..23992ac76192b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java @@ -465,7 +465,7 @@ public static Groups getUserToGroupsMappingService() { /** * Get the groups being used to map user-to-groups. - * @param conf + * @param conf configuration * @return the groups being used to map user-to-groups. */ public static synchronized Groups getUserToGroupsMappingService( @@ -482,7 +482,7 @@ public static synchronized Groups getUserToGroupsMappingService( /** * Create new groups used to map user-to-groups with loaded configuration. - * @param conf + * @param conf configuration * @return the groups being used to map user-to-groups. */ @Private diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java index df96c500cd08b..228670b425890 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java @@ -45,7 +45,7 @@ public class HadoopKerberosName extends KerberosName { /** * Create a name from the full Kerberos principal name. - * @param name + * @param name name */ public HadoopKerberosName(String name) { super(name); @@ -58,7 +58,7 @@ public HadoopKerberosName(String name) { * method should be invoked directly. * * @param conf the new configuration - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void setConfiguration(Configuration conf) throws IOException { final String defaultRule; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java index b2797871339e3..ea42d3d962326 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java @@ -435,7 +435,8 @@ protected boolean isSimpleAuthentication(Configuration conf) { * This is a recurrent problem * (that is: it keeps creeping back with JVM updates); * a fast failure is the best tactic. - * @throws NoSuchAlgorithmException + * @throws NoSuchAlgorithmException when a particular cryptographic algorithm is + * requested but is not available in the environment. */ protected void validateKeyLength() throws NoSuchAlgorithmException { @@ -1046,7 +1047,7 @@ private void failif(boolean condition, * @param conf configuration * @param argv argument list * @return an exception - * @throws Exception + * @throws Exception Exception */ public static int exec(Configuration conf, String... argv) throws Exception { try(KDiag kdiag = new KDiag()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java index 062dcff61e1d9..0d3f8c4a8a134 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java @@ -31,7 +31,10 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public @interface KerberosInfo { - /** Key for getting server's Kerberos principal name from Configuration */ + /** + * Key for getting server's Kerberos principal name from Configuration. + * @return serverPrincipal + */ String serverPrincipal(); String clientPrincipal() default ""; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NullGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NullGroupsMapping.java index 9592ecc32c012..aebb50a0af4a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NullGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NullGroupsMapping.java @@ -39,7 +39,7 @@ public void cacheGroupsAdd(List groups) { * * @param user User's name * @return set of group memberships of user - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public Set getGroupsSet(String user) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java index 9c4fb64d149c3..9cd85499f5803 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java @@ -136,6 +136,7 @@ public static URI nestURIForLocalJavaKeyStoreProvider(final URI localFile) * @param config the existing configuration with provider path * @param fileSystemClass the class which providers must be compatible * @return Configuration clone with new provider path + * @throws IOException raised on errors performing I/O. */ public static Configuration excludeIncompatibleCredentialProviders( Configuration config, Class fileSystemClass) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java index 005b2948ea2a6..c4f636e374519 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java @@ -42,14 +42,14 @@ public interface RefreshUserMappingsProtocol { /** * Refresh user to group mappings. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Idempotent public void refreshUserToGroupsMappings() throws IOException; /** * Refresh superuser proxy group list - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Idempotent public void refreshSuperUserGroupsConfiguration() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java index a91a90ac7c901..2a8c3bf30c75f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java @@ -281,7 +281,7 @@ public int read(byte[] b, int off, int len) throws IOException { *

* Fewer bytes than requested might be skipped. The actual number of bytes * skipped is equal to n or the result of a call to - * {@link #available() available}, whichever is smaller. If + * {@link #available()}, whichever is smaller. If * n is less than zero, no bytes are skipped. * *

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java index dd6c42e1491a8..0688ec6cd1c81 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java @@ -46,7 +46,7 @@ public class SaslPropertiesResolver implements Configurable{ * Looks up the configuration to see if there is custom class specified. * Constructs the instance by passing the configuration directly to the * constructor to achieve thread safety using final fields. - * @param conf + * @param conf configuration * @return SaslPropertiesResolver */ public static SaslPropertiesResolver getInstance(Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index 938eeeba96786..9878aec7253e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -351,9 +351,9 @@ String getServerPrincipal(SaslAuth authType) throws IOException { /** * Do client side SASL authentication with server via the given IpcStreams. * - * @param ipcStreams + * @param ipcStreams ipcStreams * @return AuthMethod used to negotiate the connection - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public AuthMethod saslConnect(IpcStreams ipcStreams) throws IOException { // redefined if/when a SASL negotiation starts, can be queried if the @@ -521,7 +521,7 @@ private boolean useWrap() { * * @param in - InputStream used to make the connection * @return InputStream that may be using SASL unwrap - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public InputStream getInputStream(InputStream in) throws IOException { if (useWrap()) { @@ -537,7 +537,7 @@ public InputStream getInputStream(InputStream in) throws IOException { * * @param out - OutputStream used to make the connection * @return OutputStream that may be using wrapping - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public OutputStream getOutputStream(OutputStream out) throws IOException { if (useWrap()) { @@ -638,7 +638,11 @@ public void write(byte[] buf, int off, int len) throws IOException { } } - /** Release resources used by wrapped saslClient */ + /** + * Release resources used by wrapped saslClient. + * @throws SaslException if authentication or generating response fails, + * or SASL protocol mixup + */ public void dispose() throws SaslException { if (saslClient != null) { saslClient.dispose(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index 7c3f14da21cf5..1d8fa6eee6127 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -208,7 +208,11 @@ static char[] encodePassword(byte[] password) { StandardCharsets.UTF_8).toCharArray(); } - /** Splitting fully qualified Kerberos name into parts */ + /** + * Splitting fully qualified Kerberos name into parts. + * @param fullName fullName + * @return splitKerberosName + */ public static String[] splitKerberosName(String fullName) { return fullName.split("[/@]"); } @@ -240,17 +244,29 @@ private static AuthMethod valueOf(byte code) { return i < 0 || i >= values().length ? null : values()[i]; } - /** Return the SASL mechanism name */ + /** + * Return the SASL mechanism name. + * @return mechanismName + */ public String getMechanismName() { return mechanismName; } - /** Read from in */ + /** + * Read from in. + * + * @param in DataInput + * @throws IOException raised on errors performing I/O. + */ public static AuthMethod read(DataInput in) throws IOException { return valueOf(in.readByte()); } - /** Write to out */ + /** + * Write to out. + * @param out DataOutput + * @throws IOException raised on errors performing I/O. + */ public void write(DataOutput out) throws IOException { out.write(code); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index d1eab8f4e1bbd..3cb9523ee3207 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -210,7 +210,14 @@ private static Integer parseId(final String idStr) { /** * Get the list of users or groups returned by the specified command, * and save them in the corresponding map. - * @throws IOException + * + * @param map map + * @param mapName mapName + * @param command command + * @param staticMapping staticMapping + * @param regex regex + * @throws IOException raised on errors performing I/O. + * @return updateMapInternal */ @VisibleForTesting public static boolean updateMapInternal(BiMap map, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java index eff6985471b4c..01d6f299d17d5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java @@ -92,6 +92,7 @@ public void cacheGroupsAdd(List groups) throws IOException { * * @param netgroup return users for this netgroup * @return list of users for a given netgroup + * @throws IOException raised on errors performing I/O. */ protected List getUsersForNetgroup(String netgroup) throws IOException { @@ -128,6 +129,7 @@ protected List getUsersForNetgroup(String netgroup) * * @param netgroup get users for this netgroup * @return string of users for a given netgroup in getent netgroups format + * @throws IOException raised on errors performing I/O. */ protected String execShellGetUserForNetgroup(final String netgroup) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index b2efe502144cb..c735296192d20 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -589,6 +589,7 @@ public static UserGroupInformation getCurrentUser() throws IOException { * @param user The user name, or NULL if none is specified. * * @return The most appropriate UserGroupInformation + * @throws IOException raised on errors performing I/O. */ public static UserGroupInformation getBestUGI( String ticketCachePath, String user) throws IOException { @@ -609,6 +610,7 @@ public static UserGroupInformation getBestUGI( * @param ticketCache the path to the ticket cache file * * @throws IOException if the kerberos login fails + * @return UserGroupInformation */ @InterfaceAudience.Public @InterfaceStability.Evolving @@ -630,8 +632,9 @@ public static UserGroupInformation getUGIFromTicketCache( * The creator of subject is responsible for * renewing credentials. * - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException if the kerberos login fails + * @return UserGroupInformation */ public static UserGroupInformation getUGIFromSubject(Subject subject) throws IOException { @@ -686,7 +689,7 @@ public static UserGroupInformation getLoginUser() throws IOException { * remove the login method that is followed by a space from the username * e.g. "jack (auth:SIMPLE)" {@literal ->} "jack" * - * @param userName + * @param userName userName * @return userName without login method */ public static String trimLoginMethod(String userName) { @@ -1106,7 +1109,7 @@ static long getNextTgtRenewalTime(final long tgtEndTime, final long now, * file and logs them in. They become the currently logged-in user. * @param user the principal name to load from the keytab * @param path the path to the keytab file - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException if it's a kerberos login exception. */ @InterfaceAudience.Public @@ -1136,7 +1139,7 @@ static void loginUserFromKeytab(String user, * This method assumes that the user logged in by calling * {@link #loginUserFromKeytab(String, String)}. * - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException if a failure occurred in logout, * or if the user did not log in by invoking loginUserFromKeyTab() before. */ @@ -1176,7 +1179,7 @@ public void logoutUserFromKeytab() throws IOException { /** * Re-login a user from keytab if TGT is expired or is close to expiry. * - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException if it's a kerberos login exception. */ public void checkTGTAndReloginFromKeytab() throws IOException { @@ -1224,7 +1227,7 @@ void fixKerberosTicketOrder() { * happened already. * The Subject field of this UserGroupInformation object is updated to have * the new credentials. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException on a failure */ @InterfaceAudience.Public @@ -1241,7 +1244,7 @@ public void reloginFromKeytab() throws IOException { * Subject field of this UserGroupInformation object is updated to have the * new credentials. * - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException on a failure */ @InterfaceAudience.Public @@ -1278,7 +1281,7 @@ private void reloginFromKeytab(boolean checkTGT, boolean ignoreLastLoginTime) * method assumes that login had happened already. * The Subject field of this UserGroupInformation object is updated to have * the new credentials. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @throws KerberosAuthException on a failure */ @InterfaceAudience.Public @@ -1346,6 +1349,7 @@ private void unprotectedRelogin(HadoopLoginContext login, * @param user the principal name to load from the keytab * @param path the path to the keytab file * @throws IOException if the keytab file can't be read + * @return UserGroupInformation */ public static UserGroupInformation loginUserFromKeytabAndReturnUGI(String user, @@ -1372,8 +1376,9 @@ private boolean hasSufficientTimeElapsed(long now) { } /** - * Did the login happen via keytab + * Did the login happen via keytab. * @return true or false + * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.Public @InterfaceStability.Evolving @@ -1382,8 +1387,9 @@ public static boolean isLoginKeytabBased() throws IOException { } /** - * Did the login happen via ticket cache + * Did the login happen via ticket cache. * @return true or false + * @throws IOException raised on errors performing I/O. */ public static boolean isLoginTicketBased() throws IOException { return getLoginUser().isFromTicket(); @@ -1405,6 +1411,7 @@ public static UserGroupInformation createRemoteUser(String user) { * Create a user from a login name. It is intended to be used for remote * users in RPC, since it won't have any credentials. * @param user the full user principal name, must not be empty or null + * @param authMethod authMethod * @return the UserGroupInformation for the remote user. */ @InterfaceAudience.Public @@ -1474,8 +1481,8 @@ public static AuthenticationMethod valueOf(AuthMethod authMethod) { /** * Create a proxy user using username of the effective user and the ugi of the * real user. - * @param user - * @param realUser + * @param user user + * @param realUser realUser * @return proxyUser ugi */ @InterfaceAudience.Public @@ -1788,7 +1795,7 @@ public String toString() { /** * Sets the authentication method in the subject * - * @param authMethod + * @param authMethod authMethod */ public synchronized void setAuthenticationMethod(AuthenticationMethod authMethod) { @@ -1798,7 +1805,7 @@ void setAuthenticationMethod(AuthenticationMethod authMethod) { /** * Sets the authentication method in the subject * - * @param authMethod + * @param authMethod authMethod */ public void setAuthenticationMethod(AuthMethod authMethod) { user.setAuthenticationMethod(AuthenticationMethod.valueOf(authMethod)); @@ -1831,7 +1838,7 @@ public synchronized AuthenticationMethod getRealAuthenticationMethod() { * Returns the authentication method of a ugi. If the authentication method is * PROXY, returns the authentication method of the real user. * - * @param ugi + * @param ugi ugi * @return AuthenticationMethod */ public static AuthenticationMethod getRealAuthenticationMethod( @@ -1933,6 +1940,8 @@ public T doAs(PrivilegedExceptionAction action /** * Log current UGI and token information into specified log. * @param ugi - UGI + * @param log log + * @param caption caption */ @InterfaceAudience.LimitedPrivate({"HDFS", "KMS"}) @InterfaceStability.Unstable @@ -1950,7 +1959,8 @@ public static void logUserInfo(Logger log, String caption, /** * Log all (current, real, login) UGI and token info into specified log. * @param ugi - UGI - * @throws IOException + * @param log - log + * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.LimitedPrivate({"HDFS", "KMS"}) @InterfaceStability.Unstable @@ -1968,7 +1978,7 @@ public static void logAllUserInfo(Logger log, UserGroupInformation ugi) throws /** * Log all (current, real, login) UGI and token info into UGI debug log. * @param ugi - UGI - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void logAllUserInfo(UserGroupInformation ugi) throws IOException { @@ -2246,7 +2256,7 @@ private static String prependFileAuthority(String keytabPath) { * A test method to print out the current user's UGI. * @param args if there are two arguments, read the user from the keytab * and print it out. - * @throws Exception + * @throws Exception Exception */ public static void main(String [] args) throws Exception { System.out.println("Getting UGI for current user"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java index 216d949de1048..dd74bea1c5a0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java @@ -63,8 +63,8 @@ public class ReloadingX509KeystoreManager extends X509ExtendedKeyManager { * @param location local path to the keystore file. * @param storePassword password of the keystore file. * @param keyPassword The password of the key. - * @throws IOException - * @throws GeneralSecurityException + * @throws IOException raised on errors performing I/O. + * @throws GeneralSecurityException thrown if create encryptor error */ public ReloadingX509KeystoreManager(String type, String location, String storePassword, String keyPassword) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java index d74e7bdb10272..a950557d33263 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java @@ -28,14 +28,28 @@ * compilation units. Resolution of fetcher impl will be done at runtime. */ public interface DtFetcher { - /** Return a key used to identify the object/service implementation. */ + /** + * Return a key used to identify the object/service implementation. + * @return ServiceName + */ Text getServiceName(); - /** Used to allow the service API to indicate whether a token is required. */ + /** + * Used to allow the service API to indicate whether a token is required. + * @return isTokenRequired + */ boolean isTokenRequired(); - /** Add any number of delegation tokens to Credentials object and return - * a token instance that is appropriate for aliasing, or null if none. */ + /** + * Add any number of delegation tokens to Credentials object and return + * a token instance that is appropriate for aliasing, or null if none. + * + * @param conf configuration + * @param creds credentials + * @param renewer renewer + * @param url url + * @throws Exception Exception + */ Token addDelegationTokens(Configuration conf, Credentials creds, String renewer, String url) throws Exception; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java index 2160d8b6a82a1..5d80a45f79f22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java @@ -99,7 +99,7 @@ private static Path fileToPath(File f) { * @param format a string equal to FORMAT_PB or FORMAT_JAVA. * @param creds the Credentials object to be written out. * @param conf a Configuration object passed along. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void doFormattedWrite( File f, String format, Credentials creds, Configuration conf) @@ -118,7 +118,7 @@ public static void doFormattedWrite( * @param alias print only tokens matching alias (null matches all). * @param conf Configuration object passed along. * @param out print to this stream. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void printTokenFile( File tokenFile, Text alias, Configuration conf, PrintStream out) @@ -170,7 +170,7 @@ public static void printCredentials( * @param url pass this URL to fetcher after stripping any http/s prefix. * @param renewer pass this renewer to the fetcher. * @param conf Configuration object passed along. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void getTokenFile(File tokenFile, String fileFormat, Text alias, Text service, String url, String renewer, Configuration conf) @@ -225,7 +225,7 @@ public static void getTokenFile(File tokenFile, String fileFormat, * @param alias overwrite service field of fetched token with this text. * @param service only apply alias to tokens matching this service text. * @param conf Configuration object passed along. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void aliasTokenFile(File tokenFile, String fileFormat, Text alias, Text service, Configuration conf) throws Exception { @@ -246,7 +246,7 @@ public static void aliasTokenFile(File tokenFile, String fileFormat, * @param tokenFiles list of local File objects. Last file holds the output. * @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output * @param conf Configuration object passed along. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void appendTokenFiles( ArrayList tokenFiles, String fileFormat, Configuration conf) @@ -269,8 +269,8 @@ public static void appendTokenFiles( * @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output * @param alias remove only tokens matching alias; null matches all. * @param conf Configuration object passed along. - * @throws IOException - * @throws InterruptedException + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException if the thread is interrupted. */ public static void removeTokenFromFile(boolean cancel, File tokenFile, String fileFormat, Text alias, Configuration conf) @@ -295,8 +295,8 @@ public static void removeTokenFromFile(boolean cancel, * @param fileFormat a string equal to FORMAT_PB or FORMAT_JAVA, for output * @param alias renew only tokens matching alias; null matches all. * @param conf Configuration object passed along. - * @throws IOException - * @throws InterruptedException + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException if the thread is interrupted. */ public static void renewTokenFile( File tokenFile, String fileFormat, Text alias, Configuration conf) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java index bc2d1b6e11a7e..1e137cf522b3a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java @@ -109,9 +109,9 @@ private String[] maybeDoLoginFromKeytabAndPrincipal(String[] args) * Parse the command line arguments and initialize subcommand. * Also will attempt to perform Kerberos login if both -principal and -keytab * flags are passed in args array. - * @param args + * @param args args * @return 0 if the argument(s) were recognized, 1 otherwise - * @throws Exception + * @throws Exception Exception */ @Override protected int init(String[] args) throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index 0141af8237b1b..1cb2b2da0da97 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -193,7 +193,7 @@ public synchronized Text getKind() { /** * Set the token kind. This is only intended to be used by services that * wrap another service's token. - * @param newKind + * @param newKind newKind */ @InterfaceAudience.Private public synchronized void setKind(Text newKind) { @@ -367,7 +367,7 @@ private static void decodeWritable(Writable obj, /** * Encode this token as a url safe string. * @return the encoded string - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public String encodeToUrlString() throws IOException { return encodeWritable(this); @@ -376,7 +376,7 @@ public String encodeToUrlString() throws IOException { /** * Decode the given url safe string into this token. * @param newValue the encoded string - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void decodeFromUrlString(String newValue) throws IOException { decodeWritable(this, newValue); @@ -481,6 +481,7 @@ private synchronized TokenRenewer getRenewer() throws IOException { /** * Is this token managed so that it can be renewed or cancelled? * @return true, if it can be renewed and cancelled. + * @throws IOException raised on errors performing I/O. */ public boolean isManaged() throws IOException { return getRenewer().isManaged(this); @@ -488,9 +489,10 @@ public boolean isManaged() throws IOException { /** * Renew this delegation token. + * @param conf configuration * @return the new expiration time - * @throws IOException - * @throws InterruptedException + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException if the thread is interrupted. */ public long renew(Configuration conf ) throws IOException, InterruptedException { @@ -499,8 +501,10 @@ public long renew(Configuration conf /** * Cancel this delegation token. - * @throws IOException - * @throws InterruptedException + * + * @param conf configuration + * @throws IOException raised on errors performing I/O. + * @throws InterruptedException if the thread is interrupted. */ public void cancel(Configuration conf ) throws IOException, InterruptedException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenInfo.java index cc76824eb0e13..9234b23202eca 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenInfo.java @@ -31,6 +31,10 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public @interface TokenInfo { - /** The type of TokenSelector to be used */ + /** + * The type of TokenSelector to be used. + * + * @return TokenSelector + */ Class> value(); } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java index ee66e90f7c4ee..b74a820faec53 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java @@ -82,7 +82,7 @@ protected void addService(Service service) { /** * If the passed object is an instance of {@link Service}, * add it to the list of services managed by this {@link CompositeService} - * @param object + * @param object object * @return true if a service is added, false otherwise. */ protected boolean addIfService(Object object) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java index 9f282b9f93483..b776784535b59 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java @@ -54,6 +54,8 @@ public class ServiceStateModel { /** * Create the service state model in the {@link Service.STATE#NOTINITED} * state. + * + * @param name input name */ public ServiceStateModel(String name) { this(name, Service.STATE.NOTINITED); @@ -62,6 +64,7 @@ public ServiceStateModel(String name) { /** * Create a service state model instance in the chosen state * @param state the starting state + * @param name input name */ public ServiceStateModel(String name, Service.STATE state) { this.state = state; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java index 92cdb5835e77d..6c34d1c35caf4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java @@ -38,7 +38,7 @@ public abstract class GetGroupsBase extends Configured implements Tool { /** * Create an instance of this tool using the given configuration. - * @param conf + * @param conf configuration */ protected GetGroupsBase(Configuration conf) { this(conf, System.out); @@ -84,7 +84,7 @@ public int run(String[] args) throws Exception { * * @param conf The configuration to use. * @return The address where the service is listening. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected abstract InetSocketAddress getProtocolAddress(Configuration conf) throws IOException; @@ -92,7 +92,7 @@ protected abstract InetSocketAddress getProtocolAddress(Configuration conf) /** * Get a client of the {@link GetUserMappingsProtocol}. * @return A {@link GetUserMappingsProtocol} client proxy. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected GetUserMappingsProtocol getUgmProtocol() throws IOException { GetUserMappingsProtocol userGroupMappingProtocol = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java index 3e80ac030d3ef..8b1922372ba3c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java @@ -40,7 +40,7 @@ public interface GetUserMappingsProtocol { * Get the groups which are mapped to the given user. * @param user The user to get the groups for. * @return The set of groups the user belongs to. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Idempotent public String[] getGroupsForUser(String user) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java index e7509885f8c8d..650c81cf5bfec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java @@ -109,7 +109,6 @@ public static int compose(int crcA, int crcB, long lengthB, int mod) { * {@code value}. * * @param value value. - * @return byte array. */ public static byte[] intToBytes(int value) { byte[] buf = new byte[4]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java index 5295d532d7e35..5f3ba9c437af6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java @@ -74,7 +74,6 @@ public enum Type { * * @return the type corresponding to the id. * @param id id - * @return Type */ public static Type valueOf(int id) { if (id < 0 || id >= values().length) { @@ -187,6 +186,8 @@ public static DataChecksum newDataChecksum(byte[] bytes, int offset) * * @param in data input stream * @throws IOException raised on errors performing I/O. + * @return DataChecksum by reading HEADER_LEN + * bytes from input stream */ public static DataChecksum newDataChecksum( DataInputStream in ) throws IOException { @@ -212,6 +213,9 @@ private static Type mapByteToChecksumType(int type) /** * Writes the checksum header to the output stream out. + * + * @param out output stream + * @throws IOException raised on errors performing I/O. */ public void writeHeader( DataOutputStream out ) throws IOException { From 9a13bc01c8fb080a851439222b119e93cc1d390a Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sun, 15 May 2022 05:25:57 -0700 Subject: [PATCH 43/53] HADOOP-18229. Fix some java doc compilation 20+ warnings. Change Maven Config, Skip Proto File --- hadoop-common-project/hadoop-common/pom.xml | 16 ++++++++ .../fs/permission/PermissionStatus.java | 40 ++++++++++++++++--- .../apache/hadoop/io/file/tfile/TFile.java | 3 +- .../apache/hadoop/security/Credentials.java | 1 + .../apache/hadoop/security/SaslRpcServer.java | 1 + .../hadoop/security/token/DtFetcher.java | 1 + 6 files changed, 56 insertions(+), 6 deletions(-) diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 6e762f567c180..f4d6e61eea3ae 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -1171,6 +1171,22 @@ + + org.apache.maven.plugins + maven-javadoc-plugin + + + **/FSProtos.java + + *.proto:*.tracing:*.protobuf + + -Xmaxerrs + 1000 + -Xmaxwarns + 1000 + + + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java index 3c3693f613baf..22e43168cc867 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java @@ -39,7 +39,13 @@ public class PermissionStatus implements Writable { WritableFactories.setFactory(PermissionStatus.class, FACTORY); } - /** Create an immutable {@link PermissionStatus} object. */ + /** + * Create an immutable {@link PermissionStatus} object. + * @param user user + * @param group group + * @param permission permission + * @return PermissionStatus + */ public static PermissionStatus createImmutable( String user, String group, FsPermission permission) { return new PermissionStatus(user, group, permission) { @@ -56,20 +62,36 @@ public void readFields(DataInput in) throws IOException { private PermissionStatus() {} - /** Constructor */ + /** + * Constructor. + * + * @param user user + * @param group group + * @param permission permission + */ public PermissionStatus(String user, String group, FsPermission permission) { username = user; groupname = group; this.permission = permission; } - /** Return user name */ + /** + * Return user name. + * @return user name + */ public String getUserName() {return username;} - /** Return group name */ + /** + * Return group name. + * @return group name + */ public String getGroupName() {return groupname;} - /** Return permission */ + /** + * Return permission. + * + * @return FsPermission + */ public FsPermission getPermission() {return permission;} @Override @@ -86,6 +108,9 @@ public void write(DataOutput out) throws IOException { /** * Create and initialize a {@link PermissionStatus} from {@link DataInput}. + * @param in data input + * @throws IOException raised on errors performing I/O. + * @return PermissionStatus */ public static PermissionStatus read(DataInput in) throws IOException { PermissionStatus p = new PermissionStatus(); @@ -95,6 +120,11 @@ public static PermissionStatus read(DataInput in) throws IOException { /** * Serialize a {@link PermissionStatus} from its base components. + * @param out out + * @param username username + * @param groupname groupname + * @param permission FsPermission + * @throws IOException raised on errors performing I/O. */ public static void write(DataOutput out, String username, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index fa6b6ee67cdbb..1a6ef3e4b2824 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -549,7 +549,7 @@ public DataOutputStream prepareAppendKey(int length) throws IOException { * guarantees that the value is encoded in one chunk, and avoids * intermediate chunk buffering. * @throws IOException raised on errors performing I/O. - * + * @return DataOutputStream */ public DataOutputStream prepareAppendValue(int length) throws IOException { if (state != State.END_KEY) { @@ -1701,6 +1701,7 @@ public int getKey(BytesWritable key) throws IOException { * * @param value value * @throws IOException raised on errors performing I/O. + * @return long value */ public long getValue(BytesWritable value) throws IOException { DataInputStream dis = getValueStream(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index 4a1087a093c28..4f51701ec5d60 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -220,6 +220,7 @@ public Map getSecretKeyMap() { * @param filename filename * @param conf configuration * @throws IOException raised on errors performing I/O. + * @return Credentials */ public static Credentials readTokenStorageFile(Path filename, Configuration conf) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index 1d8fa6eee6127..9139f42bcabbb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -257,6 +257,7 @@ public String getMechanismName() { * * @param in DataInput * @throws IOException raised on errors performing I/O. + * @return AuthMethod */ public static AuthMethod read(DataInput in) throws IOException { return valueOf(in.readByte()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java index a950557d33263..e82920f14f7f3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java @@ -49,6 +49,7 @@ public interface DtFetcher { * @param renewer renewer * @param url url * @throws Exception Exception + * @return DelegationTokens */ Token addDelegationTokens(Configuration conf, Credentials creds, String renewer, String url) throws Exception; From e91ccc02c7897ce323ef2425d0356e9d49ed754a Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sun, 15 May 2022 06:46:55 -0700 Subject: [PATCH 44/53] HADOOP-18229. Fix some java doc compilation 100 warnings. --- .../org/apache/hadoop/io/FloatWritable.java | 10 ++- .../org/apache/hadoop/io/GenericWritable.java | 2 + .../java/org/apache/hadoop/io/IOUtils.java | 14 +++- .../org/apache/hadoop/io/InputBuffer.java | 23 +++++- .../org/apache/hadoop/io/IntWritable.java | 10 ++- .../org/apache/hadoop/io/LongWritable.java | 10 ++- .../java/org/apache/hadoop/io/MD5Hash.java | 81 +++++++++++++++---- .../java/org/apache/hadoop/io/MapFile.java | 34 ++++++-- .../apache/hadoop/io/MultipleIOException.java | 11 ++- .../org/apache/hadoop/io/NullWritable.java | 5 +- .../org/apache/hadoop/io/ObjectWritable.java | 61 +++++++++++--- .../org/apache/hadoop/io/OutputBuffer.java | 11 ++- 12 files changed, 222 insertions(+), 50 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java index 367fc946da135..88bdf13c75c70 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java @@ -33,10 +33,16 @@ public FloatWritable() {} public FloatWritable(float value) { set(value); } - /** Set the value of this FloatWritable. */ + /** + * Set the value of this FloatWritable. + * @param value value + */ public void set(float value) { this.value = value; } - /** Return the value of this FloatWritable. */ + /** + * Return the value of this FloatWritable. + * @return value + */ public float get() { return value; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java index 7cfeed7f931d7..5abe1bcedd88f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java @@ -109,6 +109,7 @@ public void set(Writable obj) { /** * Return the wrapped instance. + * @return the wrapped instance. */ public Writable get() { return instance; @@ -145,6 +146,7 @@ public void write(DataOutput out) throws IOException { /** * Return all classes that may be wrapped. Subclasses should implement this * to return a constant array of classes. + * @return all classes that may be wrapped. */ abstract protected Class[] getTypes(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index 121af64b01182..ef47f4c7fa47a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -59,7 +59,8 @@ public class IOUtils { * @param out OutputStream to write to * @param buffSize the size of the buffer * @param close whether or not close the InputStream and - * OutputStream at the end. The streams are closed in the finally clause. + * OutputStream at the end. The streams are closed in the finally clause. + * @throws IOException raised on errors performing I/O. */ public static void copyBytes(InputStream in, OutputStream out, int buffSize, boolean close) @@ -85,7 +86,8 @@ public static void copyBytes(InputStream in, OutputStream out, * * @param in InputStrem to read from * @param out OutputStream to write to - * @param buffSize the size of the buffer + * @param buffSize the size of the buffer + * @throws IOException raised on errors performing I/O. */ public static void copyBytes(InputStream in, OutputStream out, int buffSize) throws IOException { @@ -107,7 +109,8 @@ public static void copyBytes(InputStream in, OutputStream out, int buffSize) * * @param in InputStrem to read from * @param out OutputStream to write to - * @param conf the Configuration object + * @param conf the Configuration object + * @throws IOException raised on errors performing I/O. */ public static void copyBytes(InputStream in, OutputStream out, Configuration conf) throws IOException { @@ -123,6 +126,7 @@ public static void copyBytes(InputStream in, OutputStream out, Configuration con * @param conf the Configuration object * @param close whether or not close the InputStream and * OutputStream at the end. The streams are closed in the finally clause. + * @throws IOException raised on errors performing I/O. */ public static void copyBytes(InputStream in, OutputStream out, Configuration conf, boolean close) throws IOException { @@ -181,6 +185,7 @@ public static void copyBytes(InputStream in, OutputStream out, long count, * @param off - offset within buf * @param len - amount of data to be read * @return number of bytes read + * @throws IOException raised on errors performing I/O. */ public static int wrappedReadForCompressedData(InputStream is, byte[] buf, int off, int len) throws IOException { @@ -407,6 +412,7 @@ public static List listDirectory(File dir, FilenameFilter filter) * once the sync is done.
* Borrowed from Uwe Schindler in LUCENE-5588 * @param fileToSync the file to fsync + * @throws IOException raised on errors performing I/O. */ public static void fsync(File fileToSync) throws IOException { if (!fileToSync.exists()) { @@ -440,7 +446,7 @@ public static void fsync(File fileToSync) throws IOException { * @param isDir if true, the given file is a directory (Channel should be * opened for read and ignore IOExceptions, because not all file * systems and operating systems allow to fsync on a directory) - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void fsync(FileChannel channel, boolean isDir) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java index 0d084b8396f16..71ad63b69d203 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java @@ -75,20 +75,35 @@ private InputBuffer(Buffer buffer) { this.buffer = buffer; } - /** Resets the data that the buffer reads. */ + /** + * Resets the data that the buffer reads. + * @param input input + * @param length length + */ public void reset(byte[] input, int length) { buffer.reset(input, 0, length); } - /** Resets the data that the buffer reads. */ + /** + * Resets the data that the buffer reads. + * @param input input + * @param start start + * @param length length + */ public void reset(byte[] input, int start, int length) { buffer.reset(input, start, length); } - /** Returns the current position in the input. */ + /** + * Returns the current position in the input. + * @return the current position in the input. + */ public int getPosition() { return buffer.getPosition(); } - /** Returns the length of the input. */ + /** + * Returns the length of the input. + * @return length of the input. + */ public int getLength() { return buffer.getLength(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java index f656d028cb054..b3a6a2920fc26 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java @@ -36,10 +36,16 @@ public IntWritable() {} public IntWritable(int value) { set(value); } - /** Set the value of this IntWritable. */ + /** + * Set the value of this IntWritable. + * @param value + */ public void set(int value) { this.value = value; } - /** Return the value of this IntWritable. */ + /** + * Return the value of this IntWritable. + * @return value of this IntWritable. + */ public int get() { return value; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java index b77ca6781a639..b807a9ac22b26 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java @@ -36,10 +36,16 @@ public LongWritable() {} public LongWritable(long value) { set(value); } - /** Set the value of this LongWritable. */ + /** + * Set the value of this LongWritable. + * @param value value + */ public void set(long value) { this.value = value; } - /** Return the value of this LongWritable. */ + /** + * Return the value of this LongWritable. + * @return value of this LongWritable. + */ public long get() { return value; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java index 99c17acdd43d2..804d0470386aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java @@ -54,12 +54,18 @@ public MD5Hash() { this.digest = new byte[MD5_LEN]; } - /** Constructs an MD5Hash from a hex string. */ + /** + * Constructs an MD5Hash from a hex string. + * @param hex + */ public MD5Hash(String hex) { setDigest(hex); } - /** Constructs an MD5Hash with a specified value. */ + /** + * Constructs an MD5Hash with a specified value. + * @param digest digest + */ public MD5Hash(byte[] digest) { if (digest.length != MD5_LEN) throw new IllegalArgumentException("Wrong length: " + digest.length); @@ -72,7 +78,12 @@ public void readFields(DataInput in) throws IOException { in.readFully(digest); } - /** Constructs, reads and returns an instance. */ + /** + * Constructs, reads and returns an instance. + * @param in in + * @throws IOException raised on errors performing I/O. + * @return MD5Hash + */ public static MD5Hash read(DataInput in) throws IOException { MD5Hash result = new MD5Hash(); result.readFields(in); @@ -85,21 +96,32 @@ public void write(DataOutput out) throws IOException { out.write(digest); } - /** Copy the contents of another instance into this instance. */ + /** + * Copy the contents of another instance into this instance. + * @param that that + */ public void set(MD5Hash that) { System.arraycopy(that.digest, 0, this.digest, 0, MD5_LEN); } - /** Returns the digest bytes. */ + /** + * Returns the digest bytes. + * @return digest + */ public byte[] getDigest() { return digest; } - /** Construct a hash value for a byte array. */ + /** + * Construct a hash value for a byte array. + * @param data data + * @return MD5Hash + */ public static MD5Hash digest(byte[] data) { return digest(data, 0, data.length); } /** - * Create a thread local MD5 digester + * Create a thread local MD5 digester. + * @return MessageDigest */ public static MessageDigest getDigester() { MessageDigest digester = DIGESTER_FACTORY.get(); @@ -107,7 +129,12 @@ public static MessageDigest getDigester() { return digester; } - /** Construct a hash value for the content from the InputStream. */ + /** + * Construct a hash value for the content from the InputStream. + * @param in input stream + * @return MD5Hash MD5Hash + * @throws IOException raised on errors performing I/O. + */ public static MD5Hash digest(InputStream in) throws IOException { final byte[] buffer = new byte[4*1024]; @@ -119,7 +146,13 @@ public static MD5Hash digest(InputStream in) throws IOException { return new MD5Hash(digester.digest()); } - /** Construct a hash value for a byte array. */ + /** + * Construct a hash value for a byte array. + * @param data data + * @param start start + * @param len len + * @return MD5Hash + */ public static MD5Hash digest(byte[] data, int start, int len) { byte[] digest; MessageDigest digester = getDigester(); @@ -128,7 +161,13 @@ public static MD5Hash digest(byte[] data, int start, int len) { return new MD5Hash(digest); } - /** Construct a hash value for an array of byte array. */ + /** + * Construct a hash value for an array of byte array. + * @param dataArr dataArr + * @param start start + * @param len len + * @return MD5Hash + */ public static MD5Hash digest(byte[][] dataArr, int start, int len) { byte[] digest; MessageDigest digester = getDigester(); @@ -139,17 +178,28 @@ public static MD5Hash digest(byte[][] dataArr, int start, int len) { return new MD5Hash(digest); } - /** Construct a hash value for a String. */ + /** + * Construct a hash value for a String. + * @param string string + * @return MD5Hash + */ public static MD5Hash digest(String string) { return digest(UTF8.getBytes(string)); } - /** Construct a hash value for a String. */ + /** + * Construct a hash value for a String. + * @param utf8 utf8 + * @return MD5Hash + */ public static MD5Hash digest(UTF8 utf8) { return digest(utf8.getBytes(), 0, utf8.getLength()); } - /** Construct a half-sized version of this MD5. Fits in a long **/ + /** + * Construct a half-sized version of this MD5. Fits in a long + * @return halfDigest + */ public long halfDigest() { long value = 0; for (int i = 0; i < 8; i++) @@ -226,7 +276,10 @@ public String toString() { return buf.toString(); } - /** Sets the digest value from a hex string. */ + /** + * Sets the digest value from a hex string. + * @param hex hex + */ public void setDigest(String hex) { if (hex.length() != MD5_LEN*2) throw new IllegalArgumentException("Wrong length: " + hex.length()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 87feb1029ea6b..f48b9d81c3a6b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -513,8 +513,15 @@ public Reader(FileSystem fs, String dirName, this(new Path(dirName), conf); } - /** Construct a map reader for the named map using the named comparator. + /** + * Construct a map reader for the named map using the named comparator. * @deprecated + * + * @param fs FileSystem + * @param dirName dirName + * @param comparator WritableComparator + * @param conf Configuration + * @throws IOException raised on errors performing I/O. */ @Deprecated public Reader(FileSystem fs, String dirName, WritableComparator comparator, @@ -867,7 +874,10 @@ public synchronized WritableComparable getClosest(WritableComparable key, return nextKey; } - /** Close the map. */ + /** + * Close the map. + * @throws IOException raised on errors performing I/O. + */ @Override public synchronized void close() throws IOException { if (!indexClosed) { @@ -878,7 +888,13 @@ public synchronized void close() throws IOException { } - /** Renames an existing map directory. */ + /** + * Renames an existing map directory. + * @param fs fs + * @param oldName oldName + * @param newName newName + * @throws IOException raised on errors performing I/O. + */ public static void rename(FileSystem fs, String oldName, String newName) throws IOException { Path oldDir = new Path(oldName); @@ -906,8 +922,9 @@ public static void delete(FileSystem fs, String name) throws IOException { * @param keyClass key class (has to be a subclass of Writable) * @param valueClass value class (has to be a subclass of Writable) * @param dryrun do not perform any changes, just report what needs to be done + * @param conf configuration * @return number of valid entries in this MapFile, or -1 if no fixing was needed - * @throws Exception + * @throws Exception Exception */ public static long fix(FileSystem fs, Path dir, Class keyClass, @@ -1007,11 +1024,12 @@ public Merger(Configuration conf) throws IOException { } /** - * Merge multiple MapFiles to one Mapfile + * Merge multiple MapFiles to one Mapfile. * - * @param inMapFiles - * @param outMapFile - * @throws IOException + * @param inMapFiles input inMapFiles + * @param deleteInputs deleteInputs + * @param outMapFile input outMapFile + * @throws IOException raised on errors performing I/O. */ public void merge(Path[] inMapFiles, boolean deleteInputs, Path outMapFile) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java index c9d7ade43064f..fc79887e26262 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java @@ -42,7 +42,11 @@ private MultipleIOException(List exceptions) { /** @return the underlying exceptions */ public List getExceptions() {return exceptions;} - /** A convenient method to create an {@link IOException}. */ + /** + * A convenient method to create an {@link IOException}. + * @param exceptions IOException List + * @return IOException + */ public static IOException createIOException(List exceptions) { if (exceptions == null || exceptions.isEmpty()) { return null; @@ -60,7 +64,10 @@ public static IOException createIOException(List exceptions) { public static class Builder { private List exceptions; - /** Add the given {@link Throwable} to the exception list. */ + /** + * Add the given {@link Throwable} to the exception list. + * @param t Throwable + */ public void add(Throwable t) { if (exceptions == null) { exceptions = new ArrayList<>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java index 77c590fdb6344..d6e4846264f98 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java @@ -32,7 +32,10 @@ public class NullWritable implements WritableComparable { private NullWritable() {} // no public ctor - /** Returns the single instance of this class. */ + /** + * Returns the single instance of this class. + * @return the single instance of this class. + */ public static NullWritable get() { return THIS; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java index b35a32f288b4b..a7e46bab9b004 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java @@ -54,13 +54,22 @@ public ObjectWritable(Class declaredClass, Object instance) { this.instance = instance; } - /** Return the instance, or null if none. */ + /** + * Return the instance, or null if none. + * @return the instance, or null if none. + */ public Object get() { return instance; } - /** Return the class this is meant to be. */ + /** + * Return the class this is meant to be. + * @return the class this is meant to be. + */ public Class getDeclaredClass() { return declaredClass; } - /** Reset the instance. */ + /** + * Reset the instance. + * @param instance instance + */ public void set(Object instance) { this.declaredClass = instance.getClass(); this.instance = instance; @@ -120,8 +129,16 @@ public void write(DataOutput out) throws IOException { } } - /** Write a {@link Writable}, {@link String}, primitive type, or an array of - * the preceding. */ + /** + * Write a {@link Writable}, {@link String}, primitive type, or an array of + * the preceding. + * + * @param out DataOutput + * @param instance instance + * @param conf Configuration + * @param declaredClass declaredClass + * @throws IOException raised on errors performing I/O. + */ public static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf) throws IOException { @@ -137,6 +154,13 @@ public static void writeObject(DataOutput out, Object instance, * usages, to preserve the ability to interchange files with other clusters * that may not be running the same version of software. Sometime in ~2013 * we can consider removing this parameter and always using the compact format. + * + * @param conf configuration + * @param out dataoutput + * @param declaredClass declaredClass + * @param instance instance + * @throws IOException raised on errors performing I/O. + * */ public static void writeObject(DataOutput out, Object instance, Class declaredClass, Configuration conf, boolean allowCompactArrays) @@ -210,15 +234,30 @@ public static void writeObject(DataOutput out, Object instance, } - /** Read a {@link Writable}, {@link String}, primitive type, or an array of - * the preceding. */ + /** + * Read a {@link Writable}, {@link String}, primitive type, or an array of + * the preceding. + * + * @param conf configuration + * @param in DataInput + * @return Object + * @throws IOException raised on errors performing I/O. + */ public static Object readObject(DataInput in, Configuration conf) throws IOException { return readObject(in, null, conf); } - /** Read a {@link Writable}, {@link String}, primitive type, or an array of - * the preceding. */ + /** + * Read a {@link Writable}, {@link String}, primitive type, or an array of + * the preceding. + * + * @param in DataInput + * @param objectWritable objectWritable + * @param conf configuration + * @return Object + * @throws IOException raised on errors performing I/O. + */ @SuppressWarnings("unchecked") public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf) throws IOException { @@ -365,6 +404,10 @@ static Method getStaticProtobufMethod(Class declaredClass, String method, * Find and load the class with given name className by first finding * it in the specified conf. If the specified conf is null, * try load it directly. + * + * @param conf configuration + * @param className classname + * @return Class */ public static Class loadClass(Configuration conf, String className) { Class declaredClass = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java index 15a396dc2bf55..1a1175bba0934 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java @@ -77,12 +77,19 @@ private OutputBuffer(Buffer buffer) { this.buffer = buffer; } - /** Returns the current contents of the buffer. + /** + * Returns the current contents of the buffer. * Data is only valid to {@link #getLength()}. + * + * @return the current contents of the buffer. */ public byte[] getData() { return buffer.getData(); } - /** Returns the length of the valid data currently in the buffer. */ + /** + * Returns the length of the valid data currently in the buffer. + * @return the length of the valid data + * currently in the buffer. + */ public int getLength() { return buffer.getLength(); } /** Resets the buffer to empty. */ From 32949533f865f17cfc1668ca0f9c9e78b373eae6 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sun, 15 May 2022 19:24:47 -0700 Subject: [PATCH 45/53] HADOOP-18229. Fix some java doc compilation 100+ warnings. --- .../org/apache/hadoop/io/GenericWritable.java | 2 +- .../org/apache/hadoop/io/IntWritable.java | 2 +- .../java/org/apache/hadoop/io/MD5Hash.java | 2 +- .../java/org/apache/hadoop/io/MapFile.java | 7 +- .../org/apache/hadoop/io/OutputBuffer.java | 9 +- .../org/apache/hadoop/io/ReadaheadPool.java | 2 +- .../org/apache/hadoop/io/SecureIOUtils.java | 18 ++ .../org/apache/hadoop/io/SequenceFile.java | 293 ++++++++++++------ .../java/org/apache/hadoop/io/SetFile.java | 17 +- 9 files changed, 255 insertions(+), 97 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java index 5abe1bcedd88f..6de927467e478 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java @@ -90,7 +90,7 @@ public abstract class GenericWritable implements Writable, Configurable { /** * Set the instance that is wrapped. * - * @param obj + * @param obj input obj. */ public void set(Writable obj) { instance = obj; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java index b3a6a2920fc26..ffcf93946d06a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java @@ -38,7 +38,7 @@ public IntWritable() {} /** * Set the value of this IntWritable. - * @param value + * @param value input value. */ public void set(int value) { this.value = value; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java index 804d0470386aa..9e69483ba2876 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java @@ -56,7 +56,7 @@ public MD5Hash() { /** * Constructs an MD5Hash from a hex string. - * @param hex + * @param hex input hex. */ public MD5Hash(String hex) { setDigest(hex); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index f48b9d81c3a6b..43b0d7acd4f2f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -904,7 +904,12 @@ public static void rename(FileSystem fs, String oldName, String newName) } } - /** Deletes the named map file. */ + /** + * Deletes the named map file. + * @param fs input fs. + * @param name input name. + * @throws IOException raised on errors performing I/O. + */ public static void delete(FileSystem fs, String name) throws IOException { Path dir = new Path(name); Path data = new Path(dir, DATA_FILE_NAME); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java index 1a1175bba0934..f80c0a71883d6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java @@ -92,13 +92,18 @@ private OutputBuffer(Buffer buffer) { */ public int getLength() { return buffer.getLength(); } - /** Resets the buffer to empty. */ + /** @return Resets the buffer to empty. */ public OutputBuffer reset() { buffer.reset(); return this; } - /** Writes bytes from a InputStream directly into the buffer. */ + /** + * Writes bytes from a InputStream directly into the buffer. + * @param in input in. + * @param length input length. + * @throws IOException raised on errors performing I/O. + */ public void write(InputStream in, int length) throws IOException { buffer.write(in, length); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java index 65e751eca417e..2a6fafce545f3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java @@ -50,7 +50,7 @@ public class ReadaheadPool { private static ReadaheadPool instance; /** - * Return the singleton instance for the current process. + * @return Return the singleton instance for the current process. */ public static ReadaheadPool getInstance() { synchronized (ReadaheadPool.class) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java index 016daf9f352c1..b7dbb0bb79e49 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java @@ -116,6 +116,12 @@ public static RandomAccessFile openForRandomRead(File f, /** * Same as openForRandomRead except that it will run even if security is off. * This is used by unit tests. + * + * @param f input f. + * @param mode input mode. + * @param expectedOwner input expectedOwner. + * @param expectedGroup input expectedGroup. + * @throws IOException raised on errors performing I/O. */ @VisibleForTesting protected static RandomAccessFile forceSecureOpenForRandomRead(File f, @@ -145,6 +151,7 @@ protected static RandomAccessFile forceSecureOpenForRandomRead(File f, * @param expectedGroup the expected group owner for the file * @throws IOException if an IO Error occurred or the user/group does not * match if security is enabled + * @return FSDataInputStream */ public static FSDataInputStream openFSDataInputStream(File file, String expectedOwner, String expectedGroup) throws IOException { @@ -157,6 +164,12 @@ public static FSDataInputStream openFSDataInputStream(File file, /** * Same as openFSDataInputStream except that it will run even if security is * off. This is used by unit tests. + * + * @param file input file. + * @param expectedOwner input expectedOwner. + * @param expectedGroup input expectedGroup. + * @throws IOException raised on errors performing I/O. + * @return FSDataInputStream */ @VisibleForTesting protected static FSDataInputStream forceSecureOpenFSDataInputStream( @@ -203,6 +216,10 @@ public static FileInputStream openForRead(File f, String expectedOwner, /** * Same as openForRead() except that it will run even if security is off. * This is used by unit tests. + * @param f input f. + * @param expectedOwner input expectedOwner. + * @param expectedGroup input expectedGroup. + * @throws IOException raised on errors performing I/O. */ @VisibleForTesting protected static FileInputStream forceSecureOpenForRead(File f, String expectedOwner, @@ -251,6 +268,7 @@ private static FileOutputStream insecureCreateForWrite(File f, * * @throws AlreadyExistsException if the file already exists * @throws IOException if any other error occurred + * @return createForWrite FileOutputStream */ public static FileOutputStream createForWrite(File f, int permissions) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 420fe51492a70..6e8fe6d33e3a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -269,7 +269,7 @@ static public void setDefaultCompressionType(Configuration job, * @param conf the configuration to use * @param opts the options to create the file with * @return a new Writer - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static Writer createWriter(Configuration conf, Writer.Option... opts ) throws IOException { @@ -301,7 +301,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param keyClass The 'key' type. * @param valClass The 'value' type. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -323,7 +323,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param valClass The 'value' type. * @param compressionType The compression type. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -348,7 +348,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param compressionType The compression type. * @param progress The Progressable object to track progress. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -375,7 +375,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param compressionType The compression type. * @param codec The compression codec. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -403,7 +403,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param progress The Progressable object to track progress. * @param metadata The metadata of the file. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -437,7 +437,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param progress The Progressable object to track progress. * @param metadata The metadata of the file. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -475,7 +475,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param codec The compression codec. * @param metadata The metadata of the file. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Deprecated public static Writer @@ -508,7 +508,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param createFlag gives the semantics of create: overwrite, append etc. * @param opts file creation options; see {@link CreateOpts}. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static Writer createWriter(FileContext fc, Configuration conf, Path name, @@ -532,7 +532,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param codec The compression codec. * @param progress The Progressable object to track progress. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -560,7 +560,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param codec The compression codec. * @param metadata The metadata of the file. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -585,7 +585,7 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts * @param compressionType The compression type. * @param codec The compression codec. * @return Returns the handle to the constructed SequenceFile Writer. - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)} * instead. */ @@ -603,22 +603,26 @@ public static Writer createWriter(Configuration conf, Writer.Option... opts /** The interface to 'raw' values of SequenceFiles. */ public static interface ValueBytes { - /** Writes the uncompressed bytes to the outStream. + /** + * Writes the uncompressed bytes to the outStream. * @param outStream : Stream to write uncompressed bytes into. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void writeUncompressedBytes(DataOutputStream outStream) throws IOException; - /** Write compressed bytes to outStream. + /** + * Write compressed bytes to outStream. * Note: that it will NOT compress the bytes if they are not compressed. * @param outStream : Stream to write compressed bytes into. + * @throws IllegalArgumentException an illegal or inappropriate argument. + * @throws IOException raised on errors performing I/O. */ public void writeCompressedBytes(DataOutputStream outStream) throws IllegalArgumentException, IOException; /** - * Size of stored data. + * @return Size of stored data. */ public int getSize(); } @@ -1190,10 +1194,17 @@ public static Option syncInterval(int value) { codec, metadata, syncInterval); } - /** Create the named file. + /** + * Create the named file. * @deprecated Use * {@link SequenceFile#createWriter(Configuration, Writer.Option...)} * instead. + * @param fs input filesystem. + * @param conf input configuration. + * @param name input name. + * @param keyClass input keyClass. + * @param valClass input valClass. + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(FileSystem fs, Configuration conf, Path name, @@ -1203,10 +1214,19 @@ public Writer(FileSystem fs, Configuration conf, Path name, new Metadata(), SYNC_INTERVAL); } - /** Create the named file with write-progress reporter. + /** + * Create the named file with write-progress reporter. * @deprecated Use * {@link SequenceFile#createWriter(Configuration, Writer.Option...)} * instead. + * @param fs input filesystem. + * @param conf input configuration. + * @param name input name. + * @param keyClass input keyClass. + * @param valClass input valClass. + * @param progress input progress. + * @param metadata input metadata. + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(FileSystem fs, Configuration conf, Path name, @@ -1217,10 +1237,22 @@ public Writer(FileSystem fs, Configuration conf, Path name, null, metadata, SYNC_INTERVAL); } - /** Create the named file with write-progress reporter. + /** + * Create the named file with write-progress reporter. * @deprecated Use * {@link SequenceFile#createWriter(Configuration, Writer.Option...)} * instead. + * @param fs input filesystem. + * @param conf input configuration. + * @param name input name. + * @param keyClass input keyClass. + * @param valClass input valClass. + * @param bufferSize input bufferSize. + * @param replication input replication. + * @param blockSize input blockSize. + * @param progress input progress. + * @param metadata input metadata. + * @throws IOException raised on errors performing I/O. */ @Deprecated public Writer(FileSystem fs, Configuration conf, Path name, @@ -1321,16 +1353,19 @@ void init(Configuration config, FSDataOutputStream outStream, } } - /** Returns the class of keys in this file. */ + /** @return Returns the class of keys in this file. */ public Class getKeyClass() { return keyClass; } - /** Returns the class of values in this file. */ + /** @return Returns the class of values in this file. */ public Class getValueClass() { return valClass; } - /** Returns the compression codec of data in this file. */ + /** @return Returns the compression codec of data in this file. */ public CompressionCodec getCompressionCodec() { return codec; } - /** create a sync point */ + /** + * create a sync point. + * @throws IOException raised on errors performing I/O. + */ public void sync() throws IOException { if (sync != null && lastSyncPos != out.getPos()) { out.writeInt(SYNC_ESCAPE); // mark the start of the sync @@ -1340,8 +1375,9 @@ public void sync() throws IOException { } /** - * flush all currently written data to the file system + * flush all currently written data to the file system. * @deprecated Use {@link #hsync()} or {@link #hflush()} instead + * @throws IOException raised on errors performing I/O. */ @Deprecated public void syncFs() throws IOException { @@ -1413,13 +1449,23 @@ synchronized void checkAndWriteSync() throws IOException { } } - /** Append a key/value pair. */ + /** + * Append a key/value pair. + * @param key input Writable key. + * @param val input Writable val. + * @throws IOException raised on errors performing I/O. + */ public void append(Writable key, Writable val) throws IOException { append((Object) key, (Object) val); } - /** Append a key/value pair. */ + /** + * Append a key/value pair. + * @param key input Object key. + * @param val input Object val. + * @throws IOException raised on errors performing I/O. + */ @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { @@ -1470,14 +1516,16 @@ public synchronized void appendRaw(byte[] keyData, int keyOffset, val.writeUncompressedBytes(out); // value } - /** Returns the current length of the output file. + /** @return Returns the current length of the output file. * *

This always returns a synchronized position. In other words, * immediately after calling {@link SequenceFile.Reader#seek(long)} with a position * returned by this method, {@link SequenceFile.Reader#next(Writable)} may be called. However * the key may be earlier in the file than key last written when this * method was called (e.g., with block-compression, it may be the first key - * in the block that was being written when this method was called). + * in the block that was being written when this method was called).

+ * + * @throws IOException raised on errors performing I/O. */ public synchronized long getLength() throws IOException { return out.getPos(); @@ -1888,7 +1936,7 @@ public Reader(Configuration conf, Option... opts) throws IOException { * @param fs The file system used to open the file. * @param file The file being read. * @param conf Configuration - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use Reader(Configuration, Option...) instead. */ @Deprecated @@ -1904,7 +1952,7 @@ public Reader(FileSystem fs, Path file, * @param start The starting position. * @param length The length being read. * @param conf Configuration - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated Use Reader(Configuration, Reader.Option...) instead. */ @Deprecated @@ -1949,7 +1997,7 @@ private void initialize(Path filename, FSDataInputStream in, * @param length The length being read if it is {@literal >=} 0. * Otherwise, the length is not available. * @return The opened stream. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ protected FSDataInputStream openFile(FileSystem fs, Path file, int bufferSize, long length) throws IOException { @@ -2139,12 +2187,12 @@ public synchronized void close() throws IOException { in.close(); } - /** Returns the name of the key class. */ + /** @return Returns the name of the key class. */ public String getKeyClassName() { return keyClassName; } - /** Returns the class of keys in this file. */ + /** @return Returns the class of keys in this file. */ public synchronized Class getKeyClass() { if (null == keyClass) { try { @@ -2156,12 +2204,12 @@ public synchronized Class getKeyClass() { return keyClass; } - /** Returns the name of the value class. */ + /** @return Returns the name of the value class. */ public String getValueClassName() { return valClassName; } - /** Returns the class of values in this file. */ + /** @return Returns the class of values in this file. */ public synchronized Class getValueClass() { if (null == valClass) { try { @@ -2362,7 +2410,7 @@ public synchronized void getCurrentValue(Writable val) /** * Get the 'value' corresponding to the last read 'key'. * @param val : The 'value' to be read. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public synchronized Object getCurrentValue(Object val) throws IOException { @@ -2409,7 +2457,7 @@ private Object deserializeValue(Object val) throws IOException { * value.True if another entry exists, and false at end of file. * * @param key key - * + * @throws IOException raised on errors performing I/O. */ public synchronized boolean next(Writable key) throws IOException { if (key.getClass() != getKeyClass()) @@ -2457,9 +2505,16 @@ public synchronized boolean next(Writable key) throws IOException { return true; } - /** Read the next key/value pair in the file into key and - * val. Returns true if such a pair exists and false when at - * end of file */ + /** + * Read the next key/value pair in the file into key and + * val. + * @return Returns true if such a pair exists and false when at + * end of file. + * + * @param key input key. + * @param val input val. + * @throws IOException raised on errors performing I/O. + */ public synchronized boolean next(Writable key, Writable val) throws IOException { if (val.getClass() != getValueClass()) @@ -2543,7 +2598,7 @@ public ValueBytes createValueBytes() { * @param key - The buffer into which the key is read * @param val - The 'raw' value * @return Returns the total record length or -1 for end of file - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public synchronized int nextRaw(DataOutputBuffer key, ValueBytes val) throws IOException { @@ -2602,7 +2657,7 @@ public synchronized int nextRaw(DataOutputBuffer key, ValueBytes val) * Read 'raw' keys. * @param key - The buffer into which the key is read * @return Returns the key length or -1 for end of file - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public synchronized int nextRawKey(DataOutputBuffer key) throws IOException { @@ -2641,8 +2696,14 @@ public synchronized int nextRawKey(DataOutputBuffer key) } - /** Read the next key in the file, skipping its - * value. Return null at end of file. */ + /** + * Read the next key in the file, skipping its + * value. + * + * @param key input Object key. + * @throws IOException raised on errors performing I/O. + * @return Return null at end of file. + */ public synchronized Object next(Object key) throws IOException { if (key != null && key.getClass() != getKeyClass()) { throw new IOException("wrong key class: "+key.getClass().getName() @@ -2699,7 +2760,7 @@ private Object deserializeKey(Object key) throws IOException { * Read 'raw' values. * @param val - The 'raw' value * @return Returns the value length - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public synchronized int nextRawValue(ValueBytes val) throws IOException { @@ -2739,16 +2800,20 @@ private void handleChecksumException(ChecksumException e) } } - /** disables sync. often invoked for tmp files */ + /** disables sync. often invoked for tmp files. */ synchronized void ignoreSync() { sync = null; } - /** Set the current byte position in the input file. + /** + * Set the current byte position in the input file. * *

The position passed must be a position returned by {@link * SequenceFile.Writer#getLength()} when writing this file. To seek to an arbitrary - * position, use {@link SequenceFile.Reader#sync(long)}. + * position, use {@link SequenceFile.Reader#sync(long)}.

+ * + * @param position input position. + * @throws IOException raised on errors performing I/O. */ public synchronized void seek(long position) throws IOException { in.seek(position); @@ -2758,7 +2823,11 @@ public synchronized void seek(long position) throws IOException { } } - /** Seek to the next sync mark past a given position.*/ + /** + * Seek to the next sync mark past a given position. + * @param position position. + * @throws IOException raised on errors performing I/O. + */ public synchronized void sync(long position) throws IOException { if (position+SYNC_SIZE >= end) { seek(end); @@ -2794,10 +2863,13 @@ public synchronized void sync(long position) throws IOException { } } - /** Returns true iff the previous call to next passed a sync mark.*/ + /** @return Returns true iff the previous call to next passed a sync mark.*/ public synchronized boolean syncSeen() { return syncSeen; } - /** Return the current byte position in the input file. */ + /** + * @return Return the current byte position in the input file. + * @throws IOException raised on errors performing I/O. + */ public synchronized long getPosition() throws IOException { return in.getPos(); } @@ -2839,19 +2911,40 @@ public static class Sorter { private Progressable progressable = null; - /** Sort and merge files containing the named classes. */ + /** + * Sort and merge files containing the named classes. + * @param fs input FileSystem. + * @param keyClass input keyClass. + * @param valClass input valClass. + * @param conf input Configuration. + */ public Sorter(FileSystem fs, Class keyClass, Class valClass, Configuration conf) { this(fs, WritableComparator.get(keyClass, conf), keyClass, valClass, conf); } - /** Sort and merge using an arbitrary {@link RawComparator}. */ + /** + * Sort and merge using an arbitrary {@link RawComparator}. + * @param fs input FileSystem. + * @param comparator input RawComparator. + * @param keyClass input keyClass. + * @param valClass input valClass. + * @param conf input Configuration. + */ public Sorter(FileSystem fs, RawComparator comparator, Class keyClass, Class valClass, Configuration conf) { this(fs, comparator, keyClass, valClass, conf, new Metadata()); } - /** Sort and merge using an arbitrary {@link RawComparator}. */ + /** + * Sort and merge using an arbitrary {@link RawComparator}. + * @param fs input FileSystem. + * @param comparator input RawComparator. + * @param keyClass input keyClass. + * @param valClass input valClass. + * @param conf input Configuration. + * @param metadata input metadata. + */ @SuppressWarnings("deprecation") public Sorter(FileSystem fs, RawComparator comparator, Class keyClass, Class valClass, Configuration conf, Metadata metadata) { @@ -2880,19 +2973,28 @@ public Sorter(FileSystem fs, RawComparator comparator, Class keyClass, this.metadata = metadata; } - /** Set the number of streams to merge at once.*/ + /** + * Set the number of streams to merge at once. + * @param factor factor. + */ public void setFactor(int factor) { this.factor = factor; } - /** Get the number of streams to merge at once.*/ + /** @return Get the number of streams to merge at once.*/ public int getFactor() { return factor; } - /** Set the total amount of buffer memory, in bytes.*/ + /** + * Set the total amount of buffer memory, in bytes. + * @param memory buffer memory. + */ public void setMemory(int memory) { this.memory = memory; } /** Get the total amount of buffer memory, in bytes.*/ public int getMemory() { return memory; } - /** Set the progressable object in order to report progress. */ + /** + * Set the progressable object in order to report progress. + * @param progressable input Progressable. + */ public void setProgressable(Progressable progressable) { this.progressable = progressable; } @@ -2902,6 +3004,7 @@ public void setProgressable(Progressable progressable) { * @param inFiles the files to be sorted * @param outFile the sorted output file * @param deleteInput should the input files be deleted as they are read? + * @throws IOException raised on errors performing I/O. */ public void sort(Path[] inFiles, Path outFile, boolean deleteInput) throws IOException { @@ -2924,6 +3027,7 @@ public void sort(Path[] inFiles, Path outFile, * @param tempDir the directory where temp files are created during sort * @param deleteInput should the input files be deleted as they are read? * @return iterator the RawKeyValueIterator + * @throws IOException raised on errors performing I/O. */ public RawKeyValueIterator sortAndIterate(Path[] inFiles, Path tempDir, boolean deleteInput) throws IOException { @@ -2949,8 +3053,9 @@ else if (segments == 1) /** * The backwards compatible interface to sort. - * @param inFile the input file to sort - * @param outFile the sorted output file + * @param inFile the input file to sort. + * @param outFile the sorted output file. + * @throws IOException raised on errors performing I/O. */ public void sort(Path inFile, Path outFile) throws IOException { sort(new Path[]{inFile}, outFile, false); @@ -3168,27 +3273,32 @@ public void setProgressable(Progressable progressable) /** The interface to iterate over raw keys/values of SequenceFiles. */ public static interface RawKeyValueIterator { - /** Gets the current raw key + /** + * Gets the current raw key. * @return DataOutputBuffer - * @throws IOException + * @throws IOException raised on errors performing I/O. */ DataOutputBuffer getKey() throws IOException; - /** Gets the current raw value + /** + * Gets the current raw value. * @return ValueBytes - * @throws IOException + * @throws IOException raised on errors performing I/O. */ ValueBytes getValue() throws IOException; - /** Sets up the current key and value (for getKey and getValue) + /** + * Sets up the current key and value (for getKey and getValue). * @return true if there exists a key/value, false otherwise - * @throws IOException + * @throws IOException raised on errors performing I/O. */ boolean next() throws IOException; - /** closes the iterator so that the underlying streams can be closed - * @throws IOException + /** + * closes the iterator so that the underlying streams can be closed. + * @throws IOException raised on errors performing I/O. */ void close() throws IOException; - /** Gets the Progress object; this has a float (0.0 - 1.0) - * indicating the bytes processed by the iterator so far + /** + * @return Gets the Progress object; this has a float (0.0 - 1.0) + * indicating the bytes processed by the iterator so far. */ Progress getProgress(); } @@ -3198,7 +3308,7 @@ public static interface RawKeyValueIterator { * @param segments the list of SegmentDescriptors * @param tmpDir the directory to write temporary files into * @return RawKeyValueIterator - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawKeyValueIterator merge(List segments, Path tmpDir) @@ -3216,7 +3326,7 @@ public RawKeyValueIterator merge(List segments, * unnecessary * @param tmpDir the directory to write temporary files into * @return RawKeyValueIteratorMergeQueue - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs, Path tmpDir) @@ -3234,7 +3344,7 @@ public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs, * @param factor the factor that will be used as the maximum merge fan-in * @param tmpDir the directory to write temporary files into * @return RawKeyValueIteratorMergeQueue - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs, int factor, Path tmpDir) @@ -3260,7 +3370,7 @@ public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs, * @param deleteInputs true if the input files should be deleted when * unnecessary * @return RawKeyValueIteratorMergeQueue - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public RawKeyValueIterator merge(Path [] inNames, Path tempDir, boolean deleteInputs) @@ -3291,7 +3401,7 @@ public RawKeyValueIterator merge(Path [] inNames, Path tempDir, * @param outputFile the path of the output file * @param prog the Progressable to report status during the file write * @return Writer - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public Writer cloneFileAttributes(Path inputFile, Path outputFile, Progressable prog) throws IOException { @@ -3313,10 +3423,10 @@ public Writer cloneFileAttributes(Path inputFile, Path outputFile, /** * Writes records from RawKeyValueIterator into a file represented by the - * passed writer + * passed writer. * @param records the RawKeyValueIterator * @param writer the Writer created earlier - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void writeFile(RawKeyValueIterator records, Writer writer) throws IOException { @@ -3666,10 +3776,13 @@ public SegmentDescriptor (long segmentOffset, long segmentLength, this.segmentPathName = segmentPathName; } - /** Do the sync checks */ + /** Do the sync checks. */ public void doSync() {ignoreSync = false;} - /** Whether to delete the files when no longer needed */ + /** + * Whether to delete the files when no longer needed. + * @param preserve input boolean preserve. + */ public void preserveInput(boolean preserve) { preserveInput = preserve; } @@ -3711,9 +3824,10 @@ public int hashCode() { return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32)); } - /** Fills up the rawKey object with the key returned by the Reader + /** + * Fills up the rawKey object with the key returned by the Reader * @return true if there is a key returned; false, otherwise - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean nextRawKey() throws IOException { if (in == null) { @@ -3742,18 +3856,19 @@ public boolean nextRawKey() throws IOException { return (keyLength >= 0); } - /** Fills up the passed rawValue with the value corresponding to the key - * read earlier - * @param rawValue + /** + * Fills up the passed rawValue with the value corresponding to the key + * read earlier. + * @param rawValue input ValueBytes rawValue. * @return the length of the value - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public int nextRawValue(ValueBytes rawValue) throws IOException { int valLength = in.nextRawValue(rawValue); return valLength; } - /** Returns the stored rawKey */ + /** @return Returns the stored rawKey */ public DataOutputBuffer getKey() { return rawKey; } @@ -3764,8 +3879,10 @@ private void close() throws IOException { this.in = null; } - /** The default cleanup. Subclasses can override this with a custom - * cleanup + /** + * The default cleanup. Subclasses can override this with a custom + * cleanup. + * @throws IOException raised on errors performing I/O. */ public void cleanup() throws IOException { close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java index 118cce75136ed..7486eacc3eb89 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java @@ -75,12 +75,25 @@ public void append(WritableComparable key) throws IOException{ /** Provide access to an existing set file. */ public static class Reader extends MapFile.Reader { - /** Construct a set reader for the named set.*/ + /** + * Construct a set reader for the named set. + * @param fs input FileSystem. + * @param dirName input dirName. + * @param conf input Configuration. + * @throws IOException raised on errors performing I/O. + */ public Reader(FileSystem fs, String dirName, Configuration conf) throws IOException { super(fs, dirName, conf); } - /** Construct a set reader for the named set using the named comparator.*/ + /** + * Construct a set reader for the named set using the named comparator. + * @param fs input FileSystem. + * @param dirName input dirName. + * @param comparator input comparator. + * @param conf input Configuration. + * @throws IOException raised on errors performing I/O. + */ public Reader(FileSystem fs, String dirName, WritableComparator comparator, Configuration conf) throws IOException { super(new Path(dirName), conf, comparator(comparator)); From 0ba0aa3229e284f65f76bd647abb0dcd33fec433 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sun, 15 May 2022 21:20:45 -0700 Subject: [PATCH 46/53] HADOOP-18229. Fix some java doc compilation 100+ warnings. --- .../org/apache/hadoop/io/SecureIOUtils.java | 8 +- .../org/apache/hadoop/io/SequenceFile.java | 8 +- .../java/org/apache/hadoop/io/SetFile.java | 39 ++++++++-- .../org/apache/hadoop/io/ShortWritable.java | 7 +- .../main/java/org/apache/hadoop/io/Text.java | 74 ++++++++++++++++--- .../main/java/org/apache/hadoop/io/UTF8.java | 48 +++++++++--- 6 files changed, 148 insertions(+), 36 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java index b7dbb0bb79e49..23233f8f033fe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java @@ -90,7 +90,7 @@ public class SecureIOUtils { private final static FileSystem rawFilesystem; /** - * Open the given File for random read access, verifying the expected user/ + * @return Open the given File for random read access, verifying the expected user/ * group constraints if security is enabled. * * Note that this function provides no additional security checks if hadoop @@ -114,7 +114,7 @@ public static RandomAccessFile openForRandomRead(File f, } /** - * Same as openForRandomRead except that it will run even if security is off. + * @return Same as openForRandomRead except that it will run even if security is off. * This is used by unit tests. * * @param f input f. @@ -195,7 +195,7 @@ protected static FSDataInputStream forceSecureOpenFSDataInputStream( * Open the given File for read access, verifying the expected user/group * constraints if security is enabled. * - * Note that this function provides no additional checks if Hadoop + * @return Note that this function provides no additional checks if Hadoop * security is disabled, since doing the checks would be too expensive * when native libraries are not available. * @@ -214,7 +214,7 @@ public static FileInputStream openForRead(File f, String expectedOwner, } /** - * Same as openForRead() except that it will run even if security is off. + * @return Same as openForRead() except that it will run even if security is off. * This is used by unit tests. * @param f input f. * @param expectedOwner input expectedOwner. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 6e8fe6d33e3a1..b7a4132abce85 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -2408,7 +2408,7 @@ public synchronized void getCurrentValue(Writable val) } /** - * Get the 'value' corresponding to the last read 'key'. + * @return Get the 'value' corresponding to the last read 'key'. * @param val : The 'value' to be read. * @throws IOException raised on errors performing I/O. */ @@ -2453,7 +2453,7 @@ private Object deserializeValue(Object val) throws IOException { } /** - * Read the next key in the file into key, skipping its + * @return Read the next key in the file into key, skipping its * value.True if another entry exists, and false at end of file. * * @param key key @@ -2988,7 +2988,7 @@ public Sorter(FileSystem fs, RawComparator comparator, Class keyClass, */ public void setMemory(int memory) { this.memory = memory; } - /** Get the total amount of buffer memory, in bytes.*/ + /** @return Get the total amount of buffer memory, in bytes.*/ public int getMemory() { return memory; } /** @@ -3440,7 +3440,7 @@ public void writeFile(RawKeyValueIterator records, Writer writer) /** Merge the provided files. * @param inFiles the array of input path names * @param outFile the final output file - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public void merge(Path[] inFiles, Path outFile) throws IOException { if (fs.exists(outFile)) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java index 7486eacc3eb89..f899daaa6cde4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java @@ -39,15 +39,29 @@ protected SetFile() {} // no public ctor */ public static class Writer extends MapFile.Writer { - /** Create the named set for keys of the named class. - * @deprecated pass a Configuration too + /** + * Create the named set for keys of the named class. + * @deprecated pass a Configuration too + * @param fs input FileSystem. + * @param dirName input dirName. + * @param keyClass input keyClass. + * @throws IOException raised on errors performing I/O. */ public Writer(FileSystem fs, String dirName, Class keyClass) throws IOException { super(new Configuration(), fs, dirName, keyClass, NullWritable.class); } - /** Create a set naming the element class and compression type. */ + /** + * Create a set naming the element class and compression type. + * + * @param conf input Configuration. + * @param fs input FileSystem. + * @param dirName input dirName. + * @param keyClass input keyClass. + * @param compress input compress. + * @throws IOException raised on errors performing I/O. + */ public Writer(Configuration conf, FileSystem fs, String dirName, Class keyClass, SequenceFile.CompressionType compress) @@ -55,7 +69,16 @@ public Writer(Configuration conf, FileSystem fs, String dirName, this(conf, fs, dirName, WritableComparator.get(keyClass, conf), compress); } - /** Create a set naming the element comparator and compression type. */ + /** + * Create a set naming the element comparator and compression type. + * + * @param conf input Configuration. + * @param fs input FileSystem. + * @param dirName input dirName. + * @param comparator input comparator. + * @param compress input compress. + * @throws IOException raised on errors performing I/O. + */ public Writer(Configuration conf, FileSystem fs, String dirName, WritableComparator comparator, SequenceFile.CompressionType compress) throws IOException { @@ -65,8 +88,12 @@ public Writer(Configuration conf, FileSystem fs, String dirName, compression(compress)); } - /** Append a key to a set. The key must be strictly greater than the - * previous key added to the set. */ + /** + * Append a key to a set. The key must be strictly greater than the + * previous key added to the set. + * @param key input key. + * @throws IOException raised on errors performing I/O. + */ public void append(WritableComparable key) throws IOException{ append(key, NullWritable.get()); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java index be09df18017b7..96e6cacae8773 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java @@ -38,12 +38,15 @@ public ShortWritable(short value) { set(value); } - /** Set the value of this ShortWritable. */ + /** + * Set the value of this ShortWritable. + * @param value input value. + */ public void set(short value) { this.value = value; } - /** Return the value of this ShortWritable. */ + /** @return Return the value of this ShortWritable. */ public short get() { return value; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java index 5ca7f3c84cab0..cb2905445910e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java @@ -90,6 +90,7 @@ public Text() { /** * Construct from a string. + * @param string input string. */ public Text(String string) { set(string); @@ -97,6 +98,7 @@ public Text(String string) { /** * Construct from another text. + * @param utf8 input utf8. */ public Text(Text utf8) { set(utf8); @@ -104,13 +106,15 @@ public Text(Text utf8) { /** * Construct from a byte array. + * + * @param utf8 input utf8. */ public Text(byte[] utf8) { set(utf8); } /** - * Get a copy of the bytes that is exactly the length of the data. + * @return Get a copy of the bytes that is exactly the length of the data. * See {@link #getBytes()} for faster access to the underlying array. */ public byte[] copyBytes() { @@ -136,7 +140,7 @@ public int getLength() { } /** - * Returns the length of this text. The length is equal to the number of + * @return Returns the length of this text. The length is equal to the number of * Unicode code units in the text. */ public int getTextLength() { @@ -149,7 +153,9 @@ public int getTextLength() { /** * Returns the Unicode Scalar Value (32-bit integer value) * for the character at position. Note that this - * method avoids using the converter or doing String instantiation + * method avoids using the converter or doing String instantiation. + * + * @param position input position. * @return the Unicode scalar value at position or -1 * if the position is invalid or points to a * trailing byte @@ -172,6 +178,9 @@ public int find(String what) { * position is measured in bytes and the return value is in * terms of byte position in the buffer. The backing buffer is * not converted to a string for this operation. + * + * @param what input what. + * @param start input start. * @return byte position of the first occurrence of the search * string in the UTF-8 buffer or -1 if not found */ @@ -213,6 +222,8 @@ public int find(String what, int start) { /** * Set to contain the contents of a string. + * + * @param string input string. */ public void set(String string) { try { @@ -229,6 +240,8 @@ public void set(String string) { * Set to a utf8 byte array. If the length of utf8 is * zero, actually clear {@link #bytes} and any existing * data is lost. + * + * @param utf8 input utf8. */ public void set(byte[] utf8) { if (utf8.length == 0) { @@ -242,6 +255,7 @@ public void set(byte[] utf8) { /** * Copy a text. + * @param other other. */ public void set(Text other) { set(other.getBytes(), 0, other.getLength()); @@ -349,6 +363,8 @@ public void readFields(DataInput in, int maxLength) throws IOException { /** * Skips over one Text in the input. + * @param in input in. + * @throws IOException raised on errors performing I/O. */ public static void skip(DataInput in) throws IOException { int length = WritableUtils.readVInt(in); @@ -359,6 +375,10 @@ public static void skip(DataInput in) throws IOException { * Read a Text object whose length is already known. * This allows creating Text from a stream which uses a different serialization * format. + * + * @param in input in. + * @param len input len. + * @throws IOException raised on errors performing I/O. */ public void readWithKnownLength(DataInput in, int len) throws IOException { ensureCapacity(len); @@ -426,9 +446,13 @@ public int compare(byte[] b1, int s1, int l1, /// STATIC UTILITIES FROM HERE DOWN /** - * Converts the provided byte array to a String using the + * @return Converts the provided byte array to a String using the * UTF-8 encoding. If the input is malformed, * replace by a default value. + * + * @param utf8 input utf8. + * @throws CharacterCodingException when a character + * encoding or decoding error occurs. */ public static String decode(byte[] utf8) throws CharacterCodingException { return decode(ByteBuffer.wrap(utf8), true); @@ -440,11 +464,18 @@ public static String decode(byte[] utf8, int start, int length) } /** - * Converts the provided byte array to a String using the + * @return Converts the provided byte array to a String using the * UTF-8 encoding. If replace is true, then * malformed input is replaced with the * substitution character, which is U+FFFD. Otherwise the * method throws a MalformedInputException. + * + * @param utf8 input utf8. + * @param start input start. + * @param length input length. + * @param replace input replace. + * @throws CharacterCodingException when a character + * encoding or decoding error occurs. */ public static String decode(byte[] utf8, int start, int length, boolean replace) throws CharacterCodingException { @@ -474,6 +505,8 @@ private static String decode(ByteBuffer utf8, boolean replace) * invalid chars are replaced by a default value. * @return ByteBuffer: bytes stores at ByteBuffer.array() * and length is ByteBuffer.limit() + * @throws CharacterCodingException when a character + * encoding or decoding error occurs. */ public static ByteBuffer encode(String string) @@ -487,8 +520,13 @@ public static ByteBuffer encode(String string) * malformed input is replaced with the * substitution character, which is U+FFFD. Otherwise the * method throws a MalformedInputException. + * + * @param string input string. + * @param replace input replace. * @return ByteBuffer: bytes stores at ByteBuffer.array() * and length is ByteBuffer.limit() + * @throws CharacterCodingException when a character + * encoding or decoding error occurs. */ public static ByteBuffer encode(String string, boolean replace) throws CharacterCodingException { @@ -508,13 +546,20 @@ public static ByteBuffer encode(String string, boolean replace) static final public int DEFAULT_MAX_LEN = 1024 * 1024; - /** Read a UTF8 encoded string from in + /** + * @return Read a UTF8 encoded string from in. + * @param in input in. + * @throws IOException raised on errors performing I/O. */ public static String readString(DataInput in) throws IOException { return readString(in, Integer.MAX_VALUE); } - /** Read a UTF8 encoded string with a maximum size + /** + * @return Read a UTF8 encoded string with a maximum size. + * @param in input datainput. + * @param maxLength input maxLength. + * @throws IOException raised on errors performing I/O. */ public static String readString(DataInput in, int maxLength) throws IOException { @@ -526,6 +571,10 @@ public static String readString(DataInput in, int maxLength) /** * Write a UTF8 encoded string to out. + * + * @param out input out. + * @param s input s. + * @throws IOException raised on errors performing I/O. */ public static int writeString(DataOutput out, String s) throws IOException { ByteBuffer bytes = encode(s); @@ -536,7 +585,12 @@ public static int writeString(DataOutput out, String s) throws IOException { } /** - * Write a UTF8 encoded string with a maximum size to out. + * @return Write a UTF8 encoded string with a maximum size to out. + * + * @param out input out. + * @param s input s. + * @param maxLength input maxLength. + * @throws IOException raised on errors performing I/O. */ public static int writeString(DataOutput out, String s, int maxLength) throws IOException { @@ -670,9 +724,11 @@ public static void validateUTF8(byte[] utf8, int start, int len) 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5 }; /** - * Returns the next code point at the current position in + * @return Returns the next code point at the current position in * the buffer. The buffer's position will be incremented. * Any mark set on this buffer will be changed by this method! + * + * @param bytes input bytes. */ public static int bytesToCodePoint(ByteBuffer bytes) { bytes.mark(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index f5d33a13005d7..a4bdffdcd8a79 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -63,27 +63,36 @@ public UTF8() { //set(""); } - /** Construct from a given string. */ + /** + * Construct from a given string. + * @param string input string. + */ public UTF8(String string) { set(string); } - /** Construct from a given string. */ + /** + * Construct from a given string. + * @param utf8 input utf8. + */ public UTF8(UTF8 utf8) { set(utf8); } - /** The raw bytes. */ + /** @return The raw bytes. */ public byte[] getBytes() { return bytes; } - /** The number of bytes in the encoded string. */ + /** @return The number of bytes in the encoded string. */ public int getLength() { return length; } - /** Set to contain the contents of a string. */ + /** + * Set to contain the contents of a string. + * @param string input string. + */ public void set(String string) { if (string.length() > 0xffff/3) { // maybe too long LOG.warn("truncating long string: " + string.length() @@ -108,7 +117,10 @@ public void set(String string) { } } - /** Set to contain the contents of a string. */ + /** + * Set to contain the contents of a string. + * @param other input other. + */ public void set(UTF8 other) { length = other.length; if (bytes == null || length > bytes.length) // grow buffer @@ -124,7 +136,11 @@ public void readFields(DataInput in) throws IOException { in.readFully(bytes, 0, length); } - /** Skips over one UTF8 in the input. */ + /** + * Skips over one UTF8 in the input. + * @param in datainput + * @throws IOException raised on errors performing I/O. + */ public static void skip(DataInput in) throws IOException { int length = in.readUnsignedShort(); WritableUtils.skipFully(in, length); @@ -214,8 +230,10 @@ public int compare(byte[] b1, int s1, int l1, /// These are probably not used much anymore, and might be removed... - /** Convert a string to a UTF-8 encoded byte array. + /** + * @return Convert a string to a UTF-8 encoded byte array. * @see String#getBytes(String) + * @param string input string. */ public static byte[] getBytes(String string) { byte[] result = new byte[utf8Length(string)]; @@ -231,8 +249,9 @@ public static byte[] getBytes(String string) { } /** - * Convert a UTF-8 encoded byte array back into a string. + * @return Convert a UTF-8 encoded byte array back into a string. * + * @param bytes input bytes * @throws IOException if the byte array is invalid UTF8 */ public static String fromBytes(byte[] bytes) throws IOException { @@ -243,9 +262,12 @@ public static String fromBytes(byte[] bytes) throws IOException { return buf.toString(); } - /** Read a UTF-8 encoded string. + /** + * @return Read a UTF-8 encoded string. * * @see DataInput#readUTF() + * @param in DataInput. + * @throws IOException raised on errors performing I/O. */ public static String readString(DataInput in) throws IOException { int bytes = in.readUnsignedShort(); @@ -318,9 +340,13 @@ private static char lowSurrogate(int codePoint) { return (char) ((codePoint & 0x3ff) + Character.MIN_LOW_SURROGATE); } - /** Write a UTF-8 encoded string. + /** + * @return Write a UTF-8 encoded string. * * @see DataOutput#writeUTF(String) + * @param out input out. + * @param s input s. + * @throws IOException raised on errors performing I/O. */ public static int writeString(DataOutput out, String s) throws IOException { if (s.length() > 0xffff/3) { // maybe too long From d4a7d6c6cd4022c74eef33968889fb4ac0c4a662 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Sun, 15 May 2022 22:47:29 -0700 Subject: [PATCH 47/53] HADOOP-18229. Fix some java doc compilation 200+ warnings. --- .../hadoop/fs/permission/FsPermission.java | 9 ++-- .../io/compress/lz4/Lz4Decompressor.java | 2 +- .../apache/hadoop/io/nativeio/NativeIO.java | 42 +++++++++++++--- .../main/java/org/apache/hadoop/ipc/RPC.java | 32 +++++++++--- .../org/apache/hadoop/ipc/RpcClientUtil.java | 6 ++- .../apache/hadoop/ipc/RpcServerException.java | 4 +- .../apache/hadoop/ipc/VersionedProtocol.java | 1 + .../apache/hadoop/ipc/WritableRpcEngine.java | 50 +++++++++++++++++-- .../DecayRpcSchedulerDetailedMetrics.java | 7 ++- .../java/org/apache/hadoop/log/LogLevel.java | 2 + .../hadoop/log/LogThrottlingHelper.java | 8 +-- .../metrics2/lib/MutableMetricsFactory.java | 4 +- .../metrics2/lib/MutableRollingAverages.java | 4 +- .../hadoop/metrics2/lib/MutableStat.java | 2 +- .../apache/hadoop/metrics2/package-info.java | 10 ++-- .../main/java/org/apache/hadoop/net/DNS.java | 10 +++- .../apache/hadoop/net/DNSToSwitchMapping.java | 2 + .../apache/hadoop/net/DomainNameResolver.java | 13 +++-- .../java/org/apache/hadoop/net/InnerNode.java | 5 +- .../org/apache/hadoop/net/InnerNodeImpl.java | 15 ++++-- .../apache/hadoop/net/NetworkTopology.java | 24 +++++---- .../apache/hadoop/net/ScriptBasedMapping.java | 8 +-- .../net/ScriptBasedMappingWithDependency.java | 5 +- .../apache/hadoop/net/SocketInputStream.java | 8 +-- .../apache/hadoop/net/SocketOutputStream.java | 13 +++-- .../hadoop/service/AbstractService.java | 2 +- .../launcher/AbstractLaunchableService.java | 2 + .../service/launcher/ServiceLauncher.java | 7 +-- .../hadoop/service/launcher/package-info.java | 27 +++++----- .../hadoop/util/GenericOptionsParser.java | 15 +++--- 30 files changed, 237 insertions(+), 102 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index c416e5f41a2e6..541d25d7c878b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -150,20 +150,17 @@ public FsPermission(String mode) { } /** - * Return user {@link FsAction}. - * return FsAction useraction + * @return Return user {@link FsAction}. */ public FsAction getUserAction() {return useraction;} /** - * Return group {@link FsAction}. - * return FsAction groupaction + * @return Return group {@link FsAction}. */ public FsAction getGroupAction() {return groupaction;} /** - * Return other {@link FsAction}. - * return FsAction otheraction + * @return Return other {@link FsAction}. */ public FsAction getOtherAction() {return otheraction;} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java index 2b62ef78b2859..719d216abaed0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java @@ -199,7 +199,7 @@ public synchronized boolean finished() { * @param off Start offset of the data * @param len Size of the buffer * @return The actual number of bytes of uncompressed data. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public synchronized int decompress(byte[] b, int off, int len) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index ebe7f213ceeb1..d00b289d5da8c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -355,7 +355,7 @@ public boolean verifyCanMlock() { } /** - * Return true if the JNI-based native IO extensions are available. + * @return Return true if the JNI-based native IO extensions are available. */ public static boolean isAvailable() { return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded; @@ -367,7 +367,14 @@ private static void assertCodeLoaded() throws IOException { } } - /** Wrapper around open(2) */ + /** + * Wrapper around open(2) . + * @param path input path. + * @param flags input flags. + * @param mode input mode. + * @return FileDescriptor. + * @throws IOException raised on errors performing I/O. + */ public static native FileDescriptor open(String path, int flags, int mode) throws IOException; /** Wrapper around fstat(2) */ private static native Stat fstat(FileDescriptor fd) throws IOException; @@ -428,6 +435,10 @@ static void posixFadviseIfPossible(String identifier, * for this syscall for more information. On systems where this * call is not available, does nothing. * + * @param fd input fd. + * @param offset input offset. + * @param nbytes input nbytes. + * @param flags input flag. * @throws NativeIOException if there is an error with the syscall */ public static void syncFileRangeIfPossible( @@ -712,7 +723,14 @@ public static void createDirectoryWithMode(File path, int mode) private static native void createDirectoryWithMode0(String path, int mode) throws NativeIOException; - /** Wrapper around CreateFile() on Windows */ + /** + * Wrapper around CreateFile() on Windows. + * @param path input path. + * @param desiredAccess input desiredAccess. + * @param shareMode input shareMode. + * @param creationDisposition input creationDisposition. + * @throws IOException raised on errors performing I/O. + */ public static native FileDescriptor createFile(String path, long desiredAccess, long shareMode, long creationDisposition) throws IOException; @@ -749,7 +767,13 @@ private static native FileDescriptor createFileWithMode0(String path, long desiredAccess, long shareMode, long creationDisposition, int mode) throws NativeIOException; - /** Wrapper around SetFilePointer() on Windows */ + /** + * @return Wrapper around SetFilePointer() on Windows. + * @param fd input fd. + * @param distanceToMove input distanceToMove. + * @param moveMethod input moveMethod. + * @throws IOException raised on errors performing I/O. + */ public static native long setFilePointer(FileDescriptor fd, long distanceToMove, long moveMethod) throws IOException; @@ -898,6 +922,7 @@ public CachedUid(String username, long timestamp) { * * @param name the full principal name containing the domain * @return name with domain removed + * @throws IOException raised on errors performing I/O. */ private static String stripDomain(String name) { int i = name.indexOf('\\'); @@ -933,6 +958,11 @@ public static String getOwner(FileDescriptor fd) throws IOException { * file opened at a given offset, i.e. other process can delete * the file the FileDescriptor is reading. Only Windows implementation * uses the native interface. + * + * @param f input f. + * @param seekOffset input seekOffset. + * @return FileDescriptor. + * @throws IOException raised on errors performing I/O. */ public static FileDescriptor getShareDeleteFileDescriptor( File f, long seekOffset) throws IOException { @@ -1045,7 +1075,7 @@ public static void renameTo(File src, File dst) * * @param src source file * @param dst hardlink location - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Deprecated public static void link(File src, File dst) throws IOException { @@ -1103,7 +1133,7 @@ private static native void link0(String src, String dst) * * @param src The source path * @param dst The destination path - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void copyFileUnbuffered(File src, File dst) throws IOException { if (nativeLoaded && Shell.WINDOWS) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index 3bbd82d153a09..699e0cb153cc0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -258,14 +258,14 @@ public String getInterfaceName() { } /** - * Get the client's preferred version + * @return Get the client's preferred version */ public long getClientVersion() { return clientVersion; } /** - * Get the server's agreed to version. + * @return Get the server's agreed to version. */ public long getServerVersion() { return serverVersion; @@ -803,39 +803,55 @@ public Builder setnumReaders(int numReaders) { return this; } - /** Default: -1 */ + /** + * @return Default: -1 + * @param queueSizePerHandler + * input queueSizePerHandler. + */ public Builder setQueueSizePerHandler(int queueSizePerHandler) { this.queueSizePerHandler = queueSizePerHandler; return this; } - /** Default: false */ + /** + * @return Default: false. + * @param verbose input verbose. + */ public Builder setVerbose(boolean verbose) { this.verbose = verbose; return this; } - /** Default: null */ + /** + * @return Default: null. + * @param secretManager input secretManager. + */ public Builder setSecretManager( SecretManager secretManager) { this.secretManager = secretManager; return this; } - /** Default: null */ + /** + * @return Default: null. + * @param portRangeConfig input portRangeConfig. + */ public Builder setPortRangeConfig(String portRangeConfig) { this.portRangeConfig = portRangeConfig; return this; } - /** Default: null */ + /** + * @return Default: null. + * @param alignmentContext input alignmentContext. + */ public Builder setAlignmentContext(AlignmentContext alignmentContext) { this.alignmentContext = alignmentContext; return this; } /** - * Build the RPC Server. + * @return Build the RPC Server. * @throws IOException on error * @throws HadoopIllegalArgumentException when mandatory fields are not set */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java index 0ce78e54a43a0..4af35ad9270f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java @@ -103,7 +103,7 @@ private static Map getVersionSignatureMap( * @param version The version at the client. * @param methodName Name of the method. * @return true if the method is supported, false otherwise. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static boolean isMethodSupported(Object rpcProxy, Class protocol, RPC.RpcKind rpcKind, long version, String methodName) throws IOException { @@ -200,6 +200,8 @@ private static ProtocolMetaInfoPB getProtocolMetaInfoProxy(Object proxy, * * the format we want is: * ClientNamenodeProtocol#getServerDefaults + * @param method input method. + * @return methodToTraceString. */ public static String methodToTraceString(Method method) { Class clazz = method.getDeclaringClass(); @@ -221,6 +223,8 @@ public static String methodToTraceString(Method method) { * * the format we want is: * ClientProtocol#getBlockLocations + * @param fullName input fullName. + * @return toTraceName. */ public static String toTraceName(String fullName) { int lastPeriod = fullName.lastIndexOf('.'); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java index 992997ead25de..c02af842cf23b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java @@ -47,14 +47,14 @@ public RpcServerException(final String message, final Throwable cause) { } /** - * get the rpc status corresponding to this exception + * @return get the rpc status corresponding to this exception */ public RpcStatusProto getRpcStatusProto() { return RpcStatusProto.ERROR; } /** - * get the detailed rpc status corresponding to this exception + * @return get the detailed rpc status corresponding to this exception */ public RpcErrorCodeProto getRpcErrorCodeProto() { return RpcErrorCodeProto.ERROR_RPC_SERVER; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java index 4d02027a0e688..98daa84187464 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java @@ -46,6 +46,7 @@ public long getProtocolVersion(String protocol, * a list of its supported methods * @see ProtocolSignature#getProtocolSignature(VersionedProtocol, String, * long, int) for a default implementation + * @throws IOException raised on errors performing I/O. */ public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index d790e49f5dcf2..b76c2e09db237 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -282,9 +282,20 @@ static Client getClient(Configuration conf) { return CLIENTS.getClient(conf); } - /** Construct a client-side proxy object that implements the named protocol, + /** + * Construct a client-side proxy object that implements the named protocol, * talking to a server at the named address. - * @param */ + * @param Generics Type T + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param ticket input ticket. + * @param conf input configuration. + * @param factory input factory. + * @param rpcTimeout input rpcTimeout. + * @param connectionRetryPolicy input connectionRetryPolicy. + * @throws IOException raised on errors performing I/O. + */ @Override public ProtocolProxy getProxy(Class protocol, long clientVersion, InetSocketAddress addr, UserGroupInformation ticket, @@ -295,9 +306,22 @@ public ProtocolProxy getProxy(Class protocol, long clientVersion, rpcTimeout, connectionRetryPolicy, null, null); } - /** Construct a client-side proxy object that implements the named protocol, + /** + * Construct a client-side proxy object that implements the named protocol, * talking to a server at the named address. - * @param */ + * @param Generics Type. + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param ticket input ticket. + * @param conf input configuration. + * @param factory input factory. + * @param rpcTimeout input rpcTimeout. + * @param connectionRetryPolicy input connectionRetryPolicy. + * @param fallbackToSimpleAuth input fallbackToSimpleAuth. + * @param alignmentContext input alignmentContext. + * @return ProtocolProxy + */ @Override @SuppressWarnings("unchecked") public ProtocolProxy getProxy(Class protocol, long clientVersion, @@ -345,7 +369,8 @@ public static class Server extends RPC.Server { * @param bindAddress the address to bind on to listen for connection * @param port the port to listen for connections on * - * @deprecated Use #Server(Class, Object, Configuration, String, int) + * @deprecated Use #Server(Class, Object, Configuration, String, int) + * @throws IOException raised on errors performing I/O. */ @Deprecated public Server(Object instance, Configuration conf, String bindAddress, @@ -360,6 +385,7 @@ public Server(Object instance, Configuration conf, String bindAddress, * @param conf the configuration to use * @param bindAddress the address to bind on to listen for connection * @param port the port to listen for connections on + * @throws IOException raised on errors performing I/O. */ public Server(Class protocolClass, Object protocolImpl, Configuration conf, String bindAddress, int port) @@ -376,9 +402,13 @@ public Server(Class protocolClass, Object protocolImpl, * @param port the port to listen for connections on * @param numHandlers the number of method handler threads to run * @param verbose whether each call should be logged + * @param numReaders input numberReaders. + * @param queueSizePerHandler input queueSizePerHandler. + * @param secretManager input secretManager. * * @deprecated use Server#Server(Class, Object, * Configuration, String, int, int, int, int, boolean, SecretManager) + * @throws IOException raised on errors performing I/O. */ @Deprecated public Server(Object protocolImpl, Configuration conf, String bindAddress, @@ -401,9 +431,14 @@ public Server(Object protocolImpl, Configuration conf, String bindAddress, * @param port the port to listen for connections on * @param numHandlers the number of method handler threads to run * @param verbose whether each call should be logged + * @param secretManager input secretManager. + * @param queueSizePerHandler input queueSizePerHandler. + * @param portRangeConfig input portRangeConfig. + * @param numReaders input numReaders. * * @deprecated use Server#Server(Class, Object, * Configuration, String, int, int, int, int, boolean, SecretManager) + * @throws IOException raised on errors performing I/O. */ @Deprecated public Server(Class protocolClass, Object protocolImpl, @@ -428,6 +463,11 @@ public Server(Class protocolClass, Object protocolImpl, * @param numHandlers the number of method handler threads to run * @param verbose whether each call should be logged * @param alignmentContext provides server state info on client responses + * @param numReaders input numReaders. + * @param portRangeConfig input portRangeConfig. + * @param queueSizePerHandler input queueSizePerHandler. + * @param secretManager input secretManager. + * @throws IOException raised on errors performing I/O. */ public Server(Class protocolClass, Object protocolImpl, Configuration conf, String bindAddress, int port, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java index b86381706d67b..0bfe5c7d88000 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java @@ -65,6 +65,7 @@ public static DecayRpcSchedulerDetailedMetrics create(String ns) { /** * Initialize the metrics for JMX with priority levels. + * @param numLevels input numLevels. */ public void init(int numLevels) { LOG.info("Initializing RPC stats for {} priority levels", numLevels); @@ -106,14 +107,16 @@ public void shutdown() { } /** - * Returns the rate name inside the metric. + * @return Returns the rate name inside the metric. + * @param priority input priority. */ public String getQueueName(int priority) { return "DecayRPCSchedulerPriority."+priority+".RpcQueueTime"; } /** - * Returns the rate name inside the metric. + * @return Returns the rate name inside the metric. + * @param priority input priority. */ public String getProcessingName(int priority) { return "DecayRPCSchedulerPriority."+priority+".RpcProcessingTime"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java index c8a88236aeb39..cb70c18a19be0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java @@ -66,6 +66,8 @@ public class LogLevel { public static final String PROTOCOL_HTTPS = "https"; /** * A command line implementation + * @param args input args. + * @throws Exception exception */ public static void main(String[] args) throws Exception { CLI cli = new CLI(new Configuration()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java index 622ee5405c892..af5f852143389 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java @@ -88,21 +88,22 @@ public class LogThrottlingHelper { public interface LogAction { /** - * Return the number of records encapsulated in this action; that is, the + * @return Return the number of records encapsulated in this action; that is, the * number of times {@code record} was called to produce this action, * including the current one. */ int getCount(); /** - * Return summary information for the value that was recorded at index + * @return Return summary information for the value that was recorded at index * {@code idx}. Corresponds to the ordering of values passed to * {@link #record(double...)}. + * @param idx input idx. */ SummaryStatistics getStats(int idx); /** - * If this is true, the caller should write to its log. Otherwise, the + * @return If this is true, the caller should write to its log. Otherwise, the * caller should take no action, and it is an error to call other methods * on this object. */ @@ -139,6 +140,7 @@ public interface LogAction { * Create a log helper without any primary recorder. * * @see #LogThrottlingHelper(long, String) + * @param minLogPeriodMs input minLogPeriodMs. */ public LogThrottlingHelper(long minLogPeriodMs) { this(minLogPeriodMs, null); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java index c7adaa5d9917f..e4886cb603e4e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java @@ -146,8 +146,10 @@ protected MetricsInfo getInfo(Class cls, Metrics annotation) { } /** - * Remove the prefix "get", if any, from the method name. Return the + * @return Remove the prefix "get", if any, from the method name. Return the * capacitalized method name." + * + * @param method input method. */ protected String getName(Method method) { String methodName = method.getName(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java index aa4d4b9ca0c64..016ecdd4d1e99 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java @@ -139,7 +139,7 @@ public long getSnapshotTimeStamp() { /** * Constructor for {@link MutableRollingAverages}. - * @param metricValueName + * @param metricValueName input metricValueName. */ public MutableRollingAverages(String metricValueName) { if (metricValueName == null) { @@ -285,6 +285,7 @@ public void close() throws IOException { * Retrieve a map of metric name {@literal ->} (aggregate). * Filter out entries that don't have at least minSamples. * + * @param minSamples input minSamples. * @return a map of peer DataNode Id to the average latency to that * node seen over the measurement period. */ @@ -314,6 +315,7 @@ public synchronized Map getStats(long minSamples) { /** * Use for test only. + * @param value input value. */ @VisibleForTesting public synchronized void setRecordValidityMs(long value) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java index e04b4b58ece0b..f2e072545ad28 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java @@ -179,7 +179,7 @@ public void resetMinMax() { } /** - * Return the SampleStat snapshot timestamp + * @return Return the SampleStat snapshot timestamp. */ public long getSnapshotTimeStamp() { return snapshotTimeStamp; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java index 8fd3b33b3a253..196469be9dce2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java @@ -87,7 +87,7 @@ usually does not need to reference any class here.

Getting started

Implementing metrics sources

Memory usage by blocksize
Memory usage by blocksize
Blocksize Compression
* memory usage
Decompression
* memory usage
- + @@ -290,10 +290,10 @@ metrics system decouples the concept for context (for grouping) with the backend that can handle multiple contexts (file, gangalia etc.):

Implementing metrics sources
Using annotationsUsing MetricsSource interface
- + - +
Migration from previous system
BeforeAfterBeforeAfter
@@ -312,10 +312,10 @@ backend that can handle multiple contexts (file, gangalia etc.):
     using the context option in the sink options like the following:
   

- + - +
Metrics2
BeforeAfterBeforeAfter
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
index 83be2f1579f53..53bb44fb2e9ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
@@ -142,8 +142,12 @@ private static LinkedHashSet getSubinterfaceInetAddrs(
   }
 
   /**
-   * Like {@link DNS#getIPs(String, boolean)}, but returns all
+   * @return Like {@link DNS#getIPs(String, boolean)}, but returns all
    * IPs associated with the given interface and its subinterfaces.
+   *
+   * @param strInterface input strInterface.
+   * @throws UnknownHostException
+   * If no IP address for the local host could be found.
    */
   public static String[] getIPs(String strInterface)
       throws UnknownHostException {
@@ -346,6 +350,8 @@ public static String[] getHosts(String strInterface)
    *            The name of the network interface to query (e.g. eth0)
    * @param nameserver
    *            The DNS host name
+   * @param tryfallbackResolution
+   *            Input tryfallbackResolution.
    * @return The default host names associated with IPs bound to the network
    *         interface
    * @throws UnknownHostException
@@ -385,7 +391,7 @@ public static String getDefaultHost(@Nullable String strInterface)
   }
 
   /**
-   * Returns the default (first) host name associated by the provided
+   * @return Returns the default (first) host name associated by the provided
    * nameserver with the address bound to the specified network interface.
    *
    * @param strInterface
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
index 1e6f5f500849f..d29c6e3077df5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
@@ -65,6 +65,8 @@ public interface DNSToSwitchMapping {
    *
    * If there is a cache on these nodes, this method will clear it, so that 
    * future accesses will see updated data.
+   *
+   * @param names input names.
    */
   public void reloadCachedMappings(List names);
 }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DomainNameResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DomainNameResolver.java
index 4c44e9da4c063..debfe2feaa8b7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DomainNameResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DomainNameResolver.java
@@ -30,9 +30,10 @@ public interface DomainNameResolver {
    * Takes one domain name and returns its IP addresses based on the actual
    * service discovery methods.
    *
-   * @param domainName
+   * @param domainName input domainName.
    * @return all IP addresses
-   * @throws UnknownHostException
+   * @throws UnknownHostException indicate that the IP address of a
+   * host could not be determined.
    */
   InetAddress[] getAllByDomainName(String domainName)
       throws UnknownHostException;
@@ -40,7 +41,7 @@ InetAddress[] getAllByDomainName(String domainName)
   /**
    * Reverse lookup an IP address and get the fully qualified domain name(fqdn).
    *
-   * @param address
+   * @param address input address.
    * @return fully qualified domain name
    */
   String getHostnameByIP(InetAddress address);
@@ -52,10 +53,12 @@ InetAddress[] getAllByDomainName(String domainName)
    * This function is necessary in secure environment since Kerberos uses fqdn
    * in the service principal instead of IP.
    *
-   * @param domainName
+   * @param domainName input domainName.
+   * @param useFQDN input useFQDN.
    * @return all fully qualified domain names belonging to the IPs resolved from
    * the input domainName
-   * @throws UnknownHostException
+   * @throws UnknownHostException indicate that the IP address of a
+   * host could not be determined.
    */
    String[] getAllResolvedHostnameByDomainName(
        String domainName, boolean useFQDN) throws UnknownHostException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNode.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNode.java
index efd1cc07d44bd..df4a01af27bc9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNode.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNode.java
@@ -27,7 +27,10 @@
 @InterfaceStability.Unstable
 public interface InnerNode extends Node {
   interface Factory {
-    /** Construct an InnerNode from a path-like string */
+    /**
+     * @return Construct an InnerNode from a path-like string.
+     * @param path input path.
+     */
     N newInnerNode(String path);
   }
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNodeImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNodeImpl.java
index 923515b6efe7e..1dd3105080778 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNodeImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/InnerNodeImpl.java
@@ -41,13 +41,22 @@ public InnerNodeImpl newInnerNode(String path) {
   protected final Map childrenMap = new HashMap<>();
   protected int numOfLeaves;
 
-  /** Construct an InnerNode from a path-like string. */
+  /**
+   * Construct an InnerNode from a path-like string.
+   * @param path input path.
+   */
   protected InnerNodeImpl(String path) {
     super(path);
   }
 
-  /** Construct an InnerNode
-   * from its name, its network location, its parent, and its level. */
+  /**
+   * Construct an InnerNode
+   * from its name, its network location, its parent, and its level.
+   * @param name input name.
+   * @param location input location.
+   * @param parent input parent.
+   * @param level input level.
+   */
   protected InnerNodeImpl(String name, String location,
       InnerNode parent, int level) {
     super(name, location, parent, level);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index 137c940001c0c..7764ab6b42010 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -415,14 +415,16 @@ public boolean isOnSameRack(Node node1, Node node2) {
   }
   
   /**
-   * Check if network topology is aware of NodeGroup
+   * @return Check if network topology is aware of NodeGroup
    */
   public boolean isNodeGroupAware() {
     return false;
   }
   
   /** 
-   * Return false directly as not aware of NodeGroup, to be override in sub-class
+   * @return Return false directly as not aware of NodeGroup, to be override in sub-class.
+   * @param node1 input node1.
+   * @param node2 input node2.
    */
   public boolean isOnSameNodeGroup(Node node1, Node node2) {
     return false;
@@ -729,11 +731,10 @@ public String toString() {
   }
   
   /**
-   * Divide networklocation string into two parts by last separator, and get 
+   * @return Divide networklocation string into two parts by last separator, and get
    * the first part here.
    * 
-   * @param networkLocation
-   * @return
+   * @param networkLocation input networkLocation.
    */
   public static String getFirstHalf(String networkLocation) {
     int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR);
@@ -741,11 +742,10 @@ public static String getFirstHalf(String networkLocation) {
   }
 
   /**
-   * Divide networklocation string into two parts by last separator, and get 
+   * @return Divide networklocation string into two parts by last separator, and get
    * the second part here.
    * 
-   * @param networkLocation
-   * @return
+   * @param networkLocation input networkLocation.
    */
   public static String getLastHalf(String networkLocation) {
     int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR);
@@ -897,7 +897,7 @@ public void sortByDistance(Node reader, Node[] nodes, int activeLen) {
    * or on a different rack from the reader. Sorting the nodes based on network
    * distance from the reader reduces network traffic and improves
    * performance.
-   * 

+ *

* As an additional twist, we also randomize the nodes at each network * distance. This helps with load balancing when there is data skew. * @@ -906,6 +906,7 @@ public void sortByDistance(Node reader, Node[] nodes, int activeLen) { * @param activeLen Number of active nodes at the front of the array * @param secondarySort a secondary sorting strategy which can inject into * that point from outside to help sort the same distance. + * @param Generics Type T */ public void sortByDistance(Node reader, T[] nodes, int activeLen, Consumer> secondarySort){ @@ -918,7 +919,7 @@ public void sortByDistance(Node reader, T[] nodes, * is not a datanode. Sorting the nodes based on network distance * from the reader reduces network traffic and improves * performance. - *

+ *

* * @param reader Node where data will be read * @param nodes Available replicas with the requested data @@ -939,13 +940,14 @@ public void sortByDistanceUsingNetworkLocation(Node reader, Node[] nodes, * is not a datanode. Sorting the nodes based on network distance * from the reader reduces network traffic and improves * performance. - *

+ *

* * @param reader Node where data will be read * @param nodes Available replicas with the requested data * @param activeLen Number of active nodes at the front of the array * @param secondarySort a secondary sorting strategy which can inject into * that point from outside to help sort the same distance. + * @param Generics Type T */ public void sortByDistanceUsingNetworkLocation(Node reader, T[] nodes, int activeLen, Consumer> secondarySort) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java index 4db8155ffed3b..60ae442b4f602 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java @@ -124,12 +124,13 @@ public String toString() { } /** - * {@inheritDoc} + * {@inheritDoc}. *

* This will get called in the superclass constructor, so a check is needed * to ensure that the raw mapping is defined before trying to relaying a null * configuration. - * @param conf + *

+ * @param conf input Configuration. */ @Override public void setConf(Configuration conf) { @@ -212,8 +213,9 @@ public List resolve(List names) { /** * Build and execute the resolution command. The command is * executed in the directory specified by the system property - * "user.dir" if set; otherwise the current working directory is used + * "user.dir" if set; otherwise the current working directory is used. * @param args a list of arguments + * @param commandScriptName input commandScriptName. * @return null if the number of arguments is out of range, * or the output of the command. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java index e05fae6496a15..4c1a547baabe0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java @@ -74,12 +74,13 @@ public String toString() { } /** - * {@inheritDoc} + * {@inheritDoc}. *

* This will get called in the superclass constructor, so a check is needed * to ensure that the raw mapping is defined before trying to relaying a null * configuration. - * @param conf + *

+ * @param conf input Configuration. */ @Override public void setConf(Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java index cfa7b01e8136a..99e646a975b22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java @@ -67,7 +67,7 @@ int performIO(ByteBuffer buf) throws IOException { * Channel for reading, should also be a {@link SelectableChannel}. * The channel will be configured to be non-blocking. * @param timeout timeout in milliseconds. must not be negative. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public SocketInputStream(ReadableByteChannel channel, long timeout) throws IOException { @@ -86,7 +86,7 @@ public SocketInputStream(ReadableByteChannel channel, long timeout) * * @param socket should have a channel associated with it. * @param timeout timeout timeout in milliseconds. must not be negative. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public SocketInputStream(Socket socket, long timeout) throws IOException { @@ -103,7 +103,7 @@ public SocketInputStream(Socket socket, long timeout) * @see SocketInputStream#SocketInputStream(ReadableByteChannel, long) * * @param socket should have a channel associated with it. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public SocketInputStream(Socket socket) throws IOException { this(socket.getChannel(), socket.getSoTimeout()); @@ -141,7 +141,7 @@ public synchronized void close() throws IOException { } /** - * Returns underlying channel used by inputstream. + * @return Returns underlying channel used by inputstream. * This is useful in certain cases like channel for * {@link FileChannel#transferFrom(ReadableByteChannel, long, long)}. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java index 93f4f56d78d63..3f6ea098a7200 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java @@ -72,7 +72,7 @@ int performIO(ByteBuffer buf) throws IOException { * Channel for writing, should also be a {@link SelectableChannel}. * The channel will be configured to be non-blocking. * @param timeout timeout in milliseconds. must not be negative. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public SocketOutputStream(WritableByteChannel channel, long timeout) throws IOException { @@ -91,7 +91,7 @@ public SocketOutputStream(WritableByteChannel channel, long timeout) * * @param socket should have a channel associated with it. * @param timeout timeout timeout in milliseconds. must not be negative. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public SocketOutputStream(Socket socket, long timeout) throws IOException { @@ -138,7 +138,7 @@ public synchronized void close() throws IOException { } /** - * Returns underlying channel used by this stream. + * @return Returns underlying channel used by this stream. * This is useful in certain cases like channel for * {@link FileChannel#transferTo(long, long, WritableByteChannel)} */ @@ -254,7 +254,12 @@ public void transferToFully(FileChannel fileCh, long position, int count, * Call * {@link #transferToFully(FileChannel, long, int, LongWritable, LongWritable) * } - * with null waitForWritableTime and transferToTime + * with null waitForWritableTime and transferToTime. + * + * @param fileCh input fileCh. + * @param position input position. + * @param count input count. + * @throws IOException raised on errors performing I/O. */ public void transferToFully(FileChannel fileCh, long position, int count) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java index 9b50e7c524270..bd85a740186b7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java @@ -241,7 +241,7 @@ public void stop() { /** * Relay to {@link #stop()} - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Override public final void close() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/AbstractLaunchableService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/AbstractLaunchableService.java index be28c5be2d017..66f8ee430f6e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/AbstractLaunchableService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/AbstractLaunchableService.java @@ -42,6 +42,8 @@ public abstract class AbstractLaunchableService extends AbstractService /** * Construct an instance with the given name. + * + * @param name input name. */ protected AbstractLaunchableService(String name) { super(name); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java index 7fd4657f3fd39..379f59b0802ef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java @@ -413,7 +413,7 @@ protected List getConfigurationsToCreate() { } /** - * This creates all the configurations defined by + * @return This creates all the configurations defined by * {@link #getConfigurationsToCreate()} , ensuring that * the resources have been pushed in. * If one cannot be loaded it is logged and the operation continues @@ -648,7 +648,7 @@ protected int coreServiceLaunch(Configuration conf, } /** - * Instantiate the service defined in {@code serviceClassName}. + * @return Instantiate the service defined in {@code serviceClassName}. * * Sets the {@code configuration} field * to the the value of {@code conf}, @@ -852,6 +852,7 @@ protected void error(String message, Throwable thrown) { * The service launcher code assumes that after this method is invoked, * no other code in the same method is called. * @param exitCode code to exit + * @param message input message. */ protected void exit(int exitCode, String message) { ExitUtil.terminate(exitCode, message); @@ -1003,7 +1004,7 @@ protected void verifyConfigurationFilesExist(String[] filenames) { } /** - * Build a log message for starting up and shutting down. + * @return Build a log message for starting up and shutting down. * @param classname the class of the server * @param args arguments */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java index f582fa2d97875..ae7acfc1b17f6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/package-info.java @@ -192,9 +192,8 @@ At this point, rather than block waiting for the service to terminate (as during the {@code execute()} method takes priority over any exit codes returned by the method. This allows services to signal failures simply by raising exceptions with exit codes. -

+

-

To view the workflow in sequence, it is:

  1. (prepare configuration files —covered later)
  2. @@ -221,7 +220,7 @@ At this point, rather than block waiting for the service to terminate (as

    For a basic service, the return code is 0 unless an exception was raised. -

    +

    For a {@link org.apache.hadoop.service.launcher.LaunchableService}, the return code is the number returned from the {@link org.apache.hadoop.service.launcher.LaunchableService#execute()} @@ -235,7 +234,7 @@ At this point, rather than block waiting for the service to terminate (as of returning error codes to signal failures and for normal Services to return any error code at all. -

    +

    Any exception which implements the {@link org.apache.hadoop.util.ExitCodeProvider} interface is considered be a provider of the exit code: the method @@ -269,7 +268,7 @@ interface listing common exception codes. These are exception codes Note that {@link org.apache.hadoop.util.ExitUtil.ExitException} itself implements {@link org.apache.hadoop.util.ExitCodeProvider#getExitCode()} -

    +

    If an exception does not implement {@link org.apache.hadoop.util.ExitCodeProvider#getExitCode()}, it will be wrapped in an {@link org.apache.hadoop.util.ExitUtil.ExitException} @@ -324,7 +323,7 @@ interface listing common exception codes. These are exception codes when received, attempts to stop the service in a limited period of time. It then triggers a JVM shutdown by way of {@link org.apache.hadoop.util.ExitUtil#terminate(int, String)} -

    + If a second signal is received, the {@link org.apache.hadoop.service.launcher.InterruptEscalator} reacts by triggering an immediate JVM halt, invoking @@ -342,7 +341,7 @@ interface listing common exception codes. These are exception codes stop the service if a shutdown request is received, so ensuring that if the JVM is exited by any thread, an attempt to shut down the service will be made. - +

    Configuration class creation

    @@ -355,12 +354,12 @@ interface listing common exception codes. These are exception codes What the launcher does do is use reflection to try and create instances of these classes simply to force in the common resources. If the classes are not on the classpath this fact will be logged. -

    +

    Applications may consider it essential to either force load in the relevant configuration, or pass it down to the service being created. In which case further measures may be needed. -

    1: Creation in an extended {@code ServiceLauncher} + 1: Creation in an extended {@code ServiceLauncher}

    Subclass the Service launcher and override its @@ -371,9 +370,9 @@ interface listing common exception codes. These are exception codes HDFS or YARN. It does imply a dedicated script to invoke the custom {@code main()} method. -

    2: Creation in {@code bindArgs()} + 2: Creation in {@code bindArgs()} -

    +

    In {@link org.apache.hadoop.service.launcher.LaunchableService#bindArgs(Configuration, List)}, a new configuration is created: @@ -390,7 +389,7 @@ interface listing common exception codes. These are exception codes instances created via the service launcher. It does imply that this is expected to be only way that services will be launched. -

    3: Creation in {@code serviceInit()} +

    3: Creation in {@code serviceInit()}

      protected void serviceInit(Configuration conf) throws Exception {
    @@ -406,7 +405,7 @@ protected void serviceInit(Configuration conf) throws Exception {
      propagating information between peer services in a
      {@link org.apache.hadoop.service.CompositeService}.
      While a dangerous practice, it does happen.
    -
    + 

    Summary: the ServiceLauncher makes a best-effort attempt to load the standard Configuration subclasses, but does not fail if they are not present. @@ -429,7 +428,7 @@ class (the one created by the If this argument is repeated multiple times, all configuration files are merged with the latest file on the command line being the last one to be applied. -

    +

    All the {@code --conf <file>} argument pairs are stripped off the argument list provided to the instantiated service; they get the merged configuration, but not the commands used to create it. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index 7eaaeb89185e3..81e1fb5d21234 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -131,7 +131,7 @@ public class GenericOptionsParser { * Create an options parser with the given options to parse the args. * @param opts the options * @param args the command line arguments - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public GenericOptionsParser(Options opts, String[] args) throws IOException { @@ -141,7 +141,7 @@ public GenericOptionsParser(Options opts, String[] args) /** * Create an options parser to parse the args. * @param args the command line arguments - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public GenericOptionsParser(String[] args) throws IOException { @@ -157,7 +157,7 @@ public GenericOptionsParser(String[] args) * * @param conf the Configuration to modify. * @param args command-line arguments. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public GenericOptionsParser(Configuration conf, String[] args) throws IOException { @@ -174,7 +174,7 @@ public GenericOptionsParser(Configuration conf, String[] args) * @param conf the configuration to modify * @param options options built by the caller * @param args User-specified arguments - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public GenericOptionsParser(Configuration conf, Options options, String[] args) throws IOException { @@ -224,9 +224,10 @@ public boolean isParseSuccessful() { } /** - * Specify properties of each generic option. + * @return Specify properties of each generic option. * Important: as {@link OptionBuilder} is not thread safe, subclasses * must synchronize use on {@code OptionBuilder.class} + * @param opts input opts. */ @SuppressWarnings("static-access") protected Options buildGeneralOptions(Options opts) { @@ -366,9 +367,9 @@ private void processGeneralOptions(CommandLine line) throws IOException { /** * If libjars are set in the conf, parse the libjars. - * @param conf + * @param conf input Configuration. * @return libjar urls - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static URL[] getLibJars(Configuration conf) throws IOException { String jars = conf.get("tmpjars"); From 543e413077d2f57a79e05619293e4c0fbb99b7c5 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 16 May 2022 03:56:47 -0700 Subject: [PATCH 48/53] HADOOP-18229. Fix some java doc compilation 80+ warnings. Change Pom.xml Maven Support JDK1.8 --- hadoop-common-project/hadoop-common/pom.xml | 6 - .../apache/hadoop/io/nativeio/NativeIO.java | 6 +- .../main/java/org/apache/hadoop/ipc/RPC.java | 138 ++++++++++++------ .../org/apache/hadoop/ipc/RetryCache.java | 4 +- .../java/org/apache/hadoop/ipc/Server.java | 2 +- .../service/launcher/ServiceLauncher.java | 1 + .../apache/hadoop/util/LightWeightCache.java | 7 +- .../apache/hadoop/util/LightWeightGSet.java | 7 +- 8 files changed, 114 insertions(+), 57 deletions(-) diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index f4d6e61eea3ae..d8e2dd3542223 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -1179,12 +1179,6 @@ **/FSProtos.java *.proto:*.tracing:*.protobuf - - -Xmaxerrs - 1000 - -Xmaxwarns - 1000 - diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index d00b289d5da8c..5cf820c50ca7c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -724,7 +724,7 @@ private static native void createDirectoryWithMode0(String path, int mode) throws NativeIOException; /** - * Wrapper around CreateFile() on Windows. + * @return Wrapper around CreateFile() on Windows. * @param path input path. * @param desiredAccess input desiredAccess. * @param shareMode input shareMode. @@ -864,7 +864,7 @@ public static boolean access(String path, AccessRight desiredAccess) } /** - * Return true if the JNI-based native IO extensions are available. + * @return Return true if the JNI-based native IO extensions are available. */ public static boolean isAvailable() { return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded; @@ -991,7 +991,7 @@ public static FileDescriptor getShareDeleteFileDescriptor( } /** - * Create the specified File for write access, ensuring that it does not exist. + * @return Create the specified File for write access, ensuring that it does not exist. * @param f the file that we want to create * @param permissions we want to have on the file (if security is enabled) * diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index 699e0cb153cc0..8c5287b30735d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -150,6 +150,9 @@ static Class[] getProtocolInterfaces(Class protocol) { * Get the protocol name. * If the protocol class has a ProtocolAnnotation, then get the protocol * name from the annotation; otherwise the class name is the protocol name. + * + * @param protocol input protocol. + * @return protocol name. */ static public String getProtocolName(Class protocol) { if (protocol == null) { @@ -164,6 +167,9 @@ static public String getProtocolName(Class protocol) { * If the protocol class has a ProtocolAnnotation, * then get the protocol version from the annotation; * otherwise get it from the versionID field of the protocol class. + * + * @param protocol input protocol. + * @return ProtocolVersion */ static public long getProtocolVersion(Class protocol) { if (protocol == null) { @@ -286,8 +292,9 @@ public RpcErrorCodeProto getRpcErrorCodeProto() { } /** - * Get a proxy connection to a remote server - * + * Get a proxy connection to a remote server. + * + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -306,8 +313,9 @@ public static T waitForProxy( /** * Get a protocol proxy that contains a proxy connection to a remote server - * and a set of methods that are supported by the server - * + * and a set of methods that are supported by the server. + * + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -324,8 +332,9 @@ public static ProtocolProxy waitForProtocolProxy(Class protocol, } /** - * Get a proxy connection to a remote server - * + * Get a proxy connection to a remote server. + * + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -344,7 +353,8 @@ public static T waitForProxy(Class protocol, long clientVersion, /** * Get a protocol proxy that contains a proxy connection to a remote server * and a set of methods that are supported by the server - * + * + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -362,8 +372,9 @@ public static ProtocolProxy waitForProtocolProxy(Class protocol, } /** - * Get a proxy connection to a remote server - * + * Get a proxy connection to a remote server. + * + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -391,6 +402,7 @@ public static T waitForProxy(Class protocol, * @param addr remote address * @param conf configuration to use * @param rpcTimeout timeout for each RPC + * @param connectionRetryPolicy input connectionRetryPolicy * @param timeout time in milliseconds before giving up * @return the proxy * @throws IOException if the far end through a RemoteException @@ -439,9 +451,17 @@ public static ProtocolProxy waitForProtocolProxy(Class protocol, } } - /** Construct a client-side proxy object that implements the named protocol, + /** + * Construct a client-side proxy object that implements the named protocol, * talking to a server at the named address. - * @param */ + * @param Generics Type T. + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param conf input Configuration. + * @param factory input factory. + * @throws IOException raised on errors performing I/O. + */ public static T getProxy(Class protocol, long clientVersion, InetSocketAddress addr, Configuration conf, @@ -452,8 +472,9 @@ public static T getProxy(Class protocol, /** * Get a protocol proxy that contains a proxy connection to a remote server - * and a set of methods that are supported by the server - * + * and a set of methods that are supported by the server. + * + * @param Generics Type T * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -470,9 +491,21 @@ public static ProtocolProxy getProtocolProxy(Class protocol, return getProtocolProxy(protocol, clientVersion, addr, ugi, conf, factory); } - /** Construct a client-side proxy object that implements the named protocol, + /** + * Construct a client-side proxy object that implements the named protocol, * talking to a server at the named address. - * @param */ + * + * @param Generics Type T + * @param protocol input protocol. + * @param clientVersion input clientVersion + * @param addr input addr + * @param ticket input tocket + * @param conf input conf + * @param factory input factory + * @return the protocol proxy + * @throws IOException raised on errors performing I/O. + * + */ public static T getProxy(Class protocol, long clientVersion, InetSocketAddress addr, @@ -509,8 +542,8 @@ public static ProtocolProxy getProtocolProxy(Class protocol, /** * Construct a client-side proxy that implements the named protocol, * talking to a server at the named address. - * @param - * + * + * @param Generics Type T. * @param protocol protocol * @param clientVersion client's version * @param addr server address @@ -534,8 +567,9 @@ public static T getProxy(Class protocol, /** * Get a protocol proxy that contains a proxy connection to a remote server - * and a set of methods that are supported by the server - * + * and a set of methods that are supported by the server. + * + * @param Generics Type T * @param protocol protocol * @param clientVersion client's version * @param addr server address @@ -561,8 +595,9 @@ public static ProtocolProxy getProtocolProxy(Class protocol, /** * Get a protocol proxy that contains a proxy connection to a remote server - * and a set of methods that are supported by the server + * and a set of methods that are supported by the server. * + * @param Generics Type T * @param protocol protocol * @param clientVersion client's version * @param addr server address @@ -609,6 +644,7 @@ public static ProtocolProxy getProtocolProxy(Class protocol, * @param fallbackToSimpleAuth set to true or false during calls to indicate * if a secure client falls back to simple auth * @param alignmentContext state alignment context + * @param Generics Type T * @return the proxy * @throws IOException if any error occurs */ @@ -632,15 +668,15 @@ public static ProtocolProxy getProtocolProxy(Class protocol, } /** - * Construct a client-side proxy object with the default SocketFactory - * @param - * - * @param protocol - * @param clientVersion - * @param addr - * @param conf + * Construct a client-side proxy object with the default SocketFactory. + * + * @param Generics Type T. + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param conf input Configuration. * @return a proxy instance - * @throws IOException + * @throws IOException if the thread is interrupted. */ public static T getProxy(Class protocol, long clientVersion, @@ -651,7 +687,8 @@ public static T getProxy(Class protocol, } /** - * Returns the server address for a given proxy. + * @return Returns the server address for a given proxy. + * @param proxy input proxy. */ public static InetSocketAddress getServerAddress(Object proxy) { return getConnectionIdForProxy(proxy).getAddress(); @@ -678,12 +715,13 @@ public static ConnectionId getConnectionIdForProxy(Object proxy) { * Get a protocol proxy that contains a proxy connection to a remote server * and a set of methods that are supported by the server * - * @param protocol - * @param clientVersion - * @param addr - * @param conf + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param conf input configuration. + * @param Generics Type T. * @return a protocol proxy - * @throws IOException + * @throws IOException if the thread is interrupted. */ public static ProtocolProxy getProtocolProxy(Class protocol, long clientVersion, @@ -767,44 +805,62 @@ public Builder(Configuration conf) { this.conf = conf; } - /** Mandatory field */ + /** + * @return Mandatory field. + * @param protocol input protocol. + */ public Builder setProtocol(Class protocol) { this.protocol = protocol; return this; } - /** Mandatory field */ + /** + * @return Mandatory field. + * @param instance input instance. + */ public Builder setInstance(Object instance) { this.instance = instance; return this; } - /** Default: 0.0.0.0 */ + /** + * @return Default: 0.0.0.0. + * @param bindAddress input bindAddress. + */ public Builder setBindAddress(String bindAddress) { this.bindAddress = bindAddress; return this; } - /** Default: 0 */ + /** + * @return Default: 0. + * @param port input port. + */ public Builder setPort(int port) { this.port = port; return this; } - /** Default: 1 */ + /** + * @return Default: 1. + * @param numHandlers input numHandlers. + */ public Builder setNumHandlers(int numHandlers) { this.numHandlers = numHandlers; return this; } - /** Default: -1 */ + /** + * @return Default: -1. + * @param numReaders input numReaders. + */ public Builder setnumReaders(int numReaders) { this.numReaders = numReaders; return this; } /** - * @return Default: -1 + * @return Default: -1. * @param queueSizePerHandler * input queueSizePerHandler. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java index c9e04ab82b615..5bcbf6ba58daf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java @@ -49,11 +49,11 @@ public class RetryCache { private static final int MAX_CAPACITY = 16; /** - * CacheEntry is tracked using unique client ID and callId of the RPC request + * CacheEntry is tracked using unique client ID and callId of the RPC request. */ public static class CacheEntry implements LightWeightCache.Entry { /** - * Processing state of the requests + * Processing state of the requests. */ private static byte INPROGRESS = 0; private static byte SUCCESS = 1; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index c5732c68b1517..e205d53d7a9d8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -2380,7 +2380,7 @@ private void checkDataLength(int dataLength) throws IOException { * @return -1 in case of error, else num bytes read so far * @throws IOException - internal error that should not be returned to * client, typically failure to respond to client - * @throws InterruptedException + * @throws InterruptedException - if the thread is interrupted. */ public int readAndProcess() throws IOException, InterruptedException { while (!shouldClose()) { // stop if a fatal response has been sent. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java index 379f59b0802ef..2afba9b098cf8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java @@ -567,6 +567,7 @@ public ExitUtil.ExitException launchService(Configuration conf, * @throws Exception any other failure -if it implements * {@link ExitCodeProvider} then it defines the exit code for any * containing exception + * @return status code */ protected int coreServiceLaunch(Configuration conf, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java index f1897ea2368b2..0ce612ccca3e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightCache.java @@ -58,10 +58,13 @@ public class LightWeightCache extends LightWeightGSet { * Entries of {@link LightWeightCache}. */ public static interface Entry extends LinkedElement { - /** Set the expiration time. */ + /** + * Set the expiration time. + * @param timeNano input timeNano. + */ public void setExpirationTime(long timeNano); - /** Get the expiration time. */ + /** @return Get the expiration time. */ public long getExpirationTime(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java index 46e95234b57f7..a0eb81e9998a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java @@ -52,10 +52,13 @@ public class LightWeightGSet implements GSet { * Elements of {@link LightWeightGSet}. */ public interface LinkedElement { - /** Set the next element. */ + /** + * Set the next element. + * @param next inputNext. + */ void setNext(LinkedElement next); - /** Get the next element. */ + /** @return Get the next element. */ LinkedElement getNext(); } From 26731ba79f2adb8b687bc2fd2c2b6ffc1139060e Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 16 May 2022 07:16:31 -0700 Subject: [PATCH 49/53] HADOOP-18229. Fix some java doc compilation 150+ warnings. --- .../org/apache/hadoop/conf/Configuration.java | 14 ++-- .../hadoop/conf/ReconfigurationException.java | 6 +- .../conf/ReconfigurationTaskStatus.java | 2 +- .../org/apache/hadoop/io/SequenceFile.java | 2 +- .../java/org/apache/hadoop/io/SetFile.java | 19 ++++-- .../main/java/org/apache/hadoop/io/Text.java | 3 + .../org/apache/hadoop/io/VIntWritable.java | 7 +- .../org/apache/hadoop/io/VLongWritable.java | 7 +- .../apache/hadoop/io/VersionedWritable.java | 2 +- .../apache/hadoop/io/WritableFactories.java | 26 ++++++-- .../org/apache/hadoop/io/WritableFactory.java | 2 +- .../org/apache/hadoop/io/WritableName.java | 32 +++++++-- .../org/apache/hadoop/io/WritableUtils.java | 34 ++++++---- .../apache/hadoop/ipc/AlignmentContext.java | 2 +- .../apache/hadoop/ipc/CallQueueManager.java | 6 ++ .../java/org/apache/hadoop/ipc/Client.java | 28 +++++--- .../org/apache/hadoop/ipc/ClientCache.java | 2 + .../java/org/apache/hadoop/ipc/ClientId.java | 12 +++- .../hadoop/ipc/GenericRefreshProtocol.java | 6 +- .../org/apache/hadoop/ipc/ProtobufHelper.java | 4 +- .../apache/hadoop/ipc/ProtobufRpcEngine.java | 8 +++ .../apache/hadoop/ipc/ProtobufRpcEngine2.java | 6 ++ .../hadoop/ipc/ProtocolMetaInterface.java | 2 +- .../org/apache/hadoop/ipc/ProtocolProxy.java | 3 +- .../main/java/org/apache/hadoop/ipc/RPC.java | 12 ++-- .../hadoop/ipc/RefreshCallQueueProtocol.java | 2 +- .../apache/hadoop/ipc/RefreshRegistry.java | 1 + .../apache/hadoop/ipc/RemoteException.java | 5 +- .../org/apache/hadoop/ipc/RetryCache.java | 18 ++++- .../java/org/apache/hadoop/ipc/RpcEngine.java | 37 +++++++++-- .../org/apache/hadoop/ipc/RpcScheduler.java | 9 ++- .../java/org/apache/hadoop/ipc/Server.java | 66 ++++++++++++++----- .../org/apache/hadoop/util/TestShell.java | 2 +- 33 files changed, 296 insertions(+), 91 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index a1ae4a7ab5f5d..9b85c8654f799 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -317,7 +317,7 @@ private static boolean getRestrictParserDefault(Object resource) { private boolean loadDefaults = true; /** - * Configuration objects + * Configuration objects. */ private static final WeakHashMap REGISTRY = new WeakHashMap(); @@ -2208,7 +2208,7 @@ private static int convertToInt(String value, int defaultValue) { } /** - * Is the given value in the set of ranges + * Is the given value in the set of ranges. * @param value the value to check * @return is the value in the ranges? */ @@ -2265,7 +2265,7 @@ public Iterator iterator() { } /** - * Parse the given attribute as a set of integer ranges + * Parse the given attribute as a set of integer ranges. * @param name the attribute name * @param defaultValue the default value if it is not set * @return a new set of ranges from the configured value @@ -2798,7 +2798,7 @@ public void setClass(String name, Class theClass, Class xface) { * directory does not exist, an attempt is made to create it. * * @param dirsProp directory in which to locate the file. - * @param path file-path. + * @param path file-path. * @return local file under the directory with the given path. * @throws IOException raised on errors performing I/O. */ @@ -3546,7 +3546,7 @@ private void loadProperty(Properties properties, String name, String attr, /** * Print a warning if a property with a given name already exists with a - * different value + * different value. */ private void checkForOverride(Properties properties, String name, String attr, String value) { String propertyValue = properties.getProperty(attr); @@ -3591,7 +3591,7 @@ public void writeXml(Writer out) throws IOException { * the configuration, this method throws an {@link IllegalArgumentException}. * * - * @param propertyName xml property name + * @param propertyName xml property name. * @param out the writer to write to. * @throws IOException raised on errors performing I/O. */ @@ -3942,7 +3942,7 @@ public void write(DataOutput out) throws IOException { } /** - * get keys matching the the regex + * get keys matching the the regex. * @param regex the regex to match against. * @return {@literal Map} with matching keys */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java index cf30ba5a2b9f3..e265eecd1b5d4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java @@ -59,9 +59,9 @@ public ReconfigurationException() { /** * Create a new instance of {@link ReconfigurationException}. - * @param property property name - * @param newVal new value - * @param oldVal old value + * @param property property name. + * @param newVal new value. + * @param oldVal old value. * @param cause original exception. */ public ReconfigurationException(String property, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java index 04de7aa97a34e..ca9ddb61566ef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java @@ -42,7 +42,7 @@ public ReconfigurationTaskStatus(long startTime, long endTime, /** * Return true if * - A reconfiguration task has finished or - * - an active reconfiguration task is running + * - an active reconfiguration task is running. * @return true if startTime > 0; false if not. */ public boolean hasTask() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index b7a4132abce85..f21b1d81a6c9f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -3825,7 +3825,7 @@ public int hashCode() { } /** - * Fills up the rawKey object with the key returned by the Reader + * Fills up the rawKey object with the key returned by the Reader. * @return true if there is a key returned; false, otherwise * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java index f899daaa6cde4..de75810df0f70 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java @@ -133,15 +133,26 @@ public boolean seek(WritableComparable key) return super.seek(key); } - /** Read the next key in a set into key. Returns - * true if such a key exists and false when at the end of the set. */ + /** + * Read the next key in a set into key. + * + * @param key input key. + * @return Returns true if such a key exists + * and false when at the end of the set. + * @throws IOException raised on errors performing I/O. + */ public boolean next(WritableComparable key) throws IOException { return next(key, NullWritable.get()); } - /** Read the matching key from a set into key. - * Returns key, or null if no match exists. */ + /** + * Read the matching key from a set into key. + * + * @param key input key. + * @return Returns key, or null if no match exists. + * @throws IOException raised on errors performing I/O. + */ public WritableComparable get(WritableComparable key) throws IOException { if (seek(key)) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java index cb2905445910e..86fb1ff9a54a2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java @@ -503,6 +503,8 @@ private static String decode(ByteBuffer utf8, boolean replace) * Converts the provided String to bytes using the * UTF-8 encoding. If the input is malformed, * invalid chars are replaced by a default value. + * + * @param string input string. * @return ByteBuffer: bytes stores at ByteBuffer.array() * and length is ByteBuffer.limit() * @throws CharacterCodingException when a character @@ -575,6 +577,7 @@ public static String readString(DataInput in, int maxLength) * @param out input out. * @param s input s. * @throws IOException raised on errors performing I/O. + * @return a UTF8 encoded string to out. */ public static int writeString(DataOutput out, String s) throws IOException { ByteBuffer bytes = encode(s); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java index f537524c4b40a..7d3f680858ec3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java @@ -37,10 +37,13 @@ public VIntWritable() {} public VIntWritable(int value) { set(value); } - /** Set the value of this VIntWritable. */ + /** + * Set the value of this VIntWritable. + * @param value input value. + */ public void set(int value) { this.value = value; } - /** Return the value of this VIntWritable. */ + /** @return Return the value of this VIntWritable. */ public int get() { return value; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java index a9fac30605be6..a72a7fc2fd39a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java @@ -37,10 +37,13 @@ public VLongWritable() {} public VLongWritable(long value) { set(value); } - /** Set the value of this LongWritable. */ + /** + * Set the value of this LongWritable. + * @param value input value. + */ public void set(long value) { this.value = value; } - /** Return the value of this LongWritable. */ + /** @return Return the value of this LongWritable. */ public long get() { return value; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java index c2db55520c918..421b8daeeae8e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java @@ -36,7 +36,7 @@ @InterfaceStability.Stable public abstract class VersionedWritable implements Writable { - /** Return the version number of the current implementation. */ + /** @return Return the version number of the current implementation. */ public abstract byte getVersion(); // javadoc from Writable diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java index a8fdbfe98dfdc..9dd231e488780 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java @@ -35,17 +35,31 @@ public class WritableFactories { private WritableFactories() {} // singleton - /** Define a factory for a class. */ + /** + * Define a factory for a class. + * @param c input c. + * @param factory input factory. + */ public static void setFactory(Class c, WritableFactory factory) { CLASS_TO_FACTORY.put(c, factory); } - /** Define a factory for a class. */ + /** + * Define a factory for a class. + * @param c input c. + * @return a factory for a class. + */ public static WritableFactory getFactory(Class c) { return CLASS_TO_FACTORY.get(c); } - /** Create a new instance of a class with a defined factory. */ + /** + * Create a new instance of a class with a defined factory. + * + * @param c input c. + * @param conf input configuration. + * @return a new instance of a class with a defined factory. + */ public static Writable newInstance(Class c, Configuration conf) { WritableFactory factory = WritableFactories.getFactory(c); if (factory != null) { @@ -59,7 +73,11 @@ public static Writable newInstance(Class c, Configuration co } } - /** Create a new instance of a class with a defined factory. */ + /** + * Create a new instance of a class with a defined factory. + * @param c input c. + * @return a new instance of a class with a defined factory. + */ public static Writable newInstance(Class c) { return newInstance(c, null); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java index bb8af974f051e..d9e9b543c7d96 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java @@ -27,7 +27,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public interface WritableFactory { - /** Return a new instance. */ + /** @return Return a new instance. */ Writable newInstance(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java index 43d396edad7ab..e5e74875225cc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java @@ -45,19 +45,33 @@ public class WritableName { private WritableName() {} // no public ctor - /** Set the name that a class should be known as to something other than the - * class name. */ + /** + * Set the name that a class should be known as to something other than the + * class name. + * + * @param writableClass input writableClass. + * @param name input name. + */ public static synchronized void setName(Class writableClass, String name) { CLASS_TO_NAME.put(writableClass, name); NAME_TO_CLASS.put(name, writableClass); } - /** Add an alternate name for a class. */ + /** + * Add an alternate name for a class. + * @param writableClass input writableClass. + * @param name input name. + */ public static synchronized void addName(Class writableClass, String name) { NAME_TO_CLASS.put(name, writableClass); } - /** Return the name for a class. Default is {@link Class#getName()}. */ + /** + * Return the name for a class. + * Default is {@link Class#getName()}. + * @param writableClass input writableClass. + * @return name for a class. + */ public static synchronized String getName(Class writableClass) { String name = CLASS_TO_NAME.get(writableClass); if (name != null) @@ -65,7 +79,15 @@ public static synchronized String getName(Class writableClass) { return writableClass.getName(); } - /** Return the class for a name. Default is {@link Class#forName(String)}.*/ + /** + * Return the class for a name. + * Default is {@link Class#forName(String)}. + * + * @param name input name. + * @param conf input configuration. + * @return class for a name. + * @throws IOException raised on errors performing I/O. + */ public static synchronized Class getClass(String name, Configuration conf ) throws IOException { Class writableClass = NAME_TO_CLASS.get(name); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java index 2062fb6fe3705..1e0ee27e93d6b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java @@ -208,7 +208,10 @@ public static void displayByteArray(byte[] record){ /** * Make a copy of a writable object using serialization to a buffer. + * + * @param Generics Type T. * @param orig The object to copy + * @param conf input Configuration * @return The copied object */ public static T clone(T orig, Configuration conf) { @@ -223,10 +226,10 @@ public static T clone(T orig, Configuration conf) { } /** - * Make a copy of the writable object using serialization to a buffer + * Make a copy of the writable object using serialization to a buffer. * @param dst the object to copy from * @param src the object to copy into, which is destroyed - * @throws IOException + * @throws IOException raised on errors performing I/O. * @deprecated use ReflectionUtils.cloneInto instead. */ @Deprecated @@ -248,7 +251,7 @@ public static void cloneInto(Writable dst, Writable src) throws IOException { * * @param stream Binary output stream * @param i Integer to be serialized - * @throws java.io.IOException + * @throws IOException raised on errors performing I/O. */ public static void writeVInt(DataOutput stream, int i) throws IOException { writeVLong(stream, i); @@ -268,7 +271,7 @@ public static void writeVInt(DataOutput stream, int i) throws IOException { * * @param stream Binary output stream * @param i Long to be serialized - * @throws java.io.IOException + * @throws IOException raised on errors performing I/O. */ public static void writeVLong(DataOutput stream, long i) throws IOException { if (i >= -112 && i <= 127) { @@ -303,7 +306,7 @@ public static void writeVLong(DataOutput stream, long i) throws IOException { /** * Reads a zero-compressed encoded long from input stream and returns it. * @param stream Binary input stream - * @throws java.io.IOException + * @throws IOException raised on errors performing I/O. * @return deserialized long from stream. */ public static long readVLong(DataInput stream) throws IOException { @@ -324,7 +327,7 @@ public static long readVLong(DataInput stream) throws IOException { /** * Reads a zero-compressed encoded integer from input stream and returns it. * @param stream Binary input stream - * @throws java.io.IOException + * @throws IOException raised on errors performing I/O. * @return deserialized integer from stream. */ public static int readVInt(DataInput stream) throws IOException { @@ -342,8 +345,10 @@ public static int readVInt(DataInput stream) throws IOException { * inclusive. * * @param stream Binary input stream - * @throws java.io.IOException - * @return deserialized integer from stream + * @param lower input lower. + * @param upper input upper. + * @throws IOException raised on errors performing I/O. + * @return deserialized integer from stream. */ public static int readVIntInRange(DataInput stream, int lower, int upper) throws IOException { @@ -387,7 +392,8 @@ public static int decodeVIntSize(byte value) { } /** - * Get the encoded length if an integer is stored in a variable-length format + * Get the encoded length if an integer is stored in a variable-length format. + * @param i input i. * @return the encoded length */ public static int getVIntSize(long i) { @@ -410,7 +416,7 @@ public static int getVIntSize(long i) { * @param in DataInput to read from * @param enumType Class type of Enum * @return Enum represented by String read from DataInput - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static > T readEnum(DataInput in, Class enumType) throws IOException{ @@ -420,7 +426,7 @@ public static > T readEnum(DataInput in, Class enumType) * writes String value of enum to DataOutput. * @param out Dataoutput stream * @param enumVal enum value - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public static void writeEnum(DataOutput out, Enum enumVal) throws IOException{ @@ -446,7 +452,11 @@ public static void skipFully(DataInput in, int len) throws IOException { } } - /** Convert writables to a byte array */ + /** + * Convert writables to a byte array. + * @param writables input writables. + * @return ByteArray. + */ public static byte[] toByteArray(Writable... writables) { final DataOutputBuffer out = new DataOutputBuffer(); try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java index fbf825bcb91b1..3d309235fe891 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java @@ -71,7 +71,7 @@ public interface AlignmentContext { * misaligned with the client state. * See implementation for more details. * @return state id required for the server to execute the call. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ long receiveRequestState(RpcRequestHeaderProto header, long threshold) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java index 6cc2540c174d3..fa6f34adaf3bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java @@ -405,6 +405,12 @@ public static int[] getDefaultQueueCapacityWeights(int priorityLevels) { /** * Replaces active queue with the newly requested one and transfers * all calls to the newQ before returning. + * + * @param schedulerClass input schedulerClass. + * @param queueClassToUse input queueClassToUse. + * @param maxSize input maxSize. + * @param ns input ns. + * @param conf input configuration. */ public synchronized void swapQueue( Class schedulerClass, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index 49432aff11789..2fe8aca85ed9a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -112,7 +112,12 @@ protected Boolean initialValue() { return (AsyncGet) ASYNC_RPC_RESPONSE.get(); } - /** Set call id and retry count for the next call. */ + /** + * Set call id and retry count for the next call. + * @param cid input cid. + * @param rc input rc. + * @param externalHandler input externalHandler. + */ public static void setCallIdAndRetryCount(int cid, int rc, Object externalHandler) { Preconditions.checkArgument(cid != RpcConstants.INVALID_CALL_ID); @@ -1349,8 +1354,14 @@ private void cleanupCalls() { } } - /** Construct an IPC client whose values are of the given {@link Writable} - * class. */ + /** + * Construct an IPC client whose values are of the given {@link Writable} + * class. + * + * @param valueClass input valueClass. + * @param conf input configuration. + * @param factory input factory. + */ public Client(Class valueClass, Configuration conf, SocketFactory factory) { this.valueClass = valueClass; @@ -1372,9 +1383,9 @@ public Client(Class valueClass, Configuration conf, } /** - * Construct an IPC client with the default SocketFactory - * @param valueClass - * @param conf + * Construct an IPC client with the default SocketFactory. + * @param valueClass input valueClass. + * @param conf input Configuration. */ public Client(Class valueClass, Configuration conf) { this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf)); @@ -1432,7 +1443,7 @@ public void stop() { * Make a call, passing rpcRequest, to the IPC server defined by * remoteId, returning the rpc respond. * - * @param rpcKind + * @param rpcKind - input rpcKind. * @param rpcRequest - contains serialized method and method parameters * @param remoteId - the target rpc server * @param fallbackToSimpleAuth - set to true or false during this method to @@ -1440,6 +1451,7 @@ public void stop() { * @return the rpc response * Throws exceptions if there are network problems or if the remote code * threw an exception. + * @throws IOException raised on errors performing I/O. */ public Writable call(RPC.RpcKind rpcKind, Writable rpcRequest, ConnectionId remoteId, AtomicBoolean fallbackToSimpleAuth) @@ -1760,7 +1772,7 @@ public int getMaxRetriesOnSasl() { return maxRetriesOnSasl; } - /** max connection retries on socket time outs */ + /** @return max connection retries on socket time outs */ public int getMaxRetriesOnSocketTimeouts() { return maxRetriesOnSocketTimeouts; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java index b7257c8b2a69c..c5d0183dba25d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java @@ -93,6 +93,8 @@ public synchronized Client getClient(Configuration conf, SocketFactory factory) /** * Stop a RPC client connection * A RPC client is closed only when its reference count becomes zero. + * + * @param client input client. */ public void stopClient(Client client) { if (Client.LOG.isDebugEnabled()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java index 152e062392fcc..bab1de753f265 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java @@ -35,7 +35,7 @@ public class ClientId { private static final int shiftWidth = 8; /** - * Return clientId as byte[] + * @return Return clientId as byte[]. */ public static byte[] getClientId() { UUID uuid = UUID.randomUUID(); @@ -45,7 +45,10 @@ public static byte[] getClientId() { return buf.array(); } - /** Convert a clientId byte[] to string */ + /** + * @return Convert a clientId byte[] to string. + * @param clientId input clientId. + */ public static String toString(byte[] clientId) { // clientId can be null or an empty array if (clientId == null || clientId.length == 0) { @@ -74,7 +77,10 @@ public static long getLsb(byte[] clientId) { return lsb; } - /** Convert from clientId string byte[] representation of clientId */ + /** + * @return Convert from clientId string byte[] representation of clientId. + * @param id input id. + */ public static byte[] toBytes(String id) { if (id == null || "".equals(id)) { return new byte[0]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java index bfa055bcb0997..c12f2abc3a534 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java @@ -41,7 +41,11 @@ public interface GenericRefreshProtocol { /** * Refresh the resource based on identity passed in. - * @throws IOException + * + * @param identifier input identifier. + * @param args input args + * @throws IOException raised on errors performing I/O. + * @return Collection RefreshResponse */ @Idempotent Collection refresh(String identifier, String[] args) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java index 1e110b9011313..9ed0640c8dcfa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java @@ -85,7 +85,7 @@ public static IOException getRemoteException( /** * Get the ByteString for frequently used fixed and small set strings. * @param key string - * @return + * @return the ByteString for frequently used fixed and small set strings. */ public static ByteString getFixedByteString(Text key) { ByteString value = FIXED_BYTESTRING_CACHE.get(key); @@ -99,7 +99,7 @@ public static ByteString getFixedByteString(Text key) { /** * Get the ByteString for frequently used fixed and small set strings. * @param key string - * @return + * @return ByteString for frequently used fixed and small set strings. */ public static ByteString getFixedByteString(String key) { ByteString value = FIXED_BYTESTRING_CACHE.get(key); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index c4457a653e35f..f5f970462102d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -144,6 +144,10 @@ protected Invoker(Class protocol, InetSocketAddress addr, /** * This constructor takes a connectionId, instead of creating a new one. + * @param protocol input protocol. + * @param connId input connId. + * @param conf input Configuration. + * @param factory input factory. */ protected Invoker(Class protocol, Client.ConnectionId connId, Configuration conf, SocketFactory factory) { @@ -423,6 +427,10 @@ public static ProtobufRpcEngineCallback registerForDeferredResponse() { * @param portRangeConfig A config parameter that can be used to restrict * the range of ports used when port is 0 (an ephemeral port) * @param alignmentContext provides server state info on client responses + * @param secretManager input secretManager + * @param queueSizePerHandler input queueSizePerHandler. + * @param numReaders input numReaders. + * @throws IOException raised on errors performing I/O. */ public Server(Class protocolClass, Object protocolImpl, Configuration conf, String bindAddress, int port, int numHandlers, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java index e1ee374282897..3a8c6275820c4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java @@ -151,6 +151,11 @@ protected Invoker(Class protocol, InetSocketAddress addr, /** * This constructor takes a connectionId, instead of creating a new one. + * + * @param protocol input protocol. + * @param connId input connId. + * @param conf input Configuration. + * @param factory input factory. */ protected Invoker(Class protocol, Client.ConnectionId connId, Configuration conf, SocketFactory factory) { @@ -458,6 +463,7 @@ public static ProtobufRpcEngineCallback2 registerForDeferredResponse2() { * @param portRangeConfig A config parameter that can be used to restrict * the range of ports used when port is 0 (an ephemeral port) * @param alignmentContext provides server state info on client responses + * @throws IOException raised on errors performing I/O. */ public Server(Class protocolClass, Object protocolImpl, Configuration conf, String bindAddress, int port, int numHandlers, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java index 29c07ac29cb87..f23c05936a356 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java @@ -36,7 +36,7 @@ public interface ProtocolMetaInterface { * It is assumed that all method names are unique for a protocol. * @param methodName The name of the method * @return true if method is supported, otherwise false. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ public boolean isMethodSupported(String methodName) throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java index cc66958d14e08..49029f97b3d29 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java @@ -85,11 +85,12 @@ public T getProxy() { } /** - * Check if a method is supported by the server or not + * Check if a method is supported by the server or not. * * @param methodName a method's name in String format * @param parameterTypes a method's parameter types * @return true if the method is supported by the server + * @throws IOException raised on errors performing I/O. */ public synchronized boolean isMethodSupported(String methodName, Class... parameterTypes) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index 8c5287b30735d..f8034e7a16c95 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -395,8 +395,9 @@ public static T waitForProxy(Class protocol, /** * Get a protocol proxy that contains a proxy connection to a remote server - * and a set of methods that are supported by the server - * + * and a set of methods that are supported by the server. + * + * @param Generics Type. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -405,7 +406,7 @@ public static T waitForProxy(Class protocol, * @param connectionRetryPolicy input connectionRetryPolicy * @param timeout time in milliseconds before giving up * @return the proxy - * @throws IOException if the far end through a RemoteException + * @throws IOException if the far end through a RemoteException. */ public static ProtocolProxy waitForProtocolProxy(Class protocol, long clientVersion, @@ -461,6 +462,7 @@ public static ProtocolProxy waitForProtocolProxy(Class protocol, * @param conf input Configuration. * @param factory input factory. * @throws IOException raised on errors performing I/O. + * @return proxy. */ public static T getProxy(Class protocol, long clientVersion, @@ -519,7 +521,8 @@ public static T getProxy(Class protocol, /** * Get a protocol proxy that contains a proxy connection to a remote server * and a set of methods that are supported by the server - * + * + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -1149,6 +1152,7 @@ private void initProtocolMetaInfo(Configuration conf) { /** * Add a protocol to the existing server. + * @param rpcKind - input rpcKind * @param protocolClass - the protocol class * @param protocolImpl - the impl of the protocol that will be called * @return the server (for convenience) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java index 553f9a00d4cca..b1aa0197040a2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java @@ -41,7 +41,7 @@ public interface RefreshCallQueueProtocol { /** * Refresh the callqueue. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ @Idempotent void refreshCallQueue() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java index 95f1323410fe0..0cc0b8ba3d8b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java @@ -72,6 +72,7 @@ public synchronized void register(String identifier, RefreshHandler handler) { /** * Remove the registered object for a given identity. * @param identifier the resource to unregister + * @param handler input handler. * @return the true if removed */ public synchronized boolean unregister(String identifier, RefreshHandler handler) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java index f1142d35e72c2..da08c3d152e61 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java @@ -124,8 +124,9 @@ private IOException instantiateException(Class cls) } /** - * Create RemoteException from attributes - * @param attrs may not be null + * Create RemoteException from attributes. + * @param attrs may not be null. + * @return RemoteException. */ public static RemoteException valueOf(Attributes attrs) { return new RemoteException(attrs.getValue("class"), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java index 5bcbf6ba58daf..b874c4a1d9693 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java @@ -233,7 +233,7 @@ public RetryCacheMetrics getMetricsForTests() { } /** - * This method returns cache name for metrics. + * @return This method returns cache name for metrics. */ public String getCacheName() { return cacheName; @@ -302,6 +302,9 @@ private CacheEntry waitForCompletion(CacheEntry newEntry) { /** * Add a new cache entry into the retry cache. The cache entry consists of * clientId and callId extracted from editlog. + * + * @param clientId input clientId. + * @param callId input callId. */ public void addCacheEntry(byte[] clientId, int callId) { CacheEntry newEntry = new CacheEntry(clientId, callId, System.nanoTime() @@ -340,7 +343,11 @@ private static CacheEntryWithPayload newEntry(Object payload, payload, System.nanoTime() + expirationTime); } - /** Static method that provides null check for retryCache */ + /** + * Static method that provides null check for retryCache. + * @param cache input Cache. + * @return CacheEntry + */ public static CacheEntry waitForCompletion(RetryCache cache) { if (skipRetryCache()) { return null; @@ -349,7 +356,12 @@ public static CacheEntry waitForCompletion(RetryCache cache) { .waitForCompletion(newEntry(cache.expirationTime)) : null; } - /** Static method that provides null check for retryCache */ + /** + * Static method that provides null check for retryCache. + * @param cache input cache. + * @param payload input payload. + * @return CacheEntryWithPayload. + */ public static CacheEntryWithPayload waitForCompletion(RetryCache cache, Object payload) { if (skipRetryCache()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java index 0f5769e705028..d1564456f325d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java @@ -36,15 +36,44 @@ @InterfaceStability.Evolving public interface RpcEngine { - /** Construct a client-side proxy object. - * @param */ + /** + * Construct a client-side proxy object. + * + * @param Generics Type T. + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param ticket input ticket. + * @param conf input Configuration. + * @param factory input factory. + * @param rpcTimeout input rpcTimeout. + * @param connectionRetryPolicy input connectionRetryPolicy. + * @throws IOException raised on errors performing I/O. + * @return ProtocolProxy + */ ProtocolProxy getProxy(Class protocol, long clientVersion, InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout, RetryPolicy connectionRetryPolicy) throws IOException; - /** Construct a client-side proxy object. */ + /** + * Construct a client-side proxy object. + * + * @param Generics Type T. + * @param protocol input protocol. + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param ticket input tocket. + * @param conf input Configuration. + * @param factory input factory. + * @param rpcTimeout input rpcTimeout. + * @param connectionRetryPolicy input connectionRetryPolicy. + * @param fallbackToSimpleAuth input fallbackToSimpleAuth. + * @param alignmentContext input alignmentContext. + * @throws IOException raised on errors performing I/O. + * @return ProtocolProxy + */ ProtocolProxy getProxy(Class protocol, long clientVersion, InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, @@ -87,7 +116,7 @@ RPC.Server getServer(Class protocol, Object instance, String bindAddress, * @param conf, Configuration. * @param factory, Socket factory. * @return Proxy object. - * @throws IOException + * @throws IOException raised on errors performing I/O. */ ProtocolProxy getProtocolMetaInfoProxy( ConnectionId connId, Configuration conf, SocketFactory factory) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java index 8c423b8e5e1bd..bffe5f2d257fc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java @@ -26,7 +26,8 @@ */ public interface RpcScheduler { /** - * Returns priority level greater than zero as a hint for scheduling. + * @return Returns priority level greater than zero as a hint for scheduling. + * @param obj input obj. */ int getPriorityLevel(Schedulable obj); @@ -37,6 +38,12 @@ public interface RpcScheduler { * implementations. It will not be called by any Hadoop code, and should not * be implemented by new implementations. * + * @param name input name. + * @param priorityLevel input priorityLevel. + * @param queueTime input queueTime. + * @param processingTime input processingTime. + * @throws UnsupportedOperationException + * the requested operation is not supported. * @deprecated Use * {@link #addResponseTime(String, Schedulable, ProcessingDetails)} instead. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index e205d53d7a9d8..fd1d2840c19f6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -266,10 +266,10 @@ static class RpcKindMapValue { * Register a RPC kind and the class to deserialize the rpc request. * * Called by static initializers of rpcKind Engines - * @param rpcKind + * @param rpcKind - input rpcKind. * @param rpcRequestWrapperClass - this class is used to deserialze the * the rpc request. - * @param rpcInvoker - use to process the calls on SS. + * @param rpcInvoker - use to process the calls on SS. */ public static void registerProtocolEngine(RPC.RpcKind rpcKind, @@ -328,7 +328,7 @@ static Class getProtocolClass(String protocolName, Configuration conf) return protocol; } - /** Returns the server instance called under or null. May be called under + /** @return Returns the server instance called under or null. May be called under * {@link #call(Writable, long)} implementations, and under {@link Writable} * methods of paramters and return values. Permits applications to access * the server context.*/ @@ -341,7 +341,7 @@ public static Server get() { */ private static final ThreadLocal CurCall = new ThreadLocal(); - /** Get the current call */ + /** @return Get the current call. */ @VisibleForTesting public static ThreadLocal getCurCall() { return CurCall; @@ -368,7 +368,8 @@ public static int getCallRetryCount() { return call != null ? call.retryCount : RpcConstants.INVALID_RETRY_COUNT; } - /** Returns the remote side ip address when invoked inside an RPC + /** + * @return Returns the remote side ip address when invoked inside an RPC * Returns null in case of an error. */ public static InetAddress getRemoteIp() { @@ -377,7 +378,7 @@ public static InetAddress getRemoteIp() { } /** - * Returns the remote side port when invoked inside an RPC + * @return Returns the remote side port when invoked inside an RPC * Returns 0 in case of an error. */ public static int getRemotePort() { @@ -412,14 +413,14 @@ public static String getAuxiliaryPortEstablishedQOP() { } /** - * Returns the clientId from the current RPC request + * @return Returns the clientId from the current RPC request. */ public static byte[] getClientId() { Call call = CurCall.get(); return call != null ? call.clientId : RpcConstants.DUMMY_CLIENT_ID; } - /** Returns remote address as a string when invoked inside an RPC. + /** @return Returns remote address as a string when invoked inside an RPC. * Returns null in case of an error. */ public static String getRemoteAddress() { @@ -441,14 +442,14 @@ public static String getProtocol() { return (call != null) ? call.getProtocol() : null; } - /** Return true if the invocation was through an RPC. + /** @return Return true if the invocation was through an RPC. */ public static boolean isRpcInvocation() { return CurCall.get() != null; } /** - * Return the priority level assigned by call queue to an RPC + * @return Return the priority level assigned by call queue to an RPC * Returns 0 in case no priority is assigned. */ public static int getPriorityLevel() { @@ -516,7 +517,7 @@ public int getNumInProcessHandler() { /** * Sets slow RPC flag. - * @param logSlowRPCFlag + * @param logSlowRPCFlag input logSlowRPCFlag. */ @VisibleForTesting protected void setLogSlowRPC(boolean logSlowRPCFlag) { @@ -707,6 +708,9 @@ Connection[] getConnections() { /** * Refresh the service authorization ACL for the service handled by this server. + * + * @param conf input Configuration. + * @param provider input PolicyProvider. */ public void refreshServiceAcl(Configuration conf, PolicyProvider provider) { serviceAuthorizationManager.refresh(conf, provider); @@ -715,6 +719,9 @@ public void refreshServiceAcl(Configuration conf, PolicyProvider provider) { /** * Refresh the service authorization ACL for the service handled by this server * using the specified Configuration. + * + * @param conf input Configuration. + * @param provider input provider. */ @Private public void refreshServiceAclWithLoadedConfiguration(Configuration conf, @@ -3198,6 +3205,18 @@ protected Server(String bindAddress, int port, * Class, RPC.RpcInvoker)} * This parameter has been retained for compatibility with existing tests * and usage. + * + * @param bindAddress input bindAddress. + * @param port input port. + * @param rpcRequestClass input rpcRequestClass. + * @param handlerCount input handlerCount. + * @param numReaders input numReaders. + * @param queueSizePerHandler input queueSizePerHandler. + * @param conf input Configuration. + * @param serverName input serverName. + * @param secretManager input secretManager. + * @param portRangeConfig input portRangeConfig. + * @throws IOException raised on errors performing I/O. */ @SuppressWarnings("unchecked") protected Server(String bindAddress, int port, @@ -3530,7 +3549,10 @@ Configuration getConf() { return conf; } - /** Sets the socket buffer size used for responding to RPCs */ + /** + * Sets the socket buffer size used for responding to RPCs. + * @param size input size. + */ public void setSocketSendBufSize(int size) { this.socketSendBufferSize = size; } public void setTracer(Tracer t) { @@ -3580,9 +3602,11 @@ public synchronized void stop() { this.rpcDetailedMetrics.shutdown(); } - /** Wait for the server to be stopped. + /** + * Wait for the server to be stopped. * Does not wait for all subthreads to finish. * See {@link #stop()}. + * @throws InterruptedException if the thread is interrupted. */ public synchronized void join() throws InterruptedException { while (running) { @@ -3619,13 +3643,25 @@ public synchronized Set getAuxiliaryListenerAddresses() { * Called for each call. * @deprecated Use {@link #call(RPC.RpcKind, String, * Writable, long)} instead + * @param param input param. + * @param receiveTime input receiveTime. + * @throws Exception if any error occurs. + * @return Call */ @Deprecated public Writable call(Writable param, long receiveTime) throws Exception { return call(RPC.RpcKind.RPC_BUILTIN, null, param, receiveTime); } - /** Called for each call. */ + /** + * Called for each call. + * @param rpcKind input rpcKind. + * @param protocol input protocol. + * @param param input param. + * @param receiveTime input receiveTime. + * @return Call + * @throws Exception raised on errors performing I/O. + */ public abstract Writable call(RPC.RpcKind rpcKind, String protocol, Writable param, long receiveTime) throws Exception; @@ -3673,7 +3709,7 @@ public int getNumOpenConnections() { } /** - * Get the NumOpenConnections/User. + * @return Get the NumOpenConnections/User. */ public String getNumOpenConnectionsPerUser() { ObjectMapper mapper = new ObjectMapper(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java index 6b7154b83b523..68e70ebb79a5c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java @@ -51,7 +51,7 @@ public class TestShell extends Assert { /** - * Set the timeout for every test + * Set the timeout for every test. */ @Rule public Timeout testTimeout = new Timeout(30000, TimeUnit.MILLISECONDS); From 00bc8ee84f73f770bc3ad5ac95f0e6eb78688721 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 16 May 2022 19:00:25 -0700 Subject: [PATCH 50/53] HADOOP-18229. Fix Check Style. --- .../apache/hadoop/conf/ConfigRedactor.java | 4 +- .../org/apache/hadoop/conf/Configuration.java | 24 +- .../org/apache/hadoop/conf/Configured.java | 2 +- .../apache/hadoop/conf/Reconfigurable.java | 8 +- .../hadoop/conf/ReconfigurableBase.java | 4 +- .../hadoop/conf/ReconfigurationException.java | 12 +- .../org/apache/hadoop/crypto/CryptoCodec.java | 6 +- .../hadoop/crypto/CryptoStreamUtils.java | 16 +- .../apache/hadoop/crypto/key/KeyProvider.java | 10 +- .../key/KeyProviderCryptoExtension.java | 8 +- .../hadoop/crypto/key/kms/ValueQueue.java | 6 +- .../apache/hadoop/fs/AbstractFileSystem.java | 372 +++++++++--------- .../org/apache/hadoop/fs/AvroFSInput.java | 10 +- .../hadoop/fs/BatchedRemoteIterator.java | 6 +- .../org/apache/hadoop/fs/BlockLocation.java | 86 ++-- .../org/apache/hadoop/fs/ByteBufferUtil.java | 8 +- .../apache/hadoop/fs/CachingGetSpaceUsed.java | 8 +- .../apache/hadoop/fs/ChecksumFileSystem.java | 22 +- .../java/org/apache/hadoop/fs/ChecksumFs.java | 18 +- .../hadoop/fs/CompositeCrcFileChecksum.java | 6 +- .../org/apache/hadoop/fs/ContentSummary.java | 22 +- .../java/org/apache/hadoop/fs/CreateFlag.java | 2 +- .../main/java/org/apache/hadoop/fs/DF.java | 2 +- .../hadoop/fs/DelegationTokenRenewer.java | 16 +- .../java/org/apache/hadoop/fs/FSBuilder.java | 92 +++-- .../hadoop/fs/FSDataOutputStreamBuilder.java | 41 +- .../org/apache/hadoop/fs/FSInputChecker.java | 8 +- .../org/apache/hadoop/fs/FSOutputSummer.java | 10 +- .../org/apache/hadoop/fs/FileChecksum.java | 10 +- .../org/apache/hadoop/fs/FileContext.java | 122 +++--- .../apache/hadoop/fs/FileEncryptionInfo.java | 4 +- .../java/org/apache/hadoop/fs/FileStatus.java | 20 +- .../java/org/apache/hadoop/fs/FileSystem.java | 212 +++++----- .../hadoop/fs/FileSystemLinkResolver.java | 2 +- .../java/org/apache/hadoop/fs/FileUtil.java | 73 ++-- 35 files changed, 639 insertions(+), 633 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java index 3a6e30874bc24..881a2ce811bbe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java @@ -57,8 +57,8 @@ public ConfigRedactor(Configuration conf) { * Given a key / value pair, decides whether or not to redact and returns * either the original value or text indicating it has been redacted. * - * @param key param key - * @param value param value, will return if conditions permit + * @param key param key. + * @param value param value, will return if conditions permit. * @return Original value, or text indicating it has been redacted */ public String redact(String key, String value) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 9b85c8654f799..5f720841d7689 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1908,7 +1908,7 @@ public long getTimeDuration(String name, String defaultValue, * @param name Property name * @param vStr The string value with time unit suffix to be converted. * @param unit Unit to convert the stored property, if it exists. - * @return time duration in given time unit + * @return time duration in given time unit. */ public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) { return getTimeDurationHelper(name, vStr, unit, unit); @@ -1923,7 +1923,7 @@ public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) { * @param vStr The string value with time unit suffix to be converted. * @param defaultUnit Unit to convert the stored property, if it exists. * @param returnUnit Unit for the returned value. - * @return time duration in given time unit + * @return time duration in given time unit. */ private long getTimeDurationHelper(String name, String vStr, TimeUnit defaultUnit, TimeUnit returnUnit) { @@ -2484,7 +2484,7 @@ public char[] getPasswordFromCredentialProviders(String name) /** * Fallback to clear text passwords in configuration. - * @param name the property name + * @param name the property name. * @return clear text password or null */ protected char[] getPasswordFromConfig(String name) { @@ -2550,7 +2550,7 @@ public InetSocketAddress getSocketAddr( * Set the socket address for the name property as * a host:port. * @param name property name. - * @param addr inetSocketAddress addr + * @param addr inetSocketAddress addr. */ public void setSocketAddr(String name, InetSocketAddress addr) { set(name, NetUtils.getHostPortString(addr)); @@ -2728,7 +2728,7 @@ public Class getClass(String name, Class defaultValue) { * @param name the conf key name. * @param defaultValue default value. * @param xface the interface implemented by the named class. - * @param Interface class type + * @param Interface class type. * @return property value as a Class, * or defaultValue. */ @@ -2758,7 +2758,7 @@ else if (theClass != null) * @param name the property name. * @param xface the interface implemented by the classes named by * name. - * @param Interface class type + * @param Interface class type. * @return a List of objects implementing xface. */ @SuppressWarnings("unchecked") @@ -2831,7 +2831,7 @@ public Path getLocalPath(String dirsProp, String path) * directory does not exist, an attempt is made to create it. * * @param dirsProp directory in which to locate the file. - * @param path file-path. + * @param path file-path. * @return local file under the directory with the given path. * @throws IOException raised on errors performing I/O. */ @@ -3445,7 +3445,7 @@ void parseNext() throws IOException, XMLStreamException { /** * Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM. - * @param prop properties + * @param prop properties. */ public void addTags(Properties prop) { // Get all system tags @@ -3906,8 +3906,8 @@ synchronized boolean getQuietMode() { } /** For debugging. List non-default properties to the terminal and exit. - * @param args the argument to be parsed - * @throws Exception exception + * @param args the argument to be parsed. + * @throws Exception exception. */ public static void main(String[] args) throws Exception { new Configuration().writeXml(System.out); @@ -3988,8 +3988,8 @@ public static void dumpDeprecatedKeys() { /** * Returns whether or not a deprecated name has been warned. If the name is not * deprecated then always return false - * @param name proprties - * @return true if name is a warned deprecation + * @param name proprties. + * @return true if name is a warned deprecation. */ public static boolean hasWarnedDeprecation(String name) { DeprecationContext deprecations = deprecationContext.get(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java index 4889dd5311826..77a7117d19665 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java @@ -34,7 +34,7 @@ public Configured() { } /** Construct a Configured. - * @param conf the Configuration object + * @param conf the Configuration object. */ public Configured(Configuration conf) { setConf(conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java index c03193968ce2f..915faf4c237ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java @@ -33,8 +33,8 @@ public interface Reconfigurable extends Configurable { * (or null if it was not previously set). If newVal is null, set the property * to its default value; * - * @param property property name - * @param newVal new value + * @param property property name. + * @param newVal new value. * @throws ReconfigurationException if there was an error applying newVal. * If the property cannot be changed, throw a * {@link ReconfigurationException}. @@ -48,14 +48,14 @@ void reconfigureProperty(String property, String newVal) * If isPropertyReconfigurable returns true for a property, * then changeConf should not throw an exception when changing * this property. - * @param property property name + * @param property property name. * @return true if property reconfigurable; false if not. */ boolean isPropertyReconfigurable(String property); /** * Return all the properties that can be changed at run time. - * @return reconfigurable propertys + * @return reconfigurable propertys. */ Collection getReconfigurableProperties(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java index c3706dec4b7f8..1c451ca6d30b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java @@ -79,7 +79,7 @@ public ReconfigurableBase() { /** * Construct a ReconfigurableBase with the {@link Configuration} * conf. - * @param conf configuration + * @param conf configuration. */ public ReconfigurableBase(Configuration conf) { super((conf == null) ? new Configuration() : conf); @@ -92,7 +92,7 @@ public void setReconfigurationUtil(ReconfigurationUtil ru) { /** * Create a new configuration. - * @return configuration + * @return configuration. */ protected abstract Configuration getNewConf(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java index e265eecd1b5d4..b22af76c9eb6b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java @@ -75,9 +75,9 @@ public ReconfigurationException(String property, /** * Create a new instance of {@link ReconfigurationException}. - * @param property property name - * @param newVal new value - * @param oldVal old value + * @param property property name. + * @param newVal new value. + * @param oldVal old value. */ public ReconfigurationException(String property, String newVal, String oldVal) { @@ -89,7 +89,7 @@ public ReconfigurationException(String property, /** * Get property that cannot be changed. - * @return property info + * @return property info. */ public String getProperty() { return property; @@ -97,7 +97,7 @@ public String getProperty() { /** * Get value to which property was supposed to be changed. - * @return new value + * @return new value. */ public String getNewValue() { return newVal; @@ -105,7 +105,7 @@ public String getNewValue() { /** * Get old value of property that cannot be changed. - * @return old value + * @return old value. */ public String getOldValue() { return oldVal; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java index 872c03f413767..e6813b96a2670 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java @@ -147,8 +147,8 @@ private static List> getCodecClasses( /** * Create a {@link org.apache.hadoop.crypto.Encryptor}. * - * @return Encryptor the encryptor - * @throws GeneralSecurityException thrown if create encryptor error + * @return Encryptor the encryptor. + * @throws GeneralSecurityException thrown if create encryptor error. */ public abstract Encryptor createEncryptor() throws GeneralSecurityException; @@ -156,7 +156,7 @@ private static List> getCodecClasses( * Create a {@link org.apache.hadoop.crypto.Decryptor}. * * @return Decryptor the decryptor - * @throws GeneralSecurityException thrown if create decryptor error + * @throws GeneralSecurityException thrown if create decryptor error. */ public abstract Decryptor createDecryptor() throws GeneralSecurityException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java index 1235d3f55fb10..dad4d20df2afd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java @@ -42,7 +42,7 @@ public class CryptoStreamUtils { /** * Forcibly free the direct buffer. * - * @param buffer buffer + * @param buffer buffer. */ public static void freeDB(ByteBuffer buffer) { if (CleanerUtil.UNMAP_SUPPORTED) { @@ -59,8 +59,8 @@ public static void freeDB(ByteBuffer buffer) { /** * Read crypto buffer size. * - * @param conf configuration - * @return hadoop.security.crypto.buffer.size + * @param conf configuration. + * @return hadoop.security.crypto.buffer.size. */ public static int getBufferSize(Configuration conf) { return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY, @@ -70,7 +70,7 @@ public static int getBufferSize(Configuration conf) { /** * AES/CTR/NoPadding or SM4/CTR/NoPadding is required. * - * @param codec crypto codec + * @param codec crypto codec. */ public static void checkCodec(CryptoCodec codec) { if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING && @@ -83,9 +83,9 @@ public static void checkCodec(CryptoCodec codec) { /** * Check and floor buffer size. * - * @param codec crypto codec + * @param codec crypto codec. * @param bufferSize the size of the buffer to be used. - * @return calc buffer siez + * @return calc buffer size. */ public static int checkBufferSize(CryptoCodec codec, int bufferSize) { Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE, @@ -98,8 +98,8 @@ public static int checkBufferSize(CryptoCodec codec, int bufferSize) { * If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's * current position, otherwise return 0; * - * @param in wrapper - * @return current position, otherwise return 0; + * @param in wrapper. + * @return current position, otherwise return 0. * @throws IOException raised on errors performing I/O. */ public static long getInputStreamOffset(InputStream in) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index 19e620b0e84b4..4d1674bd7b883 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -466,7 +466,7 @@ public abstract KeyVersion getKeyVersion(String versionName * Get key metadata in bulk. * @param names the names of the keys to get * @throws IOException raised on errors performing I/O. - * @return Metadata Array + * @return Metadata Array. */ public Metadata[] getKeysMetadata(String... names) throws IOException { Metadata[] result = new Metadata[names.length]; @@ -479,7 +479,7 @@ public Metadata[] getKeysMetadata(String... names) throws IOException { /** * Get the key material for all versions of a specific key name. * - * @param name the base name of the key + * @param name the base name of the key. * @return the list of key material * @throws IOException raised on errors performing I/O. */ @@ -540,7 +540,7 @@ private String getAlgorithm(String cipher) { * @param size length of the key. * @param algorithm algorithm to use for generating the key. * @return the generated key. - * @throws NoSuchAlgorithmException no such algorithm exception + * @throws NoSuchAlgorithmException no such algorithm exception. */ protected byte[] generateKey(int size, String algorithm) throws NoSuchAlgorithmException { @@ -562,7 +562,7 @@ protected byte[] generateKey(int size, String algorithm) * @param options the options for the new key. * @return the version name of the first version of the key. * @throws IOException raised on errors performing I/O. - * @throws NoSuchAlgorithmException no such algorithm exception + * @throws NoSuchAlgorithmException no such algorithm exception. */ public KeyVersion createKey(String name, Options options) throws NoSuchAlgorithmException, IOException { @@ -668,7 +668,7 @@ protected static String buildVersionName(String name, int version) { * Find the provider with the given key. * * @param providerList the list of providers - * @param keyName the key name we are looking for + * @param keyName the key name we are looking for. * @return the KeyProvider that has the key * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java index 7e85eef5cc741..d706e5ef100c0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java @@ -178,7 +178,7 @@ public interface CryptoExtension extends KeyProviderExtension.Extension { * Calls to this method allows the underlying KeyProvider to warm-up any * implementation specific caches used to store the Encrypted Keys. * @param keyNames Array of Key Names - * @throws IOException thrown if the key material could not be encrypted + * @throws IOException thrown if the key material could not be encrypted. */ public void warmUpEncryptedKeys(String... keyNames) throws IOException; @@ -476,8 +476,8 @@ public void drain(String keyName) { * This constructor is to be used by sub classes that provide * delegating/proxying functionality to the {@link KeyProviderCryptoExtension} * - * @param keyProvider key provider - * @param extension crypto extension + * @param keyProvider key provider. + * @param extension crypto extension. */ protected KeyProviderCryptoExtension(KeyProvider keyProvider, CryptoExtension extension) { @@ -560,7 +560,7 @@ public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv) * Calls {@link CryptoExtension#drain(String)} for the given key name on the * underlying {@link CryptoExtension}. * - * @param keyName key name + * @param keyName key name. */ public void drain(String keyName) { getExtension().drain(keyName); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java index ebe41b71f9517..65eded918d60d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java @@ -268,7 +268,7 @@ public ValueQueue(final int numValues, final float lowWaterMark, long expiry, * Initializes the Value Queues for the provided keys by calling the * fill Method with "numInitValues" values * @param keyNames Array of key Names - * @throws ExecutionException executionException + * @throws ExecutionException executionException. */ public void initializeQueuesForKeys(String... keyNames) throws ExecutionException { @@ -286,7 +286,7 @@ public void initializeQueuesForKeys(String... keyNames) * @param keyName String key name * @return E the next value in the Queue * @throws IOException raised on errors performing I/O. - * @throws ExecutionException executionException + * @throws ExecutionException executionException. */ public E getNext(String keyName) throws IOException, ExecutionException { @@ -345,7 +345,7 @@ public int getSize(String keyName) { * @param num Minimum number of values to return. * @return {@literal List} values returned * @throws IOException raised on errors performing I/O. - * @throws ExecutionException execution exception + * @throws ExecutionException execution exception. */ public List getAtMost(String keyName, int num) throws IOException, ExecutionException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index 0ef81b60329a4..a4737c548c8fa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -283,10 +283,10 @@ public AbstractFileSystem(final URI uri, final String supportedScheme, } /** - * Check that the Uri's scheme matches + * Check that the Uri's scheme matches. * - * @param uri name URI of the FS - * @param supportedScheme supported scheme + * @param uri name URI of the FS. + * @param supportedScheme supported scheme. */ public void checkScheme(URI uri, String supportedScheme) { String scheme = uri.getScheme(); @@ -363,7 +363,7 @@ public URI getUri() { * If the path is fully qualified URI, then its scheme and authority * matches that of this file system. Otherwise the path must be * slash-relative name. - * @param path the path + * @param path the path. * @throws InvalidPathException if the path is invalid */ public void checkPath(Path path) { @@ -432,7 +432,7 @@ public String getUriPath(final Path p) { /** * Make the path fully qualified to this file system - * @param path the path + * @param path the path. * @return the qualified path */ public Path makeQualified(Path path) { @@ -497,8 +497,8 @@ public FsServerDefaults getServerDefaults(final Path f) throws IOException { * through any internal symlinks or mount point * @param p path to be resolved * @return fully qualified path - * @throws FileNotFoundException when file not find throw - * @throws AccessControlException when accees control error throw + * @throws FileNotFoundException when file not find throw. + * @throws AccessControlException when accees control error throw. * @throws IOException raised on errors performing I/O. * @throws UnresolvedLinkException if symbolic link on path cannot be * resolved internally @@ -515,17 +515,17 @@ public Path resolvePath(final Path p) throws FileNotFoundException, * that the Path f must be fully qualified and the permission is absolute * (i.e. umask has been applied). * - * @param f the path - * @param createFlag create_flag - * @param opts create ops - * @throws AccessControlException access controll exception - * @throws FileAlreadyExistsException file already exception - * @throws FileNotFoundException file not found exception - * @throws ParentNotDirectoryException parent not dir exception - * @throws UnsupportedFileSystemException unsupported file system exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @param createFlag create_flag. + * @param opts create ops. + * @throws AccessControlException access controll exception. + * @throws FileAlreadyExistsException file already exception. + * @throws FileNotFoundException file not found exception. + * @throws ParentNotDirectoryException parent not dir exception. + * @throws UnsupportedFileSystemException unsupported file system exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return output stream + * @return output stream. */ public final FSDataOutputStream create(final Path f, final EnumSet createFlag, Options.CreateOpts... opts) @@ -644,23 +644,23 @@ public final FSDataOutputStream create(final Path f, * {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts * have been declared explicitly. * - * @param f the path - * @param flag create flag - * @param absolutePermission absolute permission - * @param bufferSize buffer size - * @param replication replications - * @param blockSize block size - * @param progress progress - * @param checksumOpt check sum opt - * @param createParent create parent - * @throws AccessControlException access control exception - * @throws FileAlreadyExistsException file already exists exception - * @throws FileNotFoundException file not found exception - * @throws ParentNotDirectoryException parent not directory exception - * @throws UnsupportedFileSystemException unsupported filesystem exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @param flag create flag. + * @param absolutePermission absolute permission. + * @param bufferSize buffer size. + * @param replication replications. + * @param blockSize block size. + * @param progress progress. + * @param checksumOpt check sum opt. + * @param createParent create parent. + * @throws AccessControlException access control exception. + * @throws FileAlreadyExistsException file already exists exception. + * @throws FileNotFoundException file not found exception. + * @throws ParentNotDirectoryException parent not directory exception. + * @throws UnsupportedFileSystemException unsupported filesystem exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return output stream + * @return output stream. */ public abstract FSDataOutputStream createInternal(Path f, EnumSet flag, FsPermission absolutePermission, @@ -675,13 +675,13 @@ public abstract FSDataOutputStream createInternal(Path f, * {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path * f must be fully qualified and the permission is absolute (i.e. * umask has been applied). - * @param dir directory - * @param permission permission - * @param createParent create parent flag - * @throws AccessControlException access control exception - * @throws FileAlreadyExistsException file already exists exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param dir directory. + * @param permission permission. + * @param createParent create parent flag. + * @throws AccessControlException access control exception. + * @throws FileAlreadyExistsException file already exists exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. */ public abstract void mkdir(final Path dir, final FsPermission permission, @@ -694,13 +694,13 @@ public abstract void mkdir(final Path dir, final FsPermission permission, * {@link FileContext#delete(Path, boolean)} except that Path f must be for * this file system. * - * @param f the path - * @param recursive recursive flag - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @param recursive recursive flag. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return if successfully deleted success true, not false + * @return if successfully deleted success true, not false. */ public abstract boolean delete(final Path f, final boolean recursive) throws AccessControlException, FileNotFoundException, @@ -711,12 +711,12 @@ public abstract boolean delete(final Path f, final boolean recursive) * {@link FileContext#open(Path)} except that Path f must be for this * file system. * - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return input stream + * @return input stream. */ public FSDataInputStream open(final Path f) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { @@ -728,13 +728,13 @@ public FSDataInputStream open(final Path f) throws AccessControlException, * {@link FileContext#open(Path, int)} except that Path f must be for this * file system. * - * @param f the path - * @param bufferSize buffer size - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @param bufferSize buffer size. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return if successfully open success true, not false + * @return if successfully open success true, not false. */ public abstract FSDataInputStream open(final Path f, int bufferSize) throws AccessControlException, FileNotFoundException, @@ -745,13 +745,13 @@ public abstract FSDataInputStream open(final Path f, int bufferSize) * {@link FileContext#truncate(Path, long)} except that Path f must be for * this file system. * - * @param f the path - * @param newLength new length - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @param newLength new length. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return if successfully truncate success true, not false + * @return if successfully truncate success true, not false. */ public boolean truncate(Path f, long newLength) throws AccessControlException, FileNotFoundException, @@ -765,13 +765,13 @@ public boolean truncate(Path f, long newLength) * {@link FileContext#setReplication(Path, short)} except that Path f must be * for this file system. * - * @param f the path - * @param replication replication - * @return if successfully set replication success true, not false - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param f the path. + * @param replication replication. + * @return if successfully set replication success true, not false. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public abstract boolean setReplication(final Path f, final short replication) throws AccessControlException, @@ -782,15 +782,15 @@ public abstract boolean setReplication(final Path f, * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * f must be for this file system. * - * @param src src - * @param dst dst - * @param options options - * @throws AccessControlException access control exception - * @throws FileAlreadyExistsException file already exists exception - * @throws FileNotFoundException file not found exception - * @throws ParentNotDirectoryException parent not directory exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param src src. + * @param dst dst. + * @param options options. + * @throws AccessControlException access control exception. + * @throws FileAlreadyExistsException file already exists exception. + * @throws FileNotFoundException file not found exception. + * @throws ParentNotDirectoryException parent not directory exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public final void rename(final Path src, final Path dst, final Options.Rename... options) throws AccessControlException, @@ -816,14 +816,14 @@ public final void rename(final Path src, final Path dst, * method and can take advantage of the default impl of the other * {@link #renameInternal(Path, Path, boolean)} * - * @param src src - * @param dst dst - * @throws AccessControlException access control exception - * @throws FileAlreadyExistsException file already exists exception - * @throws FileNotFoundException file not found exception - * @throws ParentNotDirectoryException parent not directory exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param src src. + * @param dst dst. + * @throws AccessControlException access control exception. + * @throws FileAlreadyExistsException file already exists exception. + * @throws FileNotFoundException file not found exception. + * @throws ParentNotDirectoryException parent not directory exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public abstract void renameInternal(final Path src, final Path dst) throws AccessControlException, FileAlreadyExistsException, @@ -835,15 +835,15 @@ public abstract void renameInternal(final Path src, final Path dst) * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path * f must be for this file system. * - * @param src src - * @param dst dst - * @param overwrite overwrite flag - * @throws AccessControlException access control exception - * @throws FileAlreadyExistsException file already exists exception - * @throws FileNotFoundException file not found exception - * @throws ParentNotDirectoryException parent not directory exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param src src. + * @param dst dst. + * @param overwrite overwrite flag. + * @throws AccessControlException access control exception. + * @throws FileAlreadyExistsException file already exists exception. + * @throws FileNotFoundException file not found exception. + * @throws ParentNotDirectoryException parent not directory exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public void renameInternal(final Path src, final Path dst, boolean overwrite) throws AccessControlException, @@ -908,11 +908,11 @@ public boolean supportsSymlinks() { * The specification of this method matches that of * {@link FileContext#createSymlink(Path, Path, boolean)}; * - * @param target target - * @param link link - * @param createParent create parent - * @throws IOException raised on errors performing I/O. - * @throws UnresolvedLinkException unresolved link exception + * @param target target. + * @param link link. + * @param createParent create parent. + * @throws IOException raised on errors performing I/O. + * @throws UnresolvedLinkException unresolved link exception. */ public void createSymlink(final Path target, final Path link, final boolean createParent) throws IOException, UnresolvedLinkException { @@ -923,8 +923,8 @@ public void createSymlink(final Path target, final Path link, * Partially resolves the path. This is used during symlink resolution in * {@link FSLinkResolver}, and differs from the similarly named method * {@link FileContext#getLinkTarget(Path)}. - * @param f the path - * @return target path + * @param f the path. + * @return target path. * @throws IOException subclass implementations may throw IOException */ public Path getLinkTarget(final Path f) throws IOException { @@ -938,12 +938,12 @@ public Path getLinkTarget(final Path f) throws IOException { * {@link FileContext#setPermission(Path, FsPermission)} except that Path f * must be for this file system. * - * @param f the path - * @param permission permission - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param f the path. + * @param permission permission. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public abstract void setPermission(final Path f, final FsPermission permission) throws AccessControlException, @@ -954,13 +954,13 @@ public abstract void setPermission(final Path f, * {@link FileContext#setOwner(Path, String, String)} except that Path f must * be for this file system. * - * @param f the path - * @param username user name - * @param groupname group name - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param f the path. + * @param username username. + * @param groupname groupname. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public abstract void setOwner(final Path f, final String username, final String groupname) throws AccessControlException, @@ -971,13 +971,13 @@ public abstract void setOwner(final Path f, final String username, * {@link FileContext#setTimes(Path, long, long)} except that Path f must be * for this file system. * - * @param f the path - * @param mtime modify time - * @param atime access time - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param f the path. + * @param mtime modify time. + * @param atime access time. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. */ public abstract void setTimes(final Path f, final long mtime, final long atime) throws AccessControlException, FileNotFoundException, @@ -988,12 +988,12 @@ public abstract void setTimes(final Path f, final long mtime, * {@link FileContext#getFileChecksum(Path)} except that Path f must be for * this file system. * - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. - * @return File Check sum + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. + * @return File Check sum. */ public abstract FileChecksum getFileChecksum(final Path f) throws AccessControlException, FileNotFoundException, @@ -1005,11 +1005,11 @@ public abstract FileChecksum getFileChecksum(final Path f) * except that an UnresolvedLinkException may be thrown if a symlink is * encountered in the path. * - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception - * @throws IOException raised on errors performing I/O. + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. + * @throws IOException raised on errors performing I/O. * @return File Status */ public abstract FileStatus getFileStatus(final Path f) @@ -1023,7 +1023,7 @@ public abstract FileStatus getFileStatus(final Path f) * synchronization is essential to guarantee consistency of read requests * particularly in HA setting. * @throws IOException raised on errors performing I/O. - * @throws UnsupportedOperationException Unsupported Operation Exception + * @throws UnsupportedOperationException Unsupported Operation Exception. */ public void msync() throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException(getClass().getCanonicalName() + @@ -1036,11 +1036,11 @@ public void msync() throws IOException, UnsupportedOperationException { * except that an UnresolvedLinkException may be thrown if a symlink is * encountered in the path. * - * @param path the path - * @param mode fsaction mode - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param path the path. + * @param mode fsaction mode. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"}) @@ -1057,12 +1057,12 @@ public void access(Path path, FsAction mode) throws AccessControlException, * If the file system does not support symlinks then the behavior is * equivalent to {@link AbstractFileSystem#getFileStatus(Path)}. * - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnsupportedFileSystemException UnSupported File System Exception + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnsupportedFileSystemException UnSupported File System Exception. * @throws IOException raised on errors performing I/O. - * @return file status + * @return file status. */ public FileStatus getFileLinkStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -1075,14 +1075,14 @@ public FileStatus getFileLinkStatus(final Path f) * {@link FileContext#getFileBlockLocations(Path, long, long)} except that * Path f must be for this file system. * - * @param f the path - * @param start start - * @param len length - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @param start start. + * @param len length. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return BlockLocation Array + * @return BlockLocation Array. */ public abstract BlockLocation[] getFileBlockLocations(final Path f, final long start, final long len) throws AccessControlException, @@ -1093,12 +1093,12 @@ public abstract BlockLocation[] getFileBlockLocations(final Path f, * {@link FileContext#getFsStatus(Path)} except that Path f must be for this * file system. * - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return Fs Status + * @return Fs Status. */ public FsStatus getFsStatus(final Path f) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException { @@ -1110,10 +1110,10 @@ public FsStatus getFsStatus(final Path f) throws AccessControlException, * The specification of this method matches that of * {@link FileContext#getFsStatus(Path)}. * - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. * @throws IOException raised on errors performing I/O. - * @return Fs Status + * @return Fs Status. */ public abstract FsStatus getFsStatus() throws AccessControlException, FileNotFoundException, IOException; @@ -1123,12 +1123,12 @@ public abstract FsStatus getFsStatus() throws AccessControlException, * {@link FileContext#listStatus(Path)} except that Path f must be for this * file system. * - * @param f path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return FileStatus Iterator + * @return FileStatus Iterator. */ public RemoteIterator listStatusIterator(final Path f) throws AccessControlException, FileNotFoundException, @@ -1162,12 +1162,12 @@ public FileStatus next() { * refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)} * for more details. * - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return FileStatus Iterator + * @return FileStatus Iterator. */ public RemoteIterator listLocatedStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -1200,12 +1200,12 @@ public LocatedFileStatus next() throws IOException { * The specification of this method matches that of * {@link FileContext.Util#listStatus(Path)} except that Path f must be * for this file system. - * @param f the path - * @throws AccessControlException access control exception - * @throws FileNotFoundException file not found exception - * @throws UnresolvedLinkException unresolved link exception + * @param f the path. + * @throws AccessControlException access control exception. + * @throws FileNotFoundException file not found exception. + * @throws UnresolvedLinkException unresolved link exception. * @throws IOException raised on errors performing I/O. - * @return FileStatus Iterator + * @return FileStatus Iterator. */ public abstract FileStatus[] listStatus(final Path f) throws AccessControlException, FileNotFoundException, @@ -1214,7 +1214,7 @@ public abstract FileStatus[] listStatus(final Path f) /** * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) - * @param path the path + * @param path the path. * @throws IOException raised on errors performing I/O. */ public RemoteIterator listCorruptFileBlocks(Path path) @@ -1229,8 +1229,8 @@ public RemoteIterator listCorruptFileBlocks(Path path) * {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f * must be for this file system. * - * @param verifyChecksum verify check sum flag - * @throws AccessControlException access control exception + * @param verifyChecksum verify check sum flag. + * @throws AccessControlException access control exception. * @throws IOException raised on errors performing I/O. */ public abstract void setVerifyChecksum(final boolean verifyChecksum) @@ -1472,10 +1472,10 @@ public void removeXAttr(Path path, String name) throws IOException { * The specification of this method matches that of * {@link FileContext#createSnapshot(Path, String)}. * - * @param path the path - * @param snapshotName snapshot name + * @param path the path. + * @param snapshotName snapshot name. * @throws IOException raised on errors performing I/O. - * @return path + * @return path. */ public Path createSnapshot(final Path path, final String snapshotName) throws IOException { @@ -1487,9 +1487,9 @@ public Path createSnapshot(final Path path, final String snapshotName) * The specification of this method matches that of * {@link FileContext#renameSnapshot(Path, String, String)}. * - * @param path the path - * @param snapshotOldName snapshot old name - * @param snapshotNewName snapshot new name + * @param path the path. + * @param snapshotOldName snapshot old name. + * @param snapshotNewName snapshot new name. * @throws IOException raised on errors performing I/O. */ public void renameSnapshot(final Path path, final String snapshotOldName, @@ -1502,8 +1502,8 @@ public void renameSnapshot(final Path path, final String snapshotOldName, * The specification of this method matches that of * {@link FileContext#deleteSnapshot(Path, String)}. * - * @param snapshotDir snapshot dir - * @param snapshotName snapshot name + * @param snapshotDir snapshot dir. + * @param snapshotName snapshot name. * @throws IOException raised on errors performing I/O. */ public void deleteSnapshot(final Path snapshotDir, final String snapshotName) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java index d8f87b07e5d16..7518dd2f7ef74 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java @@ -39,8 +39,8 @@ public class AvroFSInput implements Closeable, SeekableInput { /** * Construct given an {@link FSDataInputStream} and its length. * - * @param in inputstream - * @param len len + * @param in inputstream. + * @param len len. */ public AvroFSInput(final FSDataInputStream in, final long len) { this.stream = in; @@ -48,9 +48,9 @@ public AvroFSInput(final FSDataInputStream in, final long len) { } /** Construct given a {@link FileContext} and a {@link Path}. - * @param fc filecontext - * @param p the path - * @throws IOException If an I/O error occurred + * @param fc filecontext. + * @param p the path. + * @throws IOException If an I/O error occurred. * */ public AvroFSInput(final FileContext fc, final Path p) throws IOException { FileStatus status = fc.getFileStatus(p); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java index 18f6b8137f85e..e693bcbfe89fc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java @@ -68,7 +68,7 @@ public BatchedRemoteIterator(K prevKey) { * * @param prevKey The key to send. * @return A list of replies. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public abstract BatchedEntries makeRequest(K prevKey) throws IOException; @@ -103,8 +103,8 @@ public boolean hasNext() throws IOException { /** * Return the next list key associated with an element. - * @param element element - * @return K Generics Type + * @param element element. + * @return K Generics Type. */ public abstract K elementToPrevKey(E element); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java index 657be6fc95a07..67687c1f0e04c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java @@ -85,7 +85,7 @@ public BlockLocation() { /** * Copy constructor. - * @param that blocklocation + * @param that blocklocation. */ public BlockLocation(BlockLocation that) { this.hosts = that.hosts; @@ -101,10 +101,10 @@ public BlockLocation(BlockLocation that) { /** * Constructor with host, name, offset and length. - * @param names names array - * @param hosts host array - * @param offset offset - * @param length length + * @param names names array. + * @param hosts host array. + * @param offset offset. + * @param length length. */ public BlockLocation(String[] names, String[] hosts, long offset, long length) { @@ -113,11 +113,11 @@ public BlockLocation(String[] names, String[] hosts, long offset, /** * Constructor with host, name, offset, length and corrupt flag. - * @param names names - * @param hosts hosts - * @param offset offset - * @param length length - * @param corrupt corrupt + * @param names names. + * @param hosts hosts. + * @param offset offset. + * @param length length. + * @param corrupt corrupt. */ public BlockLocation(String[] names, String[] hosts, long offset, long length, boolean corrupt) { @@ -126,11 +126,11 @@ public BlockLocation(String[] names, String[] hosts, long offset, /** * Constructor with host, name, network topology, offset and length. - * @param names names - * @param hosts hosts - * @param topologyPaths topologyPaths - * @param offset offset - * @param length length + * @param names names. + * @param hosts hosts. + * @param topologyPaths topologyPaths. + * @param offset offset. + * @param length length. */ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths, long offset, long length) { @@ -140,12 +140,12 @@ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths, /** * Constructor with host, name, network topology, offset, length * and corrupt flag. - * @param names names - * @param hosts hosts - * @param topologyPaths topologyPaths - * @param offset offset - * @param length length - * @param corrupt corrupt + * @param names names. + * @param hosts hosts. + * @param topologyPaths topologyPaths. + * @param offset offset. + * @param length length. + * @param corrupt corrupt. */ public BlockLocation(String[] names, String[] hosts, String[] topologyPaths, long offset, long length, boolean corrupt) { @@ -198,8 +198,8 @@ public BlockLocation(String[] names, String[] hosts, String[] cachedHosts, /** * Get the list of hosts (hostname) hosting this block. - * @return hosts array - * @throws IOException If an I/O error occurred + * @return hosts array. + * @throws IOException If an I/O error occurred. */ public String[] getHosts() throws IOException { return hosts; @@ -207,7 +207,7 @@ public String[] getHosts() throws IOException { /** * Get the list of hosts (hostname) hosting a cached replica of the block. - * @return cached hosts + * @return cached hosts. */ public String[] getCachedHosts() { return cachedHosts; @@ -215,7 +215,7 @@ public String[] getCachedHosts() { /** * Get the list of names (IP:xferPort) hosting this block. - * @return names array + * @return names array. * @throws IOException If an I/O error occurred. */ public String[] getNames() throws IOException { @@ -225,8 +225,8 @@ public String[] getNames() throws IOException { /** * Get the list of network topology paths for each of the hosts. * The last component of the path is the "name" (IP:xferPort). - * @return topology paths - * @throws IOException If an I/O error occurred + * @return topology paths. + * @throws IOException If an I/O error occurred. */ public String[] getTopologyPaths() throws IOException { return topologyPaths; @@ -234,7 +234,7 @@ public String[] getTopologyPaths() throws IOException { /** * Get the storageID of each replica of the block. - * @return storage ids + * @return storage ids. */ public String[] getStorageIds() { return storageIds; @@ -242,7 +242,7 @@ public String[] getStorageIds() { /** * Get the storage type of each replica of the block. - * @return storage type of each replica of the block + * @return storage type of each replica of the block. */ public StorageType[] getStorageTypes() { return storageTypes; @@ -250,7 +250,7 @@ public StorageType[] getStorageTypes() { /** * Get the start offset of file associated with this block. - * @return start offset of file associated with this block + * @return start offset of file associated with this block. */ public long getOffset() { return offset; @@ -258,7 +258,7 @@ public long getOffset() { /** * Get the length of the block. - * @return length of the block + * @return length of the block. */ public long getLength() { return length; @@ -266,7 +266,7 @@ public long getLength() { /** * Get the corrupt flag. - * @return corrupt flag + * @return corrupt flag. */ public boolean isCorrupt() { return corrupt; @@ -274,7 +274,7 @@ public boolean isCorrupt() { /** * Return true if the block is striped (erasure coded). - * @return if the block is striped true, not false + * @return if the block is striped true, not false. */ public boolean isStriped() { return false; @@ -282,7 +282,7 @@ public boolean isStriped() { /** * Set the start offset of file associated with this block. - * @param offset start offset + * @param offset start offset. */ public void setOffset(long offset) { this.offset = offset; @@ -290,7 +290,7 @@ public void setOffset(long offset) { /** * Set the length of block. - * @param length length of block + * @param length length of block. */ public void setLength(long length) { this.length = length; @@ -298,7 +298,7 @@ public void setLength(long length) { /** * Set the corrupt flag. - * @param corrupt corrupt flag + * @param corrupt corrupt flag. */ public void setCorrupt(boolean corrupt) { this.corrupt = corrupt; @@ -306,8 +306,8 @@ public void setCorrupt(boolean corrupt) { /** * Set the hosts hosting this block. - * @param hosts hosts array - * @throws IOException If an I/O error occurred + * @param hosts hosts array. + * @throws IOException If an I/O error occurred. */ public void setHosts(String[] hosts) throws IOException { if (hosts == null) { @@ -319,7 +319,7 @@ public void setHosts(String[] hosts) throws IOException { /** * Set the hosts hosting a cached replica of this block. - * @param cachedHosts cached hosts + * @param cachedHosts cached hosts. */ public void setCachedHosts(String[] cachedHosts) { if (cachedHosts == null) { @@ -331,8 +331,8 @@ public void setCachedHosts(String[] cachedHosts) { /** * Set the names (host:port) hosting this block. - * @param names names - * @throws IOException If an I/O error occurred + * @param names names. + * @throws IOException If an I/O error occurred. */ public void setNames(String[] names) throws IOException { if (names == null) { @@ -345,8 +345,8 @@ public void setNames(String[] names) throws IOException { /** * Set the network topology paths of the hosts. * - * @param topologyPaths topology paths - * @throws IOException If an I/O error occurred + * @param topologyPaths topology paths. + * @throws IOException If an I/O error occurred. */ public void setTopologyPaths(String[] topologyPaths) throws IOException { if (topologyPaths == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java index a9790773b1707..f577649dd5fce 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java @@ -48,11 +48,11 @@ private static boolean streamHasByteBufferRead(InputStream stream) { /** * Perform a fallback read. * - * @param stream input stream - * @param bufferPool bufferPool - * @param maxLength maxLength + * @param stream input stream. + * @param bufferPool bufferPool. + * @param maxLength maxLength. * @throws IOException raised on errors performing I/O. - * @return byte buffer + * @return byte buffer. */ public static ByteBuffer fallbackRead( InputStream stream, ByteBufferPool bufferPool, int maxLength) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java index e0af53bac6f7d..d7b61346d4e3b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java @@ -54,7 +54,7 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed { * This is the constructor used by the builder. * All overriding classes should implement this. * - * @param builder builder + * @param builder builder. * @throws IOException raised on errors performing I/O. */ public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder) @@ -144,7 +144,7 @@ public String getDirPath() { /** * Increment the cached value of used space. * - * @param value dfs used value + * @param value dfs used value. */ public void incDfsUsed(long value) { used.addAndGet(value); @@ -160,7 +160,7 @@ boolean running() { /** * How long in between runs of the background refresh. * - * @return refresh interval + * @return refresh interval. */ @VisibleForTesting public long getRefreshInterval() { @@ -171,7 +171,7 @@ public long getRefreshInterval() { * Randomize the refresh interval timing by this amount, the actual interval will be chosen * uniformly between {@code interval-jitter} and {@code interval+jitter}. * - * @return between interval-jitter and interval+jitter + * @return between interval-jitter and interval+jitter. */ @VisibleForTesting public long getJitter() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 6d7afadd78f50..0efcdc8022f7b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -105,8 +105,8 @@ public FileSystem getRawFileSystem() { /** * Return the name of the checksum file associated with a file. * - * @param file the file path - * @return name of the checksum file associated with a file + * @param file the file path. + * @return name of the checksum file associated with a file. */ public Path getChecksumFile(Path file) { return new Path(file.getParent(), "." + file.getName() + ".crc"); @@ -115,8 +115,8 @@ public Path getChecksumFile(Path file) { /** * Return true if file is a checksum file name. * - * @param file the file path - * @return if file is a checksum file true, not false + * @param file the file path. + * @return if file is a checksum file true, not false. */ public static boolean isChecksumFile(Path file) { String name = file.getName(); @@ -127,9 +127,9 @@ public static boolean isChecksumFile(Path file) { * Return the length of the checksum file given the size of the * actual file. * - * @param file the file path - * @param fileSize file size - * @return checksum length + * @param file the file path. + * @param fileSize file size. + * @return checksum length. */ public long getChecksumFileLength(Path file, long fileSize) { return getChecksumLength(fileSize, getBytesPerSum()); @@ -138,7 +138,7 @@ public long getChecksumFileLength(Path file, long fileSize) { /** * Return the bytes Per Checksum. * - * @return bytes per check sum + * @return bytes per check sum. */ public int getBytesPerSum() { return bytesPerChecksum; @@ -831,9 +831,9 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst) * Copy it from FS control to the local dst name. * If src and dst are directories, the copyCrc parameter * determines whether to copy CRC files. - * @param src src path - * @param dst dst path - * @param copyCrc copy csc flag + * @param src src path. + * @param dst dst path. + * @param copyCrc copy csc flag. * @throws IOException if an I/O error occurs. */ @SuppressWarnings("deprecation") diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index a12b0e9b98613..4820c5c3045d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -73,7 +73,7 @@ public void setVerifyChecksum(boolean inVerifyChecksum) { /** * get the raw file system. * - * @return abstract file system + * @return abstract file system. */ public AbstractFileSystem getRawFs() { return getMyFs(); @@ -82,8 +82,8 @@ public AbstractFileSystem getRawFs() { /** * Return the name of the checksum file associated with a file. * - * @param file the file path - * @return the checksum file associated with a file + * @param file the file path. + * @return the checksum file associated with a file. */ public Path getChecksumFile(Path file) { return new Path(file.getParent(), "." + file.getName() + ".crc"); @@ -92,8 +92,8 @@ public Path getChecksumFile(Path file) { /** * Return true iff file is a checksum file name. * - * @param file the file path - * @return if is checksum file true,not false + * @param file the file path. + * @return if is checksum file true,not false. */ public static boolean isChecksumFile(Path file) { String name = file.getName(); @@ -104,9 +104,9 @@ public static boolean isChecksumFile(Path file) { * Return the length of the checksum file given the size of the * actual file. * - * @param file the file path - * @param fileSize file size - * @return check sum file length + * @param file the file path. + * @param fileSize file size. + * @return check sum file length. */ public long getChecksumFileLength(Path file, long fileSize) { return getChecksumLength(fileSize, getBytesPerSum()); @@ -115,7 +115,7 @@ public long getChecksumFileLength(Path file, long fileSize) { /** * Return the bytes Per Checksum. * - * @return bytes per sum + * @return bytes per sum. */ public int getBytesPerSum() { return defaultBytesPerChecksum; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java index 9c2ceb0526565..bdbc8f3a33f4b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java @@ -40,9 +40,9 @@ public class CompositeCrcFileChecksum extends FileChecksum { /** * Create a CompositeCrcFileChecksum. * - * @param crc crc - * @param crcType crcType - * @param bytesPerCrc bytesPerCrc + * @param crc crc. + * @param crcType crcType. + * @param bytesPerCrc bytesPerCrc. */ public CompositeCrcFileChecksum( int crc, DataChecksum.Type crcType, int bytesPerCrc) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java index 1050083cea78d..9f97a12fa6088 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java @@ -153,11 +153,11 @@ public ContentSummary() {} * Constructor, deprecated by ContentSummary.Builder * This constructor implicitly set spaceConsumed the same as length. * spaceConsumed and length must be set explicitly with - * ContentSummary.Builder + * ContentSummary.Builder. * - * @param length length - * @param fileCount file count - * @param directoryCount directory count + * @param length length. + * @param fileCount file count. + * @param directoryCount directory count. * */ @Deprecated public ContentSummary(long length, long fileCount, long directoryCount) { @@ -167,12 +167,12 @@ public ContentSummary(long length, long fileCount, long directoryCount) { /** * Constructor, deprecated by ContentSummary.Builder. * - * @param length length - * @param fileCount file count - * @param directoryCount directory count - * @param quota quota - * @param spaceConsumed space consumed - * @param spaceQuota space quota + * @param length length. + * @param fileCount file count. + * @param directoryCount directory count. + * @param quota quota. + * @param spaceConsumed space consumed. + * @param spaceQuota space quota. * */ @Deprecated public ContentSummary( @@ -189,7 +189,7 @@ public ContentSummary( /** * Constructor for ContentSummary.Builder. * - * @param builder builder + * @param builder builder. */ private ContentSummary(Builder builder) { super(builder); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java index b197d43d8c792..ca008e536931d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java @@ -190,7 +190,7 @@ public static void validate(Object path, boolean pathExists, * Validate the CreateFlag for the append operation. The flag must contain * APPEND, and cannot contain OVERWRITE. * - * @param flag enum set flag + * @param flag enum set flag. */ public static void validateForAppend(EnumSet flag) { validate(flag); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java index 3c06e97401341..c5a052f3de4be 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java @@ -104,7 +104,7 @@ public int getPercentUsed() { } /** - * @return the filesystem mount point for the indicated volume + * @return the filesystem mount point for the indicated volume. * @throws IOException raised on errors performing I/O. */ public String getMount() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java index 6244797119602..6f6e30410659c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java @@ -49,8 +49,8 @@ public interface Renewable { /** * Set delegation token. - * @param generic type T - * @param token token + * @param generic type T. + * @param token token. */ public void setDelegationToken(Token token); } @@ -179,7 +179,7 @@ public String toString() { /** * For testing purposes. * - * @return renew queue length + * @return renew queue length. */ @VisibleForTesting protected int getRenewQueueLength() { @@ -222,9 +222,9 @@ static synchronized void reset() { /** * Add a renew action to the queue. * - * @param generic type T - * @param fs file system - * @return renew action + * @param generic type T. + * @param fs file system. + * @return renew action. * */ @SuppressWarnings("static-access") public RenewAction addRenewAction(final T fs) { @@ -245,8 +245,8 @@ public RenewAction addRenewAction(final T /** * Remove the associated renew action from the queue * - * @param generic type T - * @param fs file system + * @param generic type T. + * @param fs file system. * @throws IOException raised on errors performing I/O. */ public void removeRenewAction( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java index fe72e117903ba..56ef51f128db8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java @@ -37,17 +37,17 @@ public interface FSBuilder> { /** * Set optional Builder parameter. - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. */ B opt(@Nonnull String key, @Nonnull String value); /** * Set optional boolean parameter for the Builder. - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. * @see #opt(String, String) */ B opt(@Nonnull String key, boolean value); @@ -55,9 +55,9 @@ public interface FSBuilder> { /** * Set optional int parameter for the Builder. * - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. * @see #opt(String, String) */ B opt(@Nonnull String key, int value); @@ -65,9 +65,9 @@ public interface FSBuilder> { /** * Set optional float parameter for the Builder. * - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. * @see #opt(String, String) */ B opt(@Nonnull String key, float value); @@ -75,9 +75,9 @@ public interface FSBuilder> { /** * Set optional long parameter for the Builder. * - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. * @see #opt(String, String) */ B opt(@Nonnull String key, long value); @@ -85,18 +85,19 @@ public interface FSBuilder> { /** * Set optional double parameter for the Builder. * - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. * @see #opt(String, String) */ B opt(@Nonnull String key, double value); /** * Set an array of string values as optional parameter for the Builder. - * @param key key - * @param values values - * @return generic type B + * + * @param key key. + * @param values values. + * @return generic type B. * @see #opt(String, String) */ B opt(@Nonnull String key, @Nonnull String... values); @@ -107,63 +108,68 @@ public interface FSBuilder> { * If the option is not supported or unavailable, * the client should expect {@link #build()} throws IllegalArgumentException. * - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. */ B must(@Nonnull String key, @Nonnull String value); /** * Set mandatory boolean option. * - * @param key key - * @param value value - * @return generic type B + * @param key key. + * @param value value. + * @return generic type B. * @see #must(String, String) */ B must(@Nonnull String key, boolean value); /** * Set mandatory int option. - * @param key key - * @param value value - * @return generic type B + * + * @param key key. + * @param value value. + * @return generic type B. * @see #must(String, String) */ B must(@Nonnull String key, int value); /** * Set mandatory float option. - * @param key key - * @param value value - * @return generic type B + * + * @param key key. + * @param value value. + * @return generic type B. * @see #must(String, String) */ B must(@Nonnull String key, float value); /** * Set mandatory long option. - * @param key key - * @param value value - * @return generic type B + * + * @param key key. + * @param value value. + * @return generic type B. * @see #must(String, String) */ B must(@Nonnull String key, long value); /** * Set mandatory double option. - * @param key key - * @param value value - * @return generic type B + * + * @param key key. + * @param value value. + * @return generic type B. * @see #must(String, String) */ B must(@Nonnull String key, double value); /** * Set a string array as mandatory option. - * @param key key - * @param values values - * @return generic type B + * + * @param key key. + * @param values values. + * @return generic type B. * @see #must(String, String) */ B must(@Nonnull String key, @Nonnull String... values); @@ -175,7 +181,7 @@ public interface FSBuilder> { * @throws UnsupportedOperationException if the filesystem does not support * the specific operation. * @throws IOException on filesystem IO errors. - * @return generic type S + * @return generic type S. */ S build() throws IllegalArgumentException, UnsupportedOperationException, IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java index e7d79f2a90f10..16938a83a69c7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java @@ -123,8 +123,9 @@ public abstract class FSDataOutputStreamBuilder /** * Constructor. - * @param fileSystem file system - * @param p the path + * + * @param fileSystem file system. + * @param p the path. */ protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem, @Nonnull Path p) { @@ -152,8 +153,8 @@ protected FsPermission getPermission() { /** * Set permission for the file. * - * @param perm permission - * @return B Generics Type + * @param perm permission. + * @return B Generics Type. */ public B permission(@Nonnull final FsPermission perm) { checkNotNull(perm); @@ -168,8 +169,8 @@ protected int getBufferSize() { /** * Set the size of the buffer to be used. * - * @param bufSize buffer size - * @return Generics Type B + * @param bufSize buffer size. + * @return Generics Type B. */ public B bufferSize(int bufSize) { bufferSize = bufSize; @@ -183,8 +184,8 @@ protected short getReplication() { /** * Set replication factor. * - * @param replica replica - * @return Generics Type B + * @param replica replica. + * @return Generics Type B. */ public B replication(short replica) { replication = replica; @@ -198,8 +199,8 @@ protected long getBlockSize() { /** * Set block size. * - * @param blkSize block size - * @return B Generics Type + * @param blkSize block size. + * @return B Generics Type. */ public B blockSize(long blkSize) { blockSize = blkSize; @@ -209,7 +210,7 @@ public B blockSize(long blkSize) { /** * Return true to create the parent directories if they do not exist. * - * @return if create the parent directories if they do not exist true,not false + * @return if create the parent directories if they do not exist true,not false. */ protected boolean isRecursive() { return recursive; @@ -218,7 +219,7 @@ protected boolean isRecursive() { /** * Create the parent directory if they do not exist. * - * @return B Generics Type + * @return B Generics Type. */ public B recursive() { recursive = true; @@ -232,8 +233,8 @@ protected Progressable getProgress() { /** * Set the facility of reporting progress. * - * @param prog progress - * @return B Generics Type + * @param prog progress. + * @return B Generics Type. */ public B progress(@Nonnull final Progressable prog) { checkNotNull(prog); @@ -248,7 +249,7 @@ protected EnumSet getFlags() { /** * Create an FSDataOutputStream at the specified path. * - * @return return Generics Type B + * @return return Generics Type B. */ public B create() { flags.add(CreateFlag.CREATE); @@ -260,8 +261,8 @@ public B create() { * Set it to false, an exception will be thrown when calling {@link #build()} * if the file exists. * - * @param overwrite overrite - * @return Generics Type B + * @param overwrite overrite. + * @return Generics Type B. */ public B overwrite(boolean overwrite) { if (overwrite) { @@ -275,7 +276,7 @@ public B overwrite(boolean overwrite) { /** * Append to an existing file (optional operation). * - * @return Generics Type B + * @return Generics Type B. */ public B append() { flags.add(CreateFlag.APPEND); @@ -289,8 +290,8 @@ protected ChecksumOpt getChecksumOpt() { /** * Set checksum opt. * - * @param chksumOpt check sum opt - * @return Generics Type B + * @param chksumOpt check sum opt. + * @return Generics Type B. */ public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) { checkNotNull(chksumOpt); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index e367f3666c6eb..ee16ca8a2cd50 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -82,7 +82,7 @@ protected FSInputChecker( Path file, int numOfRetries) { * @param sum the type of Checksum engine * @param chunkSize maximun chunk size * @param checksumSize the number byte of each checksum - * @param verifyChecksum verify check sum + * @param verifyChecksum verify check sum. */ protected FSInputChecker( Path file, int numOfRetries, boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) { @@ -133,7 +133,7 @@ abstract protected int readChunk(long pos, byte[] buf, int offset, int len, /** * Return true if there is a need for checksum verification. - * @return if there is a need for checksum verification true, not false + * @return if there is a need for checksum verification true, not false. */ protected synchronized boolean needChecksum() { return verifyChecksum && sum != null; @@ -363,8 +363,8 @@ private void verifySums(final byte b[], final int off, int read) * This is deprecated since 0.22 since it is no longer in use * by this class. * - * @param checksum check sum - * @return crc + * @param checksum check sum. + * @return crc. */ @Deprecated static public long checksum2long(byte[] checksum) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java index cf819fe1e1e41..4ef512dc257a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java @@ -187,7 +187,7 @@ public void flush() throws IOException { /** * Return the number of valid bytes currently in the buffer. * - * @return buffer data size + * @return buffer data size. */ protected synchronized int getBufferedDataSize() { return count; @@ -230,9 +230,9 @@ private void writeChecksumChunks(byte b[], int off, int len) /** * Converts a checksum integer value to a byte stream * - * @param sum check sum - * @param checksumSize check sum size - * @return byte stream + * @param sum check sum. + * @param checksumSize check sum size. + * @return byte stream. */ static public byte[] convertToByteStream(Checksum sum, int checksumSize) { return int2byte((int)sum.getValue(), new byte[checksumSize]); @@ -252,7 +252,7 @@ static byte[] int2byte(int integer, byte[] bytes) { /** * Resets existing buffer with a new one of the specified size. * - * @param size size + * @param size size. */ protected synchronized void setChecksumBufSize(int size) { this.buf = new byte[size]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java index 679c5811f19e4..62d2e3af78671 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java @@ -31,21 +31,21 @@ public abstract class FileChecksum implements Writable { /** * The checksum algorithm name. * - * @return algorithm name + * @return algorithm name. */ public abstract String getAlgorithmName(); /** * The length of the checksum in bytes. * - * @return length + * @return length. */ public abstract int getLength(); /** * The value of the checksum in bytes. * - * @return byte array + * @return byte array. */ public abstract byte[] getBytes(); @@ -56,8 +56,8 @@ public ChecksumOpt getChecksumOpt() { /** * Return true if both the algorithms and the values are the same. * - * @param other other - * @return if equal true, not false + * @param other other. + * @return if equal true, not false. */ @Override public boolean equals(Object other) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index d48918f280ee7..298570bb55fe8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -366,8 +366,8 @@ public AbstractFileSystem run() throws UnsupportedFileSystemException { * Create a FileContext with specified FS as default using the specified * config. * - * @param defFS default fs - * @param aConf configutration + * @param defFS default fs. + * @param aConf configutration. * @return new FileContext with specified FS as default. */ public static FileContext getFileContext(final AbstractFileSystem defFS, @@ -378,7 +378,7 @@ public static FileContext getFileContext(final AbstractFileSystem defFS, /** * Create a FileContext for specified file system using the default config. * - * @param defaultFS default fs + * @param defaultFS default fs. * @return a FileContext with the specified AbstractFileSystem * as the default FS. */ @@ -411,7 +411,7 @@ protected static FileContext getFileContext( * * @throws UnsupportedFileSystemException If the file system from the default * configuration is not supported - * @return file context + * @return file context. */ public static FileContext getFileContext() throws UnsupportedFileSystemException { @@ -431,7 +431,7 @@ public static FileContext getLocalFSFileContext() /** * Create a FileContext for specified URI using the default config. * - * @param defaultFsUri defaultFsUri + * @param defaultFsUri defaultFsUri. * @return a FileContext with the specified URI as the default FS. * * @throws UnsupportedFileSystemException If the file system for @@ -445,8 +445,8 @@ public static FileContext getFileContext(final URI defaultFsUri) /** * Create a FileContext for specified default URI using the specified config. * - * @param defaultFsUri defaultFsUri - * @param aConf configrution + * @param defaultFsUri defaultFsUri. + * @param aConf configrution. * @return new FileContext for specified uri * @throws UnsupportedFileSystemException If the file system with specified is * not supported @@ -477,7 +477,7 @@ public static FileContext getFileContext(final URI defaultFsUri, * {@link #getFileContext(URI, Configuration)} instead of this one. * * - * @param aConf configration + * @param aConf configration. * @return new FileContext * @throws UnsupportedFileSystemException If file system in the config * is not supported @@ -555,7 +555,7 @@ public void setWorkingDirectory(final Path newWDir) throws IOException { /** * Gets the working directory for wd-relative names (such a "foo/bar"). - * @return the path + * @return the path. */ public Path getWorkingDirectory() { return workingDir; @@ -602,7 +602,7 @@ public void setUMask(final FsPermission newUmask) { * @throws FileNotFoundException If f does not exist * @throws AccessControlException if access denied * @throws IOException If an IO Error occurred - * @throws UnresolvedLinkException If unresolved link occurred + * @throws UnresolvedLinkException If unresolved link occurred. * * Exceptions applicable to file systems accessed over RPC: * @throws RpcClientException If an exception occurred in the RPC client @@ -623,7 +623,7 @@ public Path resolvePath(final Path f) throws FileNotFoundException, * A Fully-qualified path has scheme and authority specified and an absolute * path. * Use the default file system and working dir in this FileContext to qualify. - * @param path the path + * @param path the path. * @return qualified path */ public Path makeQualified(final Path path) { @@ -762,7 +762,7 @@ public FSDataOutputStream build() throws IOException { * * Client should expect {@link FSDataOutputStreamBuilder#build()} throw the * same exceptions as create(Path, EnumSet, CreateOpts...). - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public FSDataOutputStreamBuilder create(final Path f) throws IOException { @@ -837,7 +837,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * RuntimeExceptions: * @throws InvalidPathException If path f is invalid * - * @return if delete success true, not false + * @return if delete success true, not false. */ public boolean delete(final Path f, final boolean recursive) throws AccessControlException, FileNotFoundException, @@ -868,7 +868,7 @@ public Boolean next(final AbstractFileSystem fs, final Path p) * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server - * @return input stream + * @return input stream. */ public FSDataInputStream open(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { @@ -899,7 +899,7 @@ public FSDataInputStream next(final AbstractFileSystem fs, final Path p) * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server - * @return output stream + * @return output stream. */ public FSDataInputStream open(final Path f, final int bufferSize) throws AccessControlException, FileNotFoundException, @@ -1009,7 +1009,7 @@ public Boolean next(final AbstractFileSystem fs, final Path p) * * @param src path to be renamed * @param dst new path after rename - * @param options rename options + * @param options rename options. * * @throws AccessControlException If access is denied * @throws FileAlreadyExistsException If dst already exists and @@ -1061,7 +1061,7 @@ public Void next(final AbstractFileSystem fs, final Path p) /** * Set permission of a path. - * @param f the path + * @param f the path. * @param permission - the new absolute permission (umask is not applied) * * @throws AccessControlException If access is denied @@ -1205,7 +1205,7 @@ public FileChecksum next(final AbstractFileSystem fs, final Path p) * Set the verify checksum flag for the file system denoted by the path. * This is only applicable if the * corresponding FileSystem supports checksum. By default doesn't do anything. - * @param verifyChecksum verify check sum + * @param verifyChecksum verify check sum. * @param f set the verifyChecksum for the Filesystem containing this path * * @throws AccessControlException If access is denied @@ -1260,9 +1260,9 @@ public FileStatus next(final AbstractFileSystem fs, final Path p) /** * Synchronize client metadata state. * - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. * @throws UnsupportedOperationException If file system for f is - * not supported + * not supported. */ public void msync() throws IOException, UnsupportedOperationException { defaultFS.msync(); @@ -1625,10 +1625,10 @@ public RemoteIterator next( /** * List CorruptFile Blocks. * - * @param path the path + * @param path the path. * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { @@ -1752,7 +1752,7 @@ public class Util { * @throws RpcServerException If an exception occurred in the RPC server * @throws UnexpectedServerException If server implementation throws * undeclared exception to RPC server - * @return if f exists true, not false + * @return if f exists true, not false. */ public boolean exists(final Path f) throws AccessControlException, UnsupportedFileSystemException, IOException { @@ -1814,11 +1814,11 @@ public ContentSummary getContentSummary(Path f) /** * See {@link #listStatus(Path[], PathFilter)} * - * @param files files - * @throws AccessControlException If access is denied - * @throws FileNotFoundException If files does not exist - * @throws IOException If an I/O error occurred - * @return file status array + * @param files files. + * @throws AccessControlException If access is denied. + * @throws FileNotFoundException If files does not exist. + * @throws IOException If an I/O error occurred. + * @return file status array. */ public FileStatus[] listStatus(Path[] files) throws AccessControlException, FileNotFoundException, IOException { @@ -2158,17 +2158,17 @@ public FileStatus[] globStatus(final Path pathPattern, * Copy file from src to dest. See * {@link #copy(Path, Path, boolean, boolean)} * - * @param src src - * @param dst dst - * @throws AccessControlException If access is denied - * @throws FileAlreadyExistsException If file src already exists - * @throws FileNotFoundException if next file does not exist any more - * @throws ParentNotDirectoryException If parent of src is not a - * directory. + * @param src src. + * @param dst dst. + * @throws AccessControlException If access is denied. + * @throws FileAlreadyExistsException If file src already exists. + * @throws FileNotFoundException if next file does not exist any more. + * @throws ParentNotDirectoryException If parent of src is not a + * directory. * @throws UnsupportedFileSystemException If file system for - * src/dst is not supported - * @throws IOException If an I/O error occurred - * @return if success copy true, not false + * src/dst is not supported. + * @throws IOException If an I/O error occurred. + * @return if success copy true, not false. */ public boolean copy(final Path src, final Path dst) throws AccessControlException, FileAlreadyExistsException, @@ -2179,8 +2179,8 @@ public boolean copy(final Path src, final Path dst) /** * Copy from src to dst, optionally deleting src and overwriting dst. - * @param src src - * @param dst dst + * @param src src. + * @param dst dst. * @param deleteSource - delete src if true * @param overwrite overwrite dst if true; throw IOException if dst exists * and overwrite is false. @@ -2301,7 +2301,7 @@ private static void checkDependencies(Path qualSrc, Path qualDst) * Are qualSrc and qualDst of the same file system? * @param qualPath1 - fully qualified path * @param qualPath2 - fully qualified path - * @return is same fs true,not false + * @return is same fs true,not false. */ private static boolean isSameFS(Path qualPath1, Path qualPath2) { URI srcUri = qualPath1.toUri(); @@ -2325,12 +2325,12 @@ public synchronized void run() { * Resolves all symbolic links in the specified path. * Returns the new path object. * - * @param f the path - * @throws FileNotFoundException If f does not exist - * @throws UnresolvedLinkException If unresolved link occurred + * @param f the path. + * @throws FileNotFoundException If f does not exist. + * @throws UnresolvedLinkException If unresolved link occurred. * @throws AccessControlException If access is denied. - * @throws IOException If an I/O error occurred - * @return resolve path + * @throws IOException If an I/O error occurred. + * @return resolve path. */ protected Path resolve(final Path f) throws FileNotFoundException, UnresolvedLinkException, AccessControlException, IOException { @@ -2348,7 +2348,7 @@ public Path next(final AbstractFileSystem fs, final Path p) * to, but not including the final path component. * @param f path to resolve * @return the new path object. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ protected Path resolveIntermediate(final Path f) throws IOException { return new FSLinkResolver() { @@ -2367,7 +2367,7 @@ public FileStatus next(final AbstractFileSystem fs, final Path p) * @param f * Path which needs to be resolved * @return List of AbstractFileSystems accessed in the path - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ Set resolveAbstractFileSystems(final Path f) throws IOException { @@ -2428,7 +2428,7 @@ public static Map getAllStatistics() { * @param p Path for which delegations tokens are requested. * @param renewer the account name that is allowed to renew the token. * @return List of delegation tokens. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" }) public List> getDelegationTokens( @@ -2580,7 +2580,7 @@ public AclStatus next(final AbstractFileSystem fs, final Path p) * @param path Path to modify * @param name xattr name. * @param value xattr value. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void setXAttr(Path path, String name, byte[] value) throws IOException { @@ -2599,7 +2599,7 @@ public void setXAttr(Path path, String name, byte[] value) * @param name xattr name. * @param value xattr value. * @param flag xattr set flag - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void setXAttr(Path path, final String name, final byte[] value, final EnumSet flag) throws IOException { @@ -2624,7 +2624,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * @param path Path to get extended attribute * @param name xattr name. * @return byte[] xattr value. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public byte[] getXAttr(Path path, final String name) throws IOException { final Path absF = fixRelativePart(path); @@ -2647,7 +2647,7 @@ public byte[] next(final AbstractFileSystem fs, final Path p) * @param path Path to get extended attributes * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs * of the file or directory - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public Map getXAttrs(Path path) throws IOException { final Path absF = fixRelativePart(path); @@ -2671,7 +2671,7 @@ public Map next(final AbstractFileSystem fs, final Path p) * @param names XAttr names. * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs * of the file or directory - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public Map getXAttrs(Path path, final List names) throws IOException { @@ -2694,7 +2694,7 @@ public Map next(final AbstractFileSystem fs, final Path p) * * @param path Path to remove extended attribute * @param name xattr name - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void removeXAttr(Path path, final String name) throws IOException { final Path absF = fixRelativePart(path); @@ -2718,7 +2718,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * @param path Path to get extended attributes * @return List{@literal <}String{@literal >} of the XAttr names of the * file or directory - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public List listXAttrs(Path path) throws IOException { final Path absF = fixRelativePart(path); @@ -2835,7 +2835,7 @@ public Void next(final AbstractFileSystem fs, final Path p) /** * Set the source path to satisfy storage policy. * @param path The source path referring to either a directory or a file. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void satisfyStoragePolicy(final Path path) throws IOException { @@ -2857,7 +2857,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * @param policyName the name of the target storage policy. The list * of supported Storage policies can be retrieved * via {@link #getAllStoragePolicies}. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void setStoragePolicy(final Path path, final String policyName) throws IOException { @@ -2875,7 +2875,7 @@ public Void next(final AbstractFileSystem fs, final Path p) /** * Unset the storage policy set for a given file or directory. * @param src file or directory path. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void unsetStoragePolicy(final Path src) throws IOException { final Path absF = fixRelativePart(src); @@ -2894,7 +2894,7 @@ public Void next(final AbstractFileSystem fs, final Path p) * * @param path file or directory path. * @return storage policy for give file. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException { final Path absF = fixRelativePart(path); @@ -2912,7 +2912,7 @@ public BlockStoragePolicySpi next(final AbstractFileSystem fs, * Retrieve all the storage policies supported by this file system. * * @return all storage policies supported by this filesystem. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public Collection getAllStoragePolicies() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java index 915f73f19b5c6..f50c06cec3810 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java @@ -52,7 +52,7 @@ public class FileEncryptionInfo implements Serializable { * @param keyName name of the key used for the encryption zone * @param ezKeyVersionName name of the KeyVersion used to encrypt the * encrypted data encryption key. - * @param version version + * @param version version. */ public FileEncryptionInfo(final CipherSuite suite, final CryptoProtocolVersion version, final byte[] edek, @@ -136,7 +136,7 @@ public String toString() { * NOTE: * Currently this method is used by CLI for backward compatibility. * - * @return stable string + * @return stable string. */ public String toStringStable() { StringBuilder builder = new StringBuilder("{") diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java index 18e7154a7d613..fcef578b072f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java @@ -117,16 +117,16 @@ public FileStatus(long length, boolean isdir, int block_replication, /** * Constructor for file systems on which symbolic links are not supported * - * @param length length - * @param isdir isdir - * @param block_replication block replication - * @param blocksize block size - * @param modification_time modification time - * @param access_time access_time - * @param permission permission - * @param owner owner - * @param group group - * @param path the path + * @param length length. + * @param isdir isdir. + * @param block_replication block replication. + * @param blocksize block size. + * @param modification_time modification time. + * @param access_time access_time. + * @param permission permission. + * @param owner owner. + * @param group group. + * @param path the path. */ public FileStatus(long length, boolean isdir, int block_replication, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 84dc9a01494de..0bc419b035380 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -281,8 +281,8 @@ public FileSystem run() throws IOException { /** * Returns the configured FileSystem implementation. * @param conf the configuration to use - * @return FileSystem - * @throws IOException If an I/O error occurred + * @return FileSystem. + * @throws IOException If an I/O error occurred. */ public static FileSystem get(Configuration conf) throws IOException { return get(getDefaultUri(conf), conf); @@ -394,7 +394,7 @@ protected URI getCanonicalUri() { * not specified and if {@link #getDefaultPort()} returns a * default port. * - * @param uri url + * @param uri url. * @return URI * @see NetUtils#getCanonicalUri(URI, int) */ @@ -459,7 +459,7 @@ public String getCanonicalServiceName() { } /** - * @return uri to string + * @return uri to string. * @deprecated call {@link #getUri()} instead. */ @Deprecated @@ -468,10 +468,10 @@ public String getCanonicalServiceName() { /** * @deprecated call {@link #get(URI, Configuration)} instead. * - * @param name name - * @param conf configuration - * @return file system - * @throws IOException If an I/O error occurred + * @param name name. + * @param conf configuration. + * @return file system. + * @throws IOException If an I/O error occurred. */ @Deprecated public static FileSystem getNamed(String name, Configuration conf) @@ -527,9 +527,9 @@ public static LocalFileSystem getLocal(Configuration conf) * configuration and URI, cached and returned to the caller. * *
- * @param uri uri of the filesystem - * @param conf configrution - * @return filesystem instance + * @param uri uri of the filesystem. + * @param conf configrution. + * @return filesystem instance. * @throws IOException if the FileSystem cannot be instantiated. */ public static FileSystem get(URI uri, Configuration conf) throws IOException { @@ -559,7 +559,7 @@ public static FileSystem get(URI uri, Configuration conf) throws IOException { /** * Returns the FileSystem for this URI's scheme and authority and the * given user. Internally invokes {@link #newInstance(URI, Configuration)} - * @param uri uri of the filesystem + * @param uri uri of the filesystem. * @param conf the configuration to use * @param user to perform the get as * @return filesystem instance @@ -877,7 +877,7 @@ protected void checkPath(Path path) { * @param start offset into the given file * @param len length for which to get locations for * @throws IOException IO failure - * @return block location array + * @return block location array. */ public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { @@ -918,7 +918,7 @@ public BlockLocation[] getFileBlockLocations(FileStatus file, * @param len length for which to get locations for * @throws FileNotFoundException when the path does not exist * @throws IOException IO failure - * @return block location array + * @return block location array. */ public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException { @@ -981,7 +981,7 @@ public Path resolvePath(final Path p) throws IOException { * @param f the file name to open * @param bufferSize the size of the buffer to be used. * @throws IOException IO failure - * @return input stream + * @return input stream. */ public abstract FSDataInputStream open(Path f, int bufferSize) throws IOException; @@ -990,7 +990,7 @@ public abstract FSDataInputStream open(Path f, int bufferSize) * Opens an FSDataInputStream at the indicated Path. * @param f the file to open * @throws IOException IO failure - * @return input stream + * @return input stream. */ public FSDataInputStream open(Path f) throws IOException { return open(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -1008,7 +1008,7 @@ public FSDataInputStream open(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException If {@link #open(PathHandle, int)} * not overridden by subclass - * @return input stream + * @return input stream. */ public FSDataInputStream open(PathHandle fd) throws IOException { return open(fd, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -1026,7 +1026,7 @@ public FSDataInputStream open(PathHandle fd) throws IOException { * not satisfied * @throws IOException IO failure * @throws UnsupportedOperationException If not overridden by subclass - * @return input stream + * @return input stream. */ public FSDataInputStream open(PathHandle fd, int bufferSize) throws IOException { @@ -1044,7 +1044,7 @@ public FSDataInputStream open(PathHandle fd, int bufferSize) * not overridden by subclass. * @throws UnsupportedOperationException If this FileSystem cannot enforce * the specified constraints. - * @return path handle + * @return path handle. */ public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) { // method is final with a default so clients calling getPathHandle(stat) @@ -1060,7 +1060,7 @@ public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) { * @param stat Referent in the target FileSystem * @param opt Constraints that determine the validity of the * {@link PathHandle} reference. - * @return path handle + * @return path handle. */ protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) { throw new UnsupportedOperationException(); @@ -1071,7 +1071,7 @@ protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) { * Files are overwritten by default. * @param f the file to create * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f) throws IOException { return create(f, true); @@ -1083,7 +1083,7 @@ public FSDataOutputStream create(Path f) throws IOException { * @param overwrite if a file with this name already exists, then if true, * the file will be overwritten, and if false an exception will be thrown. * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, boolean overwrite) throws IOException { @@ -1101,7 +1101,7 @@ public FSDataOutputStream create(Path f, boolean overwrite) * @param f the file to create * @param progress to report progress * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, Progressable progress) throws IOException { @@ -1118,7 +1118,7 @@ public FSDataOutputStream create(Path f, Progressable progress) * @param f the file to create * @param replication the replication factor * @throws IOException IO failure - * @return output stream + * @return output stream1 */ public FSDataOutputStream create(Path f, short replication) throws IOException { @@ -1137,7 +1137,7 @@ public FSDataOutputStream create(Path f, short replication) * @param replication the replication factor * @param progress to report progress * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, short replication, Progressable progress) throws IOException { @@ -1155,7 +1155,7 @@ public FSDataOutputStream create(Path f, short replication, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1175,9 +1175,9 @@ public FSDataOutputStream create(Path f, * @param overwrite if a file with this name already exists, then if true, * the file will be overwritten, and if false an error will be thrown. * @param bufferSize the size of the buffer to be used. - * @param progress to report progress + * @param progress to report progress. * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1199,7 +1199,7 @@ public FSDataOutputStream create(Path f, * @param replication required block replication for the file. * @param blockSize the size of the buffer to be used. * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1218,9 +1218,9 @@ public FSDataOutputStream create(Path f, * @param bufferSize the size of the buffer to be used. * @param replication required block replication for the file. * @param blockSize the size of the buffer to be used. - * @param progress to report progress + * @param progress to report progress. * @throws IOException IO failure - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, boolean overwrite, @@ -1247,7 +1247,7 @@ public FSDataOutputStream create(Path f, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) - * @return output stream + * @return output stream. */ public abstract FSDataOutputStream create(Path f, FsPermission permission, @@ -1269,7 +1269,7 @@ public abstract FSDataOutputStream create(Path f, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, FsPermission permission, @@ -1296,7 +1296,7 @@ public FSDataOutputStream create(Path f, * found in conf will be used. * @throws IOException IO failure * @see #setPermission(Path, FsPermission) - * @return output stream + * @return output stream. */ public FSDataOutputStream create(Path f, FsPermission permission, @@ -1319,15 +1319,15 @@ public FSDataOutputStream create(Path f, * This a temporary method added to support the transition from FileSystem * to FileContext for user applications. * - * @param f path - * @param absolutePermission permission - * @param flag create flag - * @param bufferSize buffer size - * @param replication replication - * @param blockSize block size - * @param progress progress - * @param checksumOpt check sum opt - * @return output stream + * @param f path. + * @param absolutePermission permission. + * @param flag create flag. + * @param bufferSize buffer size. + * @param replication replication. + * @param blockSize block size. + * @param progress progress. + * @param checksumOpt check sum opt. + * @return output stream. * @throws IOException IO failure */ @Deprecated @@ -1383,10 +1383,10 @@ protected boolean primitiveMkdir(Path f, FsPermission absolutePermission) * This a temporary method added to support the transition from FileSystem * to FileContext for user applications. * - * @param f the path - * @param absolutePermission permission - * @param createParent create parent - * @throws IOException IO failure + * @param f the path. + * @param absolutePermission permission. + * @param createParent create parent. + * @throws IOException IO failure. */ @Deprecated protected void primitiveMkdir(Path f, FsPermission absolutePermission, @@ -1426,7 +1426,7 @@ protected void primitiveMkdir(Path f, FsPermission absolutePermission, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) - * @return output stream + * @return output stream. */ public FSDataOutputStream createNonRecursive(Path f, boolean overwrite, @@ -1450,7 +1450,7 @@ public FSDataOutputStream createNonRecursive(Path f, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) - * @return output stream + * @return output stream. */ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -1474,7 +1474,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, * @param progress the progress reporter * @throws IOException IO failure * @see #setPermission(Path, FsPermission) - * @return output stream + * @return output stream. */ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, EnumSet flags, int bufferSize, short replication, long blockSize, @@ -1489,7 +1489,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, * Important: the default implementation is not atomic * @param f path to use for create * @throws IOException IO failure - * @return if create new file success true,not false + * @return if create new file success true,not false. */ public boolean createNewFile(Path f) throws IOException { if (exists(f)) { @@ -1510,7 +1510,7 @@ public boolean createNewFile(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). - * @return output stream + * @return output stream. */ public FSDataOutputStream append(Path f) throws IOException { return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, @@ -1525,7 +1525,7 @@ public FSDataOutputStream append(Path f) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). - * @return output stream + * @return output stream. */ public FSDataOutputStream append(Path f, int bufferSize) throws IOException { return append(f, bufferSize, null); @@ -1539,7 +1539,7 @@ public FSDataOutputStream append(Path f, int bufferSize) throws IOException { * @throws IOException IO failure * @throws UnsupportedOperationException if the operation is unsupported * (default). - * @return output stream + * @return output stream. */ public abstract FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException; @@ -1578,7 +1578,7 @@ public short getReplication(Path src) throws IOException { * This is the default behavior. * @param src file name * @param replication new replication - * @throws IOException an IO failure + * @throws IOException an IO failure. * @return true if successful, or the feature in unsupported; * false if replication is supported but the file does not exist, * or is a directory @@ -1624,7 +1624,7 @@ public boolean setReplication(Path src, short replication) * * @param src path to be renamed * @param dst new path after rename - * @param options rename options + * @param options rename options. * @throws FileNotFoundException src path does not exist, or the parent * path of dst does not exist. * @throws FileAlreadyExistsException dest path exists and is a file @@ -1719,9 +1719,9 @@ public boolean truncate(Path f, long newLength) throws IOException { /** * Delete a file/directory. - * @param f the path - * @throws IOException IO failure - * @return if delete success true, not false + * @param f the path. + * @throws IOException IO failure. + * @return if delete success true, not false. * @deprecated Use {@link #delete(Path, boolean)} instead. */ @Deprecated @@ -1838,7 +1838,7 @@ public boolean exists(Path f) throws IOException { * @param f path to check * @throws IOException IO failure * @deprecated Use {@link #getFileStatus(Path)} instead - * @return if f is directory true, not false + * @return if f is directory true, not false. */ @Deprecated public boolean isDirectory(Path f) throws IOException { @@ -1856,7 +1856,7 @@ public boolean isDirectory(Path f) throws IOException { * @param f path to check * @throws IOException IO failure * @deprecated Use {@link #getFileStatus(Path)} instead - * @return if f is file true, not false + * @return if f is file true, not false. */ @Deprecated public boolean isFile(Path f) throws IOException { @@ -1869,7 +1869,7 @@ public boolean isFile(Path f) throws IOException { /** * The number of bytes in a file. - * @param f the path + * @param f the path. * @return the number of bytes; 0 for a directory * @deprecated Use {@link #getFileStatus(Path)} instead. * @throws FileNotFoundException if the path does not resolve @@ -1884,7 +1884,7 @@ public long getLength(Path f) throws IOException { * @param f path to use * @throws FileNotFoundException if the path does not resolve * @throws IOException IO failure - * @return content summary + * @return content summary. */ public ContentSummary getContentSummary(Path f) throws IOException { FileStatus status = getFileStatus(f); @@ -2019,9 +2019,9 @@ public boolean hasMore() { * @param f Path to list * @param token opaque iteration token returned by previous call, or null * if this is the first call. - * @return directory entries - * @throws FileNotFoundException when the path does not exist - * @throws IOException If an I/O error occurred + * @return directory entries. + * @throws FileNotFoundException when the path does not exist. + * @throws IOException If an I/O error occurred. */ @InterfaceAudience.Private protected DirectoryEntries listStatusBatch(Path f, byte[] token) throws @@ -2053,7 +2053,7 @@ private void listStatus(ArrayList results, Path f, /** * List corrupted file blocks. * - * @param path the path + * @param path the path. * @return an iterator over the corrupt files under the given path * (may contain duplicates if a file has more than one corrupt block) * @throws UnsupportedOperationException if the operation is unsupported @@ -2400,7 +2400,7 @@ public LocatedFileStatus next() throws IOException { /** Return the current user's home directory in this FileSystem. * The default implementation returns {@code "/user/$USER/"}. - * @return the path + * @return the path. */ public Path getHomeDirectory() { String username; @@ -2463,7 +2463,7 @@ public boolean mkdirs(Path f) throws IOException { * @param f path to create * @param permission to apply to f * @throws IOException IO failure - * @return if mkdir success true, not false + * @return if mkdir success true, not false. */ public abstract boolean mkdirs(Path f, FsPermission permission ) throws IOException; @@ -2511,7 +2511,7 @@ public void moveFromLocalFile(Path src, Path dst) * @param delSrc whether to delete the src * @param src path * @param dst path - * @throws IOException IO failure + * @throws IOException IO failure. */ public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { @@ -2626,7 +2626,7 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst, * @param fsOutputFile path of output file * @param tmpLocalFile path of local tmp file * @throws IOException IO failure - * @return the path + * @return the path. */ public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { @@ -2674,7 +2674,7 @@ public void close() throws IOException { /** * Return the total size of all files in the filesystem. * @throws IOException IO failure - * @return the number of path used + * @return the number of path used. */ public long getUsed() throws IOException { Path path = new Path("/"); @@ -2683,9 +2683,9 @@ public long getUsed() throws IOException { /** * Return the total size of all files from a specified path. - * @param path the path + * @param path the path. * @throws IOException IO failure - * @return the number of path content summary + * @return the number of path content summary. */ public long getUsed(Path path) throws IOException { return getContentSummary(path).getLength(); @@ -2708,7 +2708,7 @@ public long getBlockSize(Path f) throws IOException { * Return the number of bytes that large input files should be optimally * be split into to minimize I/O time. * @deprecated use {@link #getDefaultBlockSize(Path)} instead - * @return default block size + * @return default block size. */ @Deprecated public long getDefaultBlockSize() { @@ -2761,8 +2761,8 @@ public short getDefaultReplication(Path path) { * In some FileSystem implementations such as HDFS metadata * synchronization is essential to guarantee consistency of read requests * particularly in HA setting. - * @throws IOException If an I/O error occurred - * @throws UnsupportedOperationException if the operation is unsupported + * @throws IOException If an I/O error occurred. + * @throws UnsupportedOperationException if the operation is unsupported. */ public void msync() throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException(getClass().getCanonicalName() + @@ -2838,8 +2838,8 @@ static void checkAccessPermissions(FileStatus stat, FsAction mode) /** * See {@link FileContext#fixRelativePart}. - * @param p the path - * @return relative part + * @param p the path. + * @return relative part. */ protected Path fixRelativePart(Path p) { if (p.isUriPathAbsolute()) { @@ -2852,17 +2852,17 @@ protected Path fixRelativePart(Path p) { /** * See {@link FileContext#createSymlink(Path, Path, boolean)}. * - * @param target target path - * @param link link - * @param createParent create parent - * @throws AccessControlException if access is denied - * @throws FileAlreadyExistsException when the path does not exist - * @throws FileNotFoundException when the path does not exist - * @throws ParentNotDirectoryException if the parent path of dest is not - * a directory + * @param target target path. + * @param link link. + * @param createParent create parent. + * @throws AccessControlException if access is denied. + * @throws FileAlreadyExistsException when the path does not exist. + * @throws FileNotFoundException when the path does not exist. + * @throws ParentNotDirectoryException if the parent path of dest is not + * a directory. * @throws UnsupportedFileSystemException if there was no known implementation * for the scheme. - * @throws IOException see specific implementation + * @throws IOException raised on errors performing I/O. */ public void createSymlink(final Path target, final Path link, final boolean createParent) throws AccessControlException, @@ -2877,10 +2877,10 @@ public void createSymlink(final Path target, final Path link, /** * See {@link FileContext#getFileLinkStatus(Path)}. * - * @param f the path - * @throws AccessControlException if access is denied - * @throws FileNotFoundException when the path does not exist - * @throws IOException see specific implementation + * @param f the path. + * @throws AccessControlException if access is denied. + * @throws FileNotFoundException when the path does not exist. + * @throws IOException raised on errors performing I/O. * @throws UnsupportedFileSystemException if there was no known implementation * for the scheme. * @return file status @@ -2894,7 +2894,7 @@ public FileStatus getFileLinkStatus(final Path f) /** * See {@link AbstractFileSystem#supportsSymlinks()}. - * @return if support symlinkls true, not false + * @return if support symlinkls true, not false. */ public boolean supportsSymlinks() { return false; @@ -2902,11 +2902,11 @@ public boolean supportsSymlinks() { /** * See {@link FileContext#getLinkTarget(Path)}. - * @param f the path + * @param f the path. * @throws UnsupportedOperationException if the operation is unsupported * (default outcome). - * @throws IOException IO failure - * @return the path + * @throws IOException IO failure. + * @return the path. */ public Path getLinkTarget(Path f) throws IOException { // Supporting filesystems should override this method @@ -2916,11 +2916,11 @@ public Path getLinkTarget(Path f) throws IOException { /** * See {@link AbstractFileSystem#getLinkTarget(Path)}. - * @param f the path + * @param f the path. * @throws UnsupportedOperationException if the operation is unsupported * (default outcome). - * @throws IOException IO failure - * @return the path + * @throws IOException IO failure. + * @return the path. */ protected Path resolveLink(Path f) throws IOException { // Supporting filesystems should override this method @@ -3324,7 +3324,7 @@ public void removeXAttr(Path path, String name) throws IOException { /** * Set the source path to satisfy storage policy. * @param path The source path referring to either a directory or a file. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public void satisfyStoragePolicy(final Path path) throws IOException { throw new UnsupportedOperationException( @@ -3632,7 +3632,7 @@ FileSystem getUnique(URI uri, Configuration conf) throws IOException{ * @param conf configuration * @param key key to store/retrieve this FileSystem in the cache * @return a cached or newly instantiated FileSystem. - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ private FileSystem getInternal(URI uri, Configuration conf, Key key) throws IOException{ @@ -4127,7 +4127,7 @@ public void run() { /** * Get or create the thread-local data associated with the current thread. - * @return statistics data + * @return statistics data. */ public StatisticsData getThreadStatistics() { StatisticsData data = threadData.get(); @@ -4486,7 +4486,7 @@ public static synchronized Map getStatistics() { /** * Return the FileSystem classes that have Statistics. * @deprecated use {@link #getGlobalStorageStatistics()} - * @return statistics lists + * @return statistics lists. */ @Deprecated public static synchronized List getAllStatistics() { @@ -4495,7 +4495,7 @@ public static synchronized List getAllStatistics() { /** * Get the statistics for a particular file system. - * @param scheme scheme + * @param scheme scheme. * @param cls the class to lookup * @return a statistics object * @deprecated use {@link #getGlobalStorageStatistics()} @@ -4530,7 +4530,7 @@ public static synchronized void clearStatistics() { /** * Print all statistics for all file systems to {@code System.out} - * @throws IOException If an I/O error occurred + * @throws IOException If an I/O error occurred. */ public static synchronized void printStatistics() throws IOException { @@ -4571,7 +4571,7 @@ public StorageStatistics getStorageStatistics() { /** * Get the global storage statistics. - * @return global storage statistics + * @return global storage statistics. */ public static GlobalStorageStatistics getGlobalStorageStatistics() { return GlobalStorageStatistics.INSTANCE; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java index e2011e279a5e6..593495a1daa88 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java @@ -39,7 +39,7 @@ public abstract class FileSystemLinkResolver { * @param p Path on which to perform an operation * @return Generic type returned by operation * @throws IOException raised on errors performing I/O. - * @throws UnresolvedLinkException unresolved link exception + * @throws UnresolvedLinkException unresolved link exception. */ abstract public T doCall(final Path p) throws IOException, UnresolvedLinkException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 308d008dfff0b..2af0a7b9e742f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -162,8 +162,8 @@ public static void fullyDeleteOnExit(final File file) { * (3) If dir is a normal file, it is deleted. * (4) If dir is a normal directory, then dir and all its contents recursively * are deleted. - * @param dir dir - * @return fully delete status + * @param dir dir. + * @return fully delete status. */ public static boolean fullyDelete(final File dir) { return fullyDelete(dir, false); @@ -260,8 +260,8 @@ private static boolean deleteImpl(final File f, final boolean doLog) { * If dir is a symlink to a directory, all the contents of the actual * directory pointed to by dir will be deleted. * - * @param dir dir - * @return fullyDeleteContents Status + * @param dir dir. + * @return fullyDeleteContents Status. */ public static boolean fullyDeleteContents(final File dir) { return fullyDeleteContents(dir, false); @@ -273,10 +273,10 @@ public static boolean fullyDeleteContents(final File dir) { * If dir is a symlink to a directory, all the contents of the actual * directory pointed to by dir will be deleted. * - * @param dir dir + * @param dir dir. * @param tryGrantPermissions if 'true', try grant +rwx permissions to this * and all the underlying directories before trying to delete their contents. - * @return fully delete contents status + * @return fully delete contents status. */ public static boolean fullyDeleteContents(final File dir, final boolean tryGrantPermissions) { if (tryGrantPermissions) { @@ -353,13 +353,13 @@ private static void checkDependencies(FileSystem srcFS, /** * Copy files between FileSystems. - * @param srcFS src fs - * @param src src - * @param dstFS dst fs - * @param dst dst - * @param deleteSource delete source - * @param conf configuration - * @return if copy success true, not false + * @param srcFS src fs. + * @param src src. + * @param dstFS dst fs. + * @param dst dst. + * @param deleteSource delete source. + * @param conf configuration. + * @return if copy success true, not false. * @throws IOException raised on errors performing I/O. */ public static boolean copy(FileSystem srcFS, Path src, @@ -412,13 +412,13 @@ public static boolean copy(FileSystem srcFS, Path[] srcs, /** * Copy files between FileSystems. * - * @param srcFS srcFs - * @param src src - * @param dstFS dstFs - * @param dst dst - * @param deleteSource delete source - * @param overwrite overwrite - * @param conf configuration + * @param srcFS srcFs. + * @param src src. + * @param dstFS dstFs. + * @param dst dst. + * @param deleteSource delete source. + * @param overwrite overwrite. + * @param conf configuration. * @throws IOException raised on errors performing I/O. * @return true if the operation succeeded. */ @@ -505,14 +505,13 @@ public static boolean copy(FileSystem srcFS, FileStatus srcStatus, /** * Copy local files to a FileSystem. * - * @param src src - * @param dstFS dstFs - * @param dst dst - * @param deleteSource delete source - * @param conf configuration + * @param src src. + * @param dstFS dstFs. + * @param dst dst. + * @param deleteSource delete source. + * @param conf configuration. * @throws IOException raised on errors performing I/O. * @return true if the operation succeeded. - * */ public static boolean copy(File src, FileSystem dstFS, Path dst, @@ -559,11 +558,11 @@ public static boolean copy(File src, /** * Copy FileSystem files to local files. * - * @param srcFS srcFs - * @param src src - * @param dst dst - * @param deleteSource delete source - * @param conf configuration + * @param srcFS srcFs. + * @param src src. + * @param dst dst. + * @param deleteSource delete source. + * @param conf configuration. * @throws IOException raised on errors performing I/O. * @return true if the operation succeeded. */ @@ -1010,7 +1009,7 @@ public static void unTar(InputStream inputStream, File untarDir, * * @param inFile The tar file as input. * @param untarDir The untar directory where to untar the tar file. - * @throws IOException an exception occurred + * @throws IOException an exception occurred. */ public static void unTar(File inFile, File untarDir) throws IOException { if (!untarDir.mkdirs()) { @@ -1284,7 +1283,7 @@ public static int symLink(String target, String linkname) throws IOException{ * @param perm the permission string * @return the exit code from the command * @throws IOException raised on errors performing I/O. - * @throws InterruptedException command interrupted + * @throws InterruptedException command interrupted. */ public static int chmod(String filename, String perm ) throws IOException, InterruptedException { @@ -1341,7 +1340,7 @@ public static void setOwner(File file, String username, * Platform independent implementation for {@link File#setReadable(boolean)} * File#setReadable does not work as expected on Windows. * @param f input file - * @param readable readable + * @param readable readable. * @return true on success, false otherwise */ public static boolean setReadable(File f, boolean readable) { @@ -1362,7 +1361,7 @@ public static boolean setReadable(File f, boolean readable) { * Platform independent implementation for {@link File#setWritable(boolean)} * File#setWritable does not work as expected on Windows. * @param f input file - * @param writable writable + * @param writable writable. * @return true on success, false otherwise */ public static boolean setWritable(File f, boolean writable) { @@ -1386,7 +1385,7 @@ public static boolean setWritable(File f, boolean writable) { * behavior on Windows as on Unix platforms. Creating, deleting or renaming * a file within that folder will still succeed on Windows. * @param f input file - * @param executable executable + * @param executable executable. * @return true on success, false otherwise */ public static boolean setExecutable(File f, boolean executable) { @@ -1770,7 +1769,7 @@ public static List getJarsInDirectory(String path) { * wildcard path to return all jars from the directory to use in a classpath. * * @param path the path to the directory. The path may include the wildcard. - * @param useLocal use local + * @param useLocal use local. * @return the list of jars as URLs, or an empty list if there are no jars, or * the directory does not exist */ From 32433ade07dd21ee306066bac04961685433e624 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 16 May 2022 20:23:17 -0700 Subject: [PATCH 51/53] HADOOP-18229. Fix Check Style. --- .../apache/hadoop/fs/FilterFileSystem.java | 2 +- .../java/org/apache/hadoop/fs/FsStatus.java | 12 +- .../org/apache/hadoop/fs/GlobExpander.java | 2 +- .../hadoop/fs/GlobalStorageStatistics.java | 2 +- .../java/org/apache/hadoop/fs/HardLink.java | 4 +- .../apache/hadoop/fs/LocalDirAllocator.java | 6 +- .../org/apache/hadoop/fs/LocalFileSystem.java | 4 +- .../fs/MD5MD5CRC32CastagnoliFileChecksum.java | 6 +- .../hadoop/fs/MD5MD5CRC32FileChecksum.java | 8 +- .../fs/MD5MD5CRC32GzipFileChecksum.java | 6 +- .../hadoop/fs/MultipartUploaderBuilder.java | 34 ++-- .../java/org/apache/hadoop/fs/Options.java | 6 +- .../java/org/apache/hadoop/fs/QuotaUsage.java | 30 +-- .../apache/hadoop/fs/RawLocalFileSystem.java | 4 +- .../java/org/apache/hadoop/fs/Seekable.java | 6 +- .../main/java/org/apache/hadoop/fs/Stat.java | 2 +- .../apache/hadoop/fs/StorageStatistics.java | 6 +- .../main/java/org/apache/hadoop/fs/Trash.java | 10 +- .../org/apache/hadoop/fs/TrashPolicy.java | 6 +- .../java/org/apache/hadoop/fs/XAttrCodec.java | 2 +- .../hadoop/fs/impl/AbstractFSBuilderImpl.java | 4 +- .../FutureDataInputStreamBuilderImpl.java | 6 +- .../hadoop/fs/impl/FutureIOSupport.java | 4 +- .../fs/impl/MultipartUploaderBuilderImpl.java | 4 +- .../hadoop/fs/permission/AclStatus.java | 6 +- .../apache/hadoop/fs/permission/FsAction.java | 14 +- .../hadoop/fs/permission/FsCreateModes.java | 10 +- .../hadoop/fs/permission/FsPermission.java | 38 ++-- .../fs/permission/PermissionStatus.java | 33 ++-- .../org/apache/hadoop/fs/shell/Command.java | 6 +- .../fs/shell/CommandWithDestination.java | 2 +- .../hadoop/fs/shell/find/BaseExpression.java | 6 +- .../hadoop/fs/shell/find/Expression.java | 8 +- .../hadoop/fs/shell/find/FindOptions.java | 2 +- .../apache/hadoop/fs/shell/find/Result.java | 10 +- .../fs/statistics/IOStatisticsSnapshot.java | 4 +- .../fs/statistics/IOStatisticsSupport.java | 2 +- .../hadoop/fs/statistics/MeanStatistic.java | 2 +- .../fs/store/audit/AuditingFunctions.java | 4 +- .../apache/hadoop/fs/viewfs/ConfigUtil.java | 54 +++--- .../org/apache/hadoop/fs/viewfs/FsGetter.java | 12 +- .../apache/hadoop/fs/viewfs/InodeTree.java | 44 ++--- .../hadoop/fs/viewfs/ViewFileSystem.java | 10 +- .../viewfs/ViewFileSystemOverloadScheme.java | 10 +- .../hadoop/fs/viewfs/ViewFileSystemUtil.java | 4 +- .../hadoop/ha/ActiveStandbyElector.java | 17 +- .../java/org/apache/hadoop/ha/HAAdmin.java | 4 +- .../apache/hadoop/ha/HAServiceProtocol.java | 8 +- .../org/apache/hadoop/ha/HAServiceTarget.java | 10 +- .../org/apache/hadoop/ha/HealthMonitor.java | 2 +- .../hadoop/ha/ZKFailoverController.java | 2 +- .../org/apache/hadoop/http/HttpServer2.java | 40 ++-- .../apache/hadoop/io/AbstractMapWritable.java | 12 +- .../java/org/apache/hadoop/io/ArrayFile.java | 42 ++--- .../hadoop/io/ArrayPrimitiveWritable.java | 2 +- .../apache/hadoop/io/BinaryComparable.java | 12 +- .../org/apache/hadoop/io/BooleanWritable.java | 6 +- .../io/BoundedByteArrayOutputStream.java | 6 +- .../org/apache/hadoop/io/ByteWritable.java | 2 +- .../org/apache/hadoop/io/BytesWritable.java | 4 +- .../apache/hadoop/io/CompressedWritable.java | 4 +- .../org/apache/hadoop/io/DataInputBuffer.java | 12 +- .../apache/hadoop/io/DataOutputBuffer.java | 16 +- .../org/apache/hadoop/io/EnumSetWritable.java | 12 +- .../org/apache/hadoop/io/FloatWritable.java | 4 +- .../java/org/apache/hadoop/io/IOUtils.java | 4 +- .../org/apache/hadoop/io/InputBuffer.java | 10 +- .../org/apache/hadoop/io/LongWritable.java | 2 +- .../java/org/apache/hadoop/io/MD5Hash.java | 50 ++--- .../java/org/apache/hadoop/io/MapFile.java | 178 +++++++++--------- .../apache/hadoop/io/MultipleIOException.java | 6 +- 71 files changed, 461 insertions(+), 463 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index 331e2ab9830c8..cdbe51e330701 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -304,7 +304,7 @@ public Path getHomeDirectory() { * Set the current working directory for the given file system. All relative * paths will be resolved relative to it. * - * @param newDir new dir + * @param newDir new dir. */ @Override public void setWorkingDirectory(Path newDir) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java index fd69dc7615bbd..c4bc341bf4f7c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java @@ -38,9 +38,9 @@ public class FsStatus implements Writable { /** * Construct a FsStatus object, using the specified statistics. * - * @param capacity capacity - * @param used used - * @param remaining remaining + * @param capacity capacity. + * @param used used. + * @param remaining remaining. */ public FsStatus(long capacity, long used, long remaining) { this.capacity = capacity; @@ -50,7 +50,7 @@ public FsStatus(long capacity, long used, long remaining) { /** * Return the capacity in bytes of the file system. - * @return capacity + * @return capacity. */ public long getCapacity() { return capacity; @@ -58,7 +58,7 @@ public long getCapacity() { /** * Return the number of bytes used on the file system. - * @return used + * @return used. */ public long getUsed() { return used; @@ -66,7 +66,7 @@ public long getUsed() { /** * Return the number of remaining bytes on the file system. - * @return remaining + * @return remaining. */ public long getRemaining() { return remaining; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java index efa10b1805895..c87444c6c87f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java @@ -56,7 +56,7 @@ public StringWithOffset(String string, int offset) { * {a,b}/{c/\d} - {a,b}/c/d *
* - * @param filePattern file pattern + * @param filePattern file pattern. * @return expanded file patterns * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java index 9509f8436ab03..d94339034447a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java @@ -105,7 +105,7 @@ public synchronized void reset() { * Get an iterator that we can use to iterate throw all the global storage * statistics objects. * - * @return StorageStatistics Iterator + * @return StorageStatistics Iterator. */ synchronized public Iterator iterator() { Entry first = map.firstEntry(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java index 6cc8f9ef1b5f7..1624c5d395aec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java @@ -207,9 +207,9 @@ public static void createHardLinkMult(File parentDir, String[] fileBaseNames, /** * Retrieves the number of links to the specified file. * - * @param fileName file name + * @param fileName file name. * @throws IOException raised on errors performing I/O. - * @return link count + * @return link count. */ public static int getLinkCount(File fileName) throws IOException { if (fileName == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java index e9a011154a449..f6c9d3c7cb0dd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java @@ -80,7 +80,7 @@ public class LocalDirAllocator { /** * Create an allocator object. - * @param contextCfgItemName contextCfgItemName + * @param contextCfgItemName contextCfgItemName. */ public LocalDirAllocator(String contextCfgItemName) { this.contextCfgItemName = contextCfgItemName; @@ -216,7 +216,7 @@ public File createTmpFileForWrite(String pathStr, long size, /** * Method to check whether a context is valid. - * @param contextCfgItemName contextCfgItemName + * @param contextCfgItemName contextCfgItemName. * @return true/false */ public static boolean isContextValid(String contextCfgItemName) { @@ -228,7 +228,7 @@ public static boolean isContextValid(String contextCfgItemName) { /** * Removes the context from the context config items. * - * @param contextCfgItemName contextCfgItemName + * @param contextCfgItemName contextCfgItemName. */ @Deprecated @InterfaceAudience.LimitedPrivate({"MapReduce"}) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java index 38cefaa663155..590cbd9a49ece 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java @@ -73,8 +73,8 @@ public LocalFileSystem(FileSystem rawLocalFileSystem) { /** * Convert a path to a File. - * @param path the path - * @return file + * @param path the path. + * @return file. */ public File pathToFile(Path path) { return ((RawLocalFileSystem)fs).pathToFile(path); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java index bff8eed214c56..354e4a6b4657d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java @@ -31,9 +31,9 @@ public MD5MD5CRC32CastagnoliFileChecksum() { /** * Create a MD5FileChecksum. * - * @param bytesPerCRC bytesPerCRC - * @param crcPerBlock crcPerBlock - * @param md5 md5 + * @param bytesPerCRC bytesPerCRC. + * @param crcPerBlock crcPerBlock. + * @param md5 md5. */ public MD5MD5CRC32CastagnoliFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { super(bytesPerCRC, crcPerBlock, md5); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java index 604f71c8f7c1e..c5ac381f78238 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java @@ -47,9 +47,9 @@ public MD5MD5CRC32FileChecksum() { /** * Create a MD5FileChecksum. * - * @param bytesPerCRC bytesPerCRC - * @param crcPerBlock crcPerBlock - * @param md5 md5 + * @param bytesPerCRC bytesPerCRC. + * @param crcPerBlock crcPerBlock. + * @param md5 md5. */ public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { this.bytesPerCRC = bytesPerCRC; @@ -84,7 +84,7 @@ public byte[] getBytes() { /** * returns the CRC type. - * @return data check sum type + * @return data check sum type. */ public DataChecksum.Type getCrcType() { // default to the one that is understood by all releases. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java index a23baf4e11b43..f7996c8623717 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java @@ -31,9 +31,9 @@ public MD5MD5CRC32GzipFileChecksum() { /** * Create a MD5FileChecksum. * - * @param bytesPerCRC bytesPerCRC - * @param crcPerBlock crcPerBlock - * @param md5 md5 + * @param bytesPerCRC bytesPerCRC. + * @param crcPerBlock crcPerBlock. + * @param md5 md5. */ public MD5MD5CRC32GzipFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { super(bytesPerCRC, crcPerBlock, md5); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java index 7c24f6695d0e8..e7b0865063ee5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java @@ -25,43 +25,43 @@ /** * Builder interface for Multipart readers. - * @param MultipartUploader Generic Type - * @param MultipartUploaderBuilder Generic Type + * @param MultipartUploader Generic Type. + * @param MultipartUploaderBuilder Generic Type. */ public interface MultipartUploaderBuilder> extends FSBuilder { /** * Set permission for the file. - * @param perm permission - * @return B Generics Type + * @param perm permission. + * @return B Generics Type. */ B permission(@Nonnull FsPermission perm); /** * Set the size of the buffer to be used. - * @param bufSize buffer size - * @return B Generics Type + * @param bufSize buffer size. + * @return B Generics Type. */ B bufferSize(int bufSize); /** * Set replication factor. - * @param replica replica - * @return B Generics Type + * @param replica replica. + * @return B Generics Type. */ B replication(short replica); /** * Set block size. - * @param blkSize blkSize - * @return B Generics Type + * @param blkSize blkSize. + * @return B Generics Type. */ B blockSize(long blkSize); /** * Create an FSDataOutputStream at the specified path. - * @return B Generics Type + * @return B Generics Type. */ B create(); @@ -69,21 +69,21 @@ public interface MultipartUploaderBuilder types) { @@ -368,7 +368,7 @@ protected String getTypesQuotaUsage(boolean hOption, /** * return the header of with the StorageTypes. * - * @param storageTypes storage types + * @param storageTypes storage types. * @return storage header string */ public static String getStorageTypeHeader(List storageTypes) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index f3fc09c7afbb0..468b37a885d23 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -103,8 +103,8 @@ private Path makeAbsolute(Path f) { /** * Convert a path to a File. * - * @param path the path - * @return file + * @param path the path. + * @return file. */ public File pathToFile(Path path) { checkPath(path); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java index 59f0c66b2dc7f..f7546d58e6084 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java @@ -33,7 +33,7 @@ public interface Seekable { * The next read() will be from that location. Can't * seek past the end of the file. * - * @param pos offset from the start of the file + * @param pos offset from the start of the file. * @throws IOException raised on errors performing I/O. */ void seek(long pos) throws IOException; @@ -41,7 +41,7 @@ public interface Seekable { /** * Return the current offset from the start of the file * - * @return offset from the start of the file + * @return offset from the start of the file. * @throws IOException raised on errors performing I/O. */ long getPos() throws IOException; @@ -50,7 +50,7 @@ public interface Seekable { * Seeks a different copy of the data. Returns true if * found a new source, false otherwise. * - * @param targetPos target position + * @param targetPos target position. * @return true if found a new source, false otherwise. * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java index 562bfddc84ced..72a45309b17c9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java @@ -74,7 +74,7 @@ public FileStatus getFileStatus() throws IOException { /** * Whether Stat is supported on the current platform. - * @return if is available true, not false + * @return if is available true, not false. */ public static boolean isAvailable() { if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java index 1122e5fbf1162..b4a86ab781280 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java @@ -147,14 +147,14 @@ public String getScheme() { * The values returned will depend on the type of FileSystem or FileContext * object. The values do not necessarily reflect a snapshot in time. * - * @return LongStatistic Iterator + * @return LongStatistic Iterator. */ public abstract Iterator getLongStatistics(); /** * Get the value of a statistic. * - * @param key key + * @param key key. * @return null if the statistic is not being tracked or is not a * long statistic. The value of the statistic, otherwise. */ @@ -163,7 +163,7 @@ public String getScheme() { /** * Return true if a statistic is being tracked. * - * @param key key + * @param key key. * @return True only if the statistic is being tracked. */ public abstract boolean isTracked(String key); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java index f34b40f992e5d..a58a1a3cb8eb1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java @@ -101,7 +101,7 @@ public static boolean moveToAppropriateTrash(FileSystem fs, Path p, /** * Returns whether the trash is enabled for this filesystem. * - * @return return if isEnabled true,not false + * @return return if isEnabled true,not false. */ public boolean isEnabled() { return trashPolicy.isEnabled(); @@ -109,7 +109,7 @@ public boolean isEnabled() { /** Move a file or directory to the current trash directory. * - * @param path the path + * @param path the path. * @return false if the item is already in the trash or trash is disabled * @throws IOException raised on errors performing I/O. */ @@ -146,7 +146,7 @@ public void expungeImmediately() throws IOException { * get the current working directory. * * @throws IOException on raised on errors performing I/O. - * @return Trash Dir + * @return Trash Dir. */ Path getCurrentTrashDir() throws IOException { return trashPolicy.getCurrentTrashDir(); @@ -155,7 +155,7 @@ Path getCurrentTrashDir() throws IOException { /** * get the configured trash policy. * - * @return TrashPolicy + * @return TrashPolicy. */ TrashPolicy getTrashPolicy() { return trashPolicy; @@ -166,7 +166,7 @@ TrashPolicy getTrashPolicy() { * users, intended to be run by the superuser. * * @throws IOException on raised on errors performing I/O. - * @return Runnable + * @return Runnable. */ public Runnable getEmptier() throws IOException { return trashPolicy.getEmptier(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java index e4c7f4035248d..35e51f9e1cfb1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java @@ -67,7 +67,7 @@ public void initialize(Configuration conf, FileSystem fs) { /** * Move a file or directory to the current trash directory. - * @param path the path + * @param path the path. * @return false if the item is already in the trash or trash is disabled * @throws IOException raised on errors performing I/O. */ @@ -102,7 +102,7 @@ public void initialize(Configuration conf, FileSystem fs) { * It returns the trash location correctly for the path specified no matter * the path is in encryption zone or not. * - * @return the path + * @return the path. */ public abstract Path getCurrentTrashDir(); @@ -122,7 +122,7 @@ public Path getCurrentTrashDir(Path path) throws IOException { * users, intended to be run by the superuser. * * @throws IOException raised on errors performing I/O. - * @return Runnable + * @return Runnable. */ public abstract Runnable getEmptier() throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java index de1a5322e1ee0..df878d998706c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java @@ -102,7 +102,7 @@ public static byte[] decodeValue(String value) throws IOException { * while strings encoded as hexadecimal and base64 are prefixed with * 0x and 0s, respectively. * @param value byte[] value - * @param encoding encoding + * @param encoding encoding. * @return String string representation of value * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java index 44380904b3f26..4256522b2a372 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java @@ -340,14 +340,14 @@ public Configuration getOptions() { /** * Get all the keys that are set as mandatory keys. - * @return mandatory keys + * @return mandatory keys. */ public Set getMandatoryKeys() { return Collections.unmodifiableSet(mandatoryKeys); } /** * Get all the keys that are set as optional keys. - * @return optional keys + * @return optional keys. */ public Set getOptionalKeys() { return Collections.unmodifiableSet(optionalKeys); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java index cbeb06a60c0eb..833c21ec1a67f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java @@ -127,8 +127,8 @@ protected int getBufferSize() { /** * Set the size of the buffer to be used. * - * @param bufSize buffer size - * @return FutureDataInputStreamBuilder + * @param bufSize buffer size. + * @return FutureDataInputStreamBuilder. */ public FutureDataInputStreamBuilder bufferSize(int bufSize) { bufferSize = bufSize; @@ -141,7 +141,7 @@ public FutureDataInputStreamBuilder bufferSize(int bufSize) { * the actual builder: it allows for subclasses to do things after * construction. * - * @return FutureDataInputStreamBuilder + * @return FutureDataInputStreamBuilder. */ public FutureDataInputStreamBuilder builder() { return getThisBuilder(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java index 6b1fea7351a26..0a080426c2b24 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java @@ -75,8 +75,8 @@ public static T awaitFuture(final Future future) * See {@link FutureIO#awaitFuture(Future, long, TimeUnit)}. * @param future future to evaluate * @param type of the result. - * @param timeout timeout - * @param unit unit + * @param timeout timeout. + * @param unit unit. * @return the result, if all went well. * @throws InterruptedIOException future was interrupted * @throws IOException if something went wrong diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java index c704cb116c5d6..665bcc6a95660 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java @@ -89,8 +89,8 @@ protected MultipartUploaderBuilderImpl(@Nonnull FileContext fc, /** * Constructor. * - * @param fileSystem fileSystem - * @param p path + * @param fileSystem fileSystem. + * @param p path. */ protected MultipartUploaderBuilderImpl(@Nonnull FileSystem fileSystem, @Nonnull Path p) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java index ab273b305543b..260ee7e570c9b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java @@ -185,8 +185,8 @@ public Builder stickyBit(boolean stickyBit) { /** * Sets the permission for the file. - * @param permission permission - * @return Builder + * @param permission permission. + * @return Builder. */ public Builder setPermission(FsPermission permission) { this.permission = permission; @@ -225,7 +225,7 @@ private AclStatus(String owner, String group, boolean stickyBit, /** * Get the effective permission for the AclEntry * @param entry AclEntry to get the effective action - * @return FsAction + * @return FsAction. */ public FsAction getEffectivePermission(AclEntry entry) { return getEffectivePermission(entry, permission); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java index 7e328d2c31450..746e0e1e238f4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java @@ -48,8 +48,8 @@ private FsAction(String s) { /** * Return true if this action implies that action. - * @param that FsAction that - * @return if implies true,not false + * @param that FsAction that. + * @return if implies true,not false. */ public boolean implies(FsAction that) { if (that != null) { @@ -60,23 +60,23 @@ public boolean implies(FsAction that) { /** * AND operation. - * @param that FsAction that - * @return FsAction + * @param that FsAction that. + * @return FsAction. */ public FsAction and(FsAction that) { return vals[ordinal() & that.ordinal()]; } /** * OR operation. - * @param that FsAction that - * @return FsAction + * @param that FsAction that. + * @return FsAction. */ public FsAction or(FsAction that) { return vals[ordinal() | that.ordinal()]; } /** * NOT operation. - * @return FsAction + * @return FsAction. */ public FsAction not() { return vals[7 - ordinal()]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java index a684fd33f94d4..ff3b4f6d65a49 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java @@ -35,8 +35,8 @@ public final class FsCreateModes extends FsPermission { /** * Create from unmasked mode and umask. * - * @param mode mode - * @param umask umask + * @param mode mode. + * @param umask umask. * @return If the mode is already * an FsCreateModes object, return it. */ @@ -51,9 +51,9 @@ public static FsPermission applyUMask(FsPermission mode, /** * Create from masked and unmasked modes. * - * @param masked masked - * @param unmasked unmasked - * @return FsCreateModes + * @param masked masked. + * @param unmasked unmasked. + * @return FsCreateModes. */ public static FsCreateModes create(FsPermission masked, FsPermission unmasked) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index 541d25d7c878b..33fed1d303990 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -58,8 +58,8 @@ public class FsPermission implements Writable, Serializable, /** * Create an immutable {@link FsPermission} object. - * @param permission permission - * @return FsPermission + * @param permission permission. + * @return FsPermission. */ public static FsPermission createImmutable(short permission) { return new ImmutableFsPermission(permission); @@ -89,7 +89,7 @@ public FsPermission(FsAction u, FsAction g, FsAction o, boolean sb) { /** * Construct by the given mode. - * @param mode mode + * @param mode mode. * @see #toShort() */ public FsPermission(short mode) { fromShort(mode); } @@ -190,7 +190,7 @@ public void readFields(DataInput in) throws IOException { /** * Get masked permission if exists. - * @return masked + * @return masked. */ public FsPermission getMasked() { return null; @@ -198,7 +198,7 @@ public FsPermission getMasked() { /** * Get unmasked permission if exists. - * @return unmasked + * @return unmasked. */ public FsPermission getUnmasked() { return null; @@ -207,9 +207,9 @@ public FsPermission getUnmasked() { /** * Create and initialize a {@link FsPermission} from {@link DataInput}. * - * @param in data input + * @param in data input. * @throws IOException raised on errors performing I/O. - * @return FsPermission + * @return FsPermission. */ public static FsPermission read(DataInput in) throws IOException { FsPermission p = new FsPermission(); @@ -219,7 +219,7 @@ public static FsPermission read(DataInput in) throws IOException { /** * Encode the object to a short. - * @return object to a short + * @return object to a short. */ public short toShort() { int s = (stickyBit ? 1 << 9 : 0) | @@ -319,8 +319,8 @@ public FsPermission applyUMask(FsPermission umask) { * * Octal umask, the specified bits are set in the file mode creation mask. * - * @param conf configuration - * @return FsPermission UMask + * @param conf configuration. + * @return FsPermission UMask. */ public static FsPermission getUMask(Configuration conf) { int umask = DEFAULT_UMASK; @@ -369,7 +369,7 @@ public boolean getAclBit() { * Returns true if the file is encrypted or directory is in an encryption zone. * * @return if the file is encrypted or directory - * is in an encryption zone true, not false + * is in an encryption zone true, not false. * * @deprecated Get encryption bit from the * {@link org.apache.hadoop.fs.FileStatus} object. @@ -383,7 +383,7 @@ public boolean getEncryptedBit() { * Returns true if the file or directory is erasure coded. * * @return if the file or directory is - * erasure coded true, not false + * erasure coded true, not false. * @deprecated Get ec bit from the {@link org.apache.hadoop.fs.FileStatus} * object. */ @@ -394,8 +394,8 @@ public boolean getErasureCodedBit() { /** * Set the user file creation mask (umask) - * @param conf configuration - * @param umask umask + * @param conf configuration. + * @param umask umask. */ public static void setUMask(Configuration conf, FsPermission umask) { conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort())); @@ -411,7 +411,7 @@ public static void setUMask(Configuration conf, FsPermission umask) { * {@link FsPermission#getFileDefault()} for file. * This method is kept for compatibility. * - * @return Default FsPermission + * @return Default FsPermission. */ public static FsPermission getDefault() { return new FsPermission((short)00777); @@ -420,7 +420,7 @@ public static FsPermission getDefault() { /** * Get the default permission for directory. * - * @return DirDefault FsPermission + * @return DirDefault FsPermission. */ public static FsPermission getDirDefault() { return new FsPermission((short)00777); @@ -429,7 +429,7 @@ public static FsPermission getDirDefault() { /** * Get the default permission for file. * - * @return FileDefault FsPermission + * @return FileDefault FsPermission. */ public static FsPermission getFileDefault() { return new FsPermission((short)00666); @@ -438,7 +438,7 @@ public static FsPermission getFileDefault() { /** * Get the default permission for cache pools. * - * @return CachePoolDefault FsPermission + * @return CachePoolDefault FsPermission. */ public static FsPermission getCachePoolDefault() { return new FsPermission((short)00755); @@ -447,7 +447,7 @@ public static FsPermission getCachePoolDefault() { /** * Create a FsPermission from a Unix symbolic permission string * @param unixSymbolicPermission e.g. "-rw-rw-rw-" - * @return FsPermission + * @return FsPermission. */ public static FsPermission valueOf(String unixSymbolicPermission) { if (unixSymbolicPermission == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java index 22e43168cc867..be4beb506a63f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java @@ -41,10 +41,10 @@ public class PermissionStatus implements Writable { /** * Create an immutable {@link PermissionStatus} object. - * @param user user - * @param group group - * @param permission permission - * @return PermissionStatus + * @param user user. + * @param group group. + * @param permission permission. + * @return PermissionStatus. */ public static PermissionStatus createImmutable( String user, String group, FsPermission permission) { @@ -65,9 +65,9 @@ private PermissionStatus() {} /** * Constructor. * - * @param user user - * @param group group - * @param permission permission + * @param user user. + * @param group group. + * @param permission permission. */ public PermissionStatus(String user, String group, FsPermission permission) { username = user; @@ -77,20 +77,19 @@ public PermissionStatus(String user, String group, FsPermission permission) { /** * Return user name. - * @return user name + * @return user name. */ public String getUserName() {return username;} /** * Return group name. - * @return group name + * @return group name. */ public String getGroupName() {return groupname;} /** * Return permission. - * - * @return FsPermission + * @return FsPermission. */ public FsPermission getPermission() {return permission;} @@ -108,9 +107,9 @@ public void write(DataOutput out) throws IOException { /** * Create and initialize a {@link PermissionStatus} from {@link DataInput}. - * @param in data input + * @param in data input. * @throws IOException raised on errors performing I/O. - * @return PermissionStatus + * @return PermissionStatus. */ public static PermissionStatus read(DataInput in) throws IOException { PermissionStatus p = new PermissionStatus(); @@ -120,10 +119,10 @@ public static PermissionStatus read(DataInput in) throws IOException { /** * Serialize a {@link PermissionStatus} from its base components. - * @param out out - * @param username username - * @param groupname groupname - * @param permission FsPermission + * @param out out. + * @param username username. + * @param groupname groupname. + * @param permission FsPermission. * @throws IOException raised on errors performing I/O. */ public static void write(DataOutput out, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java index 038fa43069b97..7858238ee71fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java @@ -80,7 +80,7 @@ protected Command() { /** * Constructor. * - * @param conf configuration + * @param conf configuration. */ protected Command(Configuration conf) { super(conf); @@ -142,7 +142,7 @@ public int runAll() { /** * sets the command factory for later use. - * @param factory factory + * @param factory factory. */ public void setCommandFactory(CommandFactory factory) { this.commandFactory = factory; @@ -151,7 +151,7 @@ public void setCommandFactory(CommandFactory factory) { /** * retrieves the command factory. * - * @return command factory + * @return command factory. */ protected CommandFactory getCommandFactory() { return this.commandFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java index c698f12fc865c..69a418c1925eb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java @@ -120,7 +120,7 @@ protected void setDirectWrite(boolean flag) { * file will be preserved as far as target {@link FileSystem} * implementation allows. * - * @param preserve preserve + * @param preserve preserve. */ protected void setPreserve(boolean preserve) { if (preserve) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java index 542f3e9134993..cd9bbe2bc884e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java @@ -40,7 +40,7 @@ public abstract class BaseExpression implements Expression, Configurable { /** * Sets the usage text for this {@link Expression} . - * @param usage usage array + * @param usage usage array. */ protected void setUsage(String[] usage) { this.usage = usage; @@ -48,7 +48,7 @@ protected void setUsage(String[] usage) { /** * Sets the help text for this {@link Expression} . - * @param help help + * @param help help. */ protected void setHelp(String[] help) { this.help = help; @@ -100,7 +100,7 @@ public void finish() throws IOException { /** * Return the options to be used by this expression. - * @return options + * @return options. */ protected FindOptions getOptions() { return (this.options == null) ? new FindOptions() : this.options; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java index 3a4265c0ab7a5..353fe685cc9cd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java @@ -30,7 +30,7 @@ public interface Expression { /** * Set the options for this expression, called once before processing any * items. - * @param options options + * @param options options. * @throws IOException raised on errors performing I/O. */ public void setOptions(FindOptions options) throws IOException; @@ -79,13 +79,13 @@ public interface Expression { /** * Indicates whether this expression performs an action, i.e. provides output * back to the user. - * @return if is action true, not false + * @return if is action true, not false. */ public boolean isAction(); /** * Identifies the expression as an operator rather than a primary. - * @return if is operator true, not false + * @return if is operator true, not false. */ public boolean isOperator(); @@ -93,7 +93,7 @@ public interface Expression { * Returns the precedence of this expression * (only applicable to operators). * - * @return precedence + * @return precedence. */ public int getPrecedence(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java index e3f24835f800f..c605186230590 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java @@ -264,7 +264,7 @@ public void setConfiguration(Configuration configuration) { /** * Return the {@link Configuration} return configuration {@link Configuration} - * @return configuration + * @return configuration. */ public Configuration getConfiguration() { return this.configuration; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java index a7dee3a97439b..a242681acd030 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java @@ -38,14 +38,14 @@ private Result(boolean success, boolean recurse) { /** * Should further directories be descended. * @return if is pass true,not false. - * */ + */ public boolean isDescend() { return this.descend; } /** * Should processing continue. - * @return if is pass true,not false + * @return if is pass true,not false. */ public boolean isPass() { return this.success; @@ -53,8 +53,8 @@ public boolean isPass() { /** * Returns the combination of this and another result. - * @param other other - * @return result + * @param other other. + * @return result. */ public Result combine(Result other) { return new Result(this.isPass() && other.isPass(), this.isDescend() @@ -63,7 +63,7 @@ public Result combine(Result other) { /** * Negate this result. - * @return Result + * @return Result. */ public Result negate() { return new Result(!this.isPass(), this.isDescend()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java index 4a84d47de77db..88606eb4b3055 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java @@ -238,7 +238,7 @@ public static JsonSerialization serializer() { /** * Serialize by converting each map to a TreeMap, and saving that * to the stream. - * @param s ObjectOutputStream + * @param s ObjectOutputStream. * @throws IOException raised on errors performing I/O. */ private synchronized void writeObject(ObjectOutputStream s) @@ -256,7 +256,7 @@ private synchronized void writeObject(ObjectOutputStream s) * Deserialize by loading each TreeMap, and building concurrent * hash maps from them. * - * @param s ObjectInputStream + * @param s ObjectInputStream. * @throws IOException raised on errors performing I/O. * @throws ClassNotFoundException class not found exception */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java index 90448471c5d04..bb4d9a44587a2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java @@ -71,7 +71,7 @@ private IOStatisticsSupport() { * Returns null if the source isn't of the write type * or the return value of * {@link IOStatisticsSource#getIOStatistics()} was null. - * @param source source + * @param source source. * @return an IOStatistics instance or null */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java index d330b0dc5a337..369db49654382 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java @@ -207,7 +207,7 @@ public synchronized double mean() { /** * Add another MeanStatistic. * @param other other value - * @return mean statistic + * @return mean statistic. */ public synchronized MeanStatistic add(final MeanStatistic other) { if (other.isEmpty()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java index 21ae5606f101a..9ad727f5a2df2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java @@ -86,8 +86,8 @@ public static InvocationRaisingIOE withinAuditSpan( * activates and deactivates the span around the inner one. * @param auditSpan audit span * @param operation operation - * @param Generics Type T - * @param Generics Type R + * @param Generics Type T. + * @param Generics Type R. * @return a new invocation. */ public static FunctionRaisingIOE withinAuditSpan( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java index 1faf215e50553..c9ee5e232d944 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java @@ -48,7 +48,7 @@ public static String getConfigViewFsPrefix() { /** * Add a link to the config for the specified mount table * @param conf - add the link to this conf - * @param mountTableName mountTable + * @param mountTableName mountTable. * @param src - the src path name * @param target - the target URI link */ @@ -72,9 +72,9 @@ public static void addLink(final Configuration conf, final String src, /** * Add a LinkMergeSlash to the config for the specified mount table. * - * @param conf configuration - * @param mountTableName mountTable - * @param target target + * @param conf configuration. + * @param mountTableName mountTable. + * @param target target. */ public static void addLinkMergeSlash(Configuration conf, final String mountTableName, final URI target) { @@ -85,8 +85,8 @@ public static void addLinkMergeSlash(Configuration conf, /** * Add a LinkMergeSlash to the config for the default mount table. * - * @param conf configuration - * @param target targets + * @param conf configuration. + * @param target targets. */ public static void addLinkMergeSlash(Configuration conf, final URI target) { addLinkMergeSlash(conf, getDefaultMountTableName(conf), target); @@ -95,9 +95,9 @@ public static void addLinkMergeSlash(Configuration conf, final URI target) { /** * Add a LinkFallback to the config for the specified mount table. * - * @param conf configuration - * @param mountTableName mountTable - * @param target targets + * @param conf configuration. + * @param mountTableName mountTable. + * @param target targets. */ public static void addLinkFallback(Configuration conf, final String mountTableName, final URI target) { @@ -108,8 +108,8 @@ public static void addLinkFallback(Configuration conf, /** * Add a LinkFallback to the config for the default mount table. * - * @param conf configuration - * @param target targets + * @param conf configuration. + * @param target targets. */ public static void addLinkFallback(Configuration conf, final URI target) { addLinkFallback(conf, getDefaultMountTableName(conf), target); @@ -118,9 +118,9 @@ public static void addLinkFallback(Configuration conf, final URI target) { /** * Add a LinkMerge to the config for the specified mount table. * - * @param conf configuration - * @param mountTableName mountTable - * @param targets targets + * @param conf configuration. + * @param mountTableName mountTable. + * @param targets targets. */ public static void addLinkMerge(Configuration conf, final String mountTableName, final URI[] targets) { @@ -131,8 +131,8 @@ public static void addLinkMerge(Configuration conf, /** * Add a LinkMerge to the config for the default mount table. * - * @param conf configuration - * @param targets targets array + * @param conf configuration. + * @param targets targets array. */ public static void addLinkMerge(Configuration conf, final URI[] targets) { addLinkMerge(conf, getDefaultMountTableName(conf), targets); @@ -141,11 +141,11 @@ public static void addLinkMerge(Configuration conf, final URI[] targets) { /** * Add nfly link to configuration for the given mount table. * - * @param conf configuration - * @param mountTableName mount table - * @param src src - * @param settings settings - * @param targets targets + * @param conf configuration. + * @param mountTableName mount table. + * @param src src. + * @param settings settings. + * @param targets targets. */ public static void addLinkNfly(Configuration conf, String mountTableName, String src, String settings, final String targets) { @@ -158,11 +158,11 @@ public static void addLinkNfly(Configuration conf, String mountTableName, /** * Add nfly link to configuration for the given mount table. * - * @param conf configuration - * @param mountTableName mount table - * @param src src - * @param settings settings - * @param targets targets + * @param conf configuration. + * @param mountTableName mount table. + * @param src src. + * @param settings settings. + * @param targets targets. */ public static void addLinkNfly(Configuration conf, String mountTableName, String src, String settings, final URI ... targets) { @@ -215,7 +215,7 @@ public static void setHomeDirConf(final Configuration conf, * Add config variable for homedir the specified mount table * @param conf - add to this conf * @param homedir - the home dir path starting with slash - * @param mountTableName - the mount table + * @param mountTableName - the mount table. */ public static void setHomeDirConf(final Configuration conf, final String mountTableName, final String homedir) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java index b2986a1c28434..f723f238e199e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java @@ -34,10 +34,10 @@ public class FsGetter { /** * Gets new file system instance of given uri. - * @param uri uri - * @param conf configuration + * @param uri uri. + * @param conf configuration. * @throws IOException raised on errors performing I/O. - * @return file system + * @return file system. */ public FileSystem getNewInstance(URI uri, Configuration conf) throws IOException { @@ -47,10 +47,10 @@ public FileSystem getNewInstance(URI uri, Configuration conf) /** * Gets file system instance of given uri. * - * @param uri uri - * @param conf configuration + * @param uri uri. + * @param conf configuration. * @throws IOException raised on errors performing I/O. - * @return FileSystem + * @return FileSystem. */ public FileSystem get(URI uri, Configuration conf) throws IOException { return FileSystem.get(uri, conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java index db1719e992926..5360d55e10644 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java @@ -365,7 +365,7 @@ public static class INodeLink extends INode { * Get the target of the link. If a merge link then it returned * as "," separated URI list. * - * @return the path + * @return the path. */ public Path getTargetLink() { StringBuilder result = new StringBuilder(targetDirLinkList[0].toString()); @@ -502,7 +502,7 @@ private void createLink(final String src, final String target, /** * The user of this class must subclass and implement the following * 3 abstract methods. - * @return Function + * @return Function. */ protected abstract Function initAndGetTargetFs(); @@ -595,19 +595,19 @@ Configuration getConfig() { /** * Create Inode Tree from the specified mount-table specified in Config. * - * @param config - the mount table keys are prefixed with - * FsConstants.CONFIG_VIEWFS_PREFIX - * @param viewName - the name of the mount table - * if null use defaultMT name - * @param theUri theUri - * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts + * @param config the mount table keys are prefixed with + * FsConstants.CONFIG_VIEWFS_PREFIX. + * @param viewName the name of the mount table + * if null use defaultMT name. + * @param theUri heUri. + * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts. * @throws UnsupportedFileSystemException file system for uri is - * not found - * @throws URISyntaxException if the URI does not have an authority - * it is badly formed. - * @throws FileAlreadyExistsException there is a file at the path specified - * or is discovered on one of its ancestors. - * @throws IOException raised on errors performing I/O. + * not found. + * @throws URISyntaxException if the URI does not have an authority + * it is badly formed. + * @throws FileAlreadyExistsException there is a file at the path specified + * or is discovered on one of its ancestors. + * @throws IOException raised on errors performing I/O. */ protected InodeTree(final Configuration config, final String viewName, final URI theUri, boolean initingUriAsFallbackOnNoMounts) @@ -881,7 +881,7 @@ boolean isLastInternalDirLink() { /** * Resolve the pathname p relative to root InodeDir. * @param p - input path - * @param resolveLastComponent resolveLastComponent + * @param resolveLastComponent resolveLastComponent. * @return ResolveResult which allows further resolution of the remaining path * @throws IOException raised on errors performing I/O. */ @@ -1010,9 +1010,9 @@ private Path getRemainingPath(String[] path, int startIndex) { * resolveLastComponent: true * then return value is s3://hadoop.apache.com/_hadoop * - * @param srcPath srcPath - * @param resolveLastComponent resolveLastComponent - * @return ResolveResult + * @param srcPath srcPath. + * @param resolveLastComponent resolveLastComponent. + * @return ResolveResult. */ protected ResolveResult tryResolveInRegexMountpoint(final String srcPath, final boolean resolveLastComponent) { @@ -1039,10 +1039,10 @@ protected ResolveResult tryResolveInRegexMountpoint(final String srcPath, * targetOfResolvedPathStr: /targetTestRoot/hadoop-user1 * remainingPath: /hadoop_dir1 * - * @param resultKind resultKind - * @param resolvedPathStr resolvedPathStr - * @param targetOfResolvedPathStr targetOfResolvedPathStr - * @param remainingPath remainingPath + * @param resultKind resultKind. + * @param resolvedPathStr resolvedPathStr. + * @param targetOfResolvedPathStr targetOfResolvedPathStr. + * @param remainingPath remainingPath. * @return targetFileSystem or null on exceptions. */ protected ResolveResult buildResolveResultForRegexMountPoint( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index d2a9bb667f893..da3955b125e84 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -108,7 +108,7 @@ static AccessControlException readOnlyMountTable(final String operation, /** * Gets file system creator instance. * - * @return fs getter + * @return fs getter. */ protected FsGetter fsGetter() { return new FsGetter(); @@ -384,10 +384,10 @@ protected FileSystem getTargetFileSystem(final String settings, } /** - * Convenience Constructor for apps to call directly + * Convenience Constructor for apps to call directly. * @param theUri which must be that of ViewFileSystem - * @param conf - * @throws IOException + * @param conf conf configuration. + * @throws IOException raised on errors performing I/O. */ ViewFileSystem(final URI theUri, final Configuration conf) throws IOException { @@ -397,7 +397,7 @@ protected FileSystem getTargetFileSystem(final String settings, /** * Convenience Constructor for apps to call directly. - * @param conf configuration + * @param conf configuration. * @throws IOException raised on errors performing I/O. */ public ViewFileSystem(final Configuration conf) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java index 99c626be3a214..1c25a9536e121 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java @@ -140,7 +140,7 @@ public boolean supportAutoAddingFallbackOnNoMounts() { /** * Sets whether to add fallback automatically when no mount points found. * - * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts + * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts. */ public void setSupportAutoAddingFallbackOnNoMounts( boolean addAutoFallbackOnNoMounts) { @@ -323,7 +323,7 @@ private T newInstance(Class theClass, URI uri, Configuration conf) { * @param path - fs uri path * @param conf - configuration * @throws IOException raised on errors performing I/O. - * @return file system + * @return file system. */ public FileSystem getRawFileSystem(Path path, Configuration conf) throws IOException { @@ -343,9 +343,9 @@ public FileSystem getRawFileSystem(Path path, Configuration conf) * Gets the mount path info, which contains the target file system and * remaining path to pass to the target file system. * - * @param path the path - * @param conf configuration - * @return mount path info + * @param path the path. + * @param conf configuration. + * @return mount path info. * @throws IOException raised on errors performing I/O. */ public MountPathInfo getMountPathInfo(Path path, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java index 1f05076f47397..c9c6767097b87 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java @@ -44,7 +44,7 @@ private ViewFileSystemUtil() { /** * Check if the FileSystem is a ViewFileSystem. * - * @param fileSystem file system + * @param fileSystem file system. * @return true if the fileSystem is ViewFileSystem */ public static boolean isViewFileSystem(final FileSystem fileSystem) { @@ -54,7 +54,7 @@ public static boolean isViewFileSystem(final FileSystem fileSystem) { /** * Check if the FileSystem is a ViewFileSystemOverloadScheme. * - * @param fileSystem file system + * @param fileSystem file system. * @return true if the fileSystem is ViewFileSystemOverloadScheme */ public static boolean isViewFileSystemOverloadScheme( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 2236c9cdf4195..edd15af534a76 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -92,7 +92,7 @@ public interface ActiveStandbyElectorCallback { * Callback implementations are expected to manage their own * timeouts (e.g. when making an RPC to a remote node). * - * @throws ServiceFailedException Service Failed Exception + * @throws ServiceFailedException Service Failed Exception. */ void becomeActive() throws ServiceFailedException; @@ -122,7 +122,7 @@ public interface ActiveStandbyElectorCallback { * errors or Zookeeper persistent unavailability) then notifyFatalError is * called to notify the app about it. * - * @param errorMessage error message + * @param errorMessage error message. */ void notifyFatalError(String errorMessage); @@ -208,13 +208,12 @@ enum State { * ZK connection * @param app * reference to callback interface object - * @param maxRetryNum maxRetryNum - * @throws IOException - * raised on errors performing I/O. + * @param maxRetryNum maxRetryNum. + * @throws IOException raised on errors performing I/O. * @throws HadoopIllegalArgumentException - * if valid data is not supplied. + * if valid data is not supplied. * @throws KeeperException - * other zookeeper operation errors. + * other zookeeper operation errors. */ public ActiveStandbyElector(String zookeeperHostPorts, int zookeeperSessionTimeout, String parentZnodeName, List acl, @@ -458,7 +457,7 @@ public static class ActiveNotFoundException extends Exception { * @throws KeeperException * other zookeeper operation errors * @throws InterruptedException - * interrupted exception + * interrupted exception. * @throws IOException * when ZooKeeper connection could not be established */ @@ -806,7 +805,7 @@ private void reJoinElection(int sleepTime) { * This is non-static, and separated out, so that unit tests * can override the behavior not to sleep. * - * @param sleepMs sleep ms + * @param sleepMs sleep ms. */ @VisibleForTesting protected void sleepFor(int sleepMs) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java index d557e587652ae..9eeaacd76bca5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java @@ -327,8 +327,8 @@ private int getServiceState(final CommandLine cmd) * Return the serviceId as is, we are assuming it was * given as a service address of form {@literal <}host:ipcport{@literal >}. * - * @param serviceId serviceId - * @return service addr + * @param serviceId serviceId. + * @return service addr. */ protected String getServiceAddr(String serviceId) { return serviceId; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java index 66604cc39134c..56c848617ffbc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java @@ -119,7 +119,7 @@ public void monitorHealth() throws HealthCheckFailedException, * Request service to transition to active state. No operation, if the * service is already in active state. * - * @param reqInfo reqInfo + * @param reqInfo reqInfo. * @throws ServiceFailedException * if transition from standby to active fails. * @throws AccessControlException @@ -137,7 +137,7 @@ public void transitionToActive(StateChangeRequestInfo reqInfo) * Request service to transition to standby state. No operation, if the * service is already in standby state. * - * @param reqInfo reqInfo + * @param reqInfo reqInfo. * @throws ServiceFailedException * if transition from active to standby fails. * @throws AccessControlException @@ -155,7 +155,7 @@ public void transitionToStandby(StateChangeRequestInfo reqInfo) * Request service to transition to observer state. No operation, if the * service is already in observer state. * - * @param reqInfo reqInfo + * @param reqInfo reqInfo. * @throws ServiceFailedException * if transition from standby to observer fails. * @throws AccessControlException @@ -179,7 +179,7 @@ void transitionToObserver(StateChangeRequestInfo reqInfo) * @throws IOException * if other errors happen * @see HAServiceStatus - * @return HAServiceStatus + * @return HAServiceStatus. */ @Idempotent public HAServiceStatus getServiceStatus() throws AccessControlException, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java index 324c5f2225c19..288a9dcbe0e53 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java @@ -93,8 +93,8 @@ public abstract void checkFencingConfigured() /** * @return a proxy to connect to the target HA Service. - * @param timeoutMs timeout in milliseconds - * @param conf Configuration + * @param timeoutMs timeout in milliseconds. + * @param conf Configuration. * @throws IOException raised on errors performing I/O. */ public HAServiceProtocol getProxy(Configuration conf, int timeoutMs) @@ -118,7 +118,7 @@ public HAServiceProtocol.HAServiceState getTransitionTargetHAStatus() { * returned proxy defaults to using {@link #getAddress()}, which means this * method's behavior is identical to {@link #getProxy(Configuration, int)}. * - * @param conf configuration + * @param conf configuration. * @param timeoutMs timeout in milliseconds * @return a proxy to connect to the target HA service for health monitoring * @throws IOException if there is an error @@ -157,8 +157,8 @@ private HAServiceProtocol getProxyForAddress(Configuration conf, /** * @return a proxy to the ZKFC which is associated with this HA service. - * @param conf configuration - * @param timeoutMs timeout in milliseconds + * @param conf configuration. + * @param timeoutMs timeout in milliseconds. * @throws IOException raised on errors performing I/O. */ public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java index f0d1f29b7f95c..d222d52e37349 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java @@ -186,7 +186,7 @@ private void tryConnect() { * Connect to the service to be monitored. Stubbed out for easier testing. * * @throws IOException raised on errors performing I/O. - * @return HAServiceProtocol + * @return HAServiceProtocol. */ protected HAServiceProtocol createProxy() throws IOException { return targetToMonitor.getHealthMonitorProxy(conf, rpcTimeout, rpcConnectRetries); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java index 13e55ccfb3a16..d24d5630c5917 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java @@ -154,7 +154,7 @@ protected abstract void checkRpcAdminAccess() * nameservices can run on the same ZK quorum without having to manually * configure them to separate subdirectories. * - * @return ScopeInsideParentNode + * @return ScopeInsideParentNode. */ protected abstract String getScopeInsideParentNode(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index 3bf3b590cb9fd..2928f88598207 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -270,7 +270,7 @@ public Builder setName(String name){ * specifies the binding address, and the port specifies the * listening port. Unspecified or zero port means that the server * can listen to any port. - * @return Builder + * @return Builder. */ public Builder addEndpoint(URI endpoint) { endpoints.add(endpoint); @@ -282,8 +282,8 @@ public Builder addEndpoint(URI endpoint) { * _HOST field in Kerberos principals. The hostname of the first listener * will be used if the name is unspecified. * - * @param hostName hostName - * @return Builder + * @param hostName hostName. + * @return Builder. */ public Builder hostName(String hostName) { this.hostName = hostName; @@ -313,8 +313,8 @@ public Builder keyPassword(String password) { * Specify whether the server should authorize the client in SSL * connections. * - * @param value value - * @return Builder + * @param value value. + * @return Builder. */ public Builder needsClientAuth(boolean value) { this.needsClientAuth = value; @@ -340,8 +340,8 @@ public Builder setConf(Configuration conf) { * Specify the SSL configuration to load. This API provides an alternative * to keyStore/keyPassword/trustStore. * - * @param sslCnf sslCnf - * @return Builder + * @param sslCnf sslCnf. + * @return Builder. */ public Builder setSSLConf(Configuration sslCnf) { this.sslConf = sslCnf; @@ -909,9 +909,9 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) { /** * Add default apps. * - * @param parent contexthandlercollection + * @param parent contexthandlercollection. * @param appDir The application directory - * @param conf configuration + * @param conf configuration. * @throws IOException raised on errors performing I/O. */ protected void addDefaultApps(ContextHandlerCollection parent, @@ -1194,11 +1194,11 @@ public void addGlobalFilter(String name, String classname, /** * Define a filter for a context and set up default url mappings. * - * @param ctx ctx - * @param name name - * @param classname classname - * @param parameters parameters - * @param urls urls + * @param ctx ctx. + * @param name name. + * @param classname classname. + * @param parameters parameters. + * @param urls urls. */ public static void defineFilter(ServletContextHandler ctx, String name, String classname, Map parameters, String[] urls) { @@ -1309,7 +1309,7 @@ public int getPort() { /** * Get the address that corresponds to a particular connector. * - * @param index index + * @param index index. * @return the corresponding address for the connector, or null if there's no * such connector or the connector is not bounded or was closed. */ @@ -1330,8 +1330,8 @@ public InetSocketAddress getConnectorAddress(int index) { /** * Set the min, max number of worker threads (simultaneous connections). * - * @param min min - * @param max max + * @param min min. + * @param max max. */ public void setThreads(int min, int max) { QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool(); @@ -1536,7 +1536,7 @@ void openListeners() throws Exception { /** * stop the server. * - * @throws Exception exception + * @throws Exception exception. */ public void stop() throws Exception { MultiException exception = null; @@ -1659,8 +1659,8 @@ public static boolean isInstrumentationAccessAllowed( * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. * - * @param servletContext servletContext - * @param request request + * @param servletContext servletContext. + * @param request request. * @param response used to send the error response if user does not have admin access. * @return true if admin-authorized, false otherwise * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java index 616d5ebccf05a..8cf82f425090d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java @@ -86,7 +86,7 @@ private synchronized void addToMap(Class clazz, byte id) { /** * Add a Class to the maps if it is not already present. - * @param clazz clazz + * @param clazz clazz. */ protected synchronized void addToMap(Class clazz) { if (classToIdMap.containsKey(clazz)) { @@ -102,8 +102,8 @@ protected synchronized void addToMap(Class clazz) { /** * the Class class for the specified id. - * @param id id - * @return the Class class for the specified id + * @param id id. + * @return the Class class for the specified id. */ protected Class getClass(byte id) { return idToClassMap.get(id); @@ -111,8 +111,8 @@ protected Class getClass(byte id) { /** * get id. - * @return the id for the specified Class - * @param clazz clazz + * @return the id for the specified Class. + * @param clazz clazz. */ protected byte getId(Class clazz) { return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1; @@ -120,7 +120,7 @@ protected byte getId(Class clazz) { /** * Used by child copy constructors. - * @param other other + * @param other other. */ protected synchronized void copy(Writable other) { if (other != null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java index ce0075aedcc14..313caa6360827 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java @@ -41,10 +41,10 @@ public static class Writer extends MapFile.Writer { /** * Create the named file for values of the named class. * - * @param conf configuration - * @param fs file system - * @param file file - * @param valClass valClass + * @param conf configuration. + * @param fs file system. + * @param file file. + * @param valClass valClass. * @throws IOException raised on errors performing I/O. */ public Writer(Configuration conf, FileSystem fs, @@ -57,12 +57,12 @@ public Writer(Configuration conf, FileSystem fs, /** * Create the named file for values of the named class. * - * @param conf configuration - * @param fs file system - * @param file file - * @param valClass valClass - * @param compress compress - * @param progress progress + * @param conf configuration. + * @param fs file system. + * @param file file. + * @param valClass valClass. + * @param compress compress. + * @param progress progress. * @throws IOException raised on errors performing I/O. */ public Writer(Configuration conf, FileSystem fs, @@ -78,7 +78,7 @@ public Writer(Configuration conf, FileSystem fs, /** * Append a value to the file. - * @param value value + * @param value value. * @throws IOException raised on errors performing I/O. */ public synchronized void append(Writable value) throws IOException { @@ -93,9 +93,9 @@ public static class Reader extends MapFile.Reader { /** * Construct an array reader for the named file. - * @param fs FileSystem - * @param file file - * @param conf configuration + * @param fs FileSystem. + * @param file file. + * @param conf configuration. * @throws IOException raised on errors performing I/O. */ public Reader(FileSystem fs, String file, @@ -106,7 +106,7 @@ public Reader(FileSystem fs, String file, /** * Positions the reader before its nth value. * - * @param n n key + * @param n n key. * @throws IOException raised on errors performing I/O. */ public synchronized void seek(long n) throws IOException { @@ -117,9 +117,9 @@ public synchronized void seek(long n) throws IOException { /** * Read and return the next value in the file. * - * @param value value + * @param value value. * @throws IOException raised on errors performing I/O. - * @return Writable + * @return Writable. */ public synchronized Writable next(Writable value) throws IOException { return next(key, value) ? value : null; @@ -130,7 +130,7 @@ public synchronized Writable next(Writable value) throws IOException { * #seek(long)}, {@link #next(Writable)}, or {@link * #get(long,Writable)}. * - * @return key key + * @return key key. * @throws IOException raised on errors performing I/O. */ public synchronized long key() throws IOException { @@ -139,10 +139,10 @@ public synchronized long key() throws IOException { /** * Return the nth value in the file. - * @param n n key - * @param value value + * @param n n key. + * @param value value. * @throws IOException raised on errors performing I/O. - * @return writable + * @return writable. */ public synchronized Writable get(long n, Writable value) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java index adafe0412bc83..ce7813e7483a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java @@ -108,7 +108,7 @@ public ArrayPrimitiveWritable() { * Construct an instance of known type but no value yet * for use with type-specific wrapper classes. * - * @param componentType componentType + * @param componentType componentType. */ public ArrayPrimitiveWritable(Class componentType) { checkPrimitive(componentType); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java index 24ad68fab0176..a78ff8b6c583e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java @@ -32,14 +32,14 @@ public abstract class BinaryComparable implements Comparable { /** * Return n st bytes 0..n-1 from {#getBytes()} are valid. * - * @return length + * @return length. */ public abstract int getLength(); /** * Return representative byte array for this instance. * - * @return getBytes + * @return getBytes. */ public abstract byte[] getBytes(); @@ -58,10 +58,10 @@ public int compareTo(BinaryComparable other) { /** * Compare bytes from {#getBytes()} to those provided. * - * @param other other - * @param off off - * @param len len - * @return compareBytes + * @param other other. + * @param off off. + * @param len len. + * @return compareBytes. */ public int compareTo(byte[] other, int off, int len) { return WritableComparator.compareBytes(getBytes(), 0, getLength(), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java index a779254fdc277..789b866255b01 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java @@ -36,7 +36,7 @@ public class BooleanWritable implements WritableComparable { public BooleanWritable() {}; /** - * @param value value + * @param value value. */ public BooleanWritable(boolean value) { set(value); @@ -44,7 +44,7 @@ public BooleanWritable(boolean value) { /** * Set the value of the BooleanWritable. - * @param value value + * @param value value. */ public void set(boolean value) { this.value = value; @@ -52,7 +52,7 @@ public void set(boolean value) { /** * Returns the value of the BooleanWritable. - * @return the value of the BooleanWritable + * @return the value of the BooleanWritable. */ public boolean get() { return value; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java index 470e61ed1a302..542721f318d0a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java @@ -116,7 +116,7 @@ public void reset() { /** * Return the current limit. - * @return limit + * @return limit. */ public int getLimit() { return limit; @@ -125,7 +125,7 @@ public int getLimit() { /** * Returns the underlying buffer. * Data is only valid to {@link #size()}. - * @return the underlying buffer + * @return the underlying buffer. */ public byte[] getBuffer() { return buffer; @@ -135,7 +135,7 @@ public byte[] getBuffer() { * Returns the length of the valid data * currently in the buffer. * - * @return the length of the valid data + * @return the length of the valid data. */ public int size() { return currentPointer - startOffset; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java index 86374fc4b8fa0..c4b88f4b5c98b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java @@ -41,7 +41,7 @@ public ByteWritable() {} /** * Return the value of this ByteWritable. - * @return value bytes + * @return value bytes. */ public byte get() { return value; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java index 2e753d489979d..80a23f86ce80b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java @@ -78,7 +78,7 @@ public BytesWritable(byte[] bytes, int length) { * Get a copy of the bytes that is exactly the length of the data. * See {@link #getBytes()} for faster access to the underlying array. * - * @return copyBytes + * @return copyBytes. */ public byte[] copyBytes() { return Arrays.copyOf(bytes, size); @@ -115,7 +115,7 @@ public int getLength() { /** * Get the current size of the buffer. * @deprecated Use {@link #getLength()} instead. - * @return current size of the buffer + * @return current size of the buffer. */ @Deprecated public int getSize() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java index 1f303a8888a04..c0315ab828c3b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java @@ -69,7 +69,7 @@ protected void ensureInflated() { /** * Subclasses implement this instead of {@link #readFields(DataInput)}. - * @param in data input + * @param in data input. * @throws IOException raised on errors performing I/O. */ protected abstract void readFieldsCompressed(DataInput in) @@ -94,7 +94,7 @@ public final void write(DataOutput out) throws IOException { /** * Subclasses implement this instead of {@link #write(DataOutput)}. * - * @param out data output + * @param out data output. * @throws IOException raised on errors performing I/O. */ protected abstract void writeCompressed(DataOutput out) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java index e707d4a83fca3..85e905d870096 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java @@ -143,8 +143,8 @@ private DataInputBuffer(Buffer buffer) { /** * Resets the data that the buffer reads. * - * @param input input - * @param length length + * @param input input. + * @param length length. */ public void reset(byte[] input, int length) { buffer.reset(input, 0, length); @@ -153,9 +153,9 @@ public void reset(byte[] input, int length) { /** * Resets the data that the buffer reads. * - * @param input input - * @param start start - * @param length length + * @param input input. + * @param start start. + * @param length length. */ public void reset(byte[] input, int start, int length) { buffer.reset(input, start, length); @@ -168,7 +168,7 @@ public byte[] getData() { /** * Returns the current position in the input. * - * @return position + * @return position. */ public int getPosition() { return buffer.getPosition(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java index fec36488b96b1..4c1fa41e149c4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java @@ -103,19 +103,19 @@ private DataOutputBuffer(Buffer buffer) { * Returns the current contents of the buffer. * Data is only valid to {@link #getLength()}. * - * @return data byte + * @return data byte. */ public byte[] getData() { return buffer.getData(); } /** * Returns the length of the valid data currently in the buffer. - * @return length + * @return length. */ public int getLength() { return buffer.getLength(); } /** * Resets the buffer to empty. - * @return DataOutputBuffer + * @return DataOutputBuffer. */ public DataOutputBuffer reset() { this.written = 0; @@ -125,8 +125,8 @@ public DataOutputBuffer reset() { /** * Writes bytes from a DataInput directly into the buffer. - * @param in data input - * @param length length + * @param in data input. + * @param length length. * @throws IOException raised on errors performing I/O. */ public void write(DataInput in, int length) throws IOException { @@ -135,7 +135,7 @@ public void write(DataInput in, int length) throws IOException { /** * Write to a file stream. - * @param out OutputStream + * @param out OutputStream. * @throws IOException raised on errors performing I/O. */ public void writeTo(OutputStream out) throws IOException { @@ -147,8 +147,8 @@ public void writeTo(OutputStream out) throws IOException { * be used to overwrite existing data in the buffer, i.e., buffer#count cannot * be increased, and DataOutputStream#written cannot be increased. * - * @param v v - * @param offset offset + * @param v v. + * @param offset offset. * @throws IOException raised on errors performing I/O. */ public void writeInt(int v, int offset) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java index 7482b0304e54d..4b1dc7513d054 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java @@ -64,8 +64,8 @@ public boolean add(E e) { * the argument value's size is bigger than zero, the argument * elementType is not be used. * - * @param value enumSet value - * @param elementType elementType + * @param value enumSet value. + * @param elementType elementType. */ public EnumSetWritable(EnumSet value, Class elementType) { set(value, elementType); @@ -75,7 +75,7 @@ public EnumSetWritable(EnumSet value, Class elementType) { * Construct a new EnumSetWritable. Argument value should not be null * or empty. * - * @param value enumSet value + * @param value enumSet value. */ public EnumSetWritable(EnumSet value) { this(value, null); @@ -88,8 +88,8 @@ public EnumSetWritable(EnumSet value) { * null. If the argument value's size is bigger than zero, the * argument elementType is not be used. * - * @param value enumSet Value - * @param elementType elementType + * @param value enumSet Value. + * @param elementType elementType. */ public void set(EnumSet value, Class elementType) { if ((value == null || value.size() == 0) @@ -108,7 +108,7 @@ public void set(EnumSet value, Class elementType) { /** * Return the value of this EnumSetWritable. - * @return EnumSet + * @return EnumSet. */ public EnumSet get() { return value; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java index 88bdf13c75c70..864bb8752f5c4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java @@ -35,13 +35,13 @@ public FloatWritable() {} /** * Set the value of this FloatWritable. - * @param value value + * @param value value. */ public void set(float value) { this.value = value; } /** * Return the value of this FloatWritable. - * @return value + * @return value. */ public float get() { return value; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index ef47f4c7fa47a..f0a9b0b6952f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -86,7 +86,7 @@ public static void copyBytes(InputStream in, OutputStream out, * * @param in InputStrem to read from * @param out OutputStream to write to - * @param buffSize the size of the buffer + * @param buffSize the size of the buffer. * @throws IOException raised on errors performing I/O. */ public static void copyBytes(InputStream in, OutputStream out, int buffSize) @@ -109,7 +109,7 @@ public static void copyBytes(InputStream in, OutputStream out, int buffSize) * * @param in InputStrem to read from * @param out OutputStream to write to - * @param conf the Configuration object + * @param conf the Configuration object. * @throws IOException raised on errors performing I/O. */ public static void copyBytes(InputStream in, OutputStream out, Configuration conf) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java index 71ad63b69d203..686b359f57d32 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java @@ -77,8 +77,8 @@ private InputBuffer(Buffer buffer) { /** * Resets the data that the buffer reads. - * @param input input - * @param length length + * @param input input. + * @param length length. */ public void reset(byte[] input, int length) { buffer.reset(input, 0, length); @@ -86,9 +86,9 @@ public void reset(byte[] input, int length) { /** * Resets the data that the buffer reads. - * @param input input - * @param start start - * @param length length + * @param input input. + * @param start start. + * @param length length. */ public void reset(byte[] input, int start, int length) { buffer.reset(input, start, length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java index b807a9ac22b26..9262af87bc2e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java @@ -38,7 +38,7 @@ public LongWritable() {} /** * Set the value of this LongWritable. - * @param value value + * @param value value. */ public void set(long value) { this.value = value; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java index 9e69483ba2876..edfcf6e1e7754 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java @@ -64,7 +64,7 @@ public MD5Hash(String hex) { /** * Constructs an MD5Hash with a specified value. - * @param digest digest + * @param digest digest. */ public MD5Hash(byte[] digest) { if (digest.length != MD5_LEN) @@ -80,9 +80,9 @@ public void readFields(DataInput in) throws IOException { /** * Constructs, reads and returns an instance. - * @param in in + * @param in in. * @throws IOException raised on errors performing I/O. - * @return MD5Hash + * @return MD5Hash. */ public static MD5Hash read(DataInput in) throws IOException { MD5Hash result = new MD5Hash(); @@ -98,7 +98,7 @@ public void write(DataOutput out) throws IOException { /** * Copy the contents of another instance into this instance. - * @param that that + * @param that that. */ public void set(MD5Hash that) { System.arraycopy(that.digest, 0, this.digest, 0, MD5_LEN); @@ -106,14 +106,14 @@ public void set(MD5Hash that) { /** * Returns the digest bytes. - * @return digest + * @return digest. */ public byte[] getDigest() { return digest; } /** * Construct a hash value for a byte array. - * @param data data - * @return MD5Hash + * @param data data. + * @return MD5Hash. */ public static MD5Hash digest(byte[] data) { return digest(data, 0, data.length); @@ -121,7 +121,7 @@ public static MD5Hash digest(byte[] data) { /** * Create a thread local MD5 digester. - * @return MessageDigest + * @return MessageDigest. */ public static MessageDigest getDigester() { MessageDigest digester = DIGESTER_FACTORY.get(); @@ -131,8 +131,8 @@ public static MessageDigest getDigester() { /** * Construct a hash value for the content from the InputStream. - * @param in input stream - * @return MD5Hash MD5Hash + * @param in input stream. + * @return MD5Hash. * @throws IOException raised on errors performing I/O. */ public static MD5Hash digest(InputStream in) throws IOException { @@ -148,10 +148,10 @@ public static MD5Hash digest(InputStream in) throws IOException { /** * Construct a hash value for a byte array. - * @param data data - * @param start start - * @param len len - * @return MD5Hash + * @param data data. + * @param start start. + * @param len len. + * @return MD5Hash. */ public static MD5Hash digest(byte[] data, int start, int len) { byte[] digest; @@ -163,10 +163,10 @@ public static MD5Hash digest(byte[] data, int start, int len) { /** * Construct a hash value for an array of byte array. - * @param dataArr dataArr - * @param start start - * @param len len - * @return MD5Hash + * @param dataArr dataArr. + * @param start start. + * @param len len. + * @return MD5Hash. */ public static MD5Hash digest(byte[][] dataArr, int start, int len) { byte[] digest; @@ -180,8 +180,8 @@ public static MD5Hash digest(byte[][] dataArr, int start, int len) { /** * Construct a hash value for a String. - * @param string string - * @return MD5Hash + * @param string string. + * @return MD5Hash. */ public static MD5Hash digest(String string) { return digest(UTF8.getBytes(string)); @@ -189,16 +189,16 @@ public static MD5Hash digest(String string) { /** * Construct a hash value for a String. - * @param utf8 utf8 - * @return MD5Hash + * @param utf8 utf8. + * @return MD5Hash. */ public static MD5Hash digest(UTF8 utf8) { return digest(utf8.getBytes(), 0, utf8.getLength()); } /** - * Construct a half-sized version of this MD5. Fits in a long - * @return halfDigest + * Construct a half-sized version of this MD5. Fits in a long. + * @return halfDigest. */ public long halfDigest() { long value = 0; @@ -278,7 +278,7 @@ public String toString() { /** * Sets the digest value from a hex string. - * @param hex hex + * @param hex hex. */ public void setDigest(String hex) { if (hex.length() != MD5_LEN*2) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 43b0d7acd4f2f..7b3cd78e3ccf3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -102,11 +102,11 @@ public static class Writer implements java.io.Closeable { * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. * - * @param conf configuration - * @param fs filesystem - * @param dirName dirName - * @param keyClass keyClass - * @param valClass valClass + * @param conf configuration. + * @param fs filesystem. + * @param dirName dirName. + * @param keyClass keyClass. + * @param valClass valClass. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -120,13 +120,13 @@ public Writer(Configuration conf, FileSystem fs, String dirName, * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. * - * @param conf configuration - * @param fs fs - * @param dirName dirName - * @param keyClass keyClass - * @param valClass valClass - * @param compress compress - * @param progress progress + * @param conf configuration. + * @param fs fs. + * @param dirName dirName. + * @param keyClass keyClass. + * @param valClass valClass. + * @param compress compress. + * @param progress progress. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -142,14 +142,14 @@ public Writer(Configuration conf, FileSystem fs, String dirName, * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. * - * @param conf configuration - * @param fs FileSystem - * @param dirName dirName - * @param keyClass keyClass - * @param valClass valClass - * @param compress compress - * @param codec codec - * @param progress progress + * @param conf configuration. + * @param fs FileSystem. + * @param dirName dirName. + * @param keyClass keyClass. + * @param valClass valClass. + * @param compress compress. + * @param codec codec. + * @param progress progress. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -164,12 +164,12 @@ public Writer(Configuration conf, FileSystem fs, String dirName, /** * Create the named map for keys of the named class. * @deprecated Use Writer(Configuration, Path, Option...) instead. - * @param conf configuration - * @param fs fs - * @param dirName dirName - * @param keyClass keyClass - * @param valClass valClass - * @param compress compress + * @param conf configuration. + * @param fs fs. + * @param dirName dirName. + * @param keyClass keyClass. + * @param valClass valClass. + * @param compress compress. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -182,11 +182,11 @@ public Writer(Configuration conf, FileSystem fs, String dirName, /** Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...) instead. - * @param conf configuration - * @param fs fs - * @param dirName dirName - * @param comparator comparator - * @param valClass valClass + * @param conf configuration. + * @param fs fs. + * @param dirName dirName. + * @param comparator comparator. + * @param valClass valClass. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -198,12 +198,12 @@ public Writer(Configuration conf, FileSystem fs, String dirName, } /** Create the named map using the named key comparator. - * @param conf configuration - * @param fs filesystem - * @param dirName dirName - * @param comparator comparator - * @param valClass valClass - * @param compress compress + * @param conf configuration. + * @param fs filesystem. + * @param dirName dirName. + * @param comparator comparator. + * @param valClass valClass. + * @param compress compress. * @throws IOException raised on errors performing I/O. * @deprecated Use Writer(Configuration, Path, Option...) instead. */ @@ -219,13 +219,13 @@ public Writer(Configuration conf, FileSystem fs, String dirName, * Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...)} instead. * - * @param conf configuration - * @param fs filesystem - * @param dirName dirName - * @param comparator comparator - * @param valClass valClass - * @param compress CompressionType - * @param progress progress + * @param conf configuration. + * @param fs filesystem. + * @param dirName dirName. + * @param comparator comparator. + * @param valClass valClass. + * @param compress CompressionType. + * @param progress progress. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -242,14 +242,14 @@ public Writer(Configuration conf, FileSystem fs, String dirName, * Create the named map using the named key comparator. * @deprecated Use Writer(Configuration, Path, Option...) instead. * - * @param conf configuration - * @param fs FileSystem - * @param dirName dirName - * @param comparator comparator - * @param valClass valClass - * @param compress CompressionType - * @param codec codec - * @param progress progress + * @param conf configuration. + * @param fs FileSystem. + * @param dirName dirName. + * @param comparator comparator. + * @param valClass valClass. + * @param compress CompressionType. + * @param codec codec. + * @param progress progress. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -366,7 +366,7 @@ public Writer(Configuration conf, * Sets the index interval. * @see #getIndexInterval() * - * @param interval interval + * @param interval interval. */ public void setIndexInterval(int interval) { indexInterval = interval; } @@ -374,8 +374,8 @@ public Writer(Configuration conf, * Sets the index interval and stores it in conf. * @see #getIndexInterval() * - * @param conf configuration - * @param interval interval + * @param conf configuration. + * @param interval interval. */ public static void setIndexInterval(Configuration conf, int interval) { conf.setInt(INDEX_INTERVAL, interval); @@ -392,8 +392,8 @@ public synchronized void close() throws IOException { * Append a key/value pair to the map. The key must be greater or equal * to the previous key added to the map. * - * @param key key - * @param val value + * @param key key. + * @param val value. * @throws IOException raised on errors performing I/O. */ public synchronized void append(WritableComparable key, Writable val) @@ -460,14 +460,14 @@ public static class Reader implements java.io.Closeable { /** * Returns the class of keys in this file. * - * @return keyClass + * @return keyClass. */ public Class getKeyClass() { return data.getKeyClass(); } /** * Returns the class of values in this file. * - * @return Value Class + * @return Value Class. */ public Class getValueClass() { return data.getValueClass(); } @@ -502,9 +502,9 @@ public Reader(Path dir, Configuration conf, * Construct a map reader for the named map. * @deprecated * - * @param fs FileSystem - * @param dirName dirName - * @param conf configuration + * @param fs FileSystem. + * @param dirName dirName. + * @param conf configuration. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -517,10 +517,10 @@ public Reader(FileSystem fs, String dirName, * Construct a map reader for the named map using the named comparator. * @deprecated * - * @param fs FileSystem - * @param dirName dirName - * @param comparator WritableComparator - * @param conf Configuration + * @param fs FileSystem. + * @param dirName dirName. + * @param comparator WritableComparator. + * @param conf Configuration. * @throws IOException raised on errors performing I/O. */ @Deprecated @@ -559,11 +559,11 @@ protected synchronized void open(Path dir, * Override this method to specialize the type of * {@link SequenceFile.Reader} returned. * - * @param dataFile data file - * @param conf configuration - * @param options options + * @param dataFile data file. + * @param conf configuration. + * @param options options. * @throws IOException raised on errors performing I/O. - * @return SequenceFile.Reader + * @return SequenceFile.Reader. */ protected SequenceFile.Reader createDataFileReader(Path dataFile, Configuration conf, @@ -644,7 +644,7 @@ public synchronized void reset() throws IOException { * file is empty. * * @throws IOException raised on errors performing I/O. - * @return WritableComparable + * @return WritableComparable. */ public synchronized WritableComparable midKey() throws IOException { @@ -685,7 +685,7 @@ public synchronized void finalKey(WritableComparable key) * first entry after the named key. Returns true iff the named key exists * in this map. * - * @param key key + * @param key key. * @throws IOException raised on errors performing I/O. * @return if the named key exists in this map true, not false. */ @@ -803,9 +803,9 @@ else if (cmp > 0) * val. Returns true if such a pair exists and false when at * the end of the map. * - * @param key WritableComparable - * @param val Writable - * @return if such a pair exists true,not false + * @param key WritableComparable. + * @param val Writable. + * @return if such a pair exists true,not false. * @throws IOException raised on errors performing I/O. */ public synchronized boolean next(WritableComparable key, Writable val) @@ -815,9 +815,9 @@ public synchronized boolean next(WritableComparable key, Writable val) /** * Return the value for the named key, or null if none exists. - * @param key key - * @param val val - * @return Writable if such a pair exists true,not false + * @param key key. + * @param val val. + * @return Writable if such a pair exists true,not false. * @throws IOException raised on errors performing I/O. */ public synchronized Writable get(WritableComparable key, Writable val) @@ -834,9 +834,9 @@ public synchronized Writable get(WritableComparable key, Writable val) * Returns key or if it does not exist, at the first entry * after the named key. * - * @param key - key that we're trying to find - * @param val - data value if key is found - * @return - the key that was the closest match or null if eof. + * @param key key that we're trying to find. + * @param val data value if key is found. + * @return the key that was the closest match or null if eof. * @throws IOException raised on errors performing I/O. */ public synchronized WritableComparable getClosest(WritableComparable key, @@ -890,9 +890,9 @@ public synchronized void close() throws IOException { /** * Renames an existing map directory. - * @param fs fs - * @param oldName oldName - * @param newName newName + * @param fs fs. + * @param oldName oldName. + * @param newName newName. * @throws IOException raised on errors performing I/O. */ public static void rename(FileSystem fs, String oldName, String newName) @@ -927,9 +927,9 @@ public static void delete(FileSystem fs, String name) throws IOException { * @param keyClass key class (has to be a subclass of Writable) * @param valueClass value class (has to be a subclass of Writable) * @param dryrun do not perform any changes, just report what needs to be done - * @param conf configuration + * @param conf configuration. * @return number of valid entries in this MapFile, or -1 if no fixing was needed - * @throws Exception Exception + * @throws Exception Exception. */ public static long fix(FileSystem fs, Path dir, Class keyClass, @@ -1031,9 +1031,9 @@ public Merger(Configuration conf) throws IOException { /** * Merge multiple MapFiles to one Mapfile. * - * @param inMapFiles input inMapFiles - * @param deleteInputs deleteInputs - * @param outMapFile input outMapFile + * @param inMapFiles input inMapFiles. + * @param deleteInputs deleteInputs. + * @param outMapFile input outMapFile. * @throws IOException raised on errors performing I/O. */ public void merge(Path[] inMapFiles, boolean deleteInputs, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java index fc79887e26262..452965b7c8220 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java @@ -44,8 +44,8 @@ private MultipleIOException(List exceptions) { /** * A convenient method to create an {@link IOException}. - * @param exceptions IOException List - * @return IOException + * @param exceptions IOException List. + * @return IOException. */ public static IOException createIOException(List exceptions) { if (exceptions == null || exceptions.isEmpty()) { @@ -66,7 +66,7 @@ public static class Builder { /** * Add the given {@link Throwable} to the exception list. - * @param t Throwable + * @param t Throwable. */ public void add(Throwable t) { if (exceptions == null) { From 207ee6cd5b7fc38b362b79b4422e2a5c2a5cb431 Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 16 May 2022 21:42:24 -0700 Subject: [PATCH 52/53] HADOOP-18229. Fix Check Style. --- .../org/apache/hadoop/io/ObjectWritable.java | 38 ++++++------ .../org/apache/hadoop/io/RawComparator.java | 2 +- .../org/apache/hadoop/io/SecureIOUtils.java | 6 +- .../org/apache/hadoop/io/SequenceFile.java | 10 ++-- .../main/java/org/apache/hadoop/io/UTF8.java | 4 +- .../java/org/apache/hadoop/io/Writable.java | 4 +- .../apache/hadoop/io/WritableComparator.java | 60 +++++++++---------- .../org/apache/hadoop/io/WritableUtils.java | 2 +- .../apache/hadoop/io/compress/CodecPool.java | 4 +- .../io/compress/CompressionCodecFactory.java | 6 +- .../io/compress/CompressionInputStream.java | 4 +- .../io/compress/CompressionOutputStream.java | 2 +- .../apache/hadoop/io/compress/Compressor.java | 4 +- .../compress/SplittableCompressionCodec.java | 2 +- .../io/compress/bzip2/Bzip2Compressor.java | 2 +- .../io/compress/bzip2/Bzip2Decompressor.java | 4 +- .../io/compress/bzip2/CBZip2InputStream.java | 6 +- .../io/compress/bzip2/CBZip2OutputStream.java | 10 ++-- .../io/compress/zlib/ZlibCompressor.java | 2 +- .../io/compress/zlib/ZlibDecompressor.java | 4 +- .../hadoop/io/compress/zlib/ZlibFactory.java | 2 +- .../io/compress/zstd/ZStandardCompressor.java | 4 +- .../compress/zstd/ZStandardDecompressor.java | 2 +- .../hadoop/io/erasurecode/CodecUtil.java | 4 +- .../io/erasurecode/ErasureCodeNative.java | 4 +- .../io/erasurecode/coder/ErasureCoder.java | 2 +- .../erasurecode/coder/ErasureCodingStep.java | 4 +- .../io/erasurecode/coder/ErasureDecoder.java | 10 ++-- .../coder/ErasureDecodingStep.java | 6 +- .../io/erasurecode/coder/ErasureEncoder.java | 2 +- .../coder/ErasureEncodingStep.java | 6 +- .../coder/HHErasureCodingStep.java | 4 +- .../coder/HHXORErasureDecodingStep.java | 4 +- .../coder/HHXORErasureEncodingStep.java | 4 +- .../erasurecode/coder/XORErasureDecoder.java | 2 +- .../io/erasurecode/coder/util/HHUtil.java | 2 +- .../io/erasurecode/grouper/BlockGrouper.java | 6 +- .../erasurecode/rawcoder/util/DumpUtil.java | 12 ++-- .../io/erasurecode/rawcoder/util/GF256.java | 10 ++-- .../rawcoder/util/GaloisField.java | 46 +++++++------- .../io/erasurecode/rawcoder/util/RSUtil.java | 24 ++++---- .../hadoop/io/file/tfile/ByteArray.java | 2 +- .../apache/hadoop/io/file/tfile/TFile.java | 16 ++--- .../apache/hadoop/io/file/tfile/Utils.java | 4 +- .../hadoop/io/retry/AsyncCallHandler.java | 6 +- .../apache/hadoop/io/retry/RetryPolicies.java | 58 +++++++++--------- .../apache/hadoop/io/retry/RetryProxy.java | 10 ++-- .../apache/hadoop/io/retry/RetryUtils.java | 4 +- .../hadoop/io/serializer/Deserializer.java | 6 +- .../io/serializer/DeserializerComparator.java | 2 +- .../JavaSerializationComparator.java | 2 +- .../hadoop/io/serializer/Serialization.java | 10 ++-- .../io/serializer/SerializationFactory.java | 2 +- .../hadoop/io/serializer/Serializer.java | 6 +- .../io/serializer/avro/AvroSerialization.java | 12 ++-- .../hadoop/ipc/GenericRefreshProtocol.java | 4 +- .../apache/hadoop/ipc/ProtobufRpcEngine.java | 2 +- .../main/java/org/apache/hadoop/ipc/RPC.java | 28 ++++----- .../org/apache/hadoop/ipc/RetryCache.java | 2 +- .../java/org/apache/hadoop/ipc/RpcEngine.java | 4 +- .../apache/hadoop/ipc/RpcServerException.java | 4 +- .../java/org/apache/hadoop/ipc/Server.java | 2 +- .../apache/hadoop/ipc/WritableRpcEngine.java | 2 +- .../java/org/apache/hadoop/log/LogLevel.java | 2 +- .../apache/hadoop/metrics2/MetricsSystem.java | 6 +- .../hadoop/metrics2/MetricsSystemMXBean.java | 10 ++-- .../metrics2/sink/PrometheusMetricsSink.java | 6 +- .../sink/ganglia/AbstractGangliaSink.java | 4 +- .../apache/hadoop/metrics2/util/MBeans.java | 8 +-- .../hadoop/metrics2/util/SampleQuantiles.java | 2 +- .../java/org/apache/hadoop/net/NetUtils.java | 48 +++++++-------- 71 files changed, 305 insertions(+), 305 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java index a7e46bab9b004..29c06a01ad6e3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java @@ -68,7 +68,7 @@ public ObjectWritable(Class declaredClass, Object instance) { /** * Reset the instance. - * @param instance instance + * @param instance instance. */ public void set(Object instance) { this.declaredClass = instance.getClass(); @@ -133,10 +133,10 @@ public void write(DataOutput out) throws IOException { * Write a {@link Writable}, {@link String}, primitive type, or an array of * the preceding. * - * @param out DataOutput - * @param instance instance - * @param conf Configuration - * @param declaredClass declaredClass + * @param out DataOutput. + * @param instance instance. + * @param conf Configuration. + * @param declaredClass declaredClass. * @throws IOException raised on errors performing I/O. */ public static void writeObject(DataOutput out, Object instance, @@ -155,10 +155,10 @@ public static void writeObject(DataOutput out, Object instance, * that may not be running the same version of software. Sometime in ~2013 * we can consider removing this parameter and always using the compact format. * - * @param conf configuration - * @param out dataoutput - * @param declaredClass declaredClass - * @param instance instance + * @param conf configuration. + * @param out dataoutput. + * @param declaredClass declaredClass. + * @param instance instance. * @throws IOException raised on errors performing I/O. * */ @@ -238,9 +238,9 @@ public static void writeObject(DataOutput out, Object instance, * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding. * - * @param conf configuration - * @param in DataInput - * @return Object + * @param conf configuration. + * @param in DataInput. + * @return Object. * @throws IOException raised on errors performing I/O. */ public static Object readObject(DataInput in, Configuration conf) @@ -252,10 +252,10 @@ public static Object readObject(DataInput in, Configuration conf) * Read a {@link Writable}, {@link String}, primitive type, or an array of * the preceding. * - * @param in DataInput - * @param objectWritable objectWritable - * @param conf configuration - * @return Object + * @param in DataInput. + * @param objectWritable objectWritable. + * @param conf configuration. + * @return Object. * @throws IOException raised on errors performing I/O. */ @SuppressWarnings("unchecked") @@ -405,9 +405,9 @@ static Method getStaticProtobufMethod(Class declaredClass, String method, * it in the specified conf. If the specified conf is null, * try load it directly. * - * @param conf configuration - * @param className classname - * @return Class + * @param conf configuration. + * @param className classname. + * @return Class. */ public static Class loadClass(Configuration conf, String className) { Class declaredClass = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java index a15e2346ae9f0..354dda964e92b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java @@ -29,7 +29,7 @@ * A {@link Comparator} that operates directly on byte representations of * objects. *

- * @param generic type + * @param generic type. * @see DeserializerComparator */ @InterfaceAudience.Public diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java index 23233f8f033fe..cddddcc6c9a45 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java @@ -151,7 +151,7 @@ protected static RandomAccessFile forceSecureOpenForRandomRead(File f, * @param expectedGroup the expected group owner for the file * @throws IOException if an IO Error occurred or the user/group does not * match if security is enabled - * @return FSDataInputStream + * @return FSDataInputStream. */ public static FSDataInputStream openFSDataInputStream(File file, String expectedOwner, String expectedGroup) throws IOException { @@ -169,7 +169,7 @@ public static FSDataInputStream openFSDataInputStream(File file, * @param expectedOwner input expectedOwner. * @param expectedGroup input expectedGroup. * @throws IOException raised on errors performing I/O. - * @return FSDataInputStream + * @return FSDataInputStream. */ @VisibleForTesting protected static FSDataInputStream forceSecureOpenFSDataInputStream( @@ -268,7 +268,7 @@ private static FileOutputStream insecureCreateForWrite(File f, * * @throws AlreadyExistsException if the file already exists * @throws IOException if any other error occurred - * @return createForWrite FileOutputStream + * @return createForWrite FileOutputStream. */ public static FileOutputStream createForWrite(File f, int permissions) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index f21b1d81a6c9f..a0b45814f1c77 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -2223,19 +2223,19 @@ public synchronized Class getValueClass() { /** * Returns true if values are compressed. - * @return if values are compressed true, not false + * @return if values are compressed true, not false. */ public boolean isCompressed() { return decompress; } /** * Returns true if records are block-compressed. - * @return if records are block-compressed true, not false + * @return if records are block-compressed true, not false. */ public boolean isBlockCompressed() { return blockCompressed; } /** * Returns the compression codec of data in this file. - * @return CompressionCodec + * @return CompressionCodec. */ public CompressionCodec getCompressionCodec() { return codec; } @@ -2261,7 +2261,7 @@ public CompressionType getCompressionType() { /** * Returns the metadata object of the file. - * @return metadata + * @return metadata. */ public Metadata getMetadata() { return this.metadata; @@ -2456,7 +2456,7 @@ private Object deserializeValue(Object val) throws IOException { * @return Read the next key in the file into key, skipping its * value.True if another entry exists, and false at end of file. * - * @param key key + * @param key key. * @throws IOException raised on errors performing I/O. */ public synchronized boolean next(Writable key) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index a4bdffdcd8a79..fdee830e6fea8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -138,7 +138,7 @@ public void readFields(DataInput in) throws IOException { /** * Skips over one UTF8 in the input. - * @param in datainput + * @param in datainput. * @throws IOException raised on errors performing I/O. */ public static void skip(DataInput in) throws IOException { @@ -251,7 +251,7 @@ public static byte[] getBytes(String string) { /** * @return Convert a UTF-8 encoded byte array back into a string. * - * @param bytes input bytes + * @param bytes input bytes. * @throws IOException if the byte array is invalid UTF8 */ public static String fromBytes(byte[] bytes) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java index 3b3f8cf78e1bc..56b46d554fee6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java @@ -71,7 +71,7 @@ public interface Writable { * Serialize the fields of this object to out. * * @param out DataOuput to serialize this object into. - * @throws IOException any other problem for write + * @throws IOException any other problem for write. */ void write(DataOutput out) throws IOException; @@ -82,7 +82,7 @@ public interface Writable { * existing object where possible.

* * @param in DataInput to deseriablize this object from. - * @throws IOException any other problem for readFields + * @throws IOException any other problem for readFields. */ void readFields(DataInput in) throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java index 53f81e34db8c6..05d4e3c5c533f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java @@ -49,8 +49,8 @@ public class WritableComparator implements RawComparator, Configurable { /** * For backwards compatibility. * - * @param c WritableComparable Type - * @return WritableComparator + * @param c WritableComparable Type. + * @return WritableComparator. */ public static WritableComparator get(Class c) { return get(c, null); @@ -58,9 +58,9 @@ public static WritableComparator get(Class c) { /** * Get a comparator for a {@link WritableComparable} implementation. - * @param c class - * @param conf configuration - * @return WritableComparator + * @param c class. + * @param conf configuration. + * @return WritableComparator. */ public static WritableComparator get( Class c, Configuration conf) { @@ -109,8 +109,8 @@ private static void forceInit(Class cls) { * Register an optimized comparator for a {@link WritableComparable} * implementation. Comparators registered with this method must be * thread-safe. - * @param c class - * @param comparator WritableComparator + * @param c class. + * @param comparator WritableComparator. */ public static void define(Class c, WritableComparator comparator) { comparators.put(c, comparator); @@ -127,7 +127,7 @@ protected WritableComparator() { /** * Construct for a {@link WritableComparable} implementation. - * @param keyClass WritableComparable Class + * @param keyClass WritableComparable Class. */ protected WritableComparator(Class keyClass) { this(keyClass, null, false); @@ -219,13 +219,13 @@ public int compare(Object a, Object b) { /** * Lexicographic order of binary data. - * @param b1 b1 - * @param s1 s1 - * @param l1 l1 - * @param b2 b2 - * @param s2 s2 - * @param l2 l2 - * @return compare bytes + * @param b1 b1. + * @param s1 s1. + * @param l1 l1. + * @param b2 b2. + * @param s2 s2. + * @param l2 l2. + * @return compare bytes. */ public static int compareBytes(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { @@ -234,10 +234,10 @@ public static int compareBytes(byte[] b1, int s1, int l1, /** * Compute hash for binary data. - * @param bytes bytes - * @param offset offset - * @param length length - * @return hash for binary data + * @param bytes bytes. + * @param offset offset. + * @param length length. + * @return hash for binary data. */ public static int hashBytes(byte[] bytes, int offset, int length) { int hash = 1; @@ -248,8 +248,8 @@ public static int hashBytes(byte[] bytes, int offset, int length) { /** * Compute hash for binary data. - * @param bytes bytes - * @param length length + * @param bytes bytes. + * @param length length. * @return hash for binary data. */ public static int hashBytes(byte[] bytes, int length) { @@ -258,8 +258,8 @@ public static int hashBytes(byte[] bytes, int length) { /** * Parse an unsigned short from a byte array. - * @param bytes bytes - * @param start start + * @param bytes bytes. + * @param start start. * @return unsigned short from a byte array */ public static int readUnsignedShort(byte[] bytes, int start) { @@ -269,8 +269,8 @@ public static int readUnsignedShort(byte[] bytes, int start) { /** * Parse an integer from a byte array. - * @param bytes bytes - * @param start start + * @param bytes bytes. + * @param start start. * @return integer from a byte array */ public static int readInt(byte[] bytes, int start) { @@ -283,8 +283,8 @@ public static int readInt(byte[] bytes, int start) { /** * Parse a float from a byte array. - * @param bytes bytes - * @param start start + * @param bytes bytes. + * @param start start. * @return float from a byte array */ public static float readFloat(byte[] bytes, int start) { @@ -304,9 +304,9 @@ public static long readLong(byte[] bytes, int start) { /** * Parse a double from a byte array. - * @param bytes bytes - * @param start start - * @return double from a byte array + * @param bytes bytes. + * @param start start. + * @return double from a byte array. */ public static double readDouble(byte[] bytes, int start) { return Double.longBitsToDouble(readLong(bytes, start)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java index 1e0ee27e93d6b..187398de0ec86 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java @@ -211,7 +211,7 @@ public static void displayByteArray(byte[] record){ * * @param Generics Type T. * @param orig The object to copy - * @param conf input Configuration + * @param conf input Configuration. * @return The copied object */ public static T clone(T orig, Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java index 50a98778655a6..69e8c99a1f4da 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java @@ -237,7 +237,7 @@ public static void returnDecompressor(Decompressor decompressor) { * Return the number of leased {@link Compressor}s for this * {@link CompressionCodec}. * - * @param codec codec + * @param codec codec. * @return the number of leased. */ public static int getLeasedCompressorsCount(CompressionCodec codec) { @@ -249,7 +249,7 @@ public static int getLeasedCompressorsCount(CompressionCodec codec) { * Return the number of leased {@link Decompressor}s for this * {@link CompressionCodec}. * - * @param codec codec + * @param codec codec. * @return the number of leased */ public static int getLeasedDecompressorsCount(CompressionCodec codec) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index 6291d083e83fd..8e920a2e64c27 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -172,7 +172,7 @@ public static void setCodecClasses(Configuration conf, * Find the codecs specified in the config value io.compression.codecs * and register them. Defaults to gzip and deflate. * - * @param conf configuration + * @param conf configuration. */ public CompressionCodecFactory(Configuration conf) { codecs = new TreeMap(); @@ -295,8 +295,8 @@ public static String removeSuffix(String filename, String suffix) { /** * A little test program. - * @param args arguments - * @throws Exception exception + * @param args arguments. + * @throws Exception exception. */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java index 017c89a327a5c..5bfec01ec945d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java @@ -120,7 +120,7 @@ public long getPos() throws IOException { /** * This method is current not supported. * - * @throws UnsupportedOperationException Unsupported Operation Exception + * @throws UnsupportedOperationException Unsupported Operation Exception. */ @Override @@ -131,7 +131,7 @@ public void seek(long pos) throws UnsupportedOperationException { /** * This method is current not supported. * - * @throws UnsupportedOperationException Unsupported Operation Exception + * @throws UnsupportedOperationException Unsupported Operation Exception. */ @Override public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java index aebcffa8117c7..2e412dcd58fce 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java @@ -48,7 +48,7 @@ public abstract class CompressionOutputStream extends OutputStream /** * Create a compression output stream that writes * the compressed bytes to the given stream. - * @param out out + * @param out out. */ protected CompressionOutputStream(OutputStream out) { this.out = out; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java index 8ecd3eb4cb7ef..7e2a6e679f43c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java @@ -65,13 +65,13 @@ public interface Compressor { /** * Return number of uncompressed bytes input so far. - * @return bytes read + * @return bytes read. */ public long getBytesRead(); /** * Return number of compressed bytes output so far. - * @return bytes written + * @return bytes written. */ public long getBytesWritten(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java index e5a04f2e7e88f..f2e28774a46db 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java @@ -61,7 +61,7 @@ public enum READ_MODE {CONTINUOUS, BYBLOCK}; * Create a stream as dictated by the readMode. This method is used when * the codecs wants the ability to work with the underlying stream positions. * - * @param decompressor decompressor + * @param decompressor decompressor. * @param seekableIn The seekable input stream (seeks in compressed data) * @param start The start offset into the compressed stream. May be changed * by the underlying codec. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java index f753af1b9fee9..9d1d85332489a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java @@ -67,7 +67,7 @@ public Bzip2Compressor() { /** * Creates a new compressor, taking settings from the configuration. - * @param conf configuration + * @param conf configuration. */ public Bzip2Compressor(Configuration conf) { this(Bzip2Factory.getBlockSize(conf), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java index afa963e6b5da9..acd806b9b300a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java @@ -50,8 +50,8 @@ public class Bzip2Decompressor implements Decompressor { /** * Creates a new decompressor. - * @param conserveMemory conserveMemory - * @param directBufferSize directBufferSize + * @param conserveMemory conserveMemory. + * @param directBufferSize directBufferSize. */ public Bzip2Decompressor(boolean conserveMemory, int directBufferSize) { this.conserveMemory = conserveMemory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java index 0c1f1802025b0..187fe481588c8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java @@ -152,7 +152,7 @@ public enum STATE { * This method reports the processed bytes so far. Please note that this * statistic is only updated on block boundaries and only when the stream is * initiated in BYBLOCK mode. - * @return ProcessedByteCount + * @return ProcessedByteCount. */ public long getProcessedByteCount() { return reportedBytesReadFromCompressedStream; @@ -283,8 +283,8 @@ private void makeMaps() { * the magic. Thus callers have to skip the first two bytes. Otherwise this * constructor will throw an exception. *

- * @param in in - * @param readMode READ_MODE + * @param in in. + * @param readMode READ_MODE. * @throws IOException * if the stream content is malformed or an I/O error occurs. * @throws NullPointerException diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index dde473fd1feb6..39c3638b0f497 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -210,10 +210,10 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { /** * This method is accessible by subclasses for historical purposes. If you * don't know what it does then you don't need it. - * @param len len - * @param freq freq - * @param alphaSize alphaSize - * @param maxLen maxLen + * @param len len. + * @param freq freq. + * @param alphaSize alphaSize. + * @param maxLen maxLen. */ protected static void hbMakeCodeLengths(char[] len, int[] freq, int alphaSize, int maxLen) { @@ -850,7 +850,7 @@ private void endCompression() throws IOException { /** * Returns the blocksize parameter specified at construction time. - * @return blocksize + * @return blocksize. */ public final int getBlockSize() { return this.blockSize100k; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index a3ce3ab076581..89e05fc6d07be 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -240,7 +240,7 @@ public ZlibCompressor() { /** * Creates a new compressor, taking settings from the configuration. - * @param conf configuration + * @param conf configuration. */ public ZlibCompressor(Configuration conf) { this(ZlibFactory.getCompressionLevel(conf), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java index 5f749748f30ec..c2615548d23ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java @@ -101,8 +101,8 @@ static boolean isNativeZlibLoaded() { /** * Creates a new decompressor. - * @param header header - * @param directBufferSize directBufferSize + * @param header header. + * @param directBufferSize directBufferSize. */ public ZlibDecompressor(CompressionHeader header, int directBufferSize) { this.header = header; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java index f4bae38dc457e..c2de494457ccd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java @@ -66,7 +66,7 @@ public static void loadNativeZLib() { /** * Set the flag whether to use native library. Used for testing non-native * libraries - * @param isLoaded isLoaded + * @param isLoaded isLoaded. */ @VisibleForTesting public static void setNativeZlibLoaded(final boolean isLoaded) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java index dfef01044d2c9..a77b59640cdaf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java @@ -84,8 +84,8 @@ public static int getRecommendedBufferSize() { /** * Creates a new compressor with the default compression level. * Compressed data will be generated in ZStandard format. - * @param level level - * @param bufferSize bufferSize + * @param level level. + * @param bufferSize bufferSize. */ public ZStandardCompressor(int level, int bufferSize) { this(level, bufferSize, bufferSize); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java index c9ef509c6dce2..792547a62faea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java @@ -73,7 +73,7 @@ public ZStandardDecompressor() { /** * Creates a new decompressor. - * @param bufferSize bufferSize + * @param bufferSize bufferSize. */ public ZStandardDecompressor(int bufferSize) { this.directBufferSize = bufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java index d302932fa8fd5..f89a0d9812d7c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java @@ -83,7 +83,7 @@ private CodecUtil() { } /** * Create encoder corresponding to given codec. * @param options Erasure codec options - * @param conf configuration + * @param conf configuration. * @return erasure encoder */ public static ErasureEncoder createEncoder(Configuration conf, @@ -101,7 +101,7 @@ public static ErasureEncoder createEncoder(Configuration conf, /** * Create decoder corresponding to given codec. * @param options Erasure codec options - * @param conf configuration + * @param conf configuration. * @return erasure decoder */ public static ErasureDecoder createDecoder(Configuration conf, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java index b931a68bddbe8..83a3151282096 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java @@ -61,7 +61,7 @@ private ErasureCodeNative() {} /** * Are native libraries loaded? - * @return if is native code loaded true,not false + * @return if is native code loaded true,not false. */ public static boolean isNativeCodeLoaded() { return LOADING_FAILURE_REASON == null; @@ -83,7 +83,7 @@ public static void checkNativeCodeLoaded() { /** * Get the native library name that's available or supported. - * @return library name + * @return library name. */ public static native String getLibraryName(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java index 4ce8b9c663d7e..ab1775538bd5f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java @@ -68,7 +68,7 @@ public interface ErasureCoder extends Configurable { * * @param blockGroup the erasure coding block group containing all necessary * information for codec calculation - * @return ErasureCodingStep + * @return ErasureCodingStep. */ ErasureCodingStep calculateCoding(ECBlockGroup blockGroup); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java index 87b528c1a484f..333647c982b9f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java @@ -46,8 +46,8 @@ public interface ErasureCodingStep { /** * Perform encoding or decoding given the input chunks, and generated results * will be written to the output chunks. - * @param inputChunks inputChunks - * @param outputChunks outputChunks + * @param inputChunks inputChunks. + * @param outputChunks outputChunks. * @throws IOException raised on errors performing I/O. */ void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java index 5a06ee883bb7d..30020b9959f0b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java @@ -65,7 +65,7 @@ public ErasureCoderOptions getOptions() { /** * We have all the data blocks and parity blocks as input blocks for * recovering by default. It's codec specific - * @param blockGroup blockGroup + * @param blockGroup blockGroup. * @return input blocks */ protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) { @@ -83,7 +83,7 @@ protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) { /** * Which blocks were erased ? - * @param blockGroup blockGroup + * @param blockGroup blockGroup. * @return output blocks to recover */ protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) { @@ -118,7 +118,7 @@ public void release() { /** * Perform decoding against a block blockGroup. - * @param blockGroup blockGroup + * @param blockGroup blockGroup. * @return decoding step for caller to do the real work */ protected abstract ErasureCodingStep prepareDecodingStep( @@ -126,7 +126,7 @@ protected abstract ErasureCodingStep prepareDecodingStep( /** * Get the number of erased blocks in the block group. - * @param blockGroup blockGroup + * @param blockGroup blockGroup. * @return number of erased blocks */ protected int getNumErasedBlocks(ECBlockGroup blockGroup) { @@ -153,7 +153,7 @@ protected static int getNumErasedBlocks(ECBlock[] inputBlocks) { /** * Get indexes of erased blocks from inputBlocks - * @param inputBlocks inputBlocks + * @param inputBlocks inputBlocks. * @return indexes of erased blocks from inputBlocks */ protected int[] getErasedIndexes(ECBlock[] inputBlocks) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java index c5927c9cdf59c..20a396d313678 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java @@ -37,10 +37,10 @@ public class ErasureDecodingStep implements ErasureCodingStep { /** * The constructor with all the necessary info. - * @param inputBlocks inputBlocks + * @param inputBlocks inputBlocks. * @param erasedIndexes the indexes of erased blocks in inputBlocks array - * @param outputBlocks outputBlocks - * @param rawDecoder rawDecoder + * @param outputBlocks outputBlocks. + * @param rawDecoder rawDecoder. */ public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes, ECBlock[] outputBlocks, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java index 3102d6f2c9533..cca272f69a28d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java @@ -83,7 +83,7 @@ public void release() { /** * Perform encoding against a block group. - * @param blockGroup blockGroup + * @param blockGroup blockGroup. * @return encoding step for caller to do the real work */ protected abstract ErasureCodingStep prepareEncodingStep( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java index 854017c6bad59..9e696d2c58477 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java @@ -36,9 +36,9 @@ public class ErasureEncodingStep implements ErasureCodingStep { /** * The constructor with all the necessary info. - * @param inputBlocks inputBlocks - * @param outputBlocks outputBlocks - * @param rawEncoder rawEncoder + * @param inputBlocks inputBlocks. + * @param outputBlocks outputBlocks. + * @param rawEncoder rawEncoder. */ public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks, RawErasureEncoder rawEncoder) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java index a568499ec897d..46f0a76da17df 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java @@ -38,8 +38,8 @@ public abstract class HHErasureCodingStep /** * Constructor given input blocks and output blocks. * - * @param inputBlocks inputBlocks - * @param outputBlocks outputBlocks + * @param inputBlocks inputBlocks. + * @param outputBlocks outputBlocks. */ public HHErasureCodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java index 6f8ab521b1a33..4d594f476dfd9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java @@ -43,9 +43,9 @@ public class HHXORErasureDecodingStep extends HHErasureCodingStep { /** * The constructor with all the necessary info. - * @param inputBlocks inputBlocks + * @param inputBlocks inputBlocks. * @param erasedIndexes the indexes of erased blocks in inputBlocks array - * @param outputBlocks outputBlocks + * @param outputBlocks outputBlocks. * @param rawDecoder underlying RS decoder for hitchhiker decoding * @param rawEncoder underlying XOR encoder for hitchhiker decoding */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java index 5d5e60508f24a..f571e932b6a85 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java @@ -40,8 +40,8 @@ public class HHXORErasureEncodingStep extends HHErasureCodingStep { /** * The constructor with all the necessary info. * - * @param inputBlocks inputBlocks - * @param outputBlocks outputBlocks + * @param inputBlocks inputBlocks. + * @param outputBlocks outputBlocks. * @param rsRawEncoder underlying RS encoder for hitchhiker encoding * @param xorRawEncoder underlying XOR encoder for hitchhiker encoding */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java index 16c7417446088..9aae5e43c86f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java @@ -53,7 +53,7 @@ protected ErasureCodingStep prepareDecodingStep( /** * Which blocks were erased ? For XOR it's simple we only allow and return one * erased block, either data or parity. - * @param blockGroup blockGroup + * @param blockGroup blockGroup. * @return output blocks to recover */ @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java index 2fbac7a3457a7..7f771c9677da8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java @@ -203,7 +203,7 @@ public static ByteBuffer getPiggyBackForDecode(ByteBuffer[][] inputs, /** * Find the valid input from all the inputs. * - * @param Generics Type T + * @param Generics Type T. * @param inputs input buffers to look for valid input * @return the first valid input */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java index 1a7757cbc16da..0407d16120819 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java @@ -33,7 +33,7 @@ public class BlockGrouper { /** * Set EC schema. - * @param schema schema + * @param schema schema. */ public void setSchema(ECSchema schema) { this.schema = schema; @@ -41,7 +41,7 @@ public void setSchema(ECSchema schema) { /** * Get EC schema. - * @return ECSchema + * @return ECSchema. */ protected ECSchema getSchema() { return schema; @@ -67,7 +67,7 @@ public int getRequiredNumParityBlocks() { * Calculating and organizing BlockGroup, to be called by ECManager * @param dataBlocks Data blocks to compute parity blocks against * @param parityBlocks To be computed parity blocks - * @return ECBlockGroup + * @return ECBlockGroup. */ public ECBlockGroup makeBlockGroup(ECBlock[] dataBlocks, ECBlock[] parityBlocks) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java index b4220bd8dd61b..90e57201c545b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java @@ -37,9 +37,9 @@ private DumpUtil() { * Convert bytes into format like 0x02 02 00 80. * If limit is negative or too large, then all bytes will be converted. * - * @param bytes bytes - * @param limit limit - * @return bytesToHex + * @param bytes bytes. + * @param limit limit. + * @return bytesToHex. */ public static String bytesToHex(byte[] bytes, int limit) { if (limit <= 0 || limit > bytes.length) { @@ -74,8 +74,8 @@ public static void dumpMatrix(byte[] matrix, /** * Print data in hex format in an array of chunks. - * @param header header - * @param chunks chunks + * @param header header. + * @param chunks chunks. */ public static void dumpChunks(String header, ECChunk[] chunks) { System.out.println(); @@ -88,7 +88,7 @@ public static void dumpChunks(String header, ECChunk[] chunks) { /** * Print data in hex format in a chunk. - * @param chunk chunk + * @param chunk chunk. */ public static void dumpChunk(ECChunk chunk) { String str; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java index 36ef5abcd0f0f..b48a23f8b7085 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java @@ -196,8 +196,8 @@ public static byte gfInv(byte a) { * * Ported from Intel ISA-L library. * - * @param inMatrix inMatrix - * @param outMatrix outMatrix + * @param inMatrix inMatrix. + * @param outMatrix outMatrix. * @param n n */ public static void gfInvertMatrix(byte[] inMatrix, byte[] outMatrix, int n) { @@ -268,9 +268,9 @@ public static void gfInvertMatrix(byte[] inMatrix, byte[] outMatrix, int n) { * gftbl(A) = {A{00}, A{01}, A{02}, ... , A{0f} }, {A{00}, A{10}, A{20}, * ... , A{f0} } -- from ISA-L implementation. * - * @param c c - * @param tbl tbl - * @param offset offset + * @param c c. + * @param tbl tbl. + * @param offset offset. */ public static void gfVectMulInit(byte c, byte[] tbl, int offset) { byte c2 = (byte) ((c << 1) ^ ((c & 0x80) != 0 ? 0x1d : 0)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java index 2ca9e7b8261eb..6d22ff0f62eb3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java @@ -97,7 +97,7 @@ private GaloisField(int fieldSize, int primitivePolynomial) { * * @param fieldSize size of the field * @param primitivePolynomial a primitive polynomial corresponds to the size - * @return GaloisField + * @return GaloisField. */ public static GaloisField getInstance(int fieldSize, int primitivePolynomial) { @@ -116,7 +116,7 @@ public static GaloisField getInstance(int fieldSize, /** * Get the object performs Galois field arithmetic with default setting. - * @return GaloisField + * @return GaloisField. */ public static GaloisField getInstance() { return getInstance(DEFAULT_FIELD_SIZE, DEFAULT_PRIMITIVE_POLYNOMIAL); @@ -240,11 +240,11 @@ public void solveVandermondeSystem(int[] x, int[] y, int len) { /** * A "bulk" version to the solving of Vandermonde System. * - * @param x input x - * @param y input y - * @param outputOffsets input outputOffsets - * @param len input len - * @param dataLen input dataLen + * @param x input x. + * @param y input y. + * @param outputOffsets input outputOffsets. + * @param len input len. + * @param dataLen input dataLen. */ public void solveVandermondeSystem(int[] x, byte[][] y, int[] outputOffsets, int len, int dataLen) { @@ -278,9 +278,9 @@ public void solveVandermondeSystem(int[] x, byte[][] y, int[] outputOffsets, /** * A "bulk" version of the solveVandermondeSystem, using ByteBuffer. * - * @param x input x - * @param y input y - * @param len input len + * @param x input x. + * @param y input y. + * @param len input len. */ public void solveVandermondeSystem(int[] x, ByteBuffer[] y, int len) { ByteBuffer p; @@ -425,10 +425,10 @@ public void substitute(byte[][] p, byte[] q, int x) { * Tends to be 2X faster than the "int" substitute in a loop. * * @param p input polynomial - * @param offsets input offset - * @param len input len + * @param offsets input offset. + * @param len input len. * @param q store the return result - * @param offset input offset + * @param offset input offset. * @param x input field */ public void substitute(byte[][] p, int[] offsets, @@ -452,7 +452,7 @@ public void substitute(byte[][] p, int[] offsets, * @param p input polynomial * @param q store the return result * @param x input field - * @param len input len + * @param len input len. */ public void substitute(ByteBuffer[] p, int len, ByteBuffer q, int x) { int y = 1, iIdx, oIdx; @@ -473,8 +473,8 @@ public void substitute(ByteBuffer[] p, int len, ByteBuffer q, int x) { * The "bulk" version of the remainder. * Warning: This function will modify the "dividend" inputs. * - * @param divisor divisor - * @param dividend dividend + * @param divisor divisor. + * @param dividend dividend. */ public void remainder(byte[][] dividend, int[] divisor) { for (int i = dividend.length - divisor.length; i >= 0; i--) { @@ -493,10 +493,10 @@ public void remainder(byte[][] dividend, int[] divisor) { * The "bulk" version of the remainder. * Warning: This function will modify the "dividend" inputs. * - * @param dividend dividend - * @param offsets offsets - * @param len len - * @param divisor divisor + * @param dividend dividend. + * @param offsets offsets. + * @param len len. + * @param divisor divisor. */ public void remainder(byte[][] dividend, int[] offsets, int len, int[] divisor) { @@ -519,8 +519,8 @@ public void remainder(byte[][] dividend, int[] offsets, * The "bulk" version of the remainder, using ByteBuffer. * Warning: This function will modify the "dividend" inputs. * - * @param dividend dividend - * @param divisor divisor + * @param dividend dividend. + * @param divisor divisor. */ public void remainder(ByteBuffer[] dividend, int[] divisor) { int idx1, idx2; @@ -544,7 +544,7 @@ public void remainder(ByteBuffer[] dividend, int[] divisor) { * Perform Gaussian elimination on the given matrix. This matrix has to be a * fat matrix (number of rows > number of columns). * - * @param matrix matrix + * @param matrix matrix. */ public void gaussianElimination(int[][] matrix) { assert(matrix != null && matrix.length > 0 && matrix[0].length > 0 diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java index e2abbbbdb788b..b1fdc82a11628 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java @@ -60,9 +60,9 @@ public static void initTables(int k, int rows, byte[] codingMatrix, /** * Ported from Intel ISA-L library. * - * @param k k - * @param a a - * @param m m + * @param k k. + * @param a a. + * @param m m. */ public static void genCauchyMatrix(byte[] a, int m, int k) { // Identity matrix in high position @@ -87,12 +87,12 @@ public static void genCauchyMatrix(byte[] a, int m, int k) { * The algorithm is ported from Intel ISA-L library for compatible. It * leverages Java auto-vectorization support for performance. * - * @param gfTables gfTables - * @param dataLen dataLen - * @param inputs inputs - * @param inputOffsets inputOffsets - * @param outputs outputs - * @param outputOffsets outputOffsets + * @param gfTables gfTables. + * @param dataLen dataLen. + * @param inputs inputs. + * @param inputOffsets inputOffsets. + * @param outputs outputs. + * @param outputOffsets outputOffsets. */ public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs, int[] inputOffsets, byte[][] outputs, @@ -145,9 +145,9 @@ public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs, /** * See above. Try to use the byte[] version when possible. * - * @param gfTables gfTables - * @param inputs inputs - * @param outputs outputs + * @param gfTables gfTables. + * @param inputs inputs. + * @param outputs outputs. */ public static void encodeData(byte[] gfTables, ByteBuffer[] inputs, ByteBuffer[] outputs) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java index 054cd514566f9..964fb04c1b976 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java @@ -35,7 +35,7 @@ public final class ByteArray implements RawComparable { /** * Constructing a ByteArray from a {@link BytesWritable}. * - * @param other other + * @param other other. */ public ByteArray(BytesWritable other) { this(other.getBytes(), 0, other.getLength()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 1a6ef3e4b2824..aeacc16a78f9b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -549,7 +549,7 @@ public DataOutputStream prepareAppendKey(int length) throws IOException { * guarantees that the value is encoded in one chunk, and avoids * intermediate chunk buffering. * @throws IOException raised on errors performing I/O. - * @return DataOutputStream + * @return DataOutputStream. */ public DataOutputStream prepareAppendValue(int length) throws IOException { if (state != State.END_KEY) { @@ -796,7 +796,7 @@ public boolean equals(Object obj) { * The length of TFile. This is required because we have no easy * way of knowing the actual size of the input file through the * File input stream. - * @param conf configuration + * @param conf configuration. * @throws IOException raised on errors performing I/O. */ public Reader(FSDataInputStream fsdis, long fileLength, Configuration conf) @@ -1685,7 +1685,7 @@ public void get(BytesWritable key, BytesWritable value) * @param key * BytesWritable to hold the key. * @throws IOException raised on errors performing I/O. - * @return the key into BytesWritable + * @return the key into BytesWritable. */ public int getKey(BytesWritable key) throws IOException { key.setSize(getKeyLength()); @@ -1699,9 +1699,9 @@ public int getKey(BytesWritable key) throws IOException { * directly uses the buffer inside BytesWritable for storing the value. * The call does not require the value length to be known. * - * @param value value + * @param value value. * @throws IOException raised on errors performing I/O. - * @return long value + * @return long value. */ public long getValue(BytesWritable value) throws IOException { DataInputStream dis = getValueStream(); @@ -1831,7 +1831,7 @@ public int getValueLength() { * {@link #getValue(byte[])}, {@link #getValue(byte[], int)}, * {@link #getValueStream}. * - * @param buf buf + * @param buf buf. * @return the length of the value. Does not require * isValueLengthKnown() to be true. * @throws IOException raised on errors performing I/O. @@ -1850,8 +1850,8 @@ public int getValue(byte[] buf) throws IOException { * exception: {@link #getValue(byte[])}, {@link #getValue(byte[], int)}, * {@link #getValueStream}. * - * @param buf buf - * @param offset offset + * @param buf buf. + * @param offset offset. * @return the length of the value. Does not require * isValueLengthKnown() to be true. * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java index c622e828e4c7d..714dc5a12acd2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java @@ -249,8 +249,8 @@ public static long readVLong(DataInput in) throws IOException { /** * Write a String as a VInt n, followed by n Bytes as in Text format. * - * @param out out - * @param s s + * @param out out. + * @param s s. * @throws IOException raised on errors performing I/O. */ public static void writeString(DataOutput out, String s) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java index 8a1e6fe235ac7..3ebbcd912dc71 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java @@ -51,8 +51,8 @@ public class AsyncCallHandler { /** * @return the async return value from {@link AsyncCallHandler}. - * @param T - * @param R + * @param T. + * @param R. */ @InterfaceStability.Unstable @SuppressWarnings("unchecked") @@ -68,7 +68,7 @@ public static AsyncGet getAsyncReturn() { /** * For the lower rpc layers to set the async return value. - * @param asyncReturn asyncReturn + * @param asyncReturn asyncReturn. */ @InterfaceStability.Unstable public static void setLowerLayerAsyncReturn( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index 394ccba22a46e..0b66347f1f90f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -76,9 +76,9 @@ public class RetryPolicies { * Keep trying forever with a fixed time between attempts. *

* - * @param sleepTime sleepTime - * @param timeUnit timeUnit - * @return RetryPolicy + * @param sleepTime sleepTime. + * @param timeUnit timeUnit. + * @return RetryPolicy. */ public static final RetryPolicy retryForeverWithFixedSleep(long sleepTime, TimeUnit timeUnit) { @@ -92,10 +92,10 @@ public static final RetryPolicy retryForeverWithFixedSleep(long sleepTime, * and then fail by re-throwing the exception. *

* - * @param maxRetries maxRetries - * @param sleepTime sleepTime - * @param timeUnit timeUnit - * @return RetryPolicy + * @param maxRetries maxRetries. + * @param sleepTime sleepTime. + * @param timeUnit timeUnit. + * @return RetryPolicy. */ public static final RetryPolicy retryUpToMaximumCountWithFixedSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumCountWithFixedSleep(maxRetries, sleepTime, timeUnit); @@ -107,10 +107,10 @@ public static final RetryPolicy retryUpToMaximumCountWithFixedSleep(int maxRetri * and then fail by re-throwing the exception. *

* - * @param timeUnit timeUnit - * @param sleepTime sleepTime - * @param maxTime maxTime - * @return RetryPolicy + * @param timeUnit timeUnit. + * @param sleepTime sleepTime. + * @param maxTime maxTime. + * @return RetryPolicy. */ public static final RetryPolicy retryUpToMaximumTimeWithFixedSleep(long maxTime, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumTimeWithFixedSleep(maxTime, sleepTime, timeUnit); @@ -123,10 +123,10 @@ public static final RetryPolicy retryUpToMaximumTimeWithFixedSleep(long maxTime, * The time between attempts is sleepTime mutliplied by the number of tries so far. *

* - * @param sleepTime sleepTime - * @param maxRetries maxRetries - * @param timeUnit timeUnit - * @return RetryPolicy + * @param sleepTime sleepTime. + * @param maxRetries maxRetries. + * @param timeUnit timeUnit. + * @return RetryPolicy. */ public static final RetryPolicy retryUpToMaximumCountWithProportionalSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) { return new RetryUpToMaximumCountWithProportionalSleep(maxRetries, sleepTime, timeUnit); @@ -141,10 +141,10 @@ public static final RetryPolicy retryUpToMaximumCountWithProportionalSleep(int m *

* * - * @param timeUnit timeUnit - * @param maxRetries maxRetries - * @param sleepTime sleepTime - * @return RetryPolicy + * @param timeUnit timeUnit. + * @param maxRetries maxRetries. + * @param sleepTime sleepTime. + * @return RetryPolicy. */ public static final RetryPolicy exponentialBackoffRetry( int maxRetries, long sleepTime, TimeUnit timeUnit) { @@ -156,9 +156,9 @@ public static final RetryPolicy exponentialBackoffRetry( * Set a default policy with some explicit handlers for specific exceptions. *

* - * @param exceptionToPolicyMap exceptionToPolicyMap - * @param defaultPolicy defaultPolicy - * @return RetryPolicy + * @param exceptionToPolicyMap exceptionToPolicyMap. + * @param defaultPolicy defaultPolicy. + * @return RetryPolicy. */ public static final RetryPolicy retryByException(RetryPolicy defaultPolicy, Map, RetryPolicy> exceptionToPolicyMap) { @@ -171,9 +171,9 @@ public static final RetryPolicy retryByException(RetryPolicy defaultPolicy, * Set a default policy with some explicit handlers for specific exceptions. *

* - * @param defaultPolicy defaultPolicy - * @param exceptionToPolicyMap exceptionToPolicyMap - * @return RetryPolicy + * @param defaultPolicy defaultPolicy. + * @param exceptionToPolicyMap exceptionToPolicyMap. + * @return RetryPolicy. */ public static final RetryPolicy retryByRemoteException( RetryPolicy defaultPolicy, @@ -183,9 +183,9 @@ public static final RetryPolicy retryByRemoteException( /** * A retry policy for exceptions other than RemoteException. - * @param defaultPolicy defaultPolicy - * @param exceptionToPolicyMap exceptionToPolicyMap - * @return RetryPolicy + * @param defaultPolicy defaultPolicy. + * @param exceptionToPolicyMap exceptionToPolicyMap. + * @return RetryPolicy. */ public static final RetryPolicy retryOtherThanRemoteException( RetryPolicy defaultPolicy, @@ -473,7 +473,7 @@ public String toString() { * where t_i and n_i are the i-th pair of sleep time and number of retries. * Note that the white spaces in the string are ignored. * - * @param s input string + * @param s input string. * @return the parsed object, or null if the parsing fails. */ public static MultipleLinearRandomRetry parseCommaSeparatedString(String s) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java index a8bc50c702adb..eaff5bbd528d8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java @@ -34,7 +34,7 @@ public class RetryProxy { * @param iface the interface that the retry will implement * @param implementation the instance whose methods should be retried * @param retryPolicy the policy for retrying method call failures - * @param T + * @param T. * @return the retry proxy */ public static Object create(Class iface, T implementation, @@ -52,7 +52,7 @@ public static Object create(Class iface, T implementation, * @param iface the interface that the retry will implement * @param proxyProvider provides implementation instances whose methods should be retried * @param retryPolicy the policy for retrying or failing over method call failures - * @param T + * @param T. * @return the retry proxy */ public static Object create(Class iface, @@ -71,7 +71,7 @@ public static Object create(Class iface, * {@link RetryPolicies#TRY_ONCE_THEN_FAIL} is used. * * @param iface the interface that the retry will implement - * @param T + * @param T. * @param implementation the instance whose methods should be retried * @param methodNameToPolicyMap a map of method names to retry policies * @return the retry proxy @@ -93,8 +93,8 @@ public static Object create(Class iface, T implementation, * @param iface the interface that the retry will implement * @param proxyProvider provides implementation instances whose methods should be retried * @param methodNameToPolicyMap map of method names to retry policies - * @param defaultPolicy defaultPolicy - * @param T + * @param defaultPolicy defaultPolicy. + * @param T. * @return the retry proxy */ public static Object create(Class iface, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java index 5a40cf0f08cc2..d2fb070ee2c25 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java @@ -48,7 +48,7 @@ public class RetryUtils { * - non-IOException. * * - * @param conf configuration + * @param conf configuration. * @param retryPolicyEnabledKey conf property key for enabling retry * @param defaultRetryPolicyEnabled default retryPolicyEnabledKey conf value * @param retryPolicySpecKey conf property key for retry policy spec @@ -168,7 +168,7 @@ public String toString() { * Retry policy spec: * N pairs of sleep-time and number-of-retries "s1,n1,s2,n2,..." * - * @param conf configuration + * @param conf configuration. * @param retryPolicyEnabledKey conf property key for enabling retry * @param defaultRetryPolicyEnabled default retryPolicyEnabledKey conf value * @param retryPolicySpecKey conf property key for retry policy spec diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java index 87a29565ba606..4bdd60d90c382 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java @@ -35,14 +35,14 @@ * other producers may read from the input between calls to * {@link #deserialize(Object)}. *

- * @param generic type + * @param generic type. */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public interface Deserializer { /** *

Prepare the deserializer for reading.

- * @param in input stream + * @param in input stream. * @throws IOException raised on errors performing I/O. */ void open(InputStream in) throws IOException; @@ -55,7 +55,7 @@ public interface Deserializer { * stream. Otherwise, if the object t is null a new * deserialized object will be created. *

- * @param t t + * @param t t. * @return the deserialized object * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java index b60d310f0b64e..29c04f66d4370 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java @@ -37,7 +37,7 @@ * implementation of {@link RawComparator} that operates directly * on byte representations. *

- * @param generic type + * @param generic type. */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java index ac8dbbeaa277e..d53f7ab75c503 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java @@ -31,7 +31,7 @@ * {@link Deserializer} to deserialize objects that are then compared via * their {@link Comparable} interfaces. *

- * @param generic type + * @param generic type. * @see JavaSerialization */ @InterfaceAudience.Public diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java index 12a9eeb2f3b78..0793dc1ca0184 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java @@ -25,7 +25,7 @@ *

* Encapsulates a {@link Serializer}/{@link Deserializer} pair. *

- * @param generic type + * @param generic type. */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving @@ -35,20 +35,20 @@ public interface Serialization { * Allows clients to test whether this {@link Serialization} * supports the given class. * - * @param c class - * @return if accept true,not false + * @param c class. + * @return if accept true,not false. */ boolean accept(Class c); /** * @return a {@link Serializer} for the given class. - * @param c class + * @param c class. */ Serializer getSerializer(Class c); /** * @return a {@link Deserializer} for the given class. - * @param c class + * @param c class. */ Deserializer getDeserializer(Class c); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java index 0b166ddc1f282..b531ae85233e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java @@ -53,7 +53,7 @@ public class SerializationFactory extends Configured { * classnames. *

* - * @param conf configuration + * @param conf configuration. */ public SerializationFactory(Configuration conf) { super(conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java index 50a433c52b138..c44b3678fc3be 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java @@ -35,21 +35,21 @@ * other producers may write to the output between calls to * {@link #serialize(Object)}. *

- * @param generic type + * @param generic type. */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public interface Serializer { /** *

Prepare the serializer for writing.

- * @param out output stream + * @param out output stream. * @throws IOException raised on errors performing I/O. */ void open(OutputStream out) throws IOException; /** *

Serialize t to the underlying output stream.

- * @param t t + * @param t t. * @throws IOException raised on errors performing I/O. */ void serialize(T t) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java index 7280e3f44e4dc..2327fd2d55a2e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java @@ -61,24 +61,24 @@ public Serializer getSerializer(Class c) { /** * Return an Avro Schema instance for the given class. - * @param t Generics Type T - * @return schema + * @param t Generics Type T. + * @return schema. */ @InterfaceAudience.Private public abstract Schema getSchema(T t); /** * Create and return Avro DatumWriter for the given class. - * @param clazz clazz - * @return DatumWriter + * @param clazz clazz. + * @return DatumWriter. */ @InterfaceAudience.Private public abstract DatumWriter getWriter(Class clazz); /** * Create and return Avro DatumReader for the given class. - * @param clazz clazz - * @return DatumReader + * @param clazz clazz. + * @return DatumReader. */ @InterfaceAudience.Private public abstract DatumReader getReader(Class clazz); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java index c12f2abc3a534..10e661a3095cd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java @@ -43,9 +43,9 @@ public interface GenericRefreshProtocol { * Refresh the resource based on identity passed in. * * @param identifier input identifier. - * @param args input args + * @param args input args. * @throws IOException raised on errors performing I/O. - * @return Collection RefreshResponse + * @return Collection RefreshResponse. */ @Idempotent Collection refresh(String identifier, String[] args) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index f5f970462102d..e53f57b1fc9dd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -427,7 +427,7 @@ public static ProtobufRpcEngineCallback registerForDeferredResponse() { * @param portRangeConfig A config parameter that can be used to restrict * the range of ports used when port is 0 (an ephemeral port) * @param alignmentContext provides server state info on client responses - * @param secretManager input secretManager + * @param secretManager input secretManager. * @param queueSizePerHandler input queueSizePerHandler. * @param numReaders input numReaders. * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index f8034e7a16c95..818305b316984 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -169,7 +169,7 @@ static public String getProtocolName(Class protocol) { * otherwise get it from the versionID field of the protocol class. * * @param protocol input protocol. - * @return ProtocolVersion + * @return ProtocolVersion. */ static public long getProtocolVersion(Class protocol) { if (protocol == null) { @@ -264,7 +264,7 @@ public String getInterfaceName() { } /** - * @return Get the client's preferred version + * @return Get the client's preferred version. */ public long getClientVersion() { return clientVersion; @@ -403,7 +403,7 @@ public static T waitForProxy(Class protocol, * @param addr remote address * @param conf configuration to use * @param rpcTimeout timeout for each RPC - * @param connectionRetryPolicy input connectionRetryPolicy + * @param connectionRetryPolicy input connectionRetryPolicy. * @param timeout time in milliseconds before giving up * @return the proxy * @throws IOException if the far end through a RemoteException. @@ -476,7 +476,7 @@ public static T getProxy(Class protocol, * Get a protocol proxy that contains a proxy connection to a remote server * and a set of methods that are supported by the server. * - * @param Generics Type T + * @param Generics Type T. * @param protocol protocol class * @param clientVersion client version * @param addr remote address @@ -497,14 +497,14 @@ public static ProtocolProxy getProtocolProxy(Class protocol, * Construct a client-side proxy object that implements the named protocol, * talking to a server at the named address. * - * @param Generics Type T + * @param Generics Type T. * @param protocol input protocol. - * @param clientVersion input clientVersion - * @param addr input addr - * @param ticket input tocket - * @param conf input conf - * @param factory input factory - * @return the protocol proxy + * @param clientVersion input clientVersion. + * @param addr input addr. + * @param ticket input tocket. + * @param conf input conf. + * @param factory input factory. + * @return the protocol proxy. * @throws IOException raised on errors performing I/O. * */ @@ -572,7 +572,7 @@ public static T getProxy(Class protocol, * Get a protocol proxy that contains a proxy connection to a remote server * and a set of methods that are supported by the server. * - * @param Generics Type T + * @param Generics Type T. * @param protocol protocol * @param clientVersion client's version * @param addr server address @@ -600,7 +600,7 @@ public static ProtocolProxy getProtocolProxy(Class protocol, * Get a protocol proxy that contains a proxy connection to a remote server * and a set of methods that are supported by the server. * - * @param Generics Type T + * @param Generics Type T. * @param protocol protocol * @param clientVersion client's version * @param addr server address @@ -647,7 +647,7 @@ public static ProtocolProxy getProtocolProxy(Class protocol, * @param fallbackToSimpleAuth set to true or false during calls to indicate * if a secure client falls back to simple auth * @param alignmentContext state alignment context - * @param Generics Type T + * @param Generics Type T. * @return the proxy * @throws IOException if any error occurs */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java index b874c4a1d9693..3d64a84bfb46f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java @@ -346,7 +346,7 @@ private static CacheEntryWithPayload newEntry(Object payload, /** * Static method that provides null check for retryCache. * @param cache input Cache. - * @return CacheEntry + * @return CacheEntry. */ public static CacheEntry waitForCompletion(RetryCache cache) { if (skipRetryCache()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java index d1564456f325d..afc9d035b097c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java @@ -49,7 +49,7 @@ public interface RpcEngine { * @param rpcTimeout input rpcTimeout. * @param connectionRetryPolicy input connectionRetryPolicy. * @throws IOException raised on errors performing I/O. - * @return ProtocolProxy + * @return ProtocolProxy. */ ProtocolProxy getProxy(Class protocol, long clientVersion, InetSocketAddress addr, @@ -72,7 +72,7 @@ ProtocolProxy getProxy(Class protocol, * @param fallbackToSimpleAuth input fallbackToSimpleAuth. * @param alignmentContext input alignmentContext. * @throws IOException raised on errors performing I/O. - * @return ProtocolProxy + * @return ProtocolProxy. */ ProtocolProxy getProxy(Class protocol, long clientVersion, InetSocketAddress addr, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java index c02af842cf23b..ce4aac54b6cd2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java @@ -47,14 +47,14 @@ public RpcServerException(final String message, final Throwable cause) { } /** - * @return get the rpc status corresponding to this exception + * @return get the rpc status corresponding to this exception. */ public RpcStatusProto getRpcStatusProto() { return RpcStatusProto.ERROR; } /** - * @return get the detailed rpc status corresponding to this exception + * @return get the detailed rpc status corresponding to this exception. */ public RpcErrorCodeProto getRpcErrorCodeProto() { return RpcErrorCodeProto.ERROR_RPC_SERVER; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index fd1d2840c19f6..90f730d38836d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -3659,7 +3659,7 @@ public Writable call(Writable param, long receiveTime) throws Exception { * @param protocol input protocol. * @param param input param. * @param receiveTime input receiveTime. - * @return Call + * @return Call. * @throws Exception raised on errors performing I/O. */ public abstract Writable call(RPC.RpcKind rpcKind, String protocol, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index b76c2e09db237..21181f860d98a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -320,7 +320,7 @@ public ProtocolProxy getProxy(Class protocol, long clientVersion, * @param connectionRetryPolicy input connectionRetryPolicy. * @param fallbackToSimpleAuth input fallbackToSimpleAuth. * @param alignmentContext input alignmentContext. - * @return ProtocolProxy + * @return ProtocolProxy. */ @Override @SuppressWarnings("unchecked") diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java index cb70c18a19be0..e2ad16fce2c57 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java @@ -67,7 +67,7 @@ public class LogLevel { /** * A command line implementation * @param args input args. - * @throws Exception exception + * @throws Exception exception. */ public static void main(String[] args) throws Exception { CLI cli = new CLI(new Configuration()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java index e4693ed775e2d..fef8c4b7e4ba9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java @@ -50,7 +50,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean { * the annotations of the source object.) * @param desc the description of the source (or null. See above.) * @return the source object - * @exception MetricsException Metrics Exception + * @exception MetricsException Metrics Exception. */ public abstract T register(String name, String desc, T source); @@ -65,7 +65,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean { * @param the actual type of the source object * @param source object to register * @return the source object - * @exception MetricsException Metrics Exception + * @exception MetricsException Metrics Exception. */ public T register(T source) { return register(null, null, source); @@ -85,7 +85,7 @@ public T register(T source) { * @param name of the sink. Must be unique. * @param desc the description of the sink * @return the sink - * @exception MetricsException Metrics Exception + * @exception MetricsException Metrics Exception. */ public abstract T register(String name, String desc, T sink); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java index f0fd7689b8604..8656da6f316c0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java @@ -29,19 +29,19 @@ public interface MetricsSystemMXBean { /** * Start the metrics system - * @throws MetricsException Metrics Exception + * @throws MetricsException Metrics Exception. */ public void start(); /** * Stop the metrics system - * @throws MetricsException Metrics Exception + * @throws MetricsException Metrics Exception. */ public void stop(); /** * Start metrics MBeans - * @throws MetricsException Metrics Exception + * @throws MetricsException Metrics Exception. */ public void startMetricsMBeans(); @@ -49,7 +49,7 @@ public interface MetricsSystemMXBean { * Stop metrics MBeans. * Note, it doesn't stop the metrics system control MBean, * i.e this interface. - * @throws MetricsException Metrics Exception + * @throws MetricsException Metrics Exception. */ public void stopMetricsMBeans(); @@ -57,7 +57,7 @@ public interface MetricsSystemMXBean { * @return the current config * Avoided getConfig, as it'll turn into a "Config" attribute, * which doesn't support multiple line values in jconsole. - * @throws MetricsException Metrics Exception + * @throws MetricsException Metrics Exception. */ public String currentConfig(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java index 19c77cfd4e896..9024203700ee1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/PrometheusMetricsSink.java @@ -85,9 +85,9 @@ public void putMetrics(MetricsRecord metricsRecord) { * Convert CamelCase based names to lower-case names where the separator * is the underscore, to follow prometheus naming conventions. * - * @param metricName metricName - * @param recordName recordName - * @return prometheusName + * @param metricName metricName. + * @param recordName recordName. + * @return prometheusName. */ public String prometheusName(String recordName, String metricName) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java index 5c5fe97f42610..d3d794fa74a91 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java @@ -212,7 +212,7 @@ private void loadGangliaConf(GangliaConfType gtype) { /** * Lookup GangliaConf from cache. If not found, return default values * - * @param metricName metricName + * @param metricName metricName. * @return looked up GangliaConf */ protected GangliaConf getGangliaConfForMetric(String metricName) { @@ -253,7 +253,7 @@ private void pad() { /** * Puts an integer into the buffer as 4 bytes, big-endian. - * @param i i + * @param i i. */ protected void xdr_int(int i) { buffer[offset++] = (byte) ((i >> 24) & 0xff); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java index 7de287ad5dfbd..58081449ee74f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java @@ -63,8 +63,8 @@ private MBeans() { * Where the {@literal and } are the supplied * parameters. * - * @param serviceName serviceName - * @param nameName nameName + * @param serviceName serviceName. + * @param nameName nameName. * @param theMbean - the MBean to register * @return the named used to register the MBean */ @@ -79,8 +79,8 @@ static public ObjectName register(String serviceName, String nameName, * Where the {@literal and } are the supplied * parameters. * - * @param serviceName serviceName - * @param nameName nameName + * @param serviceName serviceName. + * @param nameName nameName. * @param properties - Key value pairs to define additional JMX ObjectName * properties. * @param theMbean - the MBean to register diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java index e39bd4d5db68c..46a9d35f9d242 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java @@ -108,7 +108,7 @@ private double allowableError(int rank) { /** * Add a new value from the stream. * - * @param v v + * @param v v. */ synchronized public void insert(long v) { buffer[bufferCount] = v; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index fd9cd7c946b1f..c49706d66f27d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -134,7 +134,7 @@ public static SocketFactory getDefaultSocketFactory(Configuration conf) { * given proxy URI corresponds to an absence of configuration parameter, * returns null. If the URI is malformed raises an exception. * - * @param conf configuration + * @param conf configuration. * @param propValue the property which is the class name of the * SocketFactory to instantiate; assumed non null and non empty. * @return a socket factory as defined in the property value. @@ -156,8 +156,8 @@ public static SocketFactory getSocketFactoryFromProperty( * {@literal :} * {@literal ://:/} * - * @param target target - * @return socket addr + * @param target target. + * @return socket addr. */ public static InetSocketAddress createSocketAddr(String target) { return createSocketAddr(target, -1); @@ -169,9 +169,9 @@ public static InetSocketAddress createSocketAddr(String target) { * {@literal :} * {@literal ://:/} * - * @param target target - * @param defaultPort default port - * @return socket addr + * @param target target. + * @param defaultPort default port. + * @return socket addr. */ public static InetSocketAddress createSocketAddr(String target, int defaultPort) { @@ -191,7 +191,7 @@ public static InetSocketAddress createSocketAddr(String target, * @param configName the name of the configuration from which * target was loaded. This is used in the * exception message in the case that parsing fails. - * @return socket addr + * @return socket addr. */ public static InetSocketAddress createSocketAddr(String target, int defaultPort, @@ -371,8 +371,8 @@ private static String canonicalizeHost(String host) { * daemons, one can set up mappings from those hostnames to "localhost". * {@link NetUtils#getStaticResolution(String)} can be used to query for * the actual hostname. - * @param host the hostname or IP use to instantiate the object - * @param resolvedName resolved name + * @param host the hostname or IP use to instantiate the object. + * @param resolvedName resolved name. */ public static void addStaticResolution(String host, String resolvedName) { synchronized (hostToResolved) { @@ -384,7 +384,7 @@ public static void addStaticResolution(String host, String resolvedName) { * Retrieves the resolved name for the passed host. The resolved name must * have been set earlier using * {@link NetUtils#addStaticResolution(String, String)} - * @param host the hostname or IP use to instantiate the object + * @param host the hostname or IP use to instantiate the object. * @return the resolution */ public static String getStaticResolution(String host) { @@ -420,7 +420,7 @@ public static List getAllStaticResolutions() { * the server binds to "0.0.0.0". This returns "hostname:port" of the server, * or "127.0.0.1:port" when the getListenerAddress() returns "0.0.0.0:port". * - * @param server server + * @param server server. * @return socket address that a client can use to connect to the server. */ public static InetSocketAddress getConnectAddress(Server server) { @@ -449,7 +449,7 @@ public static InetSocketAddress getConnectAddress(InetSocketAddress addr) { /** * Same as getInputStream(socket, socket.getSoTimeout()). * - * @param socket socket + * @param socket socket. * @throws IOException raised on errors performing I/O. * @return SocketInputWrapper for reading from the socket. * @see #getInputStream(Socket, long) @@ -474,7 +474,7 @@ public static SocketInputWrapper getInputStream(Socket socket) * * @see Socket#getChannel() * - * @param socket socket + * @param socket socket. * @param timeout timeout in milliseconds. zero for waiting as * long as necessary. * @return SocketInputWrapper for reading from the socket. @@ -506,7 +506,7 @@ public static SocketInputWrapper getInputStream(Socket socket, long timeout) * * @see #getOutputStream(Socket, long) * - * @param socket socket + * @param socket socket. * @return OutputStream for writing to the socket. * @throws IOException raised on errors performing I/O. */ @@ -528,7 +528,7 @@ public static OutputStream getOutputStream(Socket socket) * * @see Socket#getChannel() * - * @param socket socket + * @param socket socket. * @param timeout timeout in milliseconds. This may not always apply. zero * for waiting as long as necessary. * @return OutputStream for writing to the socket. @@ -553,7 +553,7 @@ public static OutputStream getOutputStream(Socket socket, long timeout) * * @see java.net.Socket#connect(java.net.SocketAddress, int) * - * @param socket socket + * @param socket socket. * @param address the remote address * @param timeout timeout in milliseconds * @throws IOException raised on errors performing I/O. @@ -568,7 +568,7 @@ public static void connect(Socket socket, * Like {@link NetUtils#connect(Socket, SocketAddress, int)} but * also takes a local address and port to bind the socket to. * - * @param socket socket + * @param socket socket. * @param endpoint the remote address * @param localAddr the local address to bind the socket to * @param timeout timeout in milliseconds @@ -658,7 +658,7 @@ public static List normalizeHostNames(Collection names) { * Performs a sanity check on the list of hostnames/IPs to verify they at least * appear to be valid. * @param names - List of hostnames/IPs - * @throws UnknownHostException Unknown Host Exception + * @throws UnknownHostException Unknown Host Exception. */ public static void verifyHostnames(String[] names) throws UnknownHostException { for (String name: names) { @@ -750,8 +750,8 @@ public static String getHostname() { /** * Compose a "host:port" string from the address. * - * @param addr address - * @return hort port string + * @param addr address. + * @return hort port string. */ public static String getHostPortString(InetSocketAddress addr) { return addr.getHostName() + ":" + addr.getPort(); @@ -987,7 +987,7 @@ private static String quoteHost(final String hostname) { /** * isValidSubnet. - * @param subnet subnet + * @param subnet subnet. * @return true if the given string is a subnet specified * using CIDR notation, false otherwise */ @@ -1023,7 +1023,7 @@ private static void addMatchingAddrs(NetworkInterface nif, * @param returnSubinterfaces * whether to return IPs associated with subinterfaces * @throws IllegalArgumentException if subnet is invalid - * @return ips + * @return ips. */ public static List getIPs(String subnet, boolean returnSubinterfaces) { @@ -1103,8 +1103,8 @@ public static Set getFreeSocketPorts(int numOfPorts) { * Return an @{@link InetAddress} to bind to. If bindWildCardAddress is true * than returns null. * - * @param localAddr local addr - * @param bindWildCardAddress bind wildcard address + * @param localAddr local addr. + * @param bindWildCardAddress bind wildcard address. * @return InetAddress */ public static InetAddress bindToLocalAddress(InetAddress localAddr, boolean From 0b4146b40321baa3815f928489e1b017b387c13f Mon Sep 17 00:00:00 2001 From: slfan1989 Date: Mon, 16 May 2022 22:48:46 -0700 Subject: [PATCH 53/53] HADOOP-18229. Fix Check Style. --- .../apache/hadoop/net/NetworkTopology.java | 4 +- .../apache/hadoop/net/unix/DomainSocket.java | 14 ++-- .../apache/hadoop/security/Credentials.java | 24 +++---- .../org/apache/hadoop/security/Groups.java | 4 +- .../hadoop/security/HadoopKerberosName.java | 2 +- .../org/apache/hadoop/security/KDiag.java | 2 +- .../apache/hadoop/security/KerberosInfo.java | 2 +- .../security/SaslPropertiesResolver.java | 2 +- .../apache/hadoop/security/SaslRpcClient.java | 2 +- .../apache/hadoop/security/SaslRpcServer.java | 12 ++-- .../apache/hadoop/security/SecurityUtil.java | 20 +++--- .../hadoop/security/ShellBasedIdMapping.java | 12 ++-- .../hadoop/security/UserGroupInformation.java | 26 ++++---- .../security/alias/CredentialProvider.java | 2 +- .../security/alias/CredentialShell.java | 4 +- .../security/authorize/AccessControlList.java | 2 +- .../authorize/ImpersonationProvider.java | 4 +- .../hadoop/security/authorize/ProxyUsers.java | 12 ++-- .../ssl/ReloadingX509KeystoreManager.java | 2 +- .../security/token/DelegationTokenIssuer.java | 16 ++--- .../hadoop/security/token/DtFetcher.java | 16 ++--- .../hadoop/security/token/DtUtilShell.java | 4 +- .../apache/hadoop/security/token/Token.java | 6 +- .../hadoop/security/token/TokenRenewer.java | 14 ++-- .../AbstractDelegationTokenSecretManager.java | 55 ++++++++-------- .../web/DelegationTokenAuthenticatedURL.java | 10 +-- .../DelegationTokenAuthenticationFilter.java | 2 +- .../web/DelegationTokenAuthenticator.java | 12 ++-- .../hadoop/service/CompositeService.java | 2 +- .../hadoop/service/ServiceStateModel.java | 4 +- .../service/launcher/ServiceLauncher.java | 2 +- .../org/apache/hadoop/tools/CommandShell.java | 6 +- .../apache/hadoop/tools/GetGroupsBase.java | 2 +- .../org/apache/hadoop/tools/TableListing.java | 8 +-- .../apache/hadoop/util/AsyncDiskService.java | 6 +- .../BlockingThreadPoolExecutorService.java | 2 +- .../org/apache/hadoop/util/CrcComposer.java | 28 ++++---- .../java/org/apache/hadoop/util/CrcUtil.java | 34 +++++----- .../java/org/apache/hadoop/util/Daemon.java | 6 +- .../org/apache/hadoop/util/DataChecksum.java | 40 ++++++------ .../apache/hadoop/util/DirectBufferPool.java | 4 +- .../org/apache/hadoop/util/DiskChecker.java | 12 ++-- .../hadoop/util/DiskValidatorFactory.java | 4 +- .../org/apache/hadoop/util/GcTimeMonitor.java | 26 ++++---- .../org/apache/hadoop/util/GenericsUtil.java | 8 +-- .../java/org/apache/hadoop/util/IPList.java | 2 +- .../org/apache/hadoop/util/IdGenerator.java | 2 +- .../apache/hadoop/util/IdentityHashStore.java | 14 ++-- .../apache/hadoop/util/IndexedSortable.java | 10 +-- .../org/apache/hadoop/util/IndexedSorter.java | 14 ++-- .../apache/hadoop/util/InstrumentedLock.java | 2 +- .../hadoop/util/IntrusiveCollection.java | 30 ++++----- .../apache/hadoop/util/JsonSerialization.java | 4 +- .../apache/hadoop/util/JvmPauseMonitor.java | 4 +- .../apache/hadoop/util/LightWeightGSet.java | 12 ++-- .../hadoop/util/LightWeightResizableGSet.java | 2 +- .../java/org/apache/hadoop/util/Lists.java | 38 +++++------ .../org/apache/hadoop/util/MachineList.java | 6 +- .../apache/hadoop/util/NativeCodeLoader.java | 2 +- .../hadoop/util/NativeLibraryChecker.java | 4 +- .../apache/hadoop/util/PrintJarMainClass.java | 2 +- .../org/apache/hadoop/util/PriorityQueue.java | 16 ++--- .../org/apache/hadoop/util/ProgramDriver.java | 2 +- .../java/org/apache/hadoop/util/Progress.java | 26 ++++---- .../org/apache/hadoop/util/ProtoUtil.java | 8 +-- .../org/apache/hadoop/util/QuickSort.java | 4 +- .../apache/hadoop/util/ReflectionUtils.java | 14 ++-- .../java/org/apache/hadoop/util/RunJar.java | 6 +- .../apache/hadoop/util/SequentialNumber.java | 8 +-- .../org/apache/hadoop/util/ServletUtil.java | 16 ++--- .../java/org/apache/hadoop/util/Sets.java | 64 +++++++++---------- .../java/org/apache/hadoop/util/Shell.java | 50 +++++++-------- .../apache/hadoop/util/StringInterner.java | 4 +- .../org/apache/hadoop/util/StringUtils.java | 56 ++++++++-------- .../java/org/apache/hadoop/util/Time.java | 2 +- .../java/org/apache/hadoop/util/Tool.java | 2 +- .../org/apache/hadoop/util/ToolRunner.java | 6 +- .../java/org/apache/hadoop/util/XMLUtils.java | 2 +- .../java/org/apache/hadoop/util/ZKUtil.java | 2 +- .../org/apache/hadoop/util/bloom/Key.java | 4 +- .../hadoop/util/concurrent/AsyncGet.java | 6 +- .../hadoop/util/curator/ZKCuratorManager.java | 14 ++-- .../functional/CommonCallableSupplier.java | 6 +- .../util/functional/RemoteIterators.java | 6 +- 84 files changed, 475 insertions(+), 478 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java index 7764ab6b42010..6644b3911b844 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java @@ -415,7 +415,7 @@ public boolean isOnSameRack(Node node1, Node node2) { } /** - * @return Check if network topology is aware of NodeGroup + * @return Check if network topology is aware of NodeGroup. */ public boolean isNodeGroupAware() { return false; @@ -947,7 +947,7 @@ public void sortByDistanceUsingNetworkLocation(Node reader, Node[] nodes, * @param activeLen Number of active nodes at the front of the array * @param secondarySort a secondary sorting strategy which can inject into * that point from outside to help sort the same distance. - * @param Generics Type T + * @param Generics Type T. */ public void sortByDistanceUsingNetworkLocation(Node reader, T[] nodes, int activeLen, Consumer> secondarySort) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java index 325dbfe888e94..73fff0313a58c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java @@ -107,7 +107,7 @@ native static void validateSocketPathSecurity0(String path, /** * Return true only if UNIX domain sockets are available. * - * @return loadingFailureReason + * @return loadingFailureReason. */ public static String getLoadingFailureReason() { return loadingFailureReason; @@ -416,7 +416,7 @@ private native static void sendFileDescriptors0(int fd, * one byte. * @param offset The offset in the jbuf array to start at. * @param length Length of the jbuf array to use. - * @throws IOException raised on errors performing I/O. + * @throws IOException raised on errors performing I/O. */ public void sendFileDescriptors(FileDescriptor descriptors[], byte jbuf[], int offset, int length) throws IOException { @@ -438,11 +438,11 @@ private static native int receiveFileDescriptors0(int fd, * Receive some FileDescriptor objects from the process on the other side of * this socket, and wrap them in FileInputStream objects. * - * @param streams input stream - * @param buf input buf - * @param offset input offset - * @param length input length - * @return wrap them in FileInputStream objects + * @param streams input stream. + * @param buf input buf. + * @param offset input offset. + * @param length input length. + * @return wrap them in FileInputStream objects. * @throws IOException raised on errors performing I/O. */ public int recvFileInputStreams(FileInputStream[] streams, byte buf[], diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index 4f51701ec5d60..ef309cb2247fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -148,7 +148,7 @@ public Collection> getAllTokens() { /** * Returns an unmodifiable version of the full map of aliases to Tokens. * - * @return TokenMap + * @return TokenMap. */ public Map> getTokenMap() { return Collections.unmodifiableMap(tokenMap); @@ -197,7 +197,7 @@ public void removeSecretKey(Text alias) { /** * Return all the secret key entries in the in-memory map. * - * @return Text List + * @return Text List. */ public List getAllSecretKeys() { List list = new java.util.ArrayList(); @@ -209,7 +209,7 @@ public List getAllSecretKeys() { /** * Returns an unmodifiable version of the full map of aliases to secret keys. * - * @return SecretKeyMap + * @return SecretKeyMap. */ public Map getSecretKeyMap() { return Collections.unmodifiableMap(secretKeysMap); @@ -217,10 +217,10 @@ public Map getSecretKeyMap() { /** * Convenience method for reading a token storage file and loading its Tokens. - * @param filename filename - * @param conf configuration + * @param filename filename. + * @param conf configuration. * @throws IOException raised on errors performing I/O. - * @return Credentials + * @return Credentials. */ public static Credentials readTokenStorageFile(Path filename, Configuration conf) @@ -242,10 +242,10 @@ public static Credentials readTokenStorageFile(Path filename, /** * Convenience method for reading a token storage file and loading its Tokens. - * @param filename filename - * @param conf configuration + * @param filename filename. + * @param conf configuration. * @throws IOException raised on errors performing I/O. - * @return Token + * @return Token. */ public static Credentials readTokenStorageFile(File filename, Configuration conf) @@ -267,7 +267,7 @@ public static Credentials readTokenStorageFile(File filename, /** * Convenience method for reading a token from a DataInputStream. * - * @param in DataInputStream + * @param in DataInputStream. * @throws IOException raised on errors performing I/O. */ public void readTokenStorageStream(DataInputStream in) throws IOException { @@ -348,7 +348,7 @@ public void writeTokenStorageFile(Path filename, Configuration conf, /** * Stores all the keys to DataOutput. - * @param out DataOutput + * @param out DataOutput. * @throws IOException raised on errors performing I/O. */ @Override @@ -414,7 +414,7 @@ void readProto(DataInput in) throws IOException { /** * Loads all the keys. - * @param in DataInput + * @param in DataInput. * @throws IOException raised on errors performing I/O. */ @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java index 23992ac76192b..1b3adc14283ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java @@ -465,7 +465,7 @@ public static Groups getUserToGroupsMappingService() { /** * Get the groups being used to map user-to-groups. - * @param conf configuration + * @param conf configuration. * @return the groups being used to map user-to-groups. */ public static synchronized Groups getUserToGroupsMappingService( @@ -482,7 +482,7 @@ public static synchronized Groups getUserToGroupsMappingService( /** * Create new groups used to map user-to-groups with loaded configuration. - * @param conf configuration + * @param conf configuration. * @return the groups being used to map user-to-groups. */ @Private diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java index 228670b425890..b66f8444528a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java @@ -45,7 +45,7 @@ public class HadoopKerberosName extends KerberosName { /** * Create a name from the full Kerberos principal name. - * @param name name + * @param name name. */ public HadoopKerberosName(String name) { super(name); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java index ea42d3d962326..ee6a127f0e24f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java @@ -1047,7 +1047,7 @@ private void failif(boolean condition, * @param conf configuration * @param argv argument list * @return an exception - * @throws Exception Exception + * @throws Exception Exception. */ public static int exec(Configuration conf, String... argv) throws Exception { try(KDiag kdiag = new KDiag()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java index 0d3f8c4a8a134..e79492adf94e9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosInfo.java @@ -33,7 +33,7 @@ public @interface KerberosInfo { /** * Key for getting server's Kerberos principal name from Configuration. - * @return serverPrincipal + * @return serverPrincipal. */ String serverPrincipal(); String clientPrincipal() default ""; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java index 0688ec6cd1c81..25cc4a8144f05 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java @@ -46,7 +46,7 @@ public class SaslPropertiesResolver implements Configurable{ * Looks up the configuration to see if there is custom class specified. * Constructs the instance by passing the configuration directly to the * constructor to achieve thread safety using final fields. - * @param conf configuration + * @param conf configuration. * @return SaslPropertiesResolver */ public static SaslPropertiesResolver getInstance(Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index 9878aec7253e7..e5d62389abab7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -351,7 +351,7 @@ String getServerPrincipal(SaslAuth authType) throws IOException { /** * Do client side SASL authentication with server via the given IpcStreams. * - * @param ipcStreams ipcStreams + * @param ipcStreams ipcStreams. * @return AuthMethod used to negotiate the connection * @throws IOException raised on errors performing I/O. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index 9139f42bcabbb..b61b6cc18414d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -210,8 +210,8 @@ static char[] encodePassword(byte[] password) { /** * Splitting fully qualified Kerberos name into parts. - * @param fullName fullName - * @return splitKerberosName + * @param fullName fullName. + * @return splitKerberosName. */ public static String[] splitKerberosName(String fullName) { return fullName.split("[/@]"); @@ -246,7 +246,7 @@ private static AuthMethod valueOf(byte code) { /** * Return the SASL mechanism name. - * @return mechanismName + * @return mechanismName. */ public String getMechanismName() { return mechanismName; @@ -255,9 +255,9 @@ public String getMechanismName() { /** * Read from in. * - * @param in DataInput + * @param in DataInput. * @throws IOException raised on errors performing I/O. - * @return AuthMethod + * @return AuthMethod. */ public static AuthMethod read(DataInput in) throws IOException { return valueOf(in.readByte()); @@ -265,7 +265,7 @@ public static AuthMethod read(DataInput in) throws IOException { /** * Write to out. - * @param out DataOutput + * @param out DataOutput. * @throws IOException raised on errors performing I/O. */ public void write(DataOutput out) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 277a037a53742..2b9822a3d4817 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -115,9 +115,9 @@ private static void setConfigurationInternal(Configuration conf) { } /** - * For use only by tests and initialization + * For use only by tests and initialization. * - * @param flag flag + * @param flag flag. */ @InterfaceAudience.Private @VisibleForTesting @@ -490,9 +490,9 @@ public static Text buildTokenService(URI uri) { * user cannot be determined, this will log a FATAL error and exit * the whole JVM. * - * @param action action - * @param generic type T - * @return generic type T + * @param action action. + * @param generic type T. + * @return generic type T. */ public static T doAsLoginUserOrFatal(PrivilegedAction action) { if (UserGroupInformation.isSecurityEnabled()) { @@ -515,7 +515,7 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) { * InterruptedException is thrown, it is converted to an IOException. * * @param action the action to perform - * @param Generics Type T + * @param Generics Type T. * @return the result of the action * @throws IOException in the event of error */ @@ -529,7 +529,7 @@ public static T doAsLoginUser(PrivilegedExceptionAction action) * InterruptedException is thrown, it is converted to an IOException. * * @param action the action to perform - * @param generic type T + * @param generic type T. * @return the result of the action * @throws IOException in the event of error */ @@ -754,12 +754,12 @@ public static boolean isPrivilegedPort(final int port) { /** * Utility method to fetch ZK auth info from the configuration. * - * @param conf configuration - * @param configKey config key + * @param conf configuration. + * @param configKey config key. * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read * @throws ZKUtil.BadAuthFormatException if the auth format is invalid - * @return ZKAuthInfo List + * @return ZKAuthInfo List. */ public static List getZKAuthInfos(Configuration conf, String configKey) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index 3cb9523ee3207..c28471a3bdad9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -211,13 +211,13 @@ private static Integer parseId(final String idStr) { * Get the list of users or groups returned by the specified command, * and save them in the corresponding map. * - * @param map map - * @param mapName mapName - * @param command command - * @param staticMapping staticMapping - * @param regex regex + * @param map map. + * @param mapName mapName. + * @param command command. + * @param staticMapping staticMapping. + * @param regex regex. * @throws IOException raised on errors performing I/O. - * @return updateMapInternal + * @return updateMapInternal. */ @VisibleForTesting public static boolean updateMapInternal(BiMap map, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index c735296192d20..9671d8da38fd3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -610,7 +610,7 @@ public static UserGroupInformation getBestUGI( * @param ticketCache the path to the ticket cache file * * @throws IOException if the kerberos login fails - * @return UserGroupInformation + * @return UserGroupInformation. */ @InterfaceAudience.Public @InterfaceStability.Evolving @@ -689,7 +689,7 @@ public static UserGroupInformation getLoginUser() throws IOException { * remove the login method that is followed by a space from the username * e.g. "jack (auth:SIMPLE)" {@literal ->} "jack" * - * @param userName userName + * @param userName userName. * @return userName without login method */ public static String trimLoginMethod(String userName) { @@ -1349,7 +1349,7 @@ private void unprotectedRelogin(HadoopLoginContext login, * @param user the principal name to load from the keytab * @param path the path to the keytab file * @throws IOException if the keytab file can't be read - * @return UserGroupInformation + * @return UserGroupInformation. */ public static UserGroupInformation loginUserFromKeytabAndReturnUGI(String user, @@ -1411,7 +1411,7 @@ public static UserGroupInformation createRemoteUser(String user) { * Create a user from a login name. It is intended to be used for remote * users in RPC, since it won't have any credentials. * @param user the full user principal name, must not be empty or null - * @param authMethod authMethod + * @param authMethod authMethod. * @return the UserGroupInformation for the remote user. */ @InterfaceAudience.Public @@ -1481,8 +1481,8 @@ public static AuthenticationMethod valueOf(AuthMethod authMethod) { /** * Create a proxy user using username of the effective user and the ugi of the * real user. - * @param user user - * @param realUser realUser + * @param user user. + * @param realUser realUser. * @return proxyUser ugi */ @InterfaceAudience.Public @@ -1795,7 +1795,7 @@ public String toString() { /** * Sets the authentication method in the subject * - * @param authMethod authMethod + * @param authMethod authMethod. */ public synchronized void setAuthenticationMethod(AuthenticationMethod authMethod) { @@ -1805,7 +1805,7 @@ void setAuthenticationMethod(AuthenticationMethod authMethod) { /** * Sets the authentication method in the subject * - * @param authMethod authMethod + * @param authMethod authMethod. */ public void setAuthenticationMethod(AuthMethod authMethod) { user.setAuthenticationMethod(AuthenticationMethod.valueOf(authMethod)); @@ -1838,7 +1838,7 @@ public synchronized AuthenticationMethod getRealAuthenticationMethod() { * Returns the authentication method of a ugi. If the authentication method is * PROXY, returns the authentication method of the real user. * - * @param ugi ugi + * @param ugi ugi. * @return AuthenticationMethod */ public static AuthenticationMethod getRealAuthenticationMethod( @@ -1940,8 +1940,8 @@ public T doAs(PrivilegedExceptionAction action /** * Log current UGI and token information into specified log. * @param ugi - UGI - * @param log log - * @param caption caption + * @param log log. + * @param caption caption. */ @InterfaceAudience.LimitedPrivate({"HDFS", "KMS"}) @InterfaceStability.Unstable @@ -1959,7 +1959,7 @@ public static void logUserInfo(Logger log, String caption, /** * Log all (current, real, login) UGI and token info into specified log. * @param ugi - UGI - * @param log - log + * @param log - log. * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.LimitedPrivate({"HDFS", "KMS"}) @@ -2256,7 +2256,7 @@ private static String prependFileAuthority(String keytabPath) { * A test method to print out the current user's UGI. * @param args if there are two arguments, read the user from the keytab * and print it out. - * @throws Exception Exception + * @throws Exception Exception. */ public static void main(String [] args) throws Exception { System.out.println("Getting UGI for current user"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java index d93e0e609b1ae..2779194d85e00 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java @@ -116,7 +116,7 @@ public abstract CredentialEntry getCredentialEntry(String alias) * @param name the alias of the credential * @param credential the credential value for the alias. * @throws IOException raised on errors performing I/O. - * @return CredentialEntry + * @return CredentialEntry. */ public abstract CredentialEntry createCredentialEntry(String name, char[] credential) throws IOException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java index c998bd51a5c38..66df17a181e54 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java @@ -70,7 +70,7 @@ public class CredentialShell extends CommandShell { * % hadoop credential check alias [-provider providerPath] * % hadoop credential delete alias [-provider providerPath] [-f] * - * @param args args + * @param args args. * @return 0 if the argument(s) were recognized, 1 otherwise * @throws IOException raised on errors performing I/O. */ @@ -523,7 +523,7 @@ public void format(String message) { * * @param args * Command line arguments - * @throws Exception exception + * @throws Exception exception. */ public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new CredentialShell(), args); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java index 8453f4f59c6e7..39dc29a79e1f6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java @@ -296,7 +296,7 @@ else if (!users.isEmpty()) { /** * Returns the access control list as a String that can be used for building a * new instance by sending it to the constructor of {@link AccessControlList}. - * @return acl string + * @return acl string. */ public String getAclString() { StringBuilder sb = new StringBuilder(INITIAL_CAPACITY); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java index df022c38076bf..129e1e4dad26a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java @@ -46,7 +46,7 @@ public interface ImpersonationProvider extends Configurable { * be preferred to avoid possibly re-resolving the ip address. * @param user ugi of the effective or proxy user which contains a real user. * @param remoteAddress the ip address of client. - * @throws AuthorizationException Authorization Exception + * @throws AuthorizationException Authorization Exception. */ default void authorize(UserGroupInformation user, String remoteAddress) throws AuthorizationException { @@ -62,7 +62,7 @@ default void authorize(UserGroupInformation user, String remoteAddress) * * @param user ugi of the effective or proxy user which contains a real user * @param remoteAddress the ip address of client - * @throws AuthorizationException Authorization Exception + * @throws AuthorizationException Authorization Exception. */ void authorize(UserGroupInformation user, InetAddress remoteAddress) throws AuthorizationException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java index dede4c925ae0d..cc80708f1854e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java @@ -94,7 +94,7 @@ public static void refreshSuperUserGroupsConfiguration(Configuration conf) { * * @param user ugi of the effective or proxy user which contains a real user * @param remoteAddress the ip address of client - * @throws AuthorizationException Authorization Exception + * @throws AuthorizationException Authorization Exception. */ public static void authorize(UserGroupInformation user, String remoteAddress) throws AuthorizationException { @@ -106,7 +106,7 @@ public static void authorize(UserGroupInformation user, * * @param user ugi of the effective or proxy user which contains a real user * @param remoteAddress the inet address of client - * @throws AuthorizationException Authorization Exception + * @throws AuthorizationException Authorization Exception. */ public static void authorize(UserGroupInformation user, InetAddress remoteAddress) throws AuthorizationException { @@ -125,10 +125,10 @@ private static ImpersonationProvider getSip() { /** * This function is kept to provide backward compatibility. - * @param user user - * @param remoteAddress remote address - * @param conf configuration - * @throws AuthorizationException Authorization Exception + * @param user user. + * @param remoteAddress remote address. + * @param conf configuration. + * @throws AuthorizationException Authorization Exception. * @deprecated use {@link #authorize(UserGroupInformation, String)} instead. */ @Deprecated diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java index dd74bea1c5a0d..429304ef64c1c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509KeystoreManager.java @@ -64,7 +64,7 @@ public class ReloadingX509KeystoreManager extends X509ExtendedKeyManager { * @param storePassword password of the keystore file. * @param keyPassword The password of the key. * @throws IOException raised on errors performing I/O. - * @throws GeneralSecurityException thrown if create encryptor error + * @throws GeneralSecurityException thrown if create encryptor error. */ public ReloadingX509KeystoreManager(String type, String location, String storePassword, String keyPassword) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java index ad41107e4adc7..77e74a271fc0e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DelegationTokenIssuer.java @@ -39,15 +39,15 @@ public interface DelegationTokenIssuer { * The service name used as the alias for the token in the credential * token map. addDelegationTokens will use this to determine if * a token exists, and if not, add a new token with this alias. - * @return the token + * @return the token. */ String getCanonicalServiceName(); /** * Unconditionally get a new token with the optional renewer. Returning * null indicates the service does not issue tokens. - * @param renewer renewer - * @return the token + * @param renewer renewer. + * @return the token. * @throws IOException raised on errors performing I/O. */ Token getDelegationToken(String renewer) throws IOException; @@ -55,7 +55,7 @@ public interface DelegationTokenIssuer { /** * Issuers may need tokens from additional services. * - * @return delegation token issuer + * @return delegation token issuer. * @throws IOException raised on errors performing I/O. */ default DelegationTokenIssuer[] getAdditionalTokenIssuers() @@ -89,10 +89,10 @@ default Token[] addDelegationTokens( /** * NEVER call this method directly. * - * @param issuer issuer - * @param renewer renewer - * @param credentials cache in which to add new delegation tokens - * @param tokens list of new delegation tokens + * @param issuer issuer. + * @param renewer renewer. + * @param credentials cache in which to add new delegation tokens. + * @param tokens list of new delegation tokens. * @throws IOException raised on errors performing I/O. */ @InterfaceAudience.Private diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java index e82920f14f7f3..4b22df2043e8c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFetcher.java @@ -30,13 +30,13 @@ public interface DtFetcher { /** * Return a key used to identify the object/service implementation. - * @return ServiceName + * @return ServiceName. */ Text getServiceName(); /** * Used to allow the service API to indicate whether a token is required. - * @return isTokenRequired + * @return isTokenRequired. */ boolean isTokenRequired(); @@ -44,12 +44,12 @@ public interface DtFetcher { * Add any number of delegation tokens to Credentials object and return * a token instance that is appropriate for aliasing, or null if none. * - * @param conf configuration - * @param creds credentials - * @param renewer renewer - * @param url url - * @throws Exception Exception - * @return DelegationTokens + * @param conf configuration. + * @param creds credentials. + * @param renewer renewer. + * @param url url. + * @throws Exception Exception. + * @return DelegationTokens. */ Token addDelegationTokens(Configuration conf, Credentials creds, String renewer, String url) throws Exception; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java index 1e137cf522b3a..9e34ebf4a2a58 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java @@ -109,9 +109,9 @@ private String[] maybeDoLoginFromKeytabAndPrincipal(String[] args) * Parse the command line arguments and initialize subcommand. * Also will attempt to perform Kerberos login if both -principal and -keytab * flags are passed in args array. - * @param args args + * @param args args. * @return 0 if the argument(s) were recognized, 1 otherwise - * @throws Exception Exception + * @throws Exception Exception. */ @Override protected int init(String[] args) throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index 1cb2b2da0da97..33314060a5563 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -193,7 +193,7 @@ public synchronized Text getKind() { /** * Set the token kind. This is only intended to be used by services that * wrap another service's token. - * @param newKind newKind + * @param newKind newKind. */ @InterfaceAudience.Private public synchronized void setKind(Text newKind) { @@ -489,7 +489,7 @@ public boolean isManaged() throws IOException { /** * Renew this delegation token. - * @param conf configuration + * @param conf configuration. * @return the new expiration time * @throws IOException raised on errors performing I/O. * @throws InterruptedException if the thread is interrupted. @@ -502,7 +502,7 @@ public long renew(Configuration conf /** * Cancel this delegation token. * - * @param conf configuration + * @param conf configuration. * @throws IOException raised on errors performing I/O. * @throws InterruptedException if the thread is interrupted. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java index 032978aad3b6d..eba4bf6daa42f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenRenewer.java @@ -51,11 +51,11 @@ public abstract class TokenRenewer { /** * Renew the given token. * - * @param token the token being checked - * @param conf configuration + * @param token the token being checked. + * @param conf configuration. * - * @return the new expiration time - * @throws IOException raised on errors performing I/O. + * @return the new expiration time. + * @throws IOException raised on errors performing I/O. * @throws InterruptedException thrown when a thread is waiting, sleeping, * or otherwise occupied, and the thread is interrupted, * either before or during the activity. @@ -67,10 +67,10 @@ public abstract long renew(Token token, /** * Cancel the given token. * - * @param token the token being checked - * @param conf configuration + * @param token the token being checked. + * @param conf configuration. * - * @throws IOException raised on errors performing I/O. + * @throws IOException raised on errors performing I/O. * @throws InterruptedException thrown when a thread is waiting, sleeping, * or otherwise occupied, and the thread is interrupted, * either before or during the activity. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index 063d0a8687b8e..baf3a7ff069b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -190,7 +190,7 @@ public long getCurrentTokensSize() { * Add a previously used master key to cache (when NN restarts), * should be called before activate(). * - * @param key delegation key + * @param key delegation key. * @throws IOException raised on errors performing I/O. */ public synchronized void addKey(DelegationKey key) throws IOException { @@ -244,7 +244,7 @@ protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOExce * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @return currentId + * @return currentId. */ protected synchronized int getCurrentKeyId() { return currentId; @@ -254,7 +254,7 @@ protected synchronized int getCurrentKeyId() { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @return currentId + * @return currentId. */ protected synchronized int incrementCurrentKeyId() { return ++currentId; @@ -264,7 +264,7 @@ protected synchronized int incrementCurrentKeyId() { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param keyId keyId + * @param keyId keyId. */ protected synchronized void setCurrentKeyId(int keyId) { currentId = keyId; @@ -274,7 +274,7 @@ protected synchronized void setCurrentKeyId(int keyId) { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @return delegationTokenSequenceNumber + * @return delegationTokenSequenceNumber. */ protected synchronized int getDelegationTokenSeqNum() { return delegationTokenSequenceNumber; @@ -284,7 +284,7 @@ protected synchronized int getDelegationTokenSeqNum() { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @return delegationTokenSequenceNumber + * @return delegationTokenSequenceNumber. */ protected synchronized int incrementDelegationTokenSeqNum() { return ++delegationTokenSequenceNumber; @@ -294,7 +294,7 @@ protected synchronized int incrementDelegationTokenSeqNum() { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param seqNum seqNum + * @param seqNum seqNum. */ protected synchronized void setDelegationTokenSeqNum(int seqNum) { delegationTokenSequenceNumber = seqNum; @@ -304,8 +304,8 @@ protected synchronized void setDelegationTokenSeqNum(int seqNum) { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param keyId keyId - * @return DelegationKey + * @param keyId keyId. + * @return DelegationKey. */ protected DelegationKey getDelegationKey(int keyId) { return allKeys.get(keyId); @@ -315,7 +315,7 @@ protected DelegationKey getDelegationKey(int keyId) { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param key DelegationKey + * @param key DelegationKey. * @throws IOException raised on errors performing I/O. */ protected void storeDelegationKey(DelegationKey key) throws IOException { @@ -327,7 +327,7 @@ protected void storeDelegationKey(DelegationKey key) throws IOException { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param key DelegationKey + * @param key DelegationKey. * @throws IOException raised on errors performing I/O. */ protected void updateDelegationKey(DelegationKey key) throws IOException { @@ -338,8 +338,8 @@ protected void updateDelegationKey(DelegationKey key) throws IOException { * For subclasses externalizing the storage, for example Zookeeper * based implementations * - * @param ident ident - * @return DelegationTokenInformation + * @param ident ident. + * @return DelegationTokenInformation. */ protected DelegationTokenInformation getTokenInfo(TokenIdent ident) { return currentTokens.get(ident); @@ -349,8 +349,8 @@ protected DelegationTokenInformation getTokenInfo(TokenIdent ident) { * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param ident ident - * @param tokenInfo tokenInfo + * @param ident ident. + * @param tokenInfo tokenInfo. * @throws IOException raised on errors performing I/O. */ protected void storeToken(TokenIdent ident, @@ -364,8 +364,8 @@ protected void storeToken(TokenIdent ident, * For subclasses externalizing the storage, for example Zookeeper * based implementations. * - * @param ident ident - * @param tokenInfo tokenInfo + * @param ident ident. + * @param tokenInfo tokenInfo. * @throws IOException raised on errors performing I/O. */ protected void updateToken(TokenIdent ident, @@ -502,9 +502,9 @@ protected synchronized byte[] createPassword(TokenIdent identifier) { * if the token is expired. Note that this method should be called with * acquiring the secret manager's monitor. * - * @param identifier identifier - * @throws InvalidToken invalid token exception - * @return DelegationTokenInformation + * @param identifier identifier. + * @throws InvalidToken invalid token exception. + * @return DelegationTokenInformation. */ protected DelegationTokenInformation checkToken(TokenIdent identifier) throws InvalidToken { @@ -548,7 +548,7 @@ public synchronized String getTokenTrackingId(TokenIdent identifier) { * Verifies that the given identifier and password are valid and match. * @param identifier Token identifier. * @param password Password in the token. - * @throws InvalidToken InvalidToken + * @throws InvalidToken InvalidToken. */ public synchronized void verifyToken(TokenIdent identifier, byte[] password) throws InvalidToken { @@ -623,8 +623,8 @@ public synchronized long renewToken(Token token, /** * Cancel a token by removing it from cache. * - * @param token token - * @param canceller canceller + * @param token token. + * @param canceller canceller. * @return Identifier of the canceled token * @throws InvalidToken for invalid token * @throws AccessControlException if the user isn't allowed to cancel @@ -689,23 +689,20 @@ public DelegationTokenInformation(long renewDate, byte[] password, this.trackingId = trackingId; } /** - * returns renew date. - * @return renew date + * @return returns renew date. */ public long getRenewDate() { return renewDate; } /** - * returns password. - * @return password + * @return returns password. */ byte[] getPassword() { return password; } /** - * returns tracking id. - * @return tracking id + * @return returns tracking id. */ public String getTrackingId() { return trackingId; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java index 2815f56818501..31eef61c8f57c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java @@ -337,9 +337,9 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs) /** * Select a delegation token from all tokens in credentials, based on url. * - * @param url url - * @param creds credentials - * @return token + * @param url url. + * @param creds credentials. + * @return token. */ @InterfaceAudience.Private public org.apache.hadoop.security.token.Token @@ -411,7 +411,7 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs) * @param token the authentication token with the Delegation Token to renew. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. - * @return delegation token long value + * @return delegation token long value. */ public long renewDelegationToken(URL url, Token token) throws IOException, AuthenticationException { @@ -428,7 +428,7 @@ public long renewDelegationToken(URL url, Token token) * @param doAsUser the user to do as, which will be the token owner. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. - * @return delegation token long value + * @return delegation token long value. */ public long renewDelegationToken(URL url, Token token, String doAsUser) throws IOException, AuthenticationException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java index 3de8d3ab91377..571e54c5f907c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java @@ -125,7 +125,7 @@ protected Properties getConfiguration(String configPrefix, * Set AUTH_TYPE property to the name of the corresponding authentication * handler class based on the input properties. * @param props input properties. - * @throws ServletException servlet exception + * @throws ServletException servlet exception. */ protected void setAuthHandlerClass(Properties props) throws ServletException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java index 2694df5a97485..1b2b6ca1ef693 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java @@ -163,7 +163,7 @@ public void authenticate(URL url, AuthenticatedURL.Token token) * @param renewer the renewer user. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. - * @return abstract delegation token identifier + * @return abstract delegation token identifier. */ public Token getDelegationToken(URL url, AuthenticatedURL.Token token, String renewer) @@ -183,7 +183,7 @@ public Token getDelegationToken(URL url, * @param doAsUser the user to do as, which will be the token owner. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. - * @return abstract delegation token identifier + * @return abstract delegation token identifier. */ public Token getDelegationToken(URL url, AuthenticatedURL.Token token, String renewer, String doAsUser) @@ -212,7 +212,7 @@ public Token getDelegationToken(URL url, * @param dToken abstract delegation token identifier. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. - * @return delegation token long value + * @return delegation token long value. */ public long renewDelegationToken(URL url, AuthenticatedURL.Token token, @@ -232,7 +232,7 @@ public long renewDelegationToken(URL url, * @param dToken abstract delegation token identifier. * @throws IOException if an IO error occurred. * @throws AuthenticationException if an authentication exception occurred. - * @return delegation token long value + * @return delegation token long value. */ public long renewDelegationToken(URL url, AuthenticatedURL.Token token, @@ -251,7 +251,7 @@ public long renewDelegationToken(URL url, * @param url the URL to cancel the delegation token from. Only HTTP/S URLs * are supported. * @param token the authentication token with the Delegation Token to cancel. - * @param dToken abstract delegation token identifier + * @param dToken abstract delegation token identifier. * @throws IOException if an IO error occurred. */ public void cancelDelegationToken(URL url, @@ -268,7 +268,7 @@ public void cancelDelegationToken(URL url, * @param url the URL to cancel the delegation token from. Only HTTP/S URLs * are supported. * @param token the authentication token with the Delegation Token to cancel. - * @param dToken abstract delegation token identifier + * @param dToken abstract delegation token identifier. * @param doAsUser the user to do as, which will be the token owner. * @throws IOException if an IO error occurred. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java index b74a820faec53..b08f9952d863e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java @@ -82,7 +82,7 @@ protected void addService(Service service) { /** * If the passed object is an instance of {@link Service}, * add it to the list of services managed by this {@link CompositeService} - * @param object object + * @param object object. * @return true if a service is added, false otherwise. */ protected boolean addIfService(Object object) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java index b776784535b59..c075cbb89a8a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceStateModel.java @@ -55,7 +55,7 @@ public class ServiceStateModel { * Create the service state model in the {@link Service.STATE#NOTINITED} * state. * - * @param name input name + * @param name input name. */ public ServiceStateModel(String name) { this(name, Service.STATE.NOTINITED); @@ -64,7 +64,7 @@ public ServiceStateModel(String name) { /** * Create a service state model instance in the chosen state * @param state the starting state - * @param name input name + * @param name input name. */ public ServiceStateModel(String name, Service.STATE state) { this.state = state; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java index 2afba9b098cf8..cef69ebd4b789 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java @@ -567,7 +567,7 @@ public ExitUtil.ExitException launchService(Configuration conf, * @throws Exception any other failure -if it implements * {@link ExitCodeProvider} then it defines the exit code for any * containing exception - * @return status code + * @return status code. */ protected int coreServiceLaunch(Configuration conf, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java index 4e5f0fa4054b7..25f79a63a233c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/CommandShell.java @@ -36,7 +36,7 @@ public abstract class CommandShell extends Configured implements Tool { /** * Return usage string for the command including any summary of subcommands. - * @return command usage + * @return command usage. */ public abstract String getCommandUsage(); @@ -85,9 +85,9 @@ public int run(String[] args) throws Exception { /** * Parse the command line arguments and initialize subcommand instance. - * @param args arguments + * @param args arguments. * @return 0 if the argument(s) were recognized, 1 otherwise - * @throws Exception init exception + * @throws Exception init exception. */ protected abstract int init(String[] args) throws Exception; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java index 6c34d1c35caf4..548409ea58e09 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java @@ -38,7 +38,7 @@ public abstract class GetGroupsBase extends Configured implements Tool { /** * Create an instance of this tool using the given configuration. - * @param conf configuration + * @param conf configuration. */ protected GetGroupsBase(Configuration conf) { this(conf, System.out); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java index b4264b8a2af14..d1d933c737d2b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/TableListing.java @@ -167,7 +167,7 @@ public Builder hideHeaders() { /** * Whether to show column headers in table output. This is the default. * - * @return Builder + * @return Builder. */ public Builder showHeaders() { this.showHeader = true; @@ -178,8 +178,8 @@ public Builder showHeaders() { * Set the maximum width of a row in the TableListing. Must have one or * more wrappable fields for this to take effect. * - * @param width width - * @return Builder + * @param width width. + * @return Builder. */ public Builder wrapWidth(int width) { this.wrapWidth = width; @@ -189,7 +189,7 @@ public Builder wrapWidth(int width) { /** * Create a new TableListing. * - * @return TableListing + * @return TableListing. */ public TableListing build() { return new TableListing(columns.toArray(new Column[0]), showHeader, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java index 52a33e87bf934..a3bf4faf0a980 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java @@ -95,8 +95,8 @@ public Thread newThread(Runnable r) { /** * Execute the task sometime in the future, using ThreadPools. * - * @param root root - * @param task task + * @param root root. + * @param task task. */ public synchronized void execute(String root, Runnable task) { ThreadPoolExecutor executor = executors.get(root); @@ -149,7 +149,7 @@ public synchronized boolean awaitTermination(long milliseconds) /** * Shut down all ThreadPools immediately. * - * @return Runnable List + * @return Runnable List. */ public synchronized List shutdownNow() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java index 824c035c3135d..5c90e4bd2d601 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java @@ -117,7 +117,7 @@ private BlockingThreadPoolExecutorService(int permitCount, * @param keepAliveTime time until threads are cleaned up in {@code unit} * @param unit time unit * @param prefixName prefix of name for threads - * @return BlockingThreadPoolExecutorService + * @return BlockingThreadPoolExecutorService. */ public static BlockingThreadPoolExecutorService newInstance( int activeTasks, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java index 4037bd64e7fa1..5bf773cef3836 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcComposer.java @@ -52,8 +52,8 @@ public class CrcComposer { * Returns a CrcComposer which will collapse all ingested CRCs into a single * value. * - * @param type type - * @param bytesPerCrcHint bytesPerCrcHint + * @param type type. + * @param bytesPerCrcHint bytesPerCrcHint. * @throws IOException raised on errors performing I/O. * @return a CrcComposer which will collapse all ingested CRCs into a single value. */ @@ -73,10 +73,10 @@ public static CrcComposer newCrcComposer( * a stripeLength greater than the total underlying data size is equivalent * to using a non-striped CrcComposer. * - * @param type type - * @param bytesPerCrcHint bytesPerCrcHint - * @param stripeLength stripeLength - * @return a CrcComposer which will collapse CRCs for every combined + * @param type type. + * @param bytesPerCrcHint bytesPerCrcHint. + * @param stripeLength stripeLength. + * @return a CrcComposer which will collapse CRCs for every combined. * underlying data size which aligns with the specified stripe boundary. * @throws IOException raised on errors performing I/O. */ @@ -114,10 +114,10 @@ public static CrcComposer newStripedCrcComposer( * each CRC expected to correspond to exactly {@code bytesPerCrc} underlying * data bytes. * - * @param crcBuffer crcBuffer - * @param offset offset + * @param crcBuffer crcBuffer. + * @param offset offset. * @param length must be a multiple of the expected byte-size of a CRC. - * @param bytesPerCrc bytesPerCrc + * @param bytesPerCrc bytesPerCrc. * @throws IOException raised on errors performing I/O. */ public void update( @@ -142,9 +142,9 @@ public void update( * out of {@code checksumIn}, with each CRC expected to correspond to exactly * {@code bytesPerCrc} underlying data bytes. * - * @param checksumIn checksumIn - * @param numChecksumsToRead numChecksumsToRead - * @param bytesPerCrc bytesPerCrc + * @param checksumIn checksumIn. + * @param numChecksumsToRead numChecksumsToRead. + * @param bytesPerCrc bytesPerCrc. * @throws IOException raised on errors performing I/O. */ public void update( @@ -160,8 +160,8 @@ public void update( * Updates with a single additional CRC which corresponds to an underlying * data size of {@code bytesPerCrc}. * - * @param crcB crcB - * @param bytesPerCrc bytesPerCrc + * @param crcB crcB. + * @param bytesPerCrc bytesPerCrc. * @throws IOException raised on errors performing I/O. */ public void update(int crcB, long bytesPerCrc) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java index 650c81cf5bfec..c8183b042fb1a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CrcUtil.java @@ -45,9 +45,9 @@ private CrcUtil() { * in "reversed" (little-endian) format such that {@code mod & 1} represents * x^31 and has an implicit term x^32. * - * @param lengthBytes lengthBytes - * @param mod mod - * @return monomial + * @param lengthBytes lengthBytes. + * @param mod mod. + * @return monomial. */ public static int getMonomial(long lengthBytes, int mod) { if (lengthBytes == 0) { @@ -79,11 +79,11 @@ public static int getMonomial(long lengthBytes, int mod) { /** * composeWithMonomial. * - * @param crcA crcA - * @param crcB crcB + * @param crcA crcA. + * @param crcB crcB. * @param monomial Precomputed x^(lengthBInBytes * 8) mod {@code mod} - * @param mod mod - * @return compose with monomial + * @param mod mod. + * @return compose with monomial. */ public static int composeWithMonomial( int crcA, int crcB, int monomial, int mod) { @@ -93,10 +93,10 @@ public static int composeWithMonomial( /** * compose. * - * @param crcA crcA - * @param crcB crcB + * @param crcA crcA. + * @param crcB crcB. * @param lengthB length of content corresponding to {@code crcB}, in bytes. - * @param mod mod + * @param mod mod. * @return compose result. */ public static int compose(int crcA, int crcB, long lengthB, int mod) { @@ -129,9 +129,9 @@ public static byte[] intToBytes(int value) { * starting at {@code offset}. buf.length must be greater than or * equal to offset + 4. * - * @param buf buf size - * @param offset offset - * @param value value + * @param buf buf size. + * @param offset offset. + * @param value value. * @throws IOException raised on errors performing I/O. */ public static void writeInt(byte[] buf, int offset, int value) @@ -151,9 +151,9 @@ public static void writeInt(byte[] buf, int offset, int value) * Reads 4-byte big-endian int value from {@code buf} starting at * {@code offset}. buf.length must be greater than or equal to offset + 4. * - * @param offset offset - * @param buf buf - * @return int + * @param offset offset. + * @param buf buf. + * @return int. * @throws IOException raised on errors performing I/O. */ public static int readInt(byte[] buf, int offset) @@ -194,7 +194,7 @@ public static String toSingleCrcString(final byte[] bytes) * expecting it to be divisible by CRC byte size, and returns a list of * hex formatted values. * - * @param bytes bytes + * @param bytes bytes. * @throws IOException raised on errors performing I/O. * @return a list of hex formatted values. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java index bdbe4823db2b9..f735b82e4289b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Daemon.java @@ -54,7 +54,7 @@ public Daemon() { /** * Construct a daemon thread. - * @param runnable runnable + * @param runnable runnable. */ public Daemon(Runnable runnable) { super(runnable); @@ -64,8 +64,8 @@ public Daemon(Runnable runnable) { /** * Construct a daemon thread to be part of a specified thread group. - * @param group thread group - * @param runnable runnable + * @param group thread group. + * @param runnable runnable. */ public Daemon(ThreadGroup group, Runnable runnable) { super(group, runnable); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java index 5f3ba9c437af6..1c37d5944c6f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java @@ -73,7 +73,7 @@ public enum Type { * the type corresponding to the id. * * @return the type corresponding to the id. - * @param id id + * @param id id. */ public static Type valueOf(int id) { if (id < 0 || id >= values().length) { @@ -88,7 +88,7 @@ public static Type valueOf(int id) { * Create a Crc32 Checksum object. The implementation of the Crc32 algorithm * is chosen depending on the platform. * - * @return Checksum + * @return Checksum. */ public static Checksum newCrc32() { return new CRC32(); @@ -152,8 +152,8 @@ public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) { /** * Creates a DataChecksum from HEADER_LEN bytes from arr[offset]. * - * @param bytes bytes - * @param offset offset + * @param bytes bytes. + * @param offset offset. * @return DataChecksum of the type in the array or null in case of an error. * @throws IOException raised on errors performing I/O. */ @@ -184,10 +184,10 @@ public static DataChecksum newDataChecksum(byte[] bytes, int offset) * This constructs a DataChecksum by reading HEADER_LEN bytes from input * stream in. * - * @param in data input stream + * @param in data input stream. * @throws IOException raised on errors performing I/O. * @return DataChecksum by reading HEADER_LEN - * bytes from input stream + * bytes from input stream. */ public static DataChecksum newDataChecksum( DataInputStream in ) throws IOException { @@ -214,7 +214,7 @@ private static Type mapByteToChecksumType(int type) /** * Writes the checksum header to the output stream out. * - * @param out output stream + * @param out output stream. * @throws IOException raised on errors performing I/O. */ public void writeHeader( DataOutputStream out ) @@ -238,8 +238,8 @@ public byte[] getHeader() { * Writes the current checksum to the stream. * If reset is true, then resets the checksum. * - * @param out out - * @param reset reset + * @param out out. + * @param reset reset. * @return number of bytes written. Will be equal to getChecksumSize(); * @throws IOException raised on errors performing I/O. */ @@ -266,9 +266,9 @@ public int writeValue( DataOutputStream out, boolean reset ) * Writes the current checksum to a buffer. * If reset is true, then resets the checksum. * - * @param buf buf - * @param offset offset - * @param reset reset + * @param buf buf. + * @param offset offset. + * @param reset reset. * @return number of bytes written. Will be equal to getChecksumSize(); * @throws IOException raised on errors performing I/O. */ @@ -298,8 +298,8 @@ public int writeValue( byte[] buf, int offset, boolean reset ) /** * Compares the checksum located at buf[offset] with the current checksum. * - * @param buf buf - * @param offset offset + * @param buf buf. + * @param offset offset. * @return true if the checksum matches and false otherwise. */ public boolean compare( byte buf[], int offset ) { @@ -339,7 +339,7 @@ public int getChecksumSize() { /** * the required checksum size given the data length. - * @param dataSize data size + * @param dataSize data size. * @return the required checksum size given the data length. */ public int getChecksumSize(int dataSize) { @@ -567,11 +567,11 @@ public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) { * Implementation of chunked calculation specifically on byte arrays. This * is to avoid the copy when dealing with ByteBuffers that have array backing. * - * @param data data - * @param dataOffset dataOffset - * @param dataLength dataLength - * @param sums sums - * @param sumsOffset sumsOffset + * @param data data. + * @param dataOffset dataOffset. + * @param dataLength dataLength. + * @param sums sums. + * @param sumsOffset sumsOffset. */ public void calculateChunkedSums( byte[] data, int dataOffset, int dataLength, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java index 8611884c7e7e5..8808e1067284a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java @@ -54,8 +54,8 @@ public class DirectBufferPool { * If a pooled buffer is available, returns that. Otherwise * allocates a new one. * - * @param size size - * @return ByteBuffer + * @param size size. + * @return ByteBuffer. */ public ByteBuffer getBuffer(int size) { Queue> list = buffersBySize.get(size); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java index 5cb9845c588f4..9fc18ca3bbf92 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java @@ -70,8 +70,8 @@ public DiskOutOfSpaceException(String msg) { * Create the directory if it doesn't exist and check that dir is readable, * writable and executable * - * @param dir dir - * @throws DiskErrorException disk problem + * @param dir dir. + * @throws DiskErrorException disk problem. */ public static void checkDir(File dir) throws DiskErrorException { checkDirInternal(dir); @@ -82,8 +82,8 @@ public static void checkDir(File dir) throws DiskErrorException { * readable, writable and executable. Perform some disk IO to * ensure that the disk is usable for writes. * - * @param dir dir - * @throws DiskErrorException disk problem + * @param dir dir. + * @throws DiskErrorException disk problem. */ public static void checkDirWithDiskIo(File dir) throws DiskErrorException { @@ -107,7 +107,7 @@ private static void checkDirInternal(File dir) * @param localFS local filesystem * @param dir directory * @param expected permission - * @throws DiskErrorException disk problem + * @throws DiskErrorException disk problem. * @throws IOException raised on errors performing I/O. */ public static void checkDir(LocalFileSystem localFS, Path dir, @@ -125,7 +125,7 @@ public static void checkDir(LocalFileSystem localFS, Path dir, * @param localFS local filesystem * @param dir directory * @param expected permission - * @throws DiskErrorException disk problem + * @throws DiskErrorException disk problem. * @throws IOException raised on errors performing I/O. */ public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java index a4f80354dde06..97d0c812667a2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskValidatorFactory.java @@ -40,7 +40,7 @@ private DiskValidatorFactory() { /** * Returns a {@link DiskValidator} instance corresponding to the passed clazz. * @param clazz a class extends {@link DiskValidator} - * @return disk validator + * @return disk validator. */ public static DiskValidator getInstance(Class clazz) { @@ -67,7 +67,7 @@ private DiskValidatorFactory() { * or "read-write" for {@link ReadWriteDiskValidator}. * @param diskValidator canonical class name, for example, "basic" * @throws DiskErrorException if the class cannot be located - * @return disk validator + * @return disk validator. */ @SuppressWarnings("unchecked") public static DiskValidator getInstance(String diskValidator) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java index fa969b57a3051..95d0d4d290ccd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GcTimeMonitor.java @@ -54,8 +54,8 @@ public static class Builder { /** * Set observation window size in milliseconds. - * @param value value - * @return window size in milliseconds + * @param value value. + * @return window size in milliseconds. */ public Builder observationWindowMs(long value) { this.observationWindowMs = value; @@ -64,8 +64,8 @@ public Builder observationWindowMs(long value) { /** * Set sleep interval in milliseconds. - * @param value value - * @return IntervalMs + * @param value value. + * @return IntervalMs. */ public Builder sleepIntervalMs(long value) { this.sleepIntervalMs = value; @@ -74,8 +74,8 @@ public Builder sleepIntervalMs(long value) { /** * Set the max GC time percentage that triggers the alert handler. - * @param value value - * @return max GC time percentage + * @param value value. + * @return max GC time percentage. */ public Builder maxGcTimePercentage(int value) { this.maxGcTimePercentage = value; @@ -84,8 +84,8 @@ public Builder maxGcTimePercentage(int value) { /** * Set the GC alert handler. - * @param value value - * @return GC alert handler + * @param value value. + * @return GC alert handler. */ public Builder gcTimeAlertHandler(GcTimeAlertHandler value) { this.handler = value; @@ -240,7 +240,7 @@ public static class GcData implements Cloneable { /** * Returns the absolute timestamp when this measurement was taken. - * @return timestamp + * @return timestamp. */ public long getTimestamp() { return timestamp; @@ -248,7 +248,7 @@ public long getTimestamp() { /** * Returns the time since the start of the associated GcTimeMonitor. - * @return GcMonitorRunTime + * @return GcMonitorRunTime. */ public long getGcMonitorRunTime() { return gcMonitorRunTime; @@ -256,7 +256,7 @@ public long getGcMonitorRunTime() { /** * Returns accumulated GC time since this JVM started. - * @return AccumulatedGcTime + * @return AccumulatedGcTime. */ public long getAccumulatedGcTime() { return totalGcTime; @@ -264,7 +264,7 @@ public long getAccumulatedGcTime() { /** * Returns the accumulated number of GC pauses since this JVM started. - * @return AccumulatedGcCount + * @return AccumulatedGcCount. */ public long getAccumulatedGcCount() { return totalGcCount; @@ -274,7 +274,7 @@ public long getAccumulatedGcCount() { * Returns the percentage (0..100) of time that the JVM spent in GC pauses * within the observation window of the associated GcTimeMonitor. * - * @return GcTimePercentage + * @return GcTimePercentage. */ public int getGcTimePercentage() { return gcTimePercentage; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java index e52ff015531c1..2d35b15bc5900 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericsUtil.java @@ -51,8 +51,8 @@ public static Class getClass(T t) { * T[]. * @param c the Class object of the items in the list * @param list the list to convert - * @param Generics Type T - * @return T Array + * @param Generics Type T. + * @return T Array. */ public static T[] toArray(Class c, List list) { @@ -69,10 +69,10 @@ public static T[] toArray(Class c, List list) * Converts the given List<T> to a an array of * T[]. * @param list the list to convert - * @param Generics Type T + * @param Generics Type T. * @throws ArrayIndexOutOfBoundsException if the list is empty. * Use {@link #toArray(Class, List)} if the list may be empty. - * @return T Array + * @return T Array. */ public static T[] toArray(List list) { return toArray(getClass(list.get(0)), list); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java index e940e08b2ff8a..71cdcf11656ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java @@ -26,7 +26,7 @@ public interface IPList { /** * returns true if the ipAddress is in the IPList. - * @param ipAddress ipAddress + * @param ipAddress ipAddress. * @return boolean value indicating whether the ipAddress is in the IPList */ public abstract boolean isIn(String ipAddress); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java index 0d348bca83a17..49ae8a8605e98 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdGenerator.java @@ -28,7 +28,7 @@ public interface IdGenerator { /** * Increment and then return the next value. - * @return long value + * @return long value. */ public long nextValue(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java index b7961200f7d74..25e79f651b944 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IdentityHashStore.java @@ -112,8 +112,8 @@ private void putInternal(Object k, Object v) { * In other words, you can insert the same key multiple times and it will * lead to multiple entries. * - * @param k Generics Type k - * @param v Generics Type v + * @param k Generics Type k. + * @param v Generics Type v. */ public void put(K k, V v) { Preconditions.checkNotNull(k); @@ -146,8 +146,8 @@ private int getElementIndex(K k) { /** * Retrieve a value associated with a given key. * - * @param k Generics Type k - * @return Generics Type V + * @param k Generics Type k. + * @return Generics Type V. */ public V get(K k) { int index = getElementIndex(k); @@ -161,8 +161,8 @@ public V get(K k) { * Retrieve a value associated with a given key, and delete the * relevant entry. * - * @param k Generics Type k - * @return Generics Type V + * @param k Generics Type k. + * @return Generics Type V. */ public V remove(K k) { int index = getElementIndex(k); @@ -195,7 +195,7 @@ public interface Visitor { /** * Visit all key, value pairs in the IdentityHashStore. * - * @param visitor visitor + * @param visitor visitor. */ public void visitAll(Visitor visitor) { int length = buffer == null ? 0 : buffer.length; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java index 369f54da4a28a..99472e18f1daf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSortable.java @@ -32,17 +32,17 @@ public interface IndexedSortable { * Compare items at the given addresses consistent with the semantics of * {@link java.util.Comparator#compare(Object, Object)}. * - * @param i(int) - * @param j(int) - * @return compare result + * @param i(int). + * @param j(int). + * @return compare result. */ int compare(int i, int j); /** * Swap items at the given addresses. * - * @param i i(int) - * @param j j(int) + * @param i i(int). + * @param j j(int). */ void swap(int i, int j); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java index ebfd3841dc4b6..252efe3e4de44 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IndexedSorter.java @@ -39,9 +39,9 @@ public interface IndexedSorter { * @see IndexedSortable#compare * @see IndexedSortable#swap * - * @param r r - * @param l l - * @param s s + * @param r r. + * @param l l. + * @param s s. */ void sort(IndexedSortable s, int l, int r); @@ -49,10 +49,10 @@ public interface IndexedSorter { * Same as {@link #sort(IndexedSortable,int,int)}, but indicate progress * periodically. * @see #sort(IndexedSortable,int,int) - * @param s s - * @param l l - * @param r r - * @param rep rep + * @param s s. + * @param l l. + * @param r r. + * @param rep rep. */ void sort(IndexedSortable s, int l, int r, Progressable rep); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java index e314c4b738de0..35d127af425c2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java @@ -185,7 +185,7 @@ protected void startLockTiming() { * * @param acquireTime - timestamp just after acquiring the lock. * @param releaseTime - timestamp just before releasing the lock. - * @param checkLockHeld checkLockHeld + * @param checkLockHeld checkLockHeld. */ protected void check(long acquireTime, long releaseTime, boolean checkLockHeld) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java index 9c152147b9efa..c27a42d763ed1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java @@ -50,9 +50,9 @@ public interface Element { * Insert this element into the list. This is the first thing that will * be called on the element. * - * @param list list - * @param prev prev - * @param next next + * @param list list. + * @param prev prev. + * @param next next. */ void insertInternal(IntrusiveCollection list, Element prev, Element next); @@ -60,16 +60,16 @@ void insertInternal(IntrusiveCollection list, /** * Set the prev pointer of an element already in the list. * - * @param list list - * @param prev prev + * @param list list. + * @param prev prev. */ void setPrev(IntrusiveCollection list, Element prev); /** * Set the next pointer of an element already in the list. * - * @param list list - * @param next next + * @param list list. + * @param next next. */ void setNext(IntrusiveCollection list, Element next); @@ -77,30 +77,30 @@ void insertInternal(IntrusiveCollection list, * Remove an element from the list. This is the last thing that will be * called on an element. * - * @param list list + * @param list list. */ void removeInternal(IntrusiveCollection list); /** * Get the prev pointer of an element. * - * @param list list - * @return Element + * @param list list. + * @return Element. */ Element getPrev(IntrusiveCollection list); /** * Get the next pointer of an element. * - * @param list list - * @return Element + * @param list list. + * @return Element. */ Element getNext(IntrusiveCollection list); /** * Returns true if this element is in the provided list. * - * @param list list + * @param list list. * @return if this element is in the provided list true, not false. */ boolean isInList(IntrusiveCollection list); @@ -281,7 +281,7 @@ public T[] toArray(T[] array) { * Add an element to the end of the list. * * @param elem The new element to add. - * @return add result + * @return add result. */ @Override public boolean add(E elem) { @@ -303,7 +303,7 @@ public boolean add(E elem) { * Add an element to the front of the list. * * @param elem The new element to add. - * @return if addFirst success true, not false + * @return if addFirst success true, not false. */ public boolean addFirst(Element elem) { if (elem == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index c57085eae160a..0bba79fd77f14 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -288,7 +288,7 @@ public T load(FileSystem fs, Path path, @Nullable FileStatus status) * @param path path * @param overwrite should any existing file be overwritten * @param instance instance - * @throws IOException IO exception + * @throws IOException IO exception. */ public void save(FileSystem fs, Path path, T instance, boolean overwrite) throws @@ -325,7 +325,7 @@ public byte[] toBytes(T instance) throws IOException { * @param bytes byte array * @throws IOException IO problems * @throws EOFException not enough data - * @return byte array + * @return byte array. */ public T fromBytes(byte[] bytes) throws IOException { return fromJson(new String(bytes, 0, bytes.length, UTF_8)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java index dc8672a4ef543..382266a99401f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java @@ -215,8 +215,8 @@ public void run() { * with a 1GB heap will very quickly go into "GC hell" and result in * log messages about the GC pauses. * - * @param args args - * @throws Exception Exception + * @param args args. + * @throws Exception Exception. */ @SuppressWarnings("resource") public static void main(String []args) throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java index a0eb81e9998a1..5151cdec5cd76 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java @@ -180,8 +180,8 @@ public E put(final E element) { * Remove the element corresponding to the key, * given key.hashCode() == index. * - * @param key key - * @param index index + * @param key key. + * @param index index. * @return If such element exists, return it. * Otherwise, return null. */ @@ -278,7 +278,7 @@ public String toString() { /** * Print detailed information of this object. * - * @param out out + * @param out out. */ public void printDetails(final PrintStream out) { out.print(this + ", entries = ["); @@ -367,9 +367,9 @@ public void setTrackModification(boolean trackModification) { * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. * - * @param mapName mapName - * @param percentage percentage - * @return compute capacity + * @param mapName mapName. + * @param percentage percentage. + * @return compute capacity. */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java index 9658e3ea887a5..051e2680bc306 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightResizableGSet.java @@ -117,7 +117,7 @@ public synchronized void getIterator(Consumer> consumer) { /** * Resize the internal table to given capacity. * - * @param cap capacity + * @param cap capacity. */ @SuppressWarnings("unchecked") protected synchronized void resize(int cap) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java index 9f86a24791700..a9d0756e8e381 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Lists.java @@ -45,8 +45,8 @@ private Lists() { /** * Creates a mutable, empty {@code ArrayList} instance. * - * @param Generics Type E - * @return ArrayList Generics Type E + * @param Generics Type E. + * @return ArrayList Generics Type E. */ public static ArrayList newArrayList() { return new ArrayList<>(); @@ -63,9 +63,9 @@ public static ArrayList newArrayList() { * {@code (...))}, or for creating an empty list then calling * {@link Collections#addAll}. * - * @param Generics Type E - * @param elements elements - * @return ArrayList Generics Type E + * @param Generics Type E. + * @param elements elements. + * @return ArrayList Generics Type E. */ @SafeVarargs public static ArrayList newArrayList(E... elements) { @@ -84,9 +84,9 @@ public static ArrayList newArrayList(E... elements) { * given elements; a very thin shortcut for creating an empty list then * calling Iterables#addAll. * - * @param Generics Type E - * @param elements elements - * @return ArrayList Generics Type E + * @param Generics Type E. + * @param elements elements. + * @return ArrayList Generics Type E. */ public static ArrayList newArrayList(Iterable elements) { if (elements == null) { @@ -102,9 +102,9 @@ public static ArrayList newArrayList(Iterable elements) { * given elements; a very thin shortcut for creating an empty list * and then calling Iterators#addAll. * - * @param Generics Type E - * @param elements elements - * @return ArrayList Generics Type E + * @param Generics Type E. + * @param elements elements. + * @return ArrayList Generics Type E. */ public static ArrayList newArrayList(Iterator elements) { ArrayList list = newArrayList(); @@ -117,7 +117,7 @@ public static ArrayList newArrayList(Iterator elements) { * specified initial size; * simply delegates to {@link ArrayList#ArrayList(int)}. * - * @param Generics Type E + * @param Generics Type E. * @param initialArraySize the exact size of the initial backing array for * the returned array list * ({@code ArrayList} documentation calls this value the "capacity"). @@ -143,7 +143,7 @@ public static ArrayList newArrayListWithCapacity( * estimated number of elements. * @throws IllegalArgumentException if {@code estimatedSize} is negative. * - * @param Generics Type E + * @param Generics Type E. */ public static ArrayList newArrayListWithExpectedSize( int estimatedSize) { @@ -160,8 +160,8 @@ public static ArrayList newArrayListWithExpectedSize( * spent a lot of time benchmarking your specific needs, use one of those * instead.

* - * @param Generics Type E - * @return Generics Type E List + * @param Generics Type E. + * @return Generics Type E List. */ public static LinkedList newLinkedList() { return new LinkedList<>(); @@ -178,9 +178,9 @@ public static LinkedList newLinkedList() { * situations. Unless you have spent a lot of time benchmarking your * specific needs, use one of those instead.

* - * @param elements elements - * @param Generics Type E - * @return Generics Type E List + * @param elements elements. + * @param Generics Type E. + * @return Generics Type E List. */ public static LinkedList newLinkedList( Iterable elements) { @@ -263,7 +263,7 @@ private static boolean addAll(Collection addTo, * @param originalList original big list. * @param pageSize desired size of each sublist ( last one * may be smaller) - * @param Generics Type + * @param Generics Type. * @return a list of sub lists. */ public static List> partition(List originalList, int pageSize) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java index 04fd8c47d0757..68c5c3ca47ff9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java @@ -89,7 +89,7 @@ public MachineList(Collection hostEntries) { /** * Accepts a collection of ip/cidr/host addresses * - * @param hostEntries hostEntries + * @param hostEntries hostEntries. * @param addressFactory addressFactory to convert host to InetAddress */ public MachineList(Collection hostEntries, @@ -139,7 +139,7 @@ public MachineList(Collection hostEntries, * {@link #includes(InetAddress)} should be preferred * to avoid possibly re-resolving the ip address. * - * @param ipAddress ipAddress + * @param ipAddress ipAddress. * @return true if ipAddress is part of the list */ public boolean includes(String ipAddress) { @@ -161,7 +161,7 @@ public boolean includes(String ipAddress) { /** * Accepts an inet address and return true if address is in the list. - * @param address address + * @param address address. * @return true if address is part of the list */ public boolean includes(InetAddress address) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java index 9aa3dcc6a8e0a..b5550f58ae218 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java @@ -77,7 +77,7 @@ public static boolean isNativeCodeLoaded() { /** * Returns true only if this build was compiled with support for ISA-L. * - * @return if this build was compiled with support for ISA-L true, not false + * @return if this build was compiled with support for ISA-L true, not false. */ public static native boolean buildSupportsIsal(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java index a53e31db61c13..9843a9d4057dd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java @@ -39,8 +39,8 @@ public class NativeLibraryChecker { LoggerFactory.getLogger(NativeLibraryChecker.class); /** - * A tool to test native library availability, - * @param args args + * A tool to test native library availability. + * @param args args. */ public static void main(String[] args) { String usage = "NativeLibraryChecker [-a|-h]\n" diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java index 99c1a206a5807..f7822e3f788d0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java @@ -31,7 +31,7 @@ public class PrintJarMainClass { /** - * @param args args + * @param args args. */ public static void main(String[] args) { try (JarFile jar_file = new JarFile(args[0])) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java index 23b9c45c10855..d149d5d811914 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PriorityQueue.java @@ -34,15 +34,15 @@ public abstract class PriorityQueue { /** * Determines the ordering of objects in this priority queue. Subclasses must define this one method. - * @param a object a - * @param b object b + * @param a object a. + * @param b object b. * @return if a less than b true, not false */ protected abstract boolean lessThan(Object a, Object b); /** * Subclass constructors must call this. - * @param maxSize max size + * @param maxSize max size. */ @SuppressWarnings("unchecked") protected final void initialize(int maxSize) { @@ -56,7 +56,7 @@ protected final void initialize(int maxSize) { * Adds an Object to a PriorityQueue in log(size) time. * If one tries to add more objects than maxSize from initialize * a RuntimeException (ArrayIndexOutOfBound) is thrown. - * @param element element + * @param element element. */ public final void put(T element) { size++; @@ -67,7 +67,7 @@ public final void put(T element) { /** * Adds element to the PriorityQueue in log(size) time if either * the PriorityQueue is not full, or not lessThan(element, top()). - * @param element element + * @param element element. * @return true if element is added, false otherwise. */ public boolean insert(T element){ @@ -87,7 +87,7 @@ else if (size > 0 && !lessThan(element, top())){ /** * Returns the least element of the PriorityQueue in constant time. * - * @return T Generics Type T + * @return T Generics Type T. */ public final T top() { if (size > 0) @@ -99,7 +99,7 @@ public final T top() { /** * Removes and returns the least element of the PriorityQueue in log(size) time. - * @return T Generics Type T + * @return T Generics Type T. */ public final T pop() { if (size > 0) { @@ -128,7 +128,7 @@ public final void adjustTop() { /** * Returns the number of elements currently stored in the PriorityQueue. * - * @return size + * @return size. */ public final int size() { return size; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java index 7851e62139487..e49cf57705344 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProgramDriver.java @@ -148,7 +148,7 @@ public int run(String[] args) /** * API compatible with Hadoop 1.x. * - * @param argv argv + * @param argv argv. * @throws Throwable Anything thrown * by the example program's main */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java index f35afc90c19c1..a839c04e99e63 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java @@ -55,8 +55,8 @@ public Progress() {} /** * Adds a named node to the tree. - * @param status status - * @return Progress + * @param status status. + * @return Progress. */ public Progress addPhase(String status) { Progress phase = addPhase(); @@ -66,7 +66,7 @@ public Progress addPhase(String status) { /** * Adds a node to the tree. Gives equal weightage to all phases. - * @return Progress + * @return Progress. */ public synchronized Progress addPhase() { Progress phase = addNewPhase(); @@ -87,9 +87,9 @@ private synchronized Progress addNewPhase() { /** * Adds a named node with a specified progress weightage to the tree. * - * @param status status - * @param weightage weightage - * @return Progress + * @param status status. + * @param weightage weightage. + * @return Progress. */ public Progress addPhase(String status, float weightage) { Progress phase = addPhase(weightage); @@ -101,8 +101,8 @@ public Progress addPhase(String status, float weightage) { /** * Adds a node with a specified progress weightage to the tree. * - * @param weightage weightage - * @return Progress + * @param weightage weightage. + * @return Progress. */ public synchronized Progress addPhase(float weightage) { Progress phase = new Progress(); @@ -125,7 +125,7 @@ public synchronized Progress addPhase(float weightage) { /** * Adds n nodes to the tree. Gives equal weightage to all phases. * - * @param n n + * @param n n. */ public synchronized void addPhases(int n) { for (int i = 0; i < n; i++) { @@ -160,7 +160,7 @@ public synchronized void startNextPhase() { /** * Returns the current sub-node executing. - * @return Progress + * @return Progress. */ public synchronized Progress phase() { return phases.get(currentPhase); @@ -185,7 +185,7 @@ public void complete() { /** * Called during execution on a leaf node to set its progress. - * @param progress progress + * @param progress progress. */ public synchronized void set(float progress) { if (Float.isNaN(progress)) { @@ -218,7 +218,7 @@ else if (progress == Float.POSITIVE_INFINITY) { /** * Returns the overall progress of the root. - * @return progress + * @return progress. */ // this method probably does not need to be synchronized as getInternal() is // synchronized and the node's parent never changes. Still, it doesn't hurt. @@ -234,7 +234,7 @@ public synchronized float get() { * Returns progress in this node. get() would give overall progress of the * root node(not just given current node). * - * @return progress + * @return progress. */ public synchronized float getProgress() { return getInternal(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java index 506a16c3b10bf..883c19c5e7750 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java @@ -83,10 +83,10 @@ public static int readRawVarint32(DataInput in) throws IOException { * as the old connection context as was done for writable where * the effective and real users are set based on the auth method. * - * @param protocol protocol - * @param ugi ugi - * @param authMethod authMethod - * @return IpcConnectionContextProto + * @param protocol protocol. + * @param ugi ugi. + * @param authMethod authMethod. + * @return IpcConnectionContextProto. */ public static IpcConnectionContextProto makeIpcConnectionContext( final String protocol, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java index 0097eaa6b5756..f3f8b839a0500 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java @@ -41,8 +41,8 @@ private static void fix(IndexedSortable s, int p, int r) { * Deepest recursion before giving up and doing a heapsort. * Returns 2 * ceil(log(n)). * - * @param x x - * @return MaxDepth + * @param x x. + * @return MaxDepth. */ protected static int getMaxDepth(int x) { if (x <= 0) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java index 2de99fce3c542..2438b714ffcd3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java @@ -120,7 +120,7 @@ private static void setJobConf(Object theObject, Configuration conf) { * * @param theClass class of which an object is created * @param conf Configuration - * @param Generics Type T + * @param Generics Type T. * @return a new object */ @SuppressWarnings("unchecked") @@ -134,7 +134,7 @@ public static T newInstance(Class theClass, Configuration conf) { * @param conf Configuration * @param argTypes the types of the arguments * @param values the values of the arguments - * @param Generics Type + * @param Generics Type. * @return a new object */ @SuppressWarnings("unchecked") @@ -286,7 +286,7 @@ public static void logThreadInfo(Logger log, * Return the correctly-typed {@link Class} of the given object. * * @param o object whose correctly-typed Class is to be obtained - * @param Generics Type T + * @param Generics Type T. * @return the correctly typed Class of the given object. */ @SuppressWarnings("unchecked") @@ -338,8 +338,8 @@ private static SerializationFactory getFactory(Configuration conf) { * Make a copy of the writable object using serialization to a buffer. * @param src the object to copy from * @param dst the object to copy into, which is destroyed - * @param Generics Type - * @param conf configuration + * @param Generics Type. + * @param conf configuration. * @return dst param (the copy) * @throws IOException raised on errors performing I/O. */ @@ -399,8 +399,8 @@ public int compare(Field a, Field b) { * Gets all the declared methods of a class including methods declared in * superclasses. * - * @param clazz clazz - * @return Method List + * @param clazz clazz. + * @return Method List. */ public static List getDeclaredMethodsIncludingInherited(Class clazz) { List methods = new ArrayList(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java index fc3f3780414d0..c28e69f54611e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java @@ -156,7 +156,7 @@ public static void unJar(InputStream inputStream, File toDir, * @param inputStream the jar stream to unpack * @param toDir the destination directory into which to unpack the jar * @param unpackRegex the pattern to match jar entries against - * @param name name + * @param name name. * * @throws IOException if an I/O error has occurred or toDir * cannot be created and does not already exist @@ -234,8 +234,8 @@ private static void ensureDirectory(File dir) throws IOException { /** Run a Hadoop job jar. If the main class is not in the jar's manifest, * then it must be provided on the command line. * - * @param args args - * @throws Throwable error + * @param args args. + * @throws Throwable error. */ public static void main(String[] args) throws Throwable { new RunJar().run(args); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java index 0298b4e32f5d2..c3c04493d8be5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SequentialNumber.java @@ -32,7 +32,7 @@ public abstract class SequentialNumber implements IdGenerator { /** * Create a new instance with the given initial value. - * @param initialValue initialValue + * @param initialValue initialValue. */ protected SequentialNumber(final long initialValue) { currentValue = new AtomicLong(initialValue); @@ -45,7 +45,7 @@ public long getCurrentValue() { /** * Set current value. - * @param value value + * @param value value. */ public void setCurrentValue(long value) { currentValue.set(value); @@ -71,9 +71,9 @@ public long nextValue() { /** * Skip to the new value. - * @param newValue newValue + * @param newValue newValue. * @throws IllegalStateException - * Cannot skip to less than the current value + * Cannot skip to less than the current value. */ public void skipTo(long newValue) throws IllegalStateException { for(;;) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java index 9e88f53b6ff79..10cac5f49f7e3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java @@ -32,10 +32,10 @@ public class ServletUtil { /** * Initial HTML header. * - * @param response response - * @param title title + * @param response response. + * @param title title. * @throws IOException raised on errors performing I/O. - * @return PrintWriter + * @return PrintWriter. */ public static PrintWriter initHTML(ServletResponse response, String title ) throws IOException { @@ -53,9 +53,9 @@ public static PrintWriter initHTML(ServletResponse response, String title * Get a parameter from a ServletRequest. * Return null if the parameter contains only white spaces. * - * @param request request - * @param name name - * @return get a parameter from a ServletRequest + * @param request request. + * @param name name. + * @return get a parameter from a ServletRequest. */ public static String getParameter(ServletRequest request, String name) { String s = request.getParameter(name); @@ -69,8 +69,8 @@ public static String getParameter(ServletRequest request, String name) { /** * parseLongParam. * - * @param request request - * @param param param + * @param request request. + * @param param param. * @return a long value as passed in the given parameter, throwing * an exception if it is not present or if it is not a valid number. * @throws IOException raised on errors performing I/O. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java index afd02a47a3bc0..8867900d0b692 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Sets.java @@ -57,7 +57,7 @@ private Sets() { * instead, at the cost of increased memory footprint, to get * deterministic iteration behavior.

* - * @param Generics Type E + * @param Generics Type E. * @return a new, empty {@code TreeSet} */ public static HashSet newHashSet() { @@ -93,8 +93,8 @@ public static TreeSet newTreeSet() { * {@code newHashSet(}{@link Arrays#asList}{@code (...))}, or for creating an * empty set then calling {@link Collections#addAll}.

* - * @param Generics Type E - * @param elements the elements that the set should contain + * @param Generics Type E. + * @param elements the elements that the set should contain. * @return a new, empty thread-safe {@code Set} */ @SafeVarargs @@ -116,9 +116,9 @@ public static HashSet newHashSet(E... elements) { *

Note: if {@code E} is an {@link Enum} type, use * newEnumSet(Iterable, Class) instead.

* - * @param Generics Type E - * @param elements the elements that the set should contain - * @return a new, empty thread-safe {@code Set} + * @param Generics Type E. + * @param elements the elements that the set should contain. + * @return a new, empty thread-safe {@code Set}. */ public static HashSet newHashSet(Iterable elements) { return (elements instanceof Collection) @@ -147,7 +147,7 @@ public static HashSet newHashSet(Iterable elements) { * then calling Iterables#addAll. This method is not very useful and will * likely be deprecated in the future. * - * @param Generics Type E + * @param Generics Type E. * @param elements the elements that the set should contain * @return a new {@code TreeSet} containing those elements (minus duplicates) */ @@ -184,9 +184,9 @@ private static boolean addAll(TreeSet addTo, *

Overall, this method is not very useful and will likely be deprecated * in the future.

* - * @param Generics Type E - * @param elements elements - * @return a new, empty thread-safe {@code Set} + * @param Generics Type E. + * @param elements elements. + * @return a new, empty thread-safe {@code Set}. */ public static HashSet newHashSet(Iterator elements) { HashSet set = newHashSet(); @@ -205,7 +205,7 @@ public static HashSet newHashSet(Iterator elements) { * * @param expectedSize the number of elements you expect to add to the * returned set - * @param Generics Type E + * @param Generics Type E. * @return a new, empty hash set with enough capacity to hold * {@code expectedSize} elements without resizing * @throws IllegalArgumentException if {@code expectedSize} is negative @@ -242,10 +242,10 @@ private static boolean addAll(Collection addTo, * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). * - * @param set1 set1 - * @param set2 set2 - * @param Generics Type E - * @return a new, empty thread-safe {@code Set} + * @param set1 set1. + * @param set2 set2. + * @param Generics Type E. + * @return a new, empty thread-safe {@code Set}. */ public static Set intersection(final Set set1, final Set set2) { @@ -270,10 +270,10 @@ public static Set intersection(final Set set1, * {@link TreeSet}, and the {@link Map#keySet} of an * {@code IdentityHashMap} all are). * - * @param set1 set1 - * @param set2 set2 - * @param Generics Type E - * @return a new, empty thread-safe {@code Set} + * @param set1 set1. + * @param set2 set2. + * @param Generics Type E. + * @return a new, empty thread-safe {@code Set}. */ public static Set union( final Set set1, final Set set2) { @@ -301,10 +301,10 @@ public static Set union( * strict order requirement, recommended method is * {@link #differenceInTreeSets(Set, Set)}. * - * @param set1 set1 - * @param set2 set2 - * @param Generics Type E - * @return a new, empty thread-safe {@code Set} + * @param set1 set1. + * @param set2 set2. + * @param Generics Type E. + * @return a new, empty thread-safe {@code Set}. */ public static Set difference( final Set set1, final Set set2) { @@ -331,10 +331,10 @@ public static Set difference( * This method is used to find difference for TreeSets. For HashSets, * recommended method is {@link #difference(Set, Set)}. * - * @param Generics Type E - * @param set1 set1 - * @param set2 set2 - * @return a new, empty thread-safe {@code Set} + * @param Generics Type E. + * @param set1 set1. + * @param set2 set2. + * @return a new, empty thread-safe {@code Set}. */ public static Set differenceInTreeSets( final Set set1, final Set set2) { @@ -359,10 +359,10 @@ public static Set differenceInTreeSets( * on different equivalence relations (as {@code HashSet}, {@code TreeSet}, * and the keySet of an {@code IdentityHashMap} all are). * - * @param set1 set1 - * @param set2 set2 - * @param Generics Type E - * @return a new, empty thread-safe {@code Set} + * @param set1 set1. + * @param set2 set2. + * @param Generics Type E. + * @return a new, empty thread-safe {@code Set}. */ public static Set symmetricDifference( final Set set1, final Set set2) { @@ -388,7 +388,7 @@ public static Set symmetricDifference( *

Unlike {@code HashSet}, this class does NOT allow {@code null} to be * used as an element. The set is serializable. * - * @param Generics Type + * @param Generics Type. * @return a new, empty thread-safe {@code Set} */ public static Set newConcurrentHashSet() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index d49de10c61cd2..bd6bcb08d9c6d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -209,7 +209,7 @@ private static OSType getOSType() { /** * a Unix command to get the current user's groups list. * - * @return group command array + * @return group command array. */ public static String[] getGroupsCommand() { return (WINDOWS)? new String[]{"cmd", "/c", "groups"} @@ -222,8 +222,8 @@ public static String[] getGroupsCommand() { * first and finally get the groups list which includes the primary group. * i.e. the user's primary group will be included twice. * - * @param user user - * @return groups for user command + * @param user user. + * @return groups for user command. */ public static String[] getGroupsForUserCommand(final String user) { //'groups username' command return is inconsistent across different unixes @@ -244,8 +244,8 @@ public static String[] getGroupsForUserCommand(final String user) { * i.e. the user's primary group will be included twice. * This command does not support Windows and will only return group names. * - * @param user user - * @return groups id for user command + * @param user user. + * @return groups id for user command. */ public static String[] getGroupsIDForUserCommand(final String user) { //'groups username' command return is inconsistent across different unixes @@ -262,8 +262,8 @@ public static String[] getGroupsIDForUserCommand(final String user) { /** * A command to get a given netgroup's user list. * - * @param netgroup net group - * @return users for net group command + * @param netgroup net group. + * @return users for net group command. */ public static String[] getUsersForNetgroupCommand(final String netgroup) { //'groups username' command return is non-consistent across different unixes @@ -273,7 +273,7 @@ public static String[] getUsersForNetgroupCommand(final String netgroup) { /** * Return a command to get permission information. * - * @return permission command + * @return permission command. */ public static String[] getGetPermissionCommand() { return (WINDOWS) ? new String[] { getWinUtilsPath(), "ls", "-F" } @@ -283,9 +283,9 @@ public static String[] getGetPermissionCommand() { /** * Return a command to set permission. * - * @param perm permission - * @param recursive recursive - * @return set permission command + * @param perm permission. + * @param recursive recursive. + * @return set permission command. */ public static String[] getSetPermissionCommand(String perm, boolean recursive) { if (recursive) { @@ -319,8 +319,8 @@ public static String[] getSetPermissionCommand(String perm, /** * Return a command to set owner. * - * @param owner owner - * @return set owner command + * @param owner owner. + * @return set owner command. */ public static String[] getSetOwnerCommand(String owner) { return (WINDOWS) ? @@ -331,9 +331,9 @@ public static String[] getSetOwnerCommand(String owner) { /** * Return a command to create symbolic links. * - * @param target target - * @param link link - * @return symlink command + * @param target target. + * @param link link. + * @return symlink command. */ public static String[] getSymlinkCommand(String target, String link) { return WINDOWS ? @@ -344,8 +344,8 @@ public static String[] getSymlinkCommand(String target, String link) { /** * Return a command to read the target of the a symbolic link. * - * @param link link - * @return read link command + * @param link link. + * @return read link command. */ public static String[] getReadlinkCommand(String link) { return WINDOWS ? @@ -365,9 +365,9 @@ public static String[] getCheckProcessIsAliveCommand(String pid) { /** * Return a command to send a signal to a given pid. * - * @param code code - * @param pid pid - * @return signal kill command + * @param code code. + * @param pid pid. + * @return signal kill command. */ public static String[] getSignalKillCommand(int code, String pid) { // Code == 0 means check alive @@ -398,7 +398,7 @@ public static String[] getSignalKillCommand(int code, String pid) { /** * Return a regular expression string that match environment variables. * - * @return environment variable regex + * @return environment variable regex. */ public static String getEnvironmentVariableRegex() { return (WINDOWS) @@ -1112,14 +1112,14 @@ private static void joinThread(Thread t) { /** * return an array containing the command name and its parameters. * - * @return exec string array + * @return exec string array. */ protected abstract String[] getExecString(); /** * Parse the execution result. * - * @param lines lines + * @param lines lines. * @throws IOException raised on errors performing I/O. * */ protected abstract void parseExecResult(BufferedReader lines) @@ -1438,7 +1438,7 @@ public static void destroyAllShellProcesses() { /** * Static method to return a Set of all Shell objects. * - * @return all shells set + * @return all shells set. */ public static Set getAllShells() { synchronized (CHILD_SHELLS) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java index 040adc3ae61ff..2cbaa2ac1f1a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java @@ -75,8 +75,8 @@ public static String weakIntern(String sample) { * Interns all the strings in the given array in place, * returning the same array. * - * @param strings strings - * @return internStringsInArray + * @param strings strings. + * @return internStringsInArray. */ public static String[] internStringsInArray(String[] strings) { for (int i = 0; i < strings.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index 234e9b3b675a8..b620ba73222ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -122,9 +122,9 @@ public static String humanReadableInt(long number) { /** * The same as String.format(Locale.ENGLISH, format, objects). - * @param format format - * @param objects objects - * @return format string + * @param format format. + * @param objects objects. + * @return format string. */ public static String format(final String format, final Object... objects) { return String.format(Locale.ENGLISH, format, objects); @@ -161,7 +161,7 @@ public static String arrayToString(String[] strs) { /** * Given an array of bytes it will convert the bytes to a hex string * representation of the bytes - * @param bytes bytes + * @param bytes bytes. * @param start start index, inclusively * @param end end index, exclusively * @return hex string representation of the byte array @@ -179,8 +179,8 @@ public static String byteToHexString(byte[] bytes, int start, int end) { /** * Same as byteToHexString(bytes, 0, bytes.length). - * @param bytes bytes - * @return byteToHexString + * @param bytes bytes. + * @return byteToHexString. */ public static String byteToHexString(byte bytes[]) { return byteToHexString(bytes, 0, bytes.length); @@ -213,8 +213,8 @@ public static byte[] hexStringToByte(String hex) { } /** * uriToString. - * @param uris uris - * @return uriToString + * @param uris uris. + * @return uriToString. */ public static String uriToString(URI[] uris){ if (uris == null) { @@ -253,8 +253,8 @@ public static URI[] stringToURI(String[] str){ /** * stringToPath. - * @param str str - * @return path array + * @param str str. + * @return path array. */ public static Path[] stringToPath(String[] str){ if (str == null) { @@ -288,7 +288,7 @@ public static String formatTimeDiff(long finishTime, long startTime){ * String in the format Xhrs, Ymins, Z sec. * * @param timeDiff The time difference to format - * @return formatTime String + * @return formatTime String. */ public static String formatTime(long timeDiff){ StringBuilder buf = new StringBuilder(); @@ -319,7 +319,7 @@ public static String formatTime(long timeDiff){ * more than 100 hours ,it is displayed as 99hrs, 59mins, 59sec. * * @param timeDiff The time difference to format - * @return format time sortable + * @return format time sortable. */ public static String formatTimeSortable(long timeDiff) { StringBuilder buf = new StringBuilder(); @@ -578,7 +578,7 @@ public static String[] split( * @param escapeChar character used to escape * @param start from where to search * @param split used to pass back the extracted string - * @return index + * @return index. */ public static int findNext(String str, char separator, char escapeChar, int start, StringBuilder split) { @@ -633,10 +633,10 @@ private static boolean hasChar(char[] chars, char character) { /** * escapeString. * - * @param str str - * @param escapeChar escapeChar + * @param str str. + * @param escapeChar escapeChar. * @param charsToEscape array of characters to be escaped - * @return escapeString + * @return escapeString. */ public static String escapeString(String str, char escapeChar, char[] charsToEscape) { @@ -680,10 +680,10 @@ public static String unEscapeString( /** * unEscapeString. - * @param str str - * @param escapeChar escapeChar + * @param str str. + * @param escapeChar escapeChar. * @param charsToEscape array of characters to unescape - * @return escape string + * @return escape string. */ public static String unEscapeString(String str, char escapeChar, char[] charsToEscape) { @@ -834,8 +834,8 @@ private TraditionalBinaryPrefix(int bitShift) { /** * The TraditionalBinaryPrefix object corresponding to the symbol. * - * @param symbol symbol - * @return traditional binary prefix object + * @param symbol symbol. + * @return traditional binary prefix object. */ public static TraditionalBinaryPrefix valueOf(char symbol) { symbol = Character.toUpperCase(symbol); @@ -935,7 +935,7 @@ public static String long2String(long n, String unit, int decimalPlaces) { /** * Escapes HTML Special characters present in the string. - * @param string param string + * @param string param string. * @return HTML Escaped String representation */ public static String escapeHTML(String string) { @@ -972,7 +972,7 @@ public static String escapeHTML(String string) { /** * a byte description of the given long interger value. * - * @param len len + * @param len len. * @return a byte description of the given long interger value. */ public static String byteDesc(long len) { @@ -982,8 +982,8 @@ public static String byteDesc(long len) { /** * limitDecimalTo2. * - * @param d double param - * @return string value ("%.2f") + * @param d double param. + * @return string value ("%.2f"). * @deprecated use StringUtils.format("%.2f", d). */ @Deprecated @@ -996,7 +996,7 @@ public static String limitDecimalTo2(double d) { * * @param separator Separator to join with. * @param strings Strings to join. - * @return join string + * @return join string. */ public static String join(CharSequence separator, Iterable strings) { Iterator i = strings.iterator(); @@ -1092,8 +1092,8 @@ public static String replaceTokens(String template, Pattern pattern, /** * Get stack trace for a given thread. - * @param t thread - * @return stack trace string + * @param t thread. + * @return stack trace string. */ public static String getStackTrace(Thread t) { final StackTraceElement[] stackTrace = t.getStackTrace(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java index 4c6db79f64f1c..f0ce85bbac873 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java @@ -82,7 +82,7 @@ public static long monotonicNowNanos() { /** * Convert time in millisecond to human readable format. * - * @param millis millisecond + * @param millis millisecond. * @return a human readable string for the input time */ public static String formatTime(long millis) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java index b526861f45741..63c275a1b06bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java @@ -82,7 +82,7 @@ public interface Tool extends Configurable { * * @param args command specific arguments. * @return exit code. - * @throws Exception command exception + * @throws Exception command exception. */ int run(String [] args) throws Exception; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java index bfb0401583f3b..b2b57f233a7b2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java @@ -57,7 +57,7 @@ public class ToolRunner { * @param tool Tool to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. - * @throws Exception Exception + * @throws Exception Exception. */ public static int run(Configuration conf, Tool tool, String[] args) throws Exception{ @@ -90,7 +90,7 @@ public static int run(Configuration conf, Tool tool, String[] args) * @param tool Tool to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. - * @throws Exception exception + * @throws Exception exception. */ public static int run(Tool tool, String[] args) throws Exception{ @@ -111,7 +111,7 @@ public static void printGenericCommandUsage(PrintStream out) { * Print out a prompt to the user, and return true if the user * responds with "y" or "yes". (case insensitive). * - * @param prompt prompt + * @param prompt prompt. * @throws IOException raised on errors performing I/O. * @return if the user * responds with "y" or "yes". (case insensitive) true, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java index 928c15452bcf8..e2b9e414ad33b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java @@ -40,7 +40,7 @@ public class XMLUtils { * @param xml input xml data * @param out output * @throws TransformerConfigurationException synopsis signals a problem - * creating a transformer object + * creating a transformer object. * @throws TransformerException this is used for throwing processor * exceptions before the processing has started. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java index 5b642e8a8292a..17d8233342743 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java @@ -88,7 +88,7 @@ public static int removeSpecificPerms(int perms, int remove) { * Parse comma separated list of ACL entries to secure generated nodes, e.g. * sasl:hdfs/host1@MY.DOMAIN:cdrwa,sasl:hdfs/host2@MY.DOMAIN:cdrwa * - * @param aclString aclString + * @param aclString aclString. * @return ACL list * @throws BadAclFormatException if an ACL is invalid */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java index be97b55bbc25b..e2299365f8e22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java @@ -100,8 +100,8 @@ public Key(byte[] value, double weight) { } /** - * @param value value - * @param weight weight + * @param value value. + * @param weight weight. */ public void set(byte[] value, double weight) { if (value == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java index fce21dab9413c..35ec9115a5f4c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGet.java @@ -54,9 +54,9 @@ R get(long timeout, TimeUnit unit) class Util { /** * Use {@link #get(long, TimeUnit)} timeout parameters to wait. - * @param obj object - * @param timeout timeout - * @param unit unit + * @param obj object. + * @param timeout timeout. + * @param unit unit. * @throws InterruptedException if the thread is interrupted. */ public static void wait(Object obj, long timeout, TimeUnit unit) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java index 54f0fb2a74604..7d35977e5b00d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java @@ -84,10 +84,10 @@ public void close() { /** * Utility method to fetch the ZK ACLs from the configuration. * - * @param conf configuration + * @param conf configuration. * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read - * @return acl list + * @return acl list. */ public static List getZKAcls(Configuration conf) throws IOException { // Parse authentication from configuration. @@ -106,11 +106,11 @@ public static List getZKAcls(Configuration conf) throws IOException { /** * Utility method to fetch ZK auth info from the configuration. * - * @param conf configuration + * @param conf configuration. * @throws java.io.IOException if the Zookeeper ACLs configuration file * cannot be read * @throws ZKUtil.BadAuthFormatException if the auth format is invalid - * @return ZKAuthInfo List + * @return ZKAuthInfo List. */ public static List getZKAuths(Configuration conf) throws IOException { @@ -192,7 +192,7 @@ public byte[] getData(final String path) throws Exception { /** * Get the data in a ZNode. * @param path Path of the ZNode. - * @param stat stat + * @param stat stat. * @return The data in the ZNode. * @throws Exception If it cannot contact Zookeeper. */ @@ -371,8 +371,8 @@ public void safeCreate(String path, byte[] data, List acl, * Deletes the path. Checks for existence of path as well. * * @param path Path to be deleted. - * @param fencingNodePath fencingNodePath - * @param fencingACL fencingACL + * @param fencingNodePath fencingNodePath. + * @param fencingACL fencingACL. * @throws Exception if any problem occurs while performing deletion. */ public void safeDelete(final String path, List fencingACL, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java index 89c4568a56075..67299ef96aec6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/CommonCallableSupplier.java @@ -91,7 +91,7 @@ public static CompletableFuture submit(final Executor executor, * return immediately. * * @param futures list of futures. - * @param Generics Type T + * @param Generics Type T. * @throws IOException if one of the called futures raised an IOE. * @throws RuntimeException if one of the futures raised one. */ @@ -108,7 +108,7 @@ public static void waitForCompletion( /** * Wait for a single of future to complete, extracting IOEs afterwards. * - * @param Generics Type T + * @param Generics Type T. * @param future future to wait for. * @throws IOException if one of the called futures raised an IOE. * @throws RuntimeException if one of the futures raised one. @@ -128,7 +128,7 @@ public static void waitForCompletion(final CompletableFuture future) /** * Wait for a single of future to complete, ignoring exceptions raised. * @param future future to wait for. - * @param Generics Type T + * @param Generics Type T. */ public static void waitForCompletionIgnoringExceptions( @Nullable final CompletableFuture future) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java index bc4c91ae9c078..d2f7742d3d988 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java @@ -99,7 +99,7 @@ public static RemoteIterator remoteIteratorFromSingleton( /** * Create a remote iterator from a java.util.Iterator. * @param type - * @param iterator iterator + * @param iterator iterator. * @return a remote iterator */ public static RemoteIterator remoteIteratorFromIterator( @@ -111,7 +111,7 @@ public static RemoteIterator remoteIteratorFromIterator( * Create a remote iterator from a java.util.Iterable -e.g. a list * or other collection. * @param type - * @param iterable iterable + * @param iterable iterable. * @return a remote iterator */ public static RemoteIterator remoteIteratorFromIterable( @@ -122,7 +122,7 @@ public static RemoteIterator remoteIteratorFromIterable( /** * Create a remote iterator from an array. * @param type - * @param array array + * @param array array. * @return a remote iterator */ public static RemoteIterator remoteIteratorFromArray(T[] array) {