From 8aca0ee7f653563a8b434e18bcdc8e5cb9096f24 Mon Sep 17 00:00:00 2001 From: fanshilun Date: Sat, 8 Feb 2025 19:21:14 +0800 Subject: [PATCH 1/5] MAPREDUCE-7421. [JDK17] Upgrade Junit 4 to 5 in hadoop-mapreduce-client-jobclient Part1. --- .../hadoop/conf/TestNoDefaultsJobConf.java | 13 +- .../java/org/apache/hadoop/fs/DFSCIOTest.java | 2 +- .../java/org/apache/hadoop/fs/TestDFSIO.java | 29 ++-- .../org/apache/hadoop/fs/TestFileSystem.java | 12 +- .../java/org/apache/hadoop/fs/TestJHLA.java | 10 +- .../org/apache/hadoop/fs/slive/TestSlive.java | 10 +- .../org/apache/hadoop/hdfs/TestNNBench.java | 47 ++++--- .../io/TestSequenceFileMergeProgress.java | 4 +- .../hadoop/ipc/TestMRCJCSocketFactory.java | 18 +-- .../mapred/ClusterMapReduceTestCase.java | 8 +- .../apache/hadoop/mapred/HadoopTestCase.java | 8 +- .../org/apache/hadoop/mapred/MRCaching.java | 8 +- .../hadoop/mapred/NotificationTestCase.java | 16 +-- .../apache/hadoop/mapred/TestBadRecords.java | 12 +- .../hadoop/mapred/TestClientRedirect.java | 14 +- .../mapred/TestClientServiceDelegate.java | 82 +++++------ .../mapred/TestClusterMapReduceTestCase.java | 14 +- .../org/apache/hadoop/mapred/TestCollect.java | 2 +- .../mapred/TestCombineFileInputFormat.java | 6 +- .../mapred/TestCombineOutputCollector.java | 2 +- .../TestCombineSequenceFileInputFormat.java | 20 +-- .../mapred/TestCombineTextInputFormat.java | 29 ++-- .../mapred/TestCommandLineJobSubmission.java | 12 +- .../apache/hadoop/mapred/TestComparators.java | 14 +- .../TestConcatenatedCompressedInput.java | 86 ++++++------ .../hadoop/mapred/TestFieldSelection.java | 4 +- .../mapred/TestFileInputFormatPathFilter.java | 12 +- .../hadoop/mapred/TestFileOutputFormat.java | 4 +- .../mapred/TestFixedLengthInputFormat.java | 71 +++++----- .../hadoop/mapred/TestGetSplitHosts.java | 4 +- .../org/apache/hadoop/mapred/TestIFile.java | 4 +- .../hadoop/mapred/TestIFileStreams.java | 10 +- .../apache/hadoop/mapred/TestInputPath.java | 4 +- .../hadoop/mapred/TestJavaSerialization.java | 20 +-- .../apache/hadoop/mapred/TestJobCleanup.java | 40 +++--- .../apache/hadoop/mapred/TestJobClients.java | 12 +- .../apache/hadoop/mapred/TestJobCounters.java | 36 ++--- .../org/apache/hadoop/mapred/TestJobName.java | 10 +- .../hadoop/mapred/TestJobSysDirWithDFS.java | 8 +- .../mapred/TestKeyValueTextInputFormat.java | 36 ++--- .../apache/hadoop/mapred/TestLazyOutput.java | 4 +- .../mapred/TestLineRecordReaderJobs.java | 4 +- .../hadoop/mapred/TestLocalJobSubmission.java | 26 ++-- .../mapred/TestMRCJCFileInputFormat.java | 22 +-- .../mapred/TestMRCJCFileOutputCommitter.java | 26 ++-- .../hadoop/mapred/TestMRCJCJobClient.java | 4 +- .../hadoop/mapred/TestMRCJCJobConf.java | 4 +- .../mapred/TestMROpportunisticMaps.java | 4 +- .../mapred/TestMRTimelineEventHandling.java | 128 +++++++++--------- .../hadoop/mapred/TestMapOutputType.java | 12 +- .../apache/hadoop/mapred/TestMapProgress.java | 8 +- .../org/apache/hadoop/mapred/TestMapRed.java | 34 ++--- .../org/apache/hadoop/mapred/TestMerge.java | 4 +- .../hadoop/mapred/TestMiniMRBringup.java | 6 +- .../hadoop/mapred/TestMiniMRChildTask.java | 46 +++---- .../hadoop/mapred/TestMiniMRClasspath.java | 8 +- .../mapred/TestMiniMRClientCluster.java | 72 +++++----- .../hadoop/mapred/TestMiniMRDFSCaching.java | 8 +- .../TestMiniMRWithDFSWithDistinctUsers.java | 14 +- .../mapred/TestMultiFileInputFormat.java | 6 +- .../hadoop/mapred/TestMultiFileSplit.java | 6 +- .../mapred/TestMultipleLevelCaching.java | 20 +-- .../mapred/TestMultipleTextOutputFormat.java | 4 +- .../hadoop/mapred/TestNetworkedJob.java | 36 +++-- .../mapred/TestOldCombinerGrouping.java | 22 +-- .../mapred/TestQueueConfigurationParser.java | 8 +- .../apache/hadoop/mapred/TestReduceFetch.java | 16 +-- .../mapred/TestReduceFetchFromPartialMem.java | 26 ++-- .../apache/hadoop/mapred/TestReduceTask.java | 6 +- .../apache/hadoop/mapred/TestReporter.java | 37 +++-- .../mapred/TestResourceMgrDelegate.java | 24 ++-- .../TestSequenceFileAsBinaryInputFormat.java | 20 +-- .../TestSequenceFileAsBinaryOutputFormat.java | 48 +++---- .../TestSequenceFileAsTextInputFormat.java | 12 +- .../mapred/TestSequenceFileInputFilter.java | 4 +- .../mapred/TestSequenceFileInputFormat.java | 10 +- .../hadoop/mapred/TestSortedRanges.java | 4 +- .../TestSpecialCharactersInOutputPath.java | 8 +- .../mapred/TestStatisticsCollector.java | 8 +- .../apache/hadoop/mapred/TestTaskCommit.java | 32 ++--- .../mapred/TestTaskPerformanceSplits.java | 36 ++--- .../apache/hadoop/mapred/TestTaskStatus.java | 88 ++++++------ .../hadoop/mapred/TestTextInputFormat.java | 106 ++++++++------- .../hadoop/mapred/TestTextOutputFormat.java | 6 +- .../mapred/TestUserDefinedCounters.java | 6 +- .../org/apache/hadoop/mapred/TestUtils.java | 4 +- .../hadoop/mapred/TestWritableJobConf.java | 4 +- .../apache/hadoop/mapred/TestYARNRunner.java | 104 +++++++------- .../mapred/jobcontrol/TestJobControl.java | 33 +++-- .../jobcontrol/TestLocalJobControl.java | 6 +- .../hadoop/mapred/join/TestDatamerge.java | 37 +++-- .../hadoop/mapred/join/TestTupleWritable.java | 42 +++--- .../TestWrappedRecordReaderClassloader.java | 18 +-- .../apache/hadoop/mapred/lib/TestChain.java | 2 +- .../hadoop/mapred/lib/TestChainMapReduce.java | 12 +- .../mapred/lib/TestDelegatingInputFormat.java | 8 +- .../lib/TestKeyFieldBasedComparator.java | 16 +-- .../lib/TestKeyFieldBasedPartitioner.java | 12 +- .../mapred/lib/TestLineInputFormat.java | 22 +-- .../hadoop/mapred/lib/TestMultipleInputs.java | 4 +- .../mapred/lib/TestMultipleOutputs.java | 40 +++--- .../lib/TestMultithreadedMapRunner.java | 8 +- .../mapred/lib/aggregate/TestAggregates.java | 11 +- .../mapred/lib/db/TestConstructQuery.java | 9 +- .../mapred/pipes/TestPipeApplication.java | 67 +++++---- .../apache/hadoop/mapred/pipes/TestPipes.java | 80 ++++++----- .../pipes/TestPipesNonJavaInputFormat.java | 12 +- 107 files changed, 1191 insertions(+), 1146 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java index e2d75ab268a19..4e63121dd6970 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java @@ -29,7 +29,7 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapred.Utils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.BufferedReader; import java.io.IOException; @@ -39,8 +39,7 @@ import java.io.OutputStreamWriter; import java.io.Writer; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.*; /** * This testcase tests that a JobConf without default values submits jobs @@ -56,10 +55,10 @@ public TestNoDefaultsJobConf() throws IOException { @Test public void testNoDefaults() throws Exception { JobConf configuration = new JobConf(); - assertTrue(configuration.get("hadoop.tmp.dir", null) != null); + assertNotNull(configuration.get("hadoop.tmp.dir", null)); configuration = new JobConf(false); - assertTrue(configuration.get("hadoop.tmp.dir", null) == null); + assertNull(configuration.get("hadoop.tmp.dir", null)); Path inDir = new Path("testing/jobconf/input"); @@ -96,8 +95,8 @@ public void testNoDefaults() throws Exception { JobClient.runJob(conf); Path[] outputFiles = FileUtil.stat2Paths( - getFileSystem().listStatus(outDir, - new Utils.OutputFileUtils.OutputFilesFilter())); + getFileSystem().listStatus(outDir, + new Utils.OutputFileUtils.OutputFilesFilter())); if (outputFiles.length > 0) { InputStream is = getFileSystem().open(outputFiles[0]); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java index d718556a05437..b96a64d56041e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java @@ -35,7 +35,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.mapred.*; import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java index 6ee143dcf4127..9b9fdacbe577a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java @@ -66,9 +66,10 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -226,7 +227,7 @@ private static Path getDataDir(Configuration conf) { private static MiniDFSCluster cluster; private static TestDFSIO bench; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { bench = new TestDFSIO(); bench.getConf().setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); @@ -241,7 +242,7 @@ public static void beforeClass() throws Exception { testWrite(); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { if(cluster == null) return; @@ -256,14 +257,16 @@ public static void testWrite() throws Exception { bench.analyzeResult(fs, TestType.TEST_TYPE_WRITE, execTime); } - @Test (timeout = 10000) + @Test + @Timeout(value = 10) public void testRead() throws Exception { FileSystem fs = cluster.getFileSystem(); long execTime = bench.readTest(fs); bench.analyzeResult(fs, TestType.TEST_TYPE_READ, execTime); } - @Test (timeout = 10000) + @Test + @Timeout(value = 10) public void testReadRandom() throws Exception { FileSystem fs = cluster.getFileSystem(); bench.getConf().setLong("test.io.skip.size", 0); @@ -271,7 +274,8 @@ public void testReadRandom() throws Exception { bench.analyzeResult(fs, TestType.TEST_TYPE_READ_RANDOM, execTime); } - @Test (timeout = 10000) + @Test + @Timeout(value = 10) public void testReadBackward() throws Exception { FileSystem fs = cluster.getFileSystem(); bench.getConf().setLong("test.io.skip.size", -DEFAULT_BUFFER_SIZE); @@ -279,7 +283,8 @@ public void testReadBackward() throws Exception { bench.analyzeResult(fs, TestType.TEST_TYPE_READ_BACKWARD, execTime); } - @Test (timeout = 10000) + @Test + @Timeout(value = 10) public void testReadSkip() throws Exception { FileSystem fs = cluster.getFileSystem(); bench.getConf().setLong("test.io.skip.size", 1); @@ -287,14 +292,16 @@ public void testReadSkip() throws Exception { bench.analyzeResult(fs, TestType.TEST_TYPE_READ_SKIP, execTime); } - @Test (timeout = 10000) + @Test + @Timeout(value = 10) public void testAppend() throws Exception { FileSystem fs = cluster.getFileSystem(); long execTime = bench.appendTest(fs); bench.analyzeResult(fs, TestType.TEST_TYPE_APPEND, execTime); } - @Test (timeout = 60000) + @Test + @Timeout(value = 60) public void testTruncate() throws Exception { FileSystem fs = cluster.getFileSystem(); bench.createControlFile(fs, DEFAULT_NR_BYTES / 2, DEFAULT_NR_FILES); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java index 075ab9a3f3ca7..2d69f4819dcbf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java @@ -47,16 +47,16 @@ import org.apache.hadoop.mapred.lib.LongSumReducer; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; public class TestFileSystem { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java index 9334a8a0dfaf6..019b358031774 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java @@ -23,9 +23,9 @@ import java.io.OutputStreamWriter; import java.io.File; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,7 +40,7 @@ public class TestJHLA { private String historyLog = System.getProperty("test.build.data", "build/test/data") + "/history/test.log"; - @Before + @BeforeEach public void setUp() throws Exception { File logFile = new File(historyLog); if(!logFile.getParentFile().exists()) @@ -121,7 +121,7 @@ public void setUp() throws Exception { writer.close(); } - @After + @AfterEach public void tearDown() throws Exception { File logFile = new File(historyLog); if(!logFile.delete()) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java index 218cae8bf7bea..61b4f718f4ccf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java @@ -19,8 +19,8 @@ package org.apache.hadoop.fs.slive; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.DataInputStream; import java.io.File; @@ -40,8 +40,8 @@ import org.apache.hadoop.fs.slive.DataVerifier.VerifyOutput; import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput; import org.apache.hadoop.util.ToolRunner; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -194,7 +194,7 @@ private ConfigExtractor getTestConfig(boolean sleep) throws Exception { return extractor; } - @Before + @BeforeEach public void ensureDeleted() throws Exception { rDelete(getTestFile()); rDelete(getTestDir()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java index 0273613f9fb2f..15e92a0385ecc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hdfs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -31,8 +31,9 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestNNBench extends HadoopTestCase { private static final String BASE_DIR = @@ -45,39 +46,42 @@ public TestNNBench() throws IOException { super(LOCAL_MR, LOCAL_FS, 1, 1); } - @After + @AfterEach public void tearDown() throws Exception { getFileSystem().delete(new Path(BASE_DIR), true); getFileSystem().delete(new Path(NNBench.DEFAULT_RES_FILE_NAME), true); super.tearDown(); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testNNBenchCreateReadAndDelete() throws Exception { runNNBench(createJobConf(), "create_write"); Path path = new Path(BASE_DIR + "/data/file_0_0"); - assertTrue("create_write should create the file", - getFileSystem().exists(path)); + assertTrue( + getFileSystem().exists(path), "create_write should create the file"); runNNBench(createJobConf(), "open_read"); runNNBench(createJobConf(), "delete"); - assertFalse("Delete operation should delete the file", - getFileSystem().exists(path)); + assertFalse( + getFileSystem().exists(path), "Delete operation should delete the file"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testNNBenchCreateAndRename() throws Exception { runNNBench(createJobConf(), "create_write"); Path path = new Path(BASE_DIR + "/data/file_0_0"); - assertTrue("create_write should create the file", - getFileSystem().exists(path)); + assertTrue( + getFileSystem().exists(path), "create_write should create the file"); runNNBench(createJobConf(), "rename"); Path renamedPath = new Path(BASE_DIR + "/data/file_0_r_0"); - assertFalse("Rename should rename the file", getFileSystem().exists(path)); - assertTrue("Rename should rename the file", - getFileSystem().exists(renamedPath)); + assertFalse(getFileSystem().exists(path), "Rename should rename the file"); + assertTrue( + getFileSystem().exists(renamedPath), "Rename should rename the file"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testNNBenchCreateControlFilesWithPool() throws Exception { runNNBench(createJobConf(), "create_write", BASE_DIR, "5"); Path path = new Path(BASE_DIR, CONTROL_DIR_NAME); @@ -86,7 +90,8 @@ public void testNNBenchCreateControlFilesWithPool() throws Exception { assertEquals(5, fileStatuses.length); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testNNBenchCrossCluster() throws Exception { MiniDFSCluster dfsCluster = new MiniDFSCluster.Builder(new JobConf()) .numDataNodes(1).build(); @@ -96,8 +101,8 @@ public void testNNBenchCrossCluster() throws Exception { runNNBench(createJobConf(), "create_write", baseDir); Path path = new Path(BASE_DIR + "/data/file_0_0"); - assertTrue("create_write should create the file", - dfsCluster.getFileSystem().exists(path)); + assertTrue( + dfsCluster.getFileSystem().exists(path), "create_write should create the file"); dfsCluster.shutdown(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java index 4f1318c044103..b4e424bac9516 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java @@ -28,8 +28,8 @@ import org.apache.hadoop.mapred.*; import org.slf4j.Logger; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestSequenceFileMergeProgress { private static final Logger LOG = FileInputFormat.LOG; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java index 123947a39a302..4766f42b4458c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java @@ -34,8 +34,8 @@ import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; import org.apache.hadoop.net.StandardSocketFactory; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; /** * This class checks that RPCs can use specialized socket factories. @@ -56,13 +56,13 @@ public void testSocketFactory() throws IOException { // Get a reference to its DFS directly FileSystem fs = cluster.getFileSystem(); - Assert.assertTrue(fs instanceof DistributedFileSystem); + Assertions.assertTrue(fs instanceof DistributedFileSystem); DistributedFileSystem directDfs = (DistributedFileSystem) fs; Configuration cconf = getCustomSocketConfigs(nameNodePort); fs = FileSystem.get(cconf); - Assert.assertTrue(fs instanceof DistributedFileSystem); + Assertions.assertTrue(fs instanceof DistributedFileSystem); DistributedFileSystem dfs = (DistributedFileSystem) fs; JobClient client = null; @@ -72,12 +72,12 @@ public void testSocketFactory() throws IOException { // could we test Client-DataNode connections? Path filePath = new Path("/dir"); - Assert.assertFalse(directDfs.exists(filePath)); - Assert.assertFalse(dfs.exists(filePath)); + Assertions.assertFalse(directDfs.exists(filePath)); + Assertions.assertFalse(dfs.exists(filePath)); directDfs.mkdirs(filePath); - Assert.assertTrue(directDfs.exists(filePath)); - Assert.assertTrue(dfs.exists(filePath)); + Assertions.assertTrue(directDfs.exists(filePath)); + Assertions.assertTrue(dfs.exists(filePath)); // This will test RPC to a Resource Manager fs = FileSystem.get(sconf); @@ -95,7 +95,7 @@ public void testSocketFactory() throws IOException { client = new JobClient(jconf); JobStatus[] jobs = client.jobsToComplete(); - Assert.assertTrue(jobs.length == 0); + Assertions.assertTrue(jobs.length == 0); } finally { closeClient(client); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java index f16b8a0f18fc2..b3e1998803ae5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java @@ -22,8 +22,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import java.io.File; import java.io.IOException; @@ -64,7 +64,7 @@ protected static void setupClassBase(Class testClass) throws Exception { * * @throws Exception */ - @Before + @BeforeEach public void setUp() throws Exception { startCluster(true, null); } @@ -125,7 +125,7 @@ protected void stopCluster() throws Exception { * * @throws Exception */ - @After + @AfterEach public void tearDown() throws Exception { stopCluster(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java index 277c0fd4b0ac5..ef7b2de903ac0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java @@ -21,8 +21,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.mapreduce.MRConfig; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import java.io.IOException; @@ -139,7 +139,7 @@ public boolean isLocalFS() { * * @throws Exception */ - @Before + @BeforeEach public void setUp() throws Exception { if (localFS) { fileSystem = FileSystem.getLocal(new JobConf()); @@ -163,7 +163,7 @@ public void setUp() throws Exception { * * @throws Exception */ - @After + @AfterEach public void tearDown() throws Exception { try { if (mrCluster != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java index a71550bce8a44..307ac53521058 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java @@ -32,7 +32,7 @@ import java.net.URI; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; public class MRCaching { static String testStr = "This is a test file " + "used for testing caching " @@ -299,13 +299,13 @@ private static void validateCacheFileSizes(Configuration job, String configValues = job.get(configKey, ""); System.out.println(configKey + " -> " + configValues); String[] realSizes = StringUtils.getStrings(configValues); - Assert.assertEquals("Number of files for "+ configKey, - expectedSizes.length, realSizes.length); + Assertions.assertEquals( + expectedSizes.length, realSizes.length, "Number of files for "+ configKey); for (int i=0; i < expectedSizes.length; ++i) { long actual = Long.valueOf(realSizes[i]); long expected = expectedSizes[i]; - Assert.assertEquals("File "+ i +" for "+ configKey, expected, actual); + Assertions.assertEquals(expected, actual, "File "+ i +" for "+ configKey); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java index 8acd015ab0987..26feba37a0672 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java @@ -35,11 +35,11 @@ import java.io.IOException; import java.io.DataOutputStream; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import org.junit.Before; -import org.junit.After; -import org.junit.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; /** @@ -132,7 +132,7 @@ protected void verifyQuery(String query, String expected) return; } failureCounter++; - assertTrue("The request (" + query + ") does not contain " + expected, false); + assertTrue(false, "The request (" + query + ") does not contain " + expected); } } @@ -149,13 +149,13 @@ protected JobConf createJobConf() { return conf; } - @Before + @BeforeEach public void setUp() throws Exception { super.setUp(); startHttpServer(); } - @After + @AfterEach public void tearDown() throws Exception { stopHttpServer(); NotificationServlet.counter = 0; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java index 1b39583bd729f..a0bc9dc1e7d35 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java @@ -38,15 +38,15 @@ import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.BeforeClass; +import org.junit.jupiter.api.BeforeAll; import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; @Ignore public class TestBadRecords extends ClusterMapReduceTestCase { @@ -61,7 +61,7 @@ public class TestBadRecords extends ClusterMapReduceTestCase { private List input; - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { setupClassBase(TestBadRecords.class); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java index f034f34742a6e..cc4988534a91e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java @@ -146,8 +146,8 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -200,7 +200,7 @@ public void testRedirect() throws Exception { org.apache.hadoop.mapreduce.Counters counters = cluster.getJob(jobID).getCounters(); validateCounters(counters); - Assert.assertTrue(amContact); + Assertions.assertTrue(amContact); LOG.info("Sleeping for 5 seconds before stop for" + " the client socket to not get EOF immediately.."); @@ -218,7 +218,7 @@ public void testRedirect() throws Exception { // Same client //results are returned from fake (not started job) counters = cluster.getJob(jobID).getCounters(); - Assert.assertEquals(0, counters.countCounters()); + Assertions.assertEquals(0, counters.countCounters()); Job job = cluster.getJob(jobID); org.apache.hadoop.mapreduce.TaskID taskId = new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0); @@ -242,7 +242,7 @@ public void testRedirect() throws Exception { counters = cluster.getJob(jobID).getCounters(); validateCounters(counters); - Assert.assertTrue(amContact); + Assertions.assertTrue(amContact); // Stop the AM. It is not even restarting. So it should be treated as // completed. @@ -251,7 +251,7 @@ public void testRedirect() throws Exception { // Same client counters = cluster.getJob(jobID).getCounters(); validateCounters(counters); - Assert.assertTrue(hsContact); + Assertions.assertTrue(hsContact); rmService.stop(); historyService.stop(); @@ -267,7 +267,7 @@ private void validateCounters(org.apache.hadoop.mapreduce.Counters counters) { LOG.info("Counter is " + itc.next().getDisplayName()); } } - Assert.assertEquals(1, counters.countCounters()); + Assertions.assertEquals(1, counters.countCounters()); } class RMService extends AbstractService implements ApplicationClientProtocol { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java index ef460e8d74d47..49956382a5e28 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java @@ -58,8 +58,8 @@ import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; @@ -94,7 +94,7 @@ public void testUnknownAppInRM() throws Exception { historyServerProxy, getRMDelegate()); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); + Assertions.assertNotNull(jobStatus); } @Test @@ -113,9 +113,9 @@ public void testRemoteExceptionFromHistoryServer() throws Exception { try { clientServiceDelegate.getJobStatus(oldJobId); - Assert.fail("Invoke should throw exception after retries."); + Assertions.fail("Invoke should throw exception after retries."); } catch (IOException e) { - Assert.assertTrue(e.getMessage().contains( + Assertions.assertTrue(e.getMessage().contains( "Job ID doesnot Exist")); } } @@ -136,7 +136,7 @@ public void testRetriesOnConnectionFailure() throws Exception { historyServerProxy, rm); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); + Assertions.assertNotNull(jobStatus); verify(historyServerProxy, times(3)).getJobReport( any(GetJobReportRequest.class)); } @@ -175,9 +175,9 @@ MRClientProtocol instantiateAMProxy( JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); + Assertions.assertNotNull(jobStatus); // assert maxClientRetry is not decremented. - Assert.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, + Assertions.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate .getMaxClientRetry()); verify(amProxy, times(5)).getJobReport(any(GetJobReportRequest.class)); @@ -213,14 +213,14 @@ MRClientProtocol instantiateAMProxy( try { clientServiceDelegate.getJobStatus(oldJobId); - Assert.fail("Exception should be thrown upon AuthorizationException"); + Assertions.fail("Exception should be thrown upon AuthorizationException"); } catch (IOException e) { - Assert.assertEquals(AuthorizationException.class.getName() + ": Denied", + Assertions.assertEquals(AuthorizationException.class.getName() + ": Denied", e.getMessage()); } // assert maxClientRetry is not decremented. - Assert.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, + Assertions.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate .getMaxClientRetry()); verify(amProxy, times(1)).getJobReport(any(GetJobReportRequest.class)); @@ -232,8 +232,8 @@ public void testHistoryServerNotConfigured() throws Exception { ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate( null, getRMDelegate()); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertEquals("N/A", jobStatus.getUsername()); - Assert.assertEquals(JobStatus.State.PREP, jobStatus.getState()); + Assertions.assertEquals("N/A", jobStatus.getUsername()); + Assertions.assertEquals(JobStatus.State.PREP, jobStatus.getState()); //RM has app report and job History Server is not configured ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class); @@ -243,8 +243,8 @@ public void testHistoryServerNotConfigured() throws Exception { clientServiceDelegate = getClientServiceDelegate(null, rm); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertEquals(applicationReport.getUser(), jobStatus.getUsername()); - Assert.assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState()); + Assertions.assertEquals(applicationReport.getUser(), jobStatus.getUsername()); + Assertions.assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState()); } @Test @@ -259,11 +259,11 @@ public void testJobReportFromHistoryServer() throws Exception { historyServerProxy, rm); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("TestJobFilePath", jobStatus.getJobFile()); - Assert.assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl()); - Assert.assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f); - Assert.assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("TestJobFilePath", jobStatus.getJobFile()); + Assertions.assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl()); + Assertions.assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f); + Assertions.assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f); } @Test @@ -278,8 +278,8 @@ public void testCountersFromHistoryServer() throws Exception { historyServerProxy, rm); Counters counters = TypeConverter.toYarn(clientServiceDelegate.getJobCounters(oldJobId)); - Assert.assertNotNull(counters); - Assert.assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); + Assertions.assertNotNull(counters); + Assertions.assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); } @Test @@ -338,16 +338,16 @@ public void testReconnectOnAMRestart() throws IOException { clientServiceDelegate).instantiateAMProxy(any(InetSocketAddress.class)); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("jobName-firstGen", jobStatus.getJobName()); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("jobName-firstGen", jobStatus.getJobName()); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("jobName-secondGen", jobStatus.getJobName()); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("jobName-secondGen", jobStatus.getJobName()); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("jobName-secondGen", jobStatus.getJobName()); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("jobName-secondGen", jobStatus.getJobName()); verify(clientServiceDelegate, times(2)).instantiateAMProxy( any(InetSocketAddress.class)); @@ -379,31 +379,31 @@ public void testAMAccessDisabled() throws IOException { historyServerProxy, rmDelegate)); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("N/A", jobStatus.getJobName()); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); // Should not reach AM even for second and third times too. jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("N/A", jobStatus.getJobName()); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus); - Assert.assertEquals("N/A", jobStatus.getJobName()); + Assertions.assertNotNull(jobStatus); + Assertions.assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); // The third time around, app is completed, so should go to JHS JobStatus jobStatus1 = clientServiceDelegate.getJobStatus(oldJobId); - Assert.assertNotNull(jobStatus1); - Assert.assertEquals("TestJobFilePath", jobStatus1.getJobFile()); - Assert.assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl()); - Assert.assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f); - Assert.assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f); + Assertions.assertNotNull(jobStatus1); + Assertions.assertEquals("TestJobFilePath", jobStatus1.getJobFile()); + Assertions.assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl()); + Assertions.assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f); + Assertions.assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); @@ -451,7 +451,7 @@ public void testRMDownRestoreForJobStatusBeforeGetAMReport() JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); verify(rmDelegate, times(3)).getApplicationReport( any(ApplicationId.class)); - Assert.assertNotNull(jobStatus); + Assertions.assertNotNull(jobStatus); } catch (YarnException e) { throw new IOException(e); } @@ -476,7 +476,7 @@ private void testRMDownForJobStatusBeforeGetAMReport(Configuration conf, conf, rmDelegate, oldJobId, historyServerProxy); try { clientServiceDelegate.getJobStatus(oldJobId); - Assert.fail("It should throw exception after retries"); + Assertions.fail("It should throw exception after retries"); } catch (IOException e) { System.out.println("fail to get job status,and e=" + e.toString()); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java index b4e8de2723c57..b55433d948d06 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java @@ -30,16 +30,16 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { setupClassBase(TestClusterMapReduceTestCase.class); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java index 595d09cc2a0fd..83ea506bcd073 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCollect.java @@ -21,7 +21,7 @@ import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat; import org.apache.hadoop.mapreduce.MRConfig; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.*; import java.util.*; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java index 4ed9eb2ccd7fc..7bbf654a4d3fd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java @@ -26,11 +26,11 @@ import org.apache.hadoop.mapred.lib.CombineFileInputFormat; import org.apache.hadoop.mapred.lib.CombineFileSplit; import org.apache.hadoop.mapred.lib.CombineFileRecordReader; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestCombineFileInputFormat { private static final Logger LOG = @@ -79,6 +79,6 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter repo LOG.info("Trying to getSplits with splits = " + SIZE_SPLITS); InputSplit[] splits = format.getSplits(job, SIZE_SPLITS); LOG.info("Got getSplits = " + splits.length); - assertEquals("splits == " + SIZE_SPLITS, SIZE_SPLITS, splits.length); + assertEquals(SIZE_SPLITS, splits.length, "splits == " + SIZE_SPLITS); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineOutputCollector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineOutputCollector.java index 83ad0e54c5fe9..d137c001400c9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineOutputCollector.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineOutputCollector.java @@ -32,7 +32,7 @@ import org.apache.hadoop.mapred.Task.CombineOutputCollector; import org.apache.hadoop.mapred.Task.TaskReporter; import org.apache.hadoop.mapreduce.MRJobConfig; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestCombineOutputCollector { private CombineOutputCollector coc; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java index 4f1d6bae4f1d5..2636b2ca6a84f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.util.BitSet; @@ -33,7 +33,8 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.mapred.lib.CombineFileSplit; import org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,7 +58,8 @@ public class TestCombineSequenceFileInputFormat { System.getProperty("test.build.data", "/tmp"), "TestCombineSequenceFileInputFormat")); - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testFormat() throws Exception { JobConf job = new JobConf(conf); @@ -92,10 +94,10 @@ public void testFormat() throws Exception { // we should have a single split as the length is comfortably smaller than // the block size - assertEquals("We got more than one splits!", 1, splits.length); + assertEquals(1, splits.length, "We got more than one splits!"); InputSplit split = splits[0]; - assertEquals("It should be CombineFileSplit", - CombineFileSplit.class, split.getClass()); + assertEquals( + CombineFileSplit.class, split.getClass(), "It should be CombineFileSplit"); // check each split BitSet bits = new BitSet(length); @@ -103,13 +105,13 @@ public void testFormat() throws Exception { format.getRecordReader(split, job, reporter); try { while (reader.next(key, value)) { - assertFalse("Key in multiple partitions.", bits.get(key.get())); + assertFalse(bits.get(key.get()), "Key in multiple partitions."); bits.set(key.get()); } } finally { reader.close(); } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java index 394630cc5955c..c40a2dbaa93f7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java @@ -18,9 +18,9 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.OutputStream; @@ -40,7 +40,8 @@ import org.apache.hadoop.mapred.lib.CombineFileSplit; import org.apache.hadoop.mapred.lib.CombineTextInputFormat; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -67,7 +68,8 @@ public class TestCombineTextInputFormat { // A reporter that does nothing private static final Reporter voidReporter = Reporter.NULL; - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); @@ -96,10 +98,10 @@ public void testFormat() throws Exception { // we should have a single split as the length is comfortably smaller than // the block size - assertEquals("We got more than one splits!", 1, splits.length); + assertEquals(1, splits.length, "We got more than one splits!"); InputSplit split = splits[0]; - assertEquals("It should be CombineFileSplit", - CombineFileSplit.class, split.getClass()); + assertEquals( + CombineFileSplit.class, split.getClass(), "It should be CombineFileSplit"); // check the split BitSet bits = new BitSet(length); @@ -115,7 +117,7 @@ public void testFormat() throws Exception { LOG.warn("conflict with " + v + " at position "+reader.getPos()); } - assertFalse("Key in multiple partitions.", bits.get(v)); + assertFalse(bits.get(v), "Key in multiple partitions."); bits.set(v); count++; } @@ -123,7 +125,7 @@ public void testFormat() throws Exception { } finally { reader.close(); } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } @@ -206,7 +208,8 @@ private static List readSplit(InputFormat format, /** * Test using the gzip codec for reading */ - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testGzip() throws IOException { JobConf job = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); @@ -219,9 +222,9 @@ public void testGzip() throws IOException { FileInputFormat.setInputPaths(job, workDir); CombineTextInputFormat format = new CombineTextInputFormat(); InputSplit[] splits = format.getSplits(job, 100); - assertEquals("compressed splits == 1", 1, splits.length); + assertEquals(1, splits.length, "compressed splits == 1"); List results = readSplit(format, splits[0], job); - assertEquals("splits[0] length", 8, results.size()); + assertEquals(8, results.size(), "splits[0] length"); final String[] firstList = {"the quick", "brown", "fox jumped", "over", " the lazy", " dog"}; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java index 7cf5e71e1a57f..aa693d75c48ec 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java @@ -27,8 +27,8 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.Ignore; -import org.junit.Test; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * check for the job submission options of @@ -59,7 +59,7 @@ public void testJobShell() throws Exception { stream.close(); mr = new MiniMRCluster(2, fs.getUri().toString(), 1); File thisbuildDir = new File(buildDir, "jobCommand"); - assertTrue("create build dir", thisbuildDir.mkdirs()); + assertTrue(thisbuildDir.mkdirs(), "create build dir"); File f = new File(thisbuildDir, "files_tmp"); FileOutputStream fstream = new FileOutputStream(f); fstream.write("somestrings".getBytes()); @@ -120,13 +120,13 @@ public void testJobShell() throws Exception { JobConf jobConf = mr.createJobConf(); //before running the job, verify that libjar is not in client classpath - assertTrue("libjar not in client classpath", loadLibJar(jobConf)==null); + assertTrue(loadLibJar(jobConf)==null, "libjar not in client classpath"); int ret = ToolRunner.run(jobConf, new testshell.ExternalMapReduce(), args); //after running the job, verify that libjar is in the client classpath - assertTrue("libjar added to client classpath", loadLibJar(jobConf)!=null); + assertTrue(loadLibJar(jobConf)!=null, "libjar added to client classpath"); - assertTrue("not failed ", ret != -1); + assertTrue(ret != -1, "not failed "); f.delete(); thisbuildDir.delete(); } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java index f83dbe2857821..e68491c9598cd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestComparators.java @@ -35,11 +35,11 @@ import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.mapreduce.MRConfig; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** @@ -312,7 +312,7 @@ public boolean equals (IntWritable v1, IntWritable v2) { } } - @Before + @BeforeEach public void configure() throws Exception { Path testdir = new Path(TEST_DIR.getAbsolutePath()); Path inDir = new Path(testdir, "in"); @@ -355,7 +355,7 @@ public void configure() throws Exception { jc = new JobClient(conf); } - @After + @AfterEach public void cleanup() { FileUtil.fullyDelete(TEST_DIR); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java index 80a9502774eac..02be9b9a50283 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java @@ -29,8 +29,8 @@ import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +43,7 @@ import java.util.List; import java.util.zip.Inflater; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * Test class for concatenated {@link CompressionInputStream}. @@ -80,7 +80,7 @@ public class TestConcatenatedCompressedInput { } } - @After + @AfterEach public void after() { ZlibFactory.loadNativeZLib(); } @@ -203,7 +203,7 @@ public void testGzip() throws IOException { format.configure(jobConf); InputSplit[] splits = format.getSplits(jobConf, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.gz")) { splits[0] = splits[1]; @@ -211,12 +211,12 @@ public void testGzip() throws IOException { } List results = readSplit(format, splits[0], jobConf); - assertEquals("splits[0] num lines", 6, results.size()); + assertEquals(6, results.size(), "splits[0] num lines"); assertEquals("splits[0][5]", "member #3", results.get(5).toString()); results = readSplit(format, splits[1], jobConf); - assertEquals("splits[1] num lines", 2, results.size()); + assertEquals(2, results.size(), "splits[1] num lines"); assertEquals("splits[1][0]", "this is a test", results.get(0).toString()); assertEquals("splits[1][1]", "of gzip", @@ -243,43 +243,43 @@ public void testPrototypeInflaterGzip() throws IOException { localFs.copyFromLocalFile(fnLocal, fnHDFS); final FileInputStream in = new FileInputStream(fnLocal.toString()); - assertEquals("concat bytes available", 148, in.available()); + assertEquals(148, in.available(), "concat bytes available"); // should wrap all of this header-reading stuff in a running-CRC wrapper // (did so in BuiltInGzipDecompressor; see below) byte[] compressedBuf = new byte[256]; int numBytesRead = in.read(compressedBuf, 0, 10); - assertEquals("header bytes read", 10, numBytesRead); - assertEquals("1st byte", 0x1f, compressedBuf[0] & 0xff); - assertEquals("2nd byte", 0x8b, compressedBuf[1] & 0xff); - assertEquals("3rd byte (compression method)", 8, compressedBuf[2] & 0xff); + assertEquals(10, numBytesRead, "header bytes read"); + assertEquals(0x1f, compressedBuf[0] & 0xff, "1st byte"); + assertEquals(0x8b, compressedBuf[1] & 0xff, "2nd byte"); + assertEquals(8, compressedBuf[2] & 0xff, "3rd byte (compression method)"); byte flags = (byte)(compressedBuf[3] & 0xff); if ((flags & 0x04) != 0) { // FEXTRA numBytesRead = in.read(compressedBuf, 0, 2); - assertEquals("XLEN bytes read", 2, numBytesRead); + assertEquals(2, numBytesRead, "XLEN bytes read"); int xlen = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff; in.skip(xlen); } if ((flags & 0x08) != 0) { // FNAME while ((numBytesRead = in.read()) != 0) { - assertFalse("unexpected end-of-file while reading filename", - numBytesRead == -1); + assertFalse( + numBytesRead == -1, "unexpected end-of-file while reading filename"); } } if ((flags & 0x10) != 0) { // FCOMMENT while ((numBytesRead = in.read()) != 0) { - assertFalse("unexpected end-of-file while reading comment", - numBytesRead == -1); + assertFalse( + numBytesRead == -1, "unexpected end-of-file while reading comment"); } } if ((flags & 0xe0) != 0) { // reserved - assertTrue("reserved bits are set??", (flags & 0xe0) == 0); + assertTrue((flags & 0xe0) == 0, "reserved bits are set??"); } if ((flags & 0x02) != 0) { // FHCRC numBytesRead = in.read(compressedBuf, 0, 2); - assertEquals("CRC16 bytes read", 2, numBytesRead); + assertEquals(2, numBytesRead, "CRC16 bytes read"); int crc16 = ((compressedBuf[1] << 8) | compressedBuf[0]) & 0xffff; } @@ -320,9 +320,9 @@ public void testBuiltInGzipDecompressor() throws IOException { localFs.delete(workDir, true); // Don't use native libs for this test ZlibFactory.setNativeZlibLoaded(false); - assertEquals("[non-native (Java) codec]", - org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class, - gzip.getDecompressorType()); + assertEquals( + org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class +, gzip.getDecompressorType(), "[non-native (Java) codec]"); System.out.println(COLOR_BR_YELLOW + "testBuiltInGzipDecompressor() using" + " non-native (Java Inflater) Decompressor (" + gzip.getDecompressorType() + ")" + COLOR_NORMAL); @@ -347,8 +347,8 @@ public void testBuiltInGzipDecompressor() throws IOException { // here's first pair of DecompressorStreams: final FileInputStream in1 = new FileInputStream(fnLocal1.toString()); final FileInputStream in2 = new FileInputStream(fnLocal2.toString()); - assertEquals("concat bytes available", 2734, in1.available()); - assertEquals("concat bytes available", 3413, in2.available()); // w/hdr CRC + assertEquals(2734, in1.available(), "concat bytes available"); + assertEquals(3413, in2.available(), "concat bytes available"); // w/hdr CRC CompressionInputStream cin2 = gzip.createInputStream(in2); LineReader in = new LineReader(cin2); @@ -360,10 +360,10 @@ public void testBuiltInGzipDecompressor() throws IOException { totalBytes += numBytes; } in.close(); - assertEquals("total uncompressed bytes in concatenated test file", - 5346, totalBytes); - assertEquals("total uncompressed lines in concatenated test file", - 84, lineNum); + assertEquals( + 5346, totalBytes, "total uncompressed bytes in concatenated test file"); + assertEquals( + 84, lineNum, "total uncompressed lines in concatenated test file"); ZlibFactory.loadNativeZLib(); // test GzipZlibDecompressor (native), just to be sure @@ -442,7 +442,7 @@ private static void doSingleGzipBufferSize(JobConf jConf) throws IOException { // here's Nth pair of DecompressorStreams: InputSplit[] splits = format.getSplits(jConf, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath() .getName().equals("testdata/testCompressThenConcat.txt.gz")) { @@ -452,7 +452,7 @@ private static void doSingleGzipBufferSize(JobConf jConf) throws IOException { } List results = readSplit(format, splits[0], jConf); - assertEquals("splits[0] length (num lines)", 84, results.size()); + assertEquals(84, results.size(), "splits[0] length (num lines)"); assertEquals("splits[0][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); @@ -461,7 +461,7 @@ private static void doSingleGzipBufferSize(JobConf jConf) throws IOException { results.get(42).toString()); results = readSplit(format, splits[1], jConf); - assertEquals("splits[1] length (num lines)", 84, results.size()); + assertEquals(84, results.size(), "splits[1] length (num lines)"); assertEquals("splits[1][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); @@ -501,7 +501,7 @@ public void testBzip2() throws IOException { // [135 splits for a 208-byte file and a 62-byte file(!)] InputSplit[] splits = format.getSplits(jobConf, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.bz2")) { splits[0] = splits[1]; @@ -509,12 +509,12 @@ public void testBzip2() throws IOException { } List results = readSplit(format, splits[0], jobConf); - assertEquals("splits[0] num lines", 6, results.size()); + assertEquals(6, results.size(), "splits[0] num lines"); assertEquals("splits[0][5]", "member #3", results.get(5).toString()); results = readSplit(format, splits[1], jobConf); - assertEquals("splits[1] num lines", 2, results.size()); + assertEquals(2, results.size(), "splits[1] num lines"); assertEquals("splits[1][0]", "this is a test", results.get(0).toString()); assertEquals("splits[1][1]", "of bzip2", @@ -555,8 +555,8 @@ public void testMoreBzip2() throws IOException { // here's first pair of BlockDecompressorStreams: final FileInputStream in1 = new FileInputStream(fnLocal1.toString()); final FileInputStream in2 = new FileInputStream(fnLocal2.toString()); - assertEquals("concat bytes available", 2567, in1.available()); - assertEquals("concat bytes available", 3056, in2.available()); + assertEquals(2567, in1.available(), "concat bytes available"); + assertEquals(3056, in2.available(), "concat bytes available"); CompressionInputStream cin2 = bzip2.createInputStream(in2); LineReader in = new LineReader(cin2); @@ -568,10 +568,10 @@ public void testMoreBzip2() throws IOException { totalBytes += numBytes; } in.close(); - assertEquals("total uncompressed bytes in concatenated test file", - 5346, totalBytes); - assertEquals("total uncompressed lines in concatenated test file", - 84, lineNum); + assertEquals( + 5346, totalBytes, "total uncompressed bytes in concatenated test file"); + assertEquals( + 84, lineNum, "total uncompressed lines in concatenated test file"); // test CBZip2InputStream with lots of different input-buffer sizes doMultipleBzip2BufferSizes(jobConf); @@ -646,7 +646,7 @@ private static void doSingleBzip2BufferSize(JobConf jConf) // here's Nth pair of DecompressorStreams: InputSplit[] splits = format.getSplits(jConf, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath() .getName().equals("testdata/testCompressThenConcat.txt.gz")) { @@ -657,7 +657,7 @@ private static void doSingleBzip2BufferSize(JobConf jConf) // testConcatThenCompress (single) List results = readSplit(format, splits[0], jConf); - assertEquals("splits[0] length (num lines)", 84, results.size()); + assertEquals(84, results.size(), "splits[0] length (num lines)"); assertEquals("splits[0][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); @@ -667,7 +667,7 @@ private static void doSingleBzip2BufferSize(JobConf jConf) // testCompressThenConcat (multi) results = readSplit(format, splits[1], jConf); - assertEquals("splits[1] length (num lines)", 84, results.size()); + assertEquals(84, results.size(), "splits[1] length (num lines)"); assertEquals("splits[1][0]", "Call me Ishmael. Some years ago--never mind how long precisely--having", results.get(0).toString()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java index f0b1df3eac18a..4e81896c95dc7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java @@ -23,8 +23,8 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper; import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.charset.StandardCharsets; import java.text.NumberFormat; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java index d87f6fd91a988..40609e64976cc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileInputFormatPathFilter.java @@ -21,10 +21,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.io.Writer; @@ -57,12 +57,12 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, new Path(new Path(System.getProperty("test.build.data", "."), "data"), "TestFileInputFormatPathFilter"); - @Before + @BeforeEach public void setUp() throws Exception { tearDown(); localFs.mkdirs(workDir); } - @After + @AfterEach public void tearDown() throws Exception { if (localFs.exists(workDir)) { localFs.delete(workDir, true); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java index 314123567fa8d..26ec18b9de34b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFileOutputFormat.java @@ -30,8 +30,8 @@ import java.io.OutputStream; import java.util.Iterator; -import org.junit.Test; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestFileOutputFormat extends HadoopTestCase { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java index 1ae17584a6d8b..b688d2cdff764 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java @@ -33,12 +33,13 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.compress.*; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestFixedLengthInputFormat { @@ -54,7 +55,7 @@ public class TestFixedLengthInputFormat { private static char[] chars; private static Random charRand; - @BeforeClass + @BeforeAll public static void onlyOnce() { try { defaultConf = new Configuration(); @@ -77,7 +78,8 @@ public static void onlyOnce() { * 20 random tests of various record, file, and split sizes. All tests have * uncompressed file as input. */ - @Test (timeout=500000) + @Test + @Timeout(value = 500) public void testFormat() throws IOException { runRandomTests(null); } @@ -86,7 +88,8 @@ public void testFormat() throws IOException { * 20 random tests of various record, file, and split sizes. All tests have * compressed file as input. */ - @Test (timeout=500000) + @Test + @Timeout(value = 500) public void testFormatCompressedIn() throws IOException { runRandomTests(new GzipCodec()); } @@ -94,7 +97,8 @@ public void testFormatCompressedIn() throws IOException { /** * Test with no record length set. */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testNoRecordLength() throws IOException { localFs.delete(workDir, true); Path file = new Path(workDir, "testFormat.txt"); @@ -115,13 +119,14 @@ public void testNoRecordLength() throws IOException { LOG.info("Exception message:" + ioe.getMessage()); } } - assertTrue("Exception for not setting record length:", exceptionThrown); + assertTrue(exceptionThrown, "Exception for not setting record length:"); } /** * Test with record length set to 0 */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testZeroRecordLength() throws IOException { localFs.delete(workDir, true); Path file = new Path(workDir, "testFormat.txt"); @@ -143,13 +148,14 @@ public void testZeroRecordLength() throws IOException { LOG.info("Exception message:" + ioe.getMessage()); } } - assertTrue("Exception for zero record length:", exceptionThrown); + assertTrue(exceptionThrown, "Exception for zero record length:"); } /** * Test with record length set to a negative value */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testNegativeRecordLength() throws IOException { localFs.delete(workDir, true); Path file = new Path(workDir, "testFormat.txt"); @@ -171,13 +177,14 @@ public void testNegativeRecordLength() throws IOException { LOG.info("Exception message:" + ioe.getMessage()); } } - assertTrue("Exception for negative record length:", exceptionThrown); + assertTrue(exceptionThrown, "Exception for negative record length:"); } /** * Test with partial record at the end of a compressed input file. */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testPartialRecordCompressedIn() throws IOException { CompressionCodec gzip = new GzipCodec(); runPartialRecordTest(gzip); @@ -186,7 +193,8 @@ public void testPartialRecordCompressedIn() throws IOException { /** * Test with partial record at the end of an uncompressed input file. */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testPartialRecordUncompressedIn() throws IOException { runPartialRecordTest(null); } @@ -194,7 +202,8 @@ public void testPartialRecordUncompressedIn() throws IOException { /** * Test using the gzip codec with two input files. */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testGzipWithTwoInputs() throws IOException { CompressionCodec gzip = new GzipCodec(); localFs.delete(workDir, true); @@ -210,17 +219,17 @@ public void testGzipWithTwoInputs() throws IOException { writeFile(localFs, new Path(workDir, "part2.txt.gz"), gzip, "ten nine eightsevensix five four threetwo one "); InputSplit[] splits = format.getSplits(job, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.gz")) { splits[0] = splits[1]; splits[1] = tmp; } List results = readSplit(format, splits[0], job); - assertEquals("splits[0] length", 10, results.size()); + assertEquals(10, results.size(), "splits[0] length"); assertEquals("splits[0][5]", "six ", results.get(5)); results = readSplit(format, splits[1], job); - assertEquals("splits[1] length", 10, results.size()); + assertEquals(10, results.size(), "splits[1] length"); assertEquals("splits[1][0]", "ten ", results.get(0)); assertEquals("splits[1][1]", "nine ", results.get(1)); } @@ -329,26 +338,26 @@ private void runRandomTests(CompressionCodec codec) throws IOException { RecordReader reader = format.getRecordReader(split, job, voidReporter); Class clazz = reader.getClass(); - assertEquals("RecordReader class should be FixedLengthRecordReader:", - FixedLengthRecordReader.class, clazz); + assertEquals( + FixedLengthRecordReader.class, clazz, "RecordReader class should be FixedLengthRecordReader:"); // Plow through the records in this split while (reader.next(key, value)) { - assertEquals("Checking key", (long)(recordNumber*recordLength), - key.get()); + assertEquals((long)(recordNumber*recordLength) +, key.get(), "Checking key"); String valueString = new String(value.getBytes(), 0, value.getLength()); - assertEquals("Checking record length:", recordLength, - value.getLength()); - assertTrue("Checking for more records than expected:", - recordNumber < totalRecords); + assertEquals(recordLength +, value.getLength(), "Checking record length:"); + assertTrue( + recordNumber < totalRecords, "Checking for more records than expected:"); String origRecord = recordList.get(recordNumber); - assertEquals("Checking record content:", origRecord, valueString); + assertEquals(origRecord, valueString, "Checking record content:"); recordNumber++; } reader.close(); } - assertEquals("Total original records should be total read records:", - recordList.size(), recordNumber); + assertEquals( + recordList.size(), recordNumber, "Total original records should be total read records:"); } } @@ -403,7 +412,7 @@ private void runPartialRecordTest(CompressionCodec codec) throws IOException { "one two threefour five six seveneightnine ten"); InputSplit[] splits = format.getSplits(job, 100); if (codec != null) { - assertEquals("compressed splits == 1", 1, splits.length); + assertEquals(1, splits.length, "compressed splits == 1"); } boolean exceptionThrown = false; for (InputSplit split : splits) { @@ -414,7 +423,7 @@ private void runPartialRecordTest(CompressionCodec codec) throws IOException { LOG.info("Exception message:" + ioe.getMessage()); } } - assertTrue("Exception for partial record:", exceptionThrown); + assertTrue(exceptionThrown, "Exception for partial record:"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java index 3d1c2e71bffdb..614775058de12 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestGetSplitHosts.java @@ -20,8 +20,8 @@ import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.net.NetworkTopology; -import org.junit.Test; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestGetSplitHosts { @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java index a314fc1f578a6..5446dbbb24c43 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java @@ -27,8 +27,8 @@ import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.GzipCodec; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; public class TestIFile { @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java index a815b28295204..fc5650b4c905b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFileStreams.java @@ -21,15 +21,15 @@ import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.OutputStream; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestIFileStreams { @Test @@ -75,7 +75,7 @@ public void testBadIFileStream() throws Exception { } ifis.close(); } catch (ChecksumException e) { - assertEquals("Unexpected bad checksum", DLEN - 1, i); + assertEquals(DLEN - 1, i, "Unexpected bad checksum"); return; } fail("Did not detect bad data in checksum"); @@ -99,7 +99,7 @@ public void testBadLength() throws Exception { } ifis.close(); } catch (ChecksumException e) { - assertEquals("Checksum before close", i, DLEN - 8); + assertEquals(i, DLEN - 8, "Checksum before close"); return; } fail("Did not detect bad data in checksum"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java index 0c20c335d89ab..81c641a277430 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestInputPath.java @@ -21,8 +21,8 @@ import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestInputPath { @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java index 371a07c17ceb8..0eadfe08740df 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java @@ -35,9 +35,9 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.mapreduce.MRConfig; -import org.junit.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestJavaSerialization { @@ -58,8 +58,8 @@ public void map(LongWritable key, Text value, StringTokenizer st = new StringTokenizer(value.toString()); while (st.hasMoreTokens()) { String token = st.nextToken(); - assertTrue("Invalid token; expected 'a' or 'b', got " + token, - token.equals("a") || token.equals("b")); + assertTrue( + token.equals("a") || token.equals("b"), "Invalid token; expected 'a' or 'b', got " + token); output.collect(token, 1L); } } @@ -124,9 +124,9 @@ public void testMapReduceJob() throws Exception { String inputFileContents = FileUtils.readFileToString(new File(INPUT_FILE.toUri().getPath())); - assertTrue("Input file contents not as expected; contents are '" - + inputFileContents + "', expected \"b a\n\" ", - inputFileContents.equals("b a\n")); + assertTrue( + inputFileContents.equals("b a\n"), "Input file contents not as expected; contents are '" + + inputFileContents + "', expected \"b a\n\" "); JobClient.runJob(conf); @@ -142,8 +142,8 @@ public void testMapReduceJob() throws Exception { assertEquals("Unexpected output; received output '" + reduceOutput + "'", "b\t1", lines[1]); assertEquals( - "Reduce output has extra lines; output is '" + reduceOutput + "'", 2, - lines.length); + 2 +, lines.length, "Reduce output has extra lines; output is '" + reduceOutput + "'"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java index 13f2301b9f788..dde9a87bc69b6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java @@ -30,13 +30,13 @@ import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.LoggerFactory; import org.slf4j.Logger; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * A JUnit test to test Map-Reduce job cleanup. @@ -57,7 +57,7 @@ public class TestJobCleanup { private static final Logger LOG = LoggerFactory.getLogger(TestJobCleanup.class); - @BeforeClass + @BeforeAll public static void setUp() throws IOException { JobConf conf = new JobConf(); fileSys = FileSystem.get(conf); @@ -82,7 +82,7 @@ public static void setUp() throws IOException { fileSys.mkdirs(emptyInDir); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { if (fileSys != null) { // fileSys.delete(new Path(TEST_ROOT_DIR), true); @@ -169,14 +169,14 @@ private void testSuccessfulJob(String filename, LOG.info("Job finished : " + job.isComplete()); Path testFile = new Path(outDir, filename); - assertTrue("Done file \"" + testFile + "\" missing for job " + id, - fileSys.exists(testFile)); + assertTrue( + fileSys.exists(testFile), "Done file \"" + testFile + "\" missing for job " + id); // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); - assertFalse("File " + file + " should not be present for successful job " - + id, fileSys.exists(file)); + assertFalse(fileSys.exists(file), "File " + file + " should not be present for successful job " + + id); } } @@ -196,19 +196,19 @@ private void testFailedJob(String fileName, RunningJob job = jobClient.submitJob(jc); JobID id = job.getID(); job.waitForCompletion(); - assertEquals("Job did not fail", JobStatus.FAILED, job.getJobState()); + assertEquals(JobStatus.FAILED, job.getJobState(), "Job did not fail"); if (fileName != null) { Path testFile = new Path(outDir, fileName); - assertTrue("File " + testFile + " missing for failed job " + id, - fileSys.exists(testFile)); + assertTrue( + fileSys.exists(testFile), "File " + testFile + " missing for failed job " + id); } // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); - assertFalse("File " + file + " should not be present for failed job " - + id, fileSys.exists(file)); + assertFalse(fileSys.exists(file), "File " + file + " should not be present for failed job " + + id); } } @@ -242,19 +242,19 @@ private void testKilledJob(String fileName, job.killJob(); // kill the job job.waitForCompletion(); // wait for the job to complete - assertEquals("Job was not killed", JobStatus.KILLED, job.getJobState()); + assertEquals(JobStatus.KILLED, job.getJobState(), "Job was not killed"); if (fileName != null) { Path testFile = new Path(outDir, fileName); - assertTrue("File " + testFile + " missing for job " + id, - fileSys.exists(testFile)); + assertTrue( + fileSys.exists(testFile), "File " + testFile + " missing for job " + id); } // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); - assertFalse("File " + file + " should not be present for killed job " - + id, fileSys.exists(file)); + assertFalse(fileSys.exists(file), "File " + file + " should not be present for killed job " + + id); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java index 770ddd2c79d14..a6c846992f41e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java @@ -19,9 +19,9 @@ package org.apache.hadoop.mapred; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; @@ -40,8 +40,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskReport; import org.apache.hadoop.mapreduce.TaskType; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; @SuppressWarnings("deprecation") public class TestJobClients { @@ -189,7 +189,7 @@ public void testShowJob() throws Exception { client.displayJobList(new JobStatus[] {mockJobStatus}, new PrintWriter(out)); String commandLineOutput = out.toString(); System.out.println(commandLineOutput); - Assert.assertTrue(commandLineOutput.contains("Total jobs:1")); + Assertions.assertTrue(commandLineOutput.contains("Total jobs:1")); verify(mockJobStatus, atLeastOnce()).getJobID(); verify(mockJobStatus).getState(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java index 18ef64d15e79f..bb034d62a85c0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java @@ -18,9 +18,9 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -46,9 +46,9 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormatCounter; import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * This is an wordcount application that tests the count of records @@ -179,7 +179,7 @@ private static long getFileSize(Path path) throws IOException { return len; } - @BeforeClass + @BeforeAll public static void initPaths() throws IOException { final Configuration conf = new Configuration(); final Path TEST_ROOT_DIR = @@ -207,7 +207,7 @@ public static void initPaths() throws IOException { createWordsFile(inFiles[2], conf); } - @AfterClass + @AfterAll public static void cleanup() throws IOException { //clean up the input and output files final Configuration conf = new Configuration(); @@ -528,7 +528,7 @@ public void map(WritableComparable key, Writable val, OutputCollector output, Reporter reporter) throws IOException { - assertNotNull("Mapper not configured!", loader); + assertNotNull(loader, "Mapper not configured!"); // load the memory loader.load(); @@ -557,7 +557,7 @@ public void reduce(WritableComparable key, Iterator val, OutputCollector output, Reporter reporter) throws IOException { - assertNotNull("Reducer not configured!", loader); + assertNotNull(loader, "Reducer not configured!"); // load the memory loader.load(); @@ -582,10 +582,10 @@ private long getTaskCounterUsage (JobClient client, JobID id, int numReports, reports = client.getReduceTaskReports(id); } - assertNotNull("No reports found for task type '" + type.name() - + "' in job " + id, reports); + assertNotNull(reports, "No reports found for task type '" + type.name() + + "' in job " + id); // make sure that the total number of reports match the expected - assertEquals("Mismatch in task id", numReports, reports.length); + assertEquals(numReports, reports.length, "Mismatch in task id"); Counters counters = reports[taskId].getCounters(); @@ -632,7 +632,7 @@ private static RunningJob runHeapUsageTestJob(JobConf conf, Path testRootDir, RunningJob job = client.submitJob(jobConf); job.waitForCompletion(); JobID jobID = job.getID(); - assertTrue("Job " + jobID + " failed!", job.isSuccessful()); + assertTrue(job.isSuccessful(), "Job " + jobID + " failed!"); return job; } @@ -708,11 +708,11 @@ public void testHeapUsageCounter() throws Exception { System.out.println("Job2 (high memory job) reduce task heap usage: " + highMemJobReduceHeapUsage); - assertTrue("Incorrect map heap usage reported by the map task", - lowMemJobMapHeapUsage < highMemJobMapHeapUsage); + assertTrue( + lowMemJobMapHeapUsage < highMemJobMapHeapUsage, "Incorrect map heap usage reported by the map task"); - assertTrue("Incorrect reduce heap usage reported by the reduce task", - lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage); + assertTrue( + lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage, "Incorrect reduce heap usage reported by the reduce task"); } finally { // shutdown the mr cluster mrCluster.shutdown(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java index f50089af4a1e1..7284330ab5e49 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java @@ -30,14 +30,14 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; -import org.junit.BeforeClass; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; public class TestJobName extends ClusterMapReduceTestCase { - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { setupClassBase(TestJobName.class); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java index 9a5ca075e3fec..0d1281997dd61 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java @@ -28,10 +28,10 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java index 301cadb08be1c..a89f398578145 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java @@ -26,13 +26,13 @@ import org.apache.hadoop.io.compress.*; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.nio.charset.StandardCharsets.UTF_8; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestKeyValueTextInputFormat { private static final Logger LOG = @@ -102,14 +102,14 @@ public void testFormat() throws Exception { RecordReader reader = format.getRecordReader(splits[j], job, reporter); Class readerClass = reader.getClass(); - assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class, readerClass); + assertEquals(KeyValueLineRecordReader.class, readerClass, "reader class is KeyValueLineRecordReader."); Text key = reader.createKey(); Class keyClass = key.getClass(); Text value = reader.createValue(); Class valueClass = value.getClass(); - assertEquals("Key class is Text.", Text.class, keyClass); - assertEquals("Value class is Text.", Text.class, valueClass); + assertEquals(Text.class, keyClass, "Key class is Text."); + assertEquals(Text.class, valueClass, "Value class is Text."); try { int count = 0; while (reader.next(key, value)) { @@ -120,7 +120,7 @@ public void testFormat() throws Exception { " in split " + j + " at position "+reader.getPos()); } - assertFalse("Key in multiple partitions.", bits.get(v)); + assertFalse(bits.get(v), "Key in multiple partitions."); bits.set(v); count++; } @@ -129,7 +129,7 @@ public void testFormat() throws Exception { reader.close(); } } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } @@ -163,18 +163,18 @@ public void testNewLines() throws Exception { in = makeStream("a\nbb\n\nccc\rdddd\r\neeeee"); Text out = new Text(); in.readLine(out); - assertEquals("line1 length", 1, out.getLength()); + assertEquals(1, out.getLength(), "line1 length"); in.readLine(out); - assertEquals("line2 length", 2, out.getLength()); + assertEquals(2, out.getLength(), "line2 length"); in.readLine(out); - assertEquals("line3 length", 0, out.getLength()); + assertEquals(0, out.getLength(), "line3 length"); in.readLine(out); - assertEquals("line4 length", 3, out.getLength()); + assertEquals(3, out.getLength(), "line4 length"); in.readLine(out); - assertEquals("line5 length", 4, out.getLength()); + assertEquals(4, out.getLength(), "line5 length"); in.readLine(out); - assertEquals("line5 length", 5, out.getLength()); - assertEquals("end of file", 0, in.readLine(out)); + assertEquals(5, out.getLength(), "line5 length"); + assertEquals(0, in.readLine(out), "end of file"); } finally { if (in != null) { in.close(); @@ -236,17 +236,17 @@ public void testGzip() throws IOException { KeyValueTextInputFormat format = new KeyValueTextInputFormat(); format.configure(job); InputSplit[] splits = format.getSplits(job, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.gz")) { splits[0] = splits[1]; splits[1] = tmp; } List results = readSplit(format, splits[0], job); - assertEquals("splits[0] length", 6, results.size()); + assertEquals(6, results.size(), "splits[0] length"); assertEquals("splits[0][5]", " dog", results.get(5).toString()); results = readSplit(format, splits[1], job); - assertEquals("splits[1] length", 2, results.size()); + assertEquals(2, results.size(), "splits[1] length"); assertEquals("splits[1][0]", "this is a test", results.get(0).toString()); assertEquals("splits[1][1]", "of gzip", diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java index ef559d95081db..49e81084ed1ac 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLazyOutput.java @@ -33,8 +33,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.LazyOutputFormat; -import org.junit.Test; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * A JUnit test to test the Map-Reduce framework's feature to create part diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReaderJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReaderJobs.java index c25b2b7c2629e..8c084a58d51d8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReaderJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLineRecordReaderJobs.java @@ -17,7 +17,7 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.io.OutputStreamWriter; @@ -28,7 +28,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestLineRecordReaderJobs { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java index c8b6c894d0c4b..680bceeafee3c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java @@ -37,16 +37,16 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * check for the job submission options of @@ -64,14 +64,14 @@ public class TestLocalJobSubmission { private Path jarPath; private Configuration config; - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { // setup the test root directory testRootDir = GenericTestUtils.setupTestRootDir(TestLocalJobSubmission.class); } - @Before + @BeforeEach public void setup() throws IOException { unitTestDir = new File(testRootDir, unitTestName.getMethodName()); unitTestDir.mkdirs(); @@ -120,7 +120,7 @@ private void testLocalJobLibjarsOption(Configuration conf) LOG.error("Job failed with {}", e.getLocalizedMessage(), e); fail("Job failed"); } - assertEquals("dist job res is not 0:", 0, res); + assertEquals(0, res, "dist job res is not 0:"); } /** @@ -140,13 +140,13 @@ public void testLocalJobEncryptedIntermediateData() throws IOException { (SpillCallBackPathsFinder) IntermediateEncryptedStream .setSpillCBInjector(new SpillCallBackPathsFinder()); res = ToolRunner.run(config, new SleepJob(), args); - Assert.assertTrue("No spill occurred", - spillInjector.getEncryptedSpilledFiles().size() > 0); + Assertions.assertTrue( + spillInjector.getEncryptedSpilledFiles().size() > 0, "No spill occurred"); } catch (Exception e) { LOG.error("Job failed with {}", e.getLocalizedMessage(), e); fail("Job failed"); } - assertEquals("dist job res is not 0:", 0, res); + assertEquals(0, res, "dist job res is not 0:"); } /** @@ -188,7 +188,7 @@ public void testLocalJobFilesOption() throws IOException { LOG.error("Job failed with {}", e.getLocalizedMessage(), e); fail("Job failed"); } - assertEquals("dist job res is not 0:", 0, res); + assertEquals(0, res, "dist job res is not 0:"); } /** @@ -209,7 +209,7 @@ public void testLocalJobArchivesOption() throws IOException { LOG.error("Job failed with {}" + e.getLocalizedMessage(), e); fail("Job failed"); } - assertEquals("dist job res is not 0:", 0, res); + assertEquals(0, res, "dist job res is not 0:"); } private Path makeJar(Path p) throws IOException { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java index bbfabe83542ce..844708b62d1c8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java @@ -26,16 +26,16 @@ import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.Text; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import java.io.DataOutputStream; import java.io.IOException; import java.util.concurrent.TimeoutException; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -94,8 +94,8 @@ public void testLocality() throws Exception { blockLocs[0].equals(splitLocs[1]))); } - assertEquals("Expected value of " + FileInputFormat.NUM_INPUT_FILES, - 1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0)); + assertEquals( + 1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0), "Expected value of " + FileInputFormat.NUM_INPUT_FILES); } private void createInputs(FileSystem fs, Path inDir, String fileName) @@ -135,8 +135,8 @@ public void testNumInputs() throws Exception { inFormat.configure(job); InputSplit[] splits = inFormat.getSplits(job, 1); - assertEquals("Expected value of " + FileInputFormat.NUM_INPUT_FILES, - numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0)); + assertEquals( + numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0), "Expected value of " + FileInputFormat.NUM_INPUT_FILES); } final Path root = new Path("/TestFileInputFormat"); @@ -191,8 +191,8 @@ public void testMultiLevelInput() throws Exception { } catch (Exception e) { exceptionThrown = true; } - assertTrue("Exception should be thrown by default for scanning a " - + "directory with directories inside.", exceptionThrown); + assertTrue(exceptionThrown, "Exception should be thrown by default for scanning a " + + "directory with directories inside."); // Enable multi-level/recursive inputs job.setBoolean(FileInputFormat.INPUT_DIR_RECURSIVE, true); @@ -314,7 +314,7 @@ static void writeFile(Configuration conf, Path name, DFSTestUtil.waitReplication(fileSys, name, replication); } - @After + @AfterEach public void tearDown() throws Exception { if (dfs != null) { dfs.shutdown(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java index 18d7010a4966a..7814642b55b2f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java @@ -26,18 +26,18 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; import java.net.URI; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestMRCJCFileOutputCommitter { private static Path outDir = new Path(GenericTestUtils.getTempPath("output")); @@ -145,14 +145,14 @@ public void testAbort() throws IOException { committer.abortTask(tContext); File expectedFile = new File(new Path(committer .getTaskAttemptPath(tContext), file).toString()); - assertFalse("task temp dir still exists", expectedFile.exists()); + assertFalse(expectedFile.exists(), "task temp dir still exists"); committer.abortJob(jContext, JobStatus.State.FAILED); expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME) .toString()); - assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists()); - assertEquals("Output directory not empty", 0, new File(outDir.toString()) - .listFiles().length); + assertFalse(expectedFile.exists(), "job temp dir "+expectedFile+" still exists"); + assertEquals(0, new File(outDir.toString()) + .listFiles().length, "Output directory not empty"); } public static class FakeFileSystem extends RawLocalFileSystem { @@ -210,7 +210,7 @@ public void testFailAbort() throws IOException { assertNotNull(th); assertTrue(th instanceof IOException); assertTrue(th.getMessage().contains("fake delete failed")); - assertTrue(expectedFile + " does not exists", expectedFile.exists()); + assertTrue(expectedFile.exists(), expectedFile + " does not exists"); th = null; try { @@ -221,10 +221,10 @@ public void testFailAbort() throws IOException { assertNotNull(th); assertTrue(th instanceof IOException); assertTrue(th.getMessage().contains("fake delete failed")); - assertTrue("job temp dir does not exists", jobTmpDir.exists()); + assertTrue(jobTmpDir.exists(), "job temp dir does not exists"); } - @After + @AfterEach public void teardown() { FileUtil.fullyDelete(new File(outDir.toString())); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java index 9a2c744d8c656..ebab173fe7002 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java @@ -30,12 +30,12 @@ import org.apache.hadoop.mapreduce.tools.CLI; import org.apache.hadoop.util.Tool; -import org.junit.BeforeClass; +import org.junit.jupiter.api.BeforeAll; import org.junit.Ignore; @Ignore public class TestMRCJCJobClient extends TestMRJobClient { - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { setupClassBase(TestMRCJCJobClient.class); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java index e412383a98601..1c59b5e32a9de 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java @@ -18,7 +18,7 @@ package org.apache.hadoop.mapred; import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; import java.net.URLClassLoader; import java.net.URL; @@ -30,7 +30,7 @@ import org.apache.hadoop.util.ClassUtil; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; @Ignore public class TestMRCJCJobConf { private static final String JAR_RELATIVE_PATH = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java index c2a966302cf66..2cd81586789b0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java @@ -28,14 +28,14 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.util.MRJobConfUtil; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * Simple MapReduce to test ability of the MRAppMaster to request and use diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java index 32e847886721b..b7dfc04115418 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedReader; import java.io.File; @@ -56,8 +56,8 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -85,8 +85,8 @@ public void testTimelineServiceStartInMiniCluster() throws Exception { cluster.start(); //verify that the timeline service is not started. - Assert.assertNull("Timeline Service should not have been started", - cluster.getApplicationHistoryServer()); + Assertions.assertNull( + cluster.getApplicationHistoryServer(), "Timeline Service should not have been started"); } finally { if(cluster != null) { @@ -103,8 +103,8 @@ public void testTimelineServiceStartInMiniCluster() throws Exception { cluster.start(); //verify that the timeline service is not started. - Assert.assertNull("Timeline Service should not have been started", - cluster.getApplicationHistoryServer()); + Assertions.assertNull( + cluster.getApplicationHistoryServer(), "Timeline Service should not have been started"); } finally { if(cluster != null) { @@ -135,33 +135,33 @@ public void testMRTimelineEventHandling() throws Exception { Path outDir = new Path(localPathRoot, "output"); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.SUCCEEDED, + Assertions.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assert.assertEquals(1, entities.getEntities().size()); + Assertions.assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); - Assert.assertEquals(job.getID().toString(), tEntity.getEntityId()); - Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); - Assert.assertEquals(EventType.AM_STARTED.toString(), + Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); + Assertions.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); + Assertions.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1) .getEventType()); - Assert.assertEquals(EventType.JOB_FINISHED.toString(), + Assertions.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(0).getEventType()); job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.FAILED, + Assertions.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assert.assertEquals(2, entities.getEntities().size()); + Assertions.assertEquals(2, entities.getEntities().size()); tEntity = entities.getEntities().get(0); - Assert.assertEquals(job.getID().toString(), tEntity.getEntityId()); - Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); - Assert.assertEquals(EventType.AM_STARTED.toString(), + Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); + Assertions.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); + Assertions.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1) .getEventType()); - Assert.assertEquals(EventType.JOB_FAILED.toString(), + Assertions.assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType()); } finally { if (cluster != null) { @@ -221,7 +221,7 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { UtilsForTests.createConfigValue(101 * 1024)); RunningJob job = UtilsForTests.runJobSucceed(successConf, inDir, outDir); - Assert.assertEquals(JobStatus.SUCCEEDED, + Assertions.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); YarnClient yarnClient = YarnClient.createYarnClient(); @@ -232,7 +232,7 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { ApplicationId firstAppId = null; List apps = yarnClient.getApplications(appStates); - Assert.assertEquals(apps.size(), 1); + Assertions.assertEquals(apps.size(), 1); ApplicationReport appReport = apps.get(0); firstAppId = appReport.getApplicationId(); UtilsForTests.waitForAppFinished(job, cluster); @@ -240,11 +240,11 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { LOG.info("Run 2nd job which should be failed."); job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.FAILED, + Assertions.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue()); apps = yarnClient.getApplications(appStates); - Assert.assertEquals(apps.size(), 2); + Assertions.assertEquals(apps.size(), 2); appReport = apps.get(0).getApplicationId().equals(firstAppId) ? apps.get(0) : apps.get(1); @@ -270,7 +270,7 @@ private void checkNewTimelineEvent(ApplicationId appId, File tmpRootFolder = new File(tmpRoot); - Assert.assertTrue(tmpRootFolder.isDirectory()); + Assertions.assertTrue(tmpRootFolder.isDirectory()); String basePath = tmpRoot + YarnConfiguration.DEFAULT_RM_CLUSTER_ID + File.separator + UserGroupInformation.getCurrentUser().getShortUserName() + @@ -283,9 +283,9 @@ private void checkNewTimelineEvent(ApplicationId appId, basePath + File.separator + "MAPREDUCE_JOB" + File.separator; File entityFolder = new File(outputDirJob); - Assert.assertTrue("Job output directory: " + outputDirJob + - " does not exist.", - entityFolder.isDirectory()); + Assertions.assertTrue( + entityFolder.isDirectory(), "Job output directory: " + outputDirJob + + " does not exist."); // check for job event file String jobEventFileName = appId.toString().replaceAll("application", "job") @@ -293,9 +293,9 @@ private void checkNewTimelineEvent(ApplicationId appId, String jobEventFilePath = outputDirJob + jobEventFileName; File jobEventFile = new File(jobEventFilePath); - Assert.assertTrue("jobEventFilePath: " + jobEventFilePath + - " does not exist.", - jobEventFile.exists()); + Assertions.assertTrue( + jobEventFile.exists(), "jobEventFilePath: " + jobEventFilePath + + " does not exist."); verifyEntity(jobEventFile, EventType.JOB_FINISHED.name(), true, false, null, false); Set cfgsToCheck = Sets.newHashSet("dummy_conf1", "dummy_conf2", @@ -306,10 +306,10 @@ private void checkNewTimelineEvent(ApplicationId appId, String outputAppDir = basePath + File.separator + "YARN_APPLICATION" + File.separator; entityFolder = new File(outputAppDir); - Assert.assertTrue( - "Job output directory: " + outputAppDir + - " does not exist.", - entityFolder.isDirectory()); + Assertions.assertTrue( + + entityFolder.isDirectory(), "Job output directory: " + outputAppDir + + " does not exist."); // check for job event file String appEventFileName = appId.toString() @@ -317,10 +317,10 @@ private void checkNewTimelineEvent(ApplicationId appId, String appEventFilePath = outputAppDir + appEventFileName; File appEventFile = new File(appEventFilePath); - Assert.assertTrue( - "appEventFilePath: " + appEventFilePath + - " does not exist.", - appEventFile.exists()); + Assertions.assertTrue( + + appEventFile.exists(), "appEventFilePath: " + appEventFilePath + + " does not exist."); verifyEntity(appEventFile, null, true, false, null, false); verifyEntity(appEventFile, null, false, true, cfgsToCheck, false); @@ -328,9 +328,9 @@ private void checkNewTimelineEvent(ApplicationId appId, String outputDirTask = basePath + File.separator + "MAPREDUCE_TASK" + File.separator; File taskFolder = new File(outputDirTask); - Assert.assertTrue("Task output directory: " + outputDirTask + - " does not exist.", - taskFolder.isDirectory()); + Assertions.assertTrue( + taskFolder.isDirectory(), "Task output directory: " + outputDirTask + + " does not exist."); String taskEventFileName = appId.toString().replaceAll("application", "task") + @@ -339,9 +339,9 @@ private void checkNewTimelineEvent(ApplicationId appId, String taskEventFilePath = outputDirTask + taskEventFileName; File taskEventFile = new File(taskEventFilePath); - Assert.assertTrue("taskEventFileName: " + taskEventFilePath + - " does not exist.", - taskEventFile.exists()); + Assertions.assertTrue( + taskEventFile.exists(), "taskEventFileName: " + taskEventFilePath + + " does not exist."); verifyEntity(taskEventFile, EventType.TASK_FINISHED.name(), true, false, null, true); @@ -349,8 +349,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String outputDirTaskAttempt = basePath + File.separator + "MAPREDUCE_TASK_ATTEMPT" + File.separator; File taskAttemptFolder = new File(outputDirTaskAttempt); - Assert.assertTrue("TaskAttempt output directory: " + outputDirTaskAttempt + - " does not exist.", taskAttemptFolder.isDirectory()); + Assertions.assertTrue(taskAttemptFolder.isDirectory(), "TaskAttempt output directory: " + outputDirTaskAttempt + + " does not exist."); String taskAttemptEventFileName = appId.toString().replaceAll( "application", "attempt") + "_m_000000_0" + @@ -359,8 +359,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String taskAttemptEventFilePath = outputDirTaskAttempt + taskAttemptEventFileName; File taskAttemptEventFile = new File(taskAttemptEventFilePath); - Assert.assertTrue("taskAttemptEventFileName: " + taskAttemptEventFilePath + - " does not exist.", taskAttemptEventFile.exists()); + Assertions.assertTrue(taskAttemptEventFile.exists(), "taskAttemptEventFileName: " + taskAttemptEventFilePath + + " does not exist."); verifyEntity(taskAttemptEventFile, EventType.MAP_ATTEMPT_FINISHED.name(), true, false, null, true); } @@ -397,14 +397,14 @@ private void verifyEntity(File entityFile, String eventId, LOG.info("strLine.trim()= " + strLine.trim()); if (checkIdPrefix) { - Assert.assertTrue("Entity ID prefix expected to be > 0", - entity.getIdPrefix() > 0); + Assertions.assertTrue( + entity.getIdPrefix() > 0, "Entity ID prefix expected to be > 0"); if (idPrefix == -1) { idPrefix = entity.getIdPrefix(); } else { - Assert.assertEquals("Entity ID prefix should be same across " + - "each publish of same entity", - idPrefix, entity.getIdPrefix()); + Assertions.assertEquals( + idPrefix, entity.getIdPrefix(), "Entity ID prefix should be same across " + + "each publish of same entity"); } } if (eventId == null) { @@ -492,21 +492,21 @@ public void testMapreduceJobTimelineServiceEnabled() RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.SUCCEEDED, + Assertions.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assert.assertEquals(0, entities.getEntities().size()); + Assertions.assertEquals(0, entities.getEntities().size()); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.SUCCEEDED, + Assertions.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assert.assertEquals(1, entities.getEntities().size()); + Assertions.assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); - Assert.assertEquals(job.getID().toString(), tEntity.getEntityId()); + Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); } finally { if (cluster != null) { cluster.stop(); @@ -532,21 +532,21 @@ public void testMapreduceJobTimelineServiceEnabled() conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.SUCCEEDED, + Assertions.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assert.assertEquals(0, entities.getEntities().size()); + Assertions.assertEquals(0, entities.getEntities().size()); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assert.assertEquals(JobStatus.SUCCEEDED, + Assertions.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assert.assertEquals(1, entities.getEntities().size()); + Assertions.assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); - Assert.assertEquals(job.getID().toString(), tEntity.getEntityId()); + Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); } finally { if (cluster != null) { cluster.stop(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapOutputType.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapOutputType.java index e3860fd2e2592..3d9522958321c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapOutputType.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapOutputType.java @@ -31,11 +31,11 @@ import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.MRConfig; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; /** @@ -90,7 +90,7 @@ public void close() { } } - @Before + @BeforeEach public void configure() throws Exception { Path testdir = new Path(TEST_DIR.getAbsolutePath()); Path inDir = new Path(testdir, "in"); @@ -124,7 +124,7 @@ public void configure() throws Exception { jc = new JobClient(conf); } - @After + @AfterEach public void cleanup() { FileUtil.fullyDelete(TEST_DIR); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java index f70a552327f45..9f157b6b97fe4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java @@ -36,11 +36,11 @@ import org.apache.hadoop.mapreduce.split.JobSplitWriter; import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Validates map phase progress. @@ -193,8 +193,8 @@ public void setProgress(float progress) { return; } // validate map task progress when the map task is in map phase - assertTrue("Map progress is not the expected value.", - Math.abs(mapTaskProgress - ((float)recordNum/3)) < 0.001); + assertTrue( + Math.abs(mapTaskProgress - ((float)recordNum/3)) < 0.001, "Map progress is not the expected value."); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java index a63dbec6d9f78..bea1c8e6ab438 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java @@ -48,11 +48,11 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; /********************************************************** * MapredLoadTest generates a bunch of work that exercises @@ -254,7 +254,7 @@ public void close() { private static int counts = 100; private static Random r = new Random(); - @After + @AfterEach public void cleanup() { FileUtil.fullyDelete(TEST_DIR); } @@ -309,12 +309,12 @@ public void reduce(WritableComparable key, Iterator values, mapOutputFile.setConf(conf); Path input = mapOutputFile.getInputFile(0); FileSystem fs = FileSystem.get(conf); - assertTrue("reduce input exists " + input, fs.exists(input)); + assertTrue(fs.exists(input), "reduce input exists " + input); SequenceFile.Reader rdr = new SequenceFile.Reader(fs, input, conf); - assertEquals("is reduce input compressed " + input, + assertEquals( compressInput, - rdr.isCompressed()); + rdr.isCompressed(), "is reduce input compressed " + input); rdr.close(); } } @@ -372,10 +372,10 @@ public void testNullKeys() throws Exception { new Path(testdir, "nullout/part-00000"), conf); m = "AAAAAAAAAAAAAA"; for (int i = 1; r.next(NullWritable.get(), t); ++i) { - assertTrue("Unexpected value: " + t, values.remove(t.toString())); + assertTrue(values.remove(t.toString()), "Unexpected value: " + t); m = m.replace((char)('A' + i - 1), (char)('A' + i)); } - assertTrue("Missing values: " + values.toString(), values.isEmpty()); + assertTrue(values.isEmpty(), "Missing values: " + values.toString()); } private void checkCompression(boolean compressMapOutputs, @@ -415,16 +415,16 @@ private void checkCompression(boolean compressMapOutputs, f.writeBytes("Is this done, yet?\n"); f.close(); RunningJob rj = JobClient.runJob(conf); - assertTrue("job was complete", rj.isComplete()); - assertTrue("job was successful", rj.isSuccessful()); + assertTrue(rj.isComplete(), "job was complete"); + assertTrue(rj.isSuccessful(), "job was successful"); Path output = new Path(outDir, Task.getOutputName(0)); - assertTrue("reduce output exists " + output, fs.exists(output)); + assertTrue(fs.exists(output), "reduce output exists " + output); SequenceFile.Reader rdr = new SequenceFile.Reader(fs, output, conf); - assertEquals("is reduce output compressed " + output, + assertEquals( redCompression != CompressionType.NONE, - rdr.isCompressed()); + rdr.isCompressed(), "is reduce output compressed " + output); rdr.close(); } finally { fs.delete(testdir, true); @@ -663,7 +663,7 @@ public void launch() throws Exception { } finally { bw.close(); } - assertTrue("testMapRed failed", success); + assertTrue(success, "testMapRed failed"); fs.delete(testdir, true); } @@ -778,7 +778,7 @@ public void runJob(int items) { JobClient.runJob(conf); } catch (Exception e) { - assertTrue("Threw exception:" + e,false); + assertTrue(false, "Threw exception:" + e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java index b8a16e146e9e1..a17119ba73597 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java @@ -44,8 +44,8 @@ import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapred.Task.TaskReporter; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; @SuppressWarnings(value={"unchecked", "deprecation"}) /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java index fc49fa569024e..35d92cd0fad7b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; @@ -50,7 +50,7 @@ public void testMiniMRYarnClusterWithoutJHS() throws IOException { mr = new MiniMRYarnCluster("testMiniMRYarnClusterWithoutJHS"); mr.init(conf); mr.start(); - Assert.assertEquals(null, mr.getHistoryServer()); + Assertions.assertEquals(null, mr.getHistoryServer()); } finally { if (mr != null) { mr.stop(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java index 28376ebc89c7b..2b50eb6c62230 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java @@ -18,10 +18,10 @@ package org.apache.hadoop.mapred; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.DataOutputStream; import java.io.File; @@ -42,9 +42,9 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; import org.apache.hadoop.util.Shell; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -170,19 +170,19 @@ public void configure(JobConf job) { boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false); if (oldConfigs) { String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", - javaOpts); + assertNotNull( + javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); assertThat(javaOpts) .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + javaOpts) .isEqualTo(TASK_OPTS_VAL); } else { String mapJavaOpts = job.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!", - mapJavaOpts); - assertEquals(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + - mapJavaOpts, - mapJavaOpts, MAP_OPTS_VAL); + assertNotNull( + mapJavaOpts, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!"); + assertEquals( + mapJavaOpts, MAP_OPTS_VAL, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + + mapJavaOpts); } // check if X=y works for an already existing parameter @@ -193,8 +193,8 @@ public void configure(JobConf job) { checkEnv("NEW_PATH", File.pathSeparator + "/tmp", "noappend"); String jobLocalDir = job.get(MRJobConfig.JOB_LOCAL_DIR); - assertNotNull(MRJobConfig.JOB_LOCAL_DIR + " is null", - jobLocalDir); + assertNotNull( + jobLocalDir, MRJobConfig.JOB_LOCAL_DIR + " is null"); } public void map(WritableComparable key, Writable value, @@ -214,16 +214,16 @@ public void configure(JobConf job) { boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false); if (oldConfigs) { String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", - javaOpts); + assertNotNull( + javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); assertThat(javaOpts) .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + javaOpts) .isEqualTo(TASK_OPTS_VAL); } else { String reduceJavaOpts = job.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!", - reduceJavaOpts); + assertNotNull( + reduceJavaOpts, JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!"); assertThat(reduceJavaOpts) .withFailMessage(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " + reduceJavaOpts) @@ -247,7 +247,7 @@ public void reduce(WritableComparable key, Iterator values, } - @BeforeClass + @BeforeAll public static void setup() throws IOException { // create configuration, dfs, file system and mapred cluster dfs = new MiniDFSCluster.Builder(conf).build(); @@ -272,7 +272,7 @@ public static void setup() throws IOException { localFs.setPermission(APP_JAR, new FsPermission("700")); } - @AfterClass + @AfterAll public static void tearDown() { // close file system and shut down dfs and mapred cluster try { @@ -378,7 +378,7 @@ private void runTestTaskEnv(JobConf config, Path inDir, Path outDir, job.setMaxMapAttempts(1); // speed up failures job.waitForCompletion(true); boolean succeeded = job.waitForCompletion(true); - assertTrue("The environment checker job failed.", succeeded); + assertTrue(succeeded, "The environment checker job failed."); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java index 71f04078b6c12..6d08b15ef9dc3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java @@ -32,8 +32,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; /** * A JUnit test to test Mini Map-Reduce Cluster with multiple directories @@ -175,7 +175,7 @@ public void testClassPath() throws IOException { String result; result = launchWordCount(fileSys.getUri(), jobConf, "The quick brown fox\nhas many silly\n" + "red fox sox\n", 3, 1); - Assert.assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" + Assertions.assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" + "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result); } finally { @@ -208,7 +208,7 @@ public void testExternalWritable() result = launchExternal(fileSys.getUri(), jobConf, "Dennis was here!\nDennis again!", 3, 1); - Assert.assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result); + Assertions.assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result); } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java index 7630d18798eae..9204e4f7052fe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.StringTokenizer; @@ -34,9 +34,9 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Basic testing for the MiniMRClientCluster. This test shows an example class @@ -54,7 +54,7 @@ public class TestMiniMRClientCluster { private class InternalClass { } - @BeforeClass + @BeforeAll public static void setup() throws IOException { final Configuration conf = new Configuration(); final Path TEST_ROOT_DIR = new Path(System.getProperty("test.build.data", @@ -81,7 +81,7 @@ public static void setup() throws IOException { InternalClass.class, 1, new Configuration()); } - @AfterClass + @AfterAll public static void cleanup() throws IOException { // clean up the input and output files final Configuration conf = new Configuration(); @@ -128,27 +128,27 @@ public void testRestart() throws Exception { String mrHistWebAppAddress2 = mrCluster.getConfig().get( JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS); - assertEquals("Address before restart: " + rmAddress1 - + " is different from new address: " + rmAddress2, rmAddress1, - rmAddress2); - assertEquals("Address before restart: " + rmAdminAddress1 - + " is different from new address: " + rmAdminAddress2, - rmAdminAddress1, rmAdminAddress2); - assertEquals("Address before restart: " + rmSchedAddress1 - + " is different from new address: " + rmSchedAddress2, - rmSchedAddress1, rmSchedAddress2); - assertEquals("Address before restart: " + rmRstrackerAddress1 - + " is different from new address: " + rmRstrackerAddress2, - rmRstrackerAddress1, rmRstrackerAddress2); - assertEquals("Address before restart: " + rmWebAppAddress1 - + " is different from new address: " + rmWebAppAddress2, - rmWebAppAddress1, rmWebAppAddress2); - assertEquals("Address before restart: " + mrHistAddress1 - + " is different from new address: " + mrHistAddress2, mrHistAddress1, - mrHistAddress2); - assertEquals("Address before restart: " + mrHistWebAppAddress1 - + " is different from new address: " + mrHistWebAppAddress2, - mrHistWebAppAddress1, mrHistWebAppAddress2); + assertEquals(rmAddress1 +, rmAddress2, "Address before restart: " + rmAddress1 + + " is different from new address: " + rmAddress2); + assertEquals( + rmAdminAddress1, rmAdminAddress2, "Address before restart: " + rmAdminAddress1 + + " is different from new address: " + rmAdminAddress2); + assertEquals( + rmSchedAddress1, rmSchedAddress2, "Address before restart: " + rmSchedAddress1 + + " is different from new address: " + rmSchedAddress2); + assertEquals( + rmRstrackerAddress1, rmRstrackerAddress2, "Address before restart: " + rmRstrackerAddress1 + + " is different from new address: " + rmRstrackerAddress2); + assertEquals( + rmWebAppAddress1, rmWebAppAddress2, "Address before restart: " + rmWebAppAddress1 + + " is different from new address: " + rmWebAppAddress2); + assertEquals(mrHistAddress1 +, mrHistAddress2, "Address before restart: " + mrHistAddress1 + + " is different from new address: " + mrHistAddress2); + assertEquals( + mrHistWebAppAddress1, mrHistWebAppAddress2, "Address before restart: " + mrHistWebAppAddress1 + + " is different from new address: " + mrHistWebAppAddress2); } @@ -165,14 +165,14 @@ public void testJob() throws Exception { private void validateCounters(Counters counters, long mapInputRecords, long mapOutputRecords, long reduceInputGroups, long reduceOutputRecords) { - assertEquals("MapInputRecords", mapInputRecords, counters.findCounter( - "MyCounterGroup", "MAP_INPUT_RECORDS").getValue()); - assertEquals("MapOutputRecords", mapOutputRecords, counters.findCounter( - "MyCounterGroup", "MAP_OUTPUT_RECORDS").getValue()); - assertEquals("ReduceInputGroups", reduceInputGroups, counters.findCounter( - "MyCounterGroup", "REDUCE_INPUT_GROUPS").getValue()); - assertEquals("ReduceOutputRecords", reduceOutputRecords, counters - .findCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS").getValue()); + assertEquals(mapInputRecords, counters.findCounter( + "MyCounterGroup", "MAP_INPUT_RECORDS").getValue(), "MapInputRecords"); + assertEquals(mapOutputRecords, counters.findCounter( + "MyCounterGroup", "MAP_OUTPUT_RECORDS").getValue(), "MapOutputRecords"); + assertEquals(reduceInputGroups, counters.findCounter( + "MyCounterGroup", "REDUCE_INPUT_GROUPS").getValue(), "ReduceInputGroups"); + assertEquals(reduceOutputRecords, counters + .findCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS").getValue(), "ReduceOutputRecords"); } private static void createFile(Path inFile, Configuration conf) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java index 3f64f7a35b961..73344f7df49a2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java @@ -22,8 +22,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.MRCaching.TestResult; import org.junit.Ignore; -import org.junit.Test; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; @@ -52,7 +52,7 @@ public void testWithDFS() throws IOException { mr.createJobConf(), "The quick brown fox\nhas many silly\n" + "red fox sox\n"); - assertTrue("Archives not matching", ret.isOutputOk); + assertTrue(ret.isOutputOk, "Archives not matching"); // launch MR cache with symlinks ret = MRCaching.launchMRCache("/testing/wc/input", "/testing/wc/output", @@ -60,7 +60,7 @@ public void testWithDFS() throws IOException { mr.createJobConf(), "The quick brown fox\nhas many silly\n" + "red fox sox\n"); - assertTrue("Archives not matching", ret.isOutputOk); + assertTrue(ret.isOutputOk, "Archives not matching"); } finally { if (fileSys != null) { fileSys.close(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java index 221fd3c15c6aa..e245acb0eb417 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java @@ -28,10 +28,10 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.security.UserGroupInformation; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * A JUnit test to test Mini Map-Reduce Cluster with Mini-DFS. @@ -70,10 +70,10 @@ public RunningJob run() throws IOException { }); rj.waitForCompletion(); - Assert.assertEquals("SUCCEEDED", JobStatus.getJobRunState(rj.getJobState())); + Assertions.assertEquals("SUCCEEDED", JobStatus.getJobRunState(rj.getJobState())); } - @Before + @BeforeEach public void setUp() throws Exception { dfs = new MiniDFSCluster.Builder(conf).numDataNodes(4).build(); @@ -98,7 +98,7 @@ public FileSystem run() throws IOException { 1, null, null, MR_UGI, mrConf); } - @After + @AfterEach public void tearDown() throws Exception { if (mr != null) { mr.shutdown();} if (dfs != null) { dfs.shutdown(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java index 3a02f4cbe1fff..e9c2a23a00fd5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileInputFormat.java @@ -25,12 +25,12 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestMultiFileInputFormat { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java index d57be9d537a73..b6bb719101bb0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java @@ -28,11 +28,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java index 7e8dfef03f134..116ce72b4c2cd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java @@ -28,11 +28,11 @@ import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * This test checks whether the task caches are created and used properly. @@ -158,14 +158,14 @@ static void launchJobAndTestCounters(String jobName, MiniMRCluster mr, } RunningJob job = launchJob(jobConf, in, out, numMaps, jobName); Counters counters = job.getCounters(); - assertEquals("Number of local maps", - counters.getCounter(JobCounter.OTHER_LOCAL_MAPS), otherLocalMaps); - assertEquals("Number of Data-local maps", - counters.getCounter(JobCounter.DATA_LOCAL_MAPS), - dataLocalMaps); - assertEquals("Number of Rack-local maps", - counters.getCounter(JobCounter.RACK_LOCAL_MAPS), - rackLocalMaps); + assertEquals( + counters.getCounter(JobCounter.OTHER_LOCAL_MAPS), otherLocalMaps, "Number of local maps"); + assertEquals( + counters.getCounter(JobCounter.DATA_LOCAL_MAPS) +, dataLocalMaps, "Number of Data-local maps"); + assertEquals( + counters.getCounter(JobCounter.RACK_LOCAL_MAPS) +, rackLocalMaps, "Number of Rack-local maps"); mr.waitUntilIdle(); mr.shutdown(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java index 114b6054d3fe4..61f6239502d94 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java @@ -22,13 +22,13 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; public class TestMultipleTextOutputFormat { private static JobConf defaultConf = new JobConf(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java index ed8ed61ff9b16..fa653697f1b5c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java @@ -19,7 +19,7 @@ package org.apache.hadoop.mapred; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -47,7 +47,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestNetworkedJob { private static String TEST_ROOT_DIR = new File(System.getProperty( @@ -56,7 +57,8 @@ public class TestNetworkedJob { private static Path inFile = new Path(testDir, "in"); private static Path outDir = new Path(testDir, "out"); - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testGetNullCounters() throws Exception { //mock creation Job mockJob = mock(Job.class); @@ -68,7 +70,8 @@ public void testGetNullCounters() throws Exception { verify(mockJob).getCounters(); } - @Test (timeout=500000) + @Test + @Timeout(value = 500) public void testGetJobStatus() throws IOException, InterruptedException, ClassNotFoundException { MiniMRClientCluster mr = null; @@ -101,11 +104,11 @@ public void testGetJobStatus() throws IOException, InterruptedException, // The following asserts read JobStatus twice and ensure the returned // JobStatus objects correspond to the same Job. - assertEquals("Expected matching JobIDs", jobId, client.getJob(jobId) - .getJobStatus().getJobID()); - assertEquals("Expected matching startTimes", rj.getJobStatus() + assertEquals(jobId, client.getJob(jobId) + .getJobStatus().getJobID(), "Expected matching JobIDs"); + assertEquals(rj.getJobStatus() .getStartTime(), client.getJob(jobId).getJobStatus() - .getStartTime()); + .getStartTime(), "Expected matching startTimes"); } finally { if (fileSys != null) { fileSys.delete(testDir, true); @@ -120,7 +123,8 @@ public void testGetJobStatus() throws IOException, InterruptedException, * @throws Exception */ @SuppressWarnings( "deprecation" ) - @Test (timeout=500000) + @Test + @Timeout(value = 500) public void testNetworkedJob() throws Exception { // mock creation MiniMRClientCluster mr = null; @@ -252,10 +256,10 @@ public void testNetworkedJob() throws Exception { // test JobClient // The following asserts read JobStatus twice and ensure the returned // JobStatus objects correspond to the same Job. - assertEquals("Expected matching JobIDs", jobId, client.getJob(jobId) - .getJobStatus().getJobID()); - assertEquals("Expected matching startTimes", rj.getJobStatus() - .getStartTime(), client.getJob(jobId).getJobStatus().getStartTime()); + assertEquals(jobId, client.getJob(jobId) + .getJobStatus().getJobID(), "Expected matching JobIDs"); + assertEquals(rj.getJobStatus() + .getStartTime(), client.getJob(jobId).getJobStatus().getStartTime(), "Expected matching startTimes"); } finally { if (fileSys != null) { fileSys.delete(testDir, true); @@ -271,7 +275,8 @@ public void testNetworkedJob() throws Exception { * * @throws IOException */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testBlackListInfo() throws IOException { BlackListInfo info = new BlackListInfo(); info.setBlackListReport("blackListInfo"); @@ -293,7 +298,8 @@ public void testBlackListInfo() throws IOException { * test run from command line JobQueueClient * @throws Exception */ - @Test (timeout=500000) + @Test + @Timeout(value = 500) public void testJobQueueClient() throws Exception { MiniMRClientCluster mr = null; FileSystem fileSys = null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java index 1f6395dfb7892..fd464ef0778d2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred; -import org.junit.After; -import org.junit.Assert; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -28,7 +28,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.BufferedReader; import java.io.File; @@ -119,7 +119,7 @@ public int compare(Text o1, Text o2) { } - @After + @AfterEach public void cleanup() { FileUtil.fullyDelete(testRootDir); } @@ -169,30 +169,30 @@ public void testCombiner() throws Exception { long combinerOutputRecords = counters.getGroup( "org.apache.hadoop.mapreduce.TaskCounter"). getCounter("COMBINE_OUTPUT_RECORDS"); - Assert.assertTrue(combinerInputRecords > 0); - Assert.assertTrue(combinerInputRecords > combinerOutputRecords); + Assertions.assertTrue(combinerInputRecords > 0); + Assertions.assertTrue(combinerInputRecords > combinerOutputRecords); BufferedReader br = new BufferedReader(new FileReader( new File(out, "part-00000"))); Set output = new HashSet(); String line = br.readLine(); - Assert.assertNotNull(line); + Assertions.assertNotNull(line); output.add(line.substring(0, 1) + line.substring(4, 5)); line = br.readLine(); - Assert.assertNotNull(line); + Assertions.assertNotNull(line); output.add(line.substring(0, 1) + line.substring(4, 5)); line = br.readLine(); - Assert.assertNull(line); + Assertions.assertNull(line); br.close(); Set expected = new HashSet(); expected.add("A2"); expected.add("B5"); - Assert.assertEquals(expected, output); + Assertions.assertEquals(expected, output); } else { - Assert.fail("Job failed"); + Assertions.fail("Job failed"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java index bdfe0f5dc6982..df2ca9ec2f79d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java @@ -32,9 +32,10 @@ import org.w3c.dom.Document; import org.w3c.dom.Element; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestQueueConfigurationParser { /** @@ -42,7 +43,8 @@ public class TestQueueConfigurationParser { * @throws ParserConfigurationException * @throws Exception */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testQueueConfigurationParser() throws ParserConfigurationException, Exception { JobQueueInfo info = new JobQueueInfo("root", "rootInfo"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java index 767459f88b4df..be1410eb1e969 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java @@ -19,10 +19,10 @@ package org.apache.hadoop.mapred; import org.apache.hadoop.mapreduce.TaskCounter; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestReduceFetch extends TestReduceFetchFromPartialMem { @@ -44,10 +44,10 @@ public void testReduceFromDisk() throws Exception { Counters c = runJob(job); final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter(); final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter(); - assertTrue("Expected all records spilled during reduce (" + spill + ")", - spill >= 2 * out); // all records spill at map, reduce - assertTrue("Expected intermediate merges (" + spill + ")", - spill >= 2 * out + (out / MAP_TASKS)); // some records hit twice + assertTrue( + spill >= 2 * out, "Expected all records spilled during reduce (" + spill + ")"); // all records spill at map, reduce + assertTrue( + spill >= 2 * out + (out / MAP_TASKS), "Expected intermediate merges (" + spill + ")"); // some records hit twice } /** @@ -65,6 +65,6 @@ public void testReduceFromMem() throws Exception { Counters c = runJob(job); final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter(); final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter(); - assertEquals("Spilled records: " + spill, out, spill); // no reduce spill + assertEquals(out, spill, "Spilled records: " + spill); // no reduce spill } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java index 1b99ce0c0aa1d..18fae4c61659e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java @@ -27,9 +27,9 @@ import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.task.reduce.Fetcher; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.DataInput; import java.io.DataOutput; @@ -39,16 +39,16 @@ import java.util.Iterator; import static org.apache.hadoop.mapreduce.task.reduce.Fetcher.SHUFFLE_ERR_GRP_NAME; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestReduceFetchFromPartialMem { protected static MiniMRCluster mrCluster = null; protected static MiniDFSCluster dfsCluster = null; - @Before + @BeforeEach public void setUp() throws Exception { Configuration conf = new Configuration(); dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); @@ -56,7 +56,7 @@ public void setUp() throws Exception { dfsCluster.getFileSystem().getUri().toString(), 1); } - @After + @AfterEach public void tearDown() throws Exception { if (dfsCluster != null) { dfsCluster.shutdown(); } if (mrCluster != null) { mrCluster.shutdown(); } @@ -87,8 +87,8 @@ public void testReduceFromPartialMem() throws Exception { Counters c = runJob(job); final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter(); final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter(); - assertTrue("Expected some records not spilled during reduce" + spill + ")", - spill < 2 * out); // spilled map records, some records at the reduce + assertTrue( + spill < 2 * out, "Expected some records not spilled during reduce" + spill + ")"); // spilled map records, some records at the reduce long shuffleIoErrors = c.getGroup(SHUFFLE_ERR_GRP_NAME).getCounter(Fetcher.ShuffleErrors.IO_ERROR.toString()); assertEquals(0, shuffleIoErrors); @@ -226,8 +226,8 @@ public void reduce(Text key, Iterator values, out.collect(key, val); ++nRec; } - assertEquals("Bad rec count for " + key, recCheck, nRec - preRec); - assertEquals("Bad rec group for " + key, vcCheck, vc); + assertEquals(recCheck, nRec - preRec, "Bad rec count for " + key); + assertEquals(vcCheck, vc, "Bad rec group for " + key); } @Override @@ -235,7 +235,7 @@ public void close() throws IOException { assertEquals(4095, nKey); assertEquals(nMaps - 1, aKey); assertEquals(nMaps - 1, bKey); - assertEquals("Bad record count", nMaps * (4096 + 2), nRec); + assertEquals(nMaps * (4096 + 2), nRec, "Bad record count"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java index 69546a6cba24e..f904ca05a865a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceTask.java @@ -26,12 +26,12 @@ import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.util.Progressable; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * This test exercises the ValueIterator. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java index 48df092e377c8..6e3a10a3ab8f6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java @@ -31,11 +31,11 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * Tests the old mapred APIs with {@link Reporter#getProgress()}. @@ -48,14 +48,14 @@ public class TestReporter { private static FileSystem fs = null; - @BeforeClass + @BeforeAll public static void setup() throws Exception { fs = FileSystem.getLocal(new Configuration()); fs.delete(testRootTempDir, true); fs.mkdirs(testRootTempDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testRootTempDir, true); } @@ -92,16 +92,16 @@ public void map(LongWritable key, Text value, float mapProgress = ((float)++numRecords)/INPUT_LINES; // calculate the attempt progress based on the progress range float attemptProgress = progressRange * mapProgress; - assertEquals("Invalid progress in map", - attemptProgress, reporter.getProgress(), 0f); + assertEquals(attemptProgress, reporter.getProgress(), 0f, + "Invalid progress in map"); output.collect(new Text(value.toString() + numRecords), value); } @Override public void close() throws IOException { super.close(); - assertEquals("Invalid progress in map cleanup", - progressRange, reporter.getProgress(), 0f); + assertEquals(progressRange, reporter.getProgress(), 0f, + "Invalid progress in map cleanup"); } } @@ -147,7 +147,7 @@ public void testReporterProgressForMapOnlyJob() throws IOException { 1, 0, INPUT); job.waitForCompletion(); - assertTrue("Job failed", job.isSuccessful()); + assertTrue(job.isSuccessful(), "Job failed"); } /** @@ -175,18 +175,17 @@ public void reduce(Text key, Iterator values, throws IOException { float reducePhaseProgress = ((float)++recordCount)/INPUT_LINES; float weightedReducePhaseProgress = - reducePhaseProgress * REDUCE_PROGRESS_RANGE; - assertEquals("Invalid progress in reduce", - SHUFFLE_PROGRESS_RANGE + weightedReducePhaseProgress, - reporter.getProgress(), 0.02f); + reducePhaseProgress * REDUCE_PROGRESS_RANGE; + assertEquals(SHUFFLE_PROGRESS_RANGE + weightedReducePhaseProgress, + reporter.getProgress(), 0.02f, "Invalid progress in reduce"); this.reporter = reporter; } @Override public void close() throws IOException { super.close(); - assertEquals("Invalid progress in reduce cleanup", - 1.0f, reporter.getProgress(), 0f); + assertEquals(1.0f, reporter.getProgress(), 0f, + "Invalid progress in reduce cleanup"); } } @@ -210,7 +209,7 @@ public void testReporterProgressForMRJob() throws IOException { 1, 1, INPUT); job.waitForCompletion(); - assertTrue("Job failed", job.isSuccessful()); + assertTrue(job.isSuccessful(), "Job failed"); } @Test @@ -244,7 +243,7 @@ public void testStatusLimit() throws IOException, InterruptedException, job.waitForCompletion(true); - assertTrue("Job failed", job.isSuccessful()); + assertTrue(job.isSuccessful(), "Job failed"); } } \ No newline at end of file diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index d6754f06f49e4..14b0abaa88f8d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -22,7 +22,7 @@ import java.util.ArrayList; import java.util.List; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.JobStatus.State; @@ -41,7 +41,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -69,7 +69,7 @@ public void testGetRootQueues() throws IOException, InterruptedException { new YarnConfiguration()) { @Override protected void serviceStart() throws Exception { - Assert.assertTrue(this.client instanceof YarnClientImpl); + Assertions.assertTrue(this.client instanceof YarnClientImpl); ((YarnClientImpl) this.client).setRMClient(applicationsManager); } }; @@ -84,10 +84,10 @@ protected void serviceStart() throws Exception { throw new IOException(e); } - Assert.assertTrue("Children of root queue not requested", - argument.getValue().getIncludeChildQueues()); - Assert.assertTrue("Request wasn't to recurse through children", - argument.getValue().getRecursive()); + Assertions.assertTrue( + argument.getValue().getIncludeChildQueues(), "Children of root queue not requested"); + Assertions.assertTrue( + argument.getValue().getRecursive(), "Request wasn't to recurse through children"); } @Test @@ -113,16 +113,16 @@ public void tesAllJobs() throws Exception { new YarnConfiguration()) { @Override protected void serviceStart() throws Exception { - Assert.assertTrue(this.client instanceof YarnClientImpl); + Assertions.assertTrue(this.client instanceof YarnClientImpl); ((YarnClientImpl) this.client).setRMClient(applicationsManager); } }; JobStatus[] allJobs = resourceMgrDelegate.getAllJobs(); - Assert.assertEquals(State.FAILED, allJobs[0].getState()); - Assert.assertEquals(State.SUCCEEDED, allJobs[1].getState()); - Assert.assertEquals(State.KILLED, allJobs[2].getState()); - Assert.assertEquals(State.FAILED, allJobs[3].getState()); + Assertions.assertEquals(State.FAILED, allJobs[0].getState()); + Assertions.assertEquals(State.SUCCEEDED, allJobs[1].getState()); + Assertions.assertEquals(State.KILLED, allJobs[2].getState()); + Assertions.assertEquals(State.FAILED, allJobs[3].getState()); } private ApplicationReport getApplicationReport( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java index 45788cff03972..bb73bf3199618 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java @@ -25,13 +25,13 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.slf4j.Logger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.Random; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestSequenceFileAsBinaryInputFormat { private static final Logger LOG = FileInputFormat.LOG; @@ -89,20 +89,20 @@ public void testBinary() throws IOException { buf.reset(bval.getBytes(), bval.getLength()); cmpval.readFields(buf); assertTrue( - "Keys don't match: " + "*" + cmpkey.toString() + ":" + - tkey.toString() + "*", - cmpkey.toString().equals(tkey.toString())); + + cmpkey.toString().equals(tkey.toString()), "Keys don't match: " + "*" + cmpkey.toString() + ":" + + tkey.toString() + "*"); assertTrue( - "Vals don't match: " + "*" + cmpval.toString() + ":" + - tval.toString() + "*", - cmpval.toString().equals(tval.toString())); + + cmpval.toString().equals(tval.toString()), "Vals don't match: " + "*" + cmpval.toString() + ":" + + tval.toString() + "*"); ++count; } } finally { reader.close(); } } - assertEquals("Some records not found", RECORDS, count); + assertEquals(RECORDS, count, "Some records not found"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java index 92b15131bb205..d99dfcafebb00 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java @@ -30,13 +30,13 @@ import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile.CompressionType; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; public class TestSequenceFileAsBinaryOutputFormat { private static final Logger LOG = @@ -123,9 +123,9 @@ public void testBinary() throws IOException { sourceInt = r.nextInt(); sourceDouble = r.nextDouble(); assertEquals( - "Keys don't match: " + "*" + iwritable.get() + ":" + - sourceInt + "*", - sourceInt, iwritable.get()); + + sourceInt, iwritable.get(), "Keys don't match: " + "*" + iwritable.get() + ":" + + sourceInt + "*"); assertThat(dwritable.get()).withFailMessage( "Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*") @@ -136,7 +136,7 @@ public void testBinary() throws IOException { reader.close(); } } - assertEquals("Some records not found", RECORDS, count); + assertEquals(RECORDS, count, "Some records not found"); } @Test @@ -149,29 +149,29 @@ public void testSequenceOutputClassDefaultsToMapRedOutputClass() job.setOutputKeyClass(FloatWritable.class); job.setOutputValueClass(BooleanWritable.class); - assertEquals("SequenceFileOutputKeyClass should default to ouputKeyClass", - FloatWritable.class, - SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass( - job)); - assertEquals("SequenceFileOutputValueClass should default to " - + "ouputValueClass", - BooleanWritable.class, - SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass( - job)); + assertEquals( + FloatWritable.class +, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass( + job), "SequenceFileOutputKeyClass should default to ouputKeyClass"); + assertEquals( + BooleanWritable.class +, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass( + job), "SequenceFileOutputValueClass should default to " + + "ouputValueClass"); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class ); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class ); - assertEquals("SequenceFileOutputKeyClass not updated", - IntWritable.class, - SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass( - job)); - assertEquals("SequenceFileOutputValueClass not updated", - DoubleWritable.class, - SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass( - job)); + assertEquals( + IntWritable.class +, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass( + job), "SequenceFileOutputKeyClass not updated"); + assertEquals( + DoubleWritable.class +, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass( + job), "SequenceFileOutputValueClass not updated"); } @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java index aa62caedb1178..193da0b1b85c0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java @@ -26,13 +26,13 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.slf4j.Logger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.BitSet; import java.util.Random; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestSequenceFileAsTextInputFormat { private static final Logger LOG = FileInputFormat.LOG; @@ -94,7 +94,7 @@ public void testFormat() throws Exception { RecordReader reader = format.getRecordReader(splits[j], job, reporter); Class readerClass = reader.getClass(); - assertEquals("reader class is SequenceFileAsTextRecordReader.", SequenceFileAsTextRecordReader.class, readerClass); + assertEquals(SequenceFileAsTextRecordReader.class, readerClass, "reader class is SequenceFileAsTextRecordReader."); Text value = reader.createValue(); Text key = reader.createKey(); try { @@ -105,7 +105,7 @@ public void testFormat() throws Exception { // LOG.info("@"+reader.getPos()); // } int keyInt = Integer.parseInt(key.toString()); - assertFalse("Key in multiple partitions.", bits.get(keyInt)); + assertFalse(bits.get(keyInt), "Key in multiple partitions."); bits.set(keyInt); count++; } @@ -114,7 +114,7 @@ public void testFormat() throws Exception { reader.close(); } } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java index b3a0b8b6cd723..f21a88bd9466b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java @@ -25,13 +25,13 @@ import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.slf4j.Logger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.Random; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestSequenceFileInputFilter { private static final Logger LOG = FileInputFormat.LOG; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java index 3af1647d6ca40..30be72155d140 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java @@ -25,13 +25,13 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile; import org.slf4j.Logger; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.BitSet; import java.util.Random; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestSequenceFileInputFormat { private static final Logger LOG = FileInputFormat.LOG; @@ -102,7 +102,7 @@ public void testFormat() throws Exception { // LOG.info("splits["+j+"]="+splits[j]+" : " + key.get()); // LOG.info("@"+reader.getPos()); // } - assertFalse("Key in multiple partitions.", bits.get(key.get())); + assertFalse(bits.get(key.get()), "Key in multiple partitions."); bits.set(key.get()); count++; } @@ -111,7 +111,7 @@ public void testFormat() throws Exception { reader.close(); } } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java index ffa42e4f4e811..cb1f5cd57fe72 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java @@ -18,13 +18,13 @@ package org.apache.hadoop.mapred; import org.apache.hadoop.mapred.SortedRanges.Range; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Iterator; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestSortedRanges { private static final Logger LOG = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java index 5b4a14131fd81..859a9d17db8ea 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java @@ -30,12 +30,12 @@ import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.util.Progressable; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795). @@ -85,7 +85,7 @@ public static boolean launchJob(URI fileSys, try { assertTrue(runningJob.isComplete()); assertTrue(runningJob.isSuccessful()); - assertTrue("Output folder not found!", fs.exists(new Path("/testing/output/" + OUTPUT_FILENAME))); + assertTrue(fs.exists(new Path("/testing/output/" + OUTPUT_FILENAME)), "Output folder not found!"); } catch (NullPointerException npe) { // This NPE should no more happens fail("A NPE should not have happened."); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java index d9b94e43268a6..81fec8056943f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java @@ -21,12 +21,12 @@ import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow; import org.apache.hadoop.mapred.StatisticsCollector.Stat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; public class TestStatisticsCollector { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java index a534cfaff0bda..3dace91babf78 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java @@ -25,16 +25,16 @@ import org.apache.hadoop.mapred.SortedRanges.Range; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; @@ -86,7 +86,7 @@ public TestTaskCommit() throws IOException { super(LOCAL_MR, LOCAL_FS, 1, 1); } - @After + @AfterEach public void tearDown() throws Exception { super.tearDown(); FileUtil.fullyDelete(new File(rootDir.toString())); @@ -250,43 +250,43 @@ public void testTaskCleanupDoesNotCommit() throws Exception { task.setTaskCleanupTask(); MyUmbilical umbilical = new MyUmbilical(); task.run(job, umbilical); - assertTrue("Task did not succeed", umbilical.taskDone); + assertTrue(umbilical.taskDone, "Task did not succeed"); } @Test public void testCommitRequiredForMapTask() throws Exception { Task testTask = createDummyTask(TaskType.MAP); - assertTrue("MapTask should need commit", testTask.isCommitRequired()); + assertTrue(testTask.isCommitRequired(), "MapTask should need commit"); } @Test public void testCommitRequiredForReduceTask() throws Exception { Task testTask = createDummyTask(TaskType.REDUCE); - assertTrue("ReduceTask should need commit", testTask.isCommitRequired()); + assertTrue(testTask.isCommitRequired(), "ReduceTask should need commit"); } @Test public void testCommitNotRequiredForJobSetup() throws Exception { Task testTask = createDummyTask(TaskType.MAP); testTask.setJobSetupTask(); - assertFalse("Job setup task should not need commit", - testTask.isCommitRequired()); + assertFalse( + testTask.isCommitRequired(), "Job setup task should not need commit"); } @Test public void testCommitNotRequiredForJobCleanup() throws Exception { Task testTask = createDummyTask(TaskType.MAP); testTask.setJobCleanupTask(); - assertFalse("Job cleanup task should not need commit", - testTask.isCommitRequired()); + assertFalse( + testTask.isCommitRequired(), "Job cleanup task should not need commit"); } @Test public void testCommitNotRequiredForTaskCleanup() throws Exception { Task testTask = createDummyTask(TaskType.REDUCE); testTask.setTaskCleanupTask(); - assertFalse("Task cleanup task should not need commit", - testTask.isCommitRequired()); + assertFalse( + testTask.isCommitRequired(), "Task cleanup task should not need commit"); } private Task createDummyTask(TaskType type) throws IOException, ClassNotFoundException, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java index 1260b148c18e1..61e0ed7fc1065 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; public class TestTaskPerformanceSplits { @Test @@ -40,15 +40,15 @@ public void testPeriodStatsets() { System.err.println("segment i = " + results[i]); } - assertEquals("Bad interpolation in cumulative segment 0", 200, results[0]); - assertEquals("Bad interpolation in cumulative segment 1", 200, results[1]); - assertEquals("Bad interpolation in cumulative segment 2", 200, results[2]); - assertEquals("Bad interpolation in cumulative segment 3", 300, results[3]); - assertEquals("Bad interpolation in cumulative segment 4", 400, results[4]); - assertEquals("Bad interpolation in cumulative segment 5", 2200, results[5]); + assertEquals(200, results[0], "Bad interpolation in cumulative segment 0"); + assertEquals(200, results[1], "Bad interpolation in cumulative segment 1"); + assertEquals(200, results[2], "Bad interpolation in cumulative segment 2"); + assertEquals(300, results[3], "Bad interpolation in cumulative segment 3"); + assertEquals(400, results[4], "Bad interpolation in cumulative segment 4"); + assertEquals(2200, results[5], "Bad interpolation in cumulative segment 5"); // these are rounded down - assertEquals("Bad interpolation in cumulative segment 6", 2200, results[6]); - assertEquals("Bad interpolation in cumulative segment 7", 2201, results[7]); + assertEquals(2200, results[6], "Bad interpolation in cumulative segment 6"); + assertEquals(2201, results[7], "Bad interpolation in cumulative segment 7"); status.extend(0.0D, 0); status.extend(1.0D/16.0D, 300); // + 75 for bucket 0 @@ -59,13 +59,13 @@ public void testPeriodStatsets() { results = status.getValues(); - assertEquals("Bad interpolation in status segment 0", 275, results[0]); - assertEquals("Bad interpolation in status segment 1", 750, results[1]); - assertEquals("Bad interpolation in status segment 2", 1500, results[2]); - assertEquals("Bad interpolation in status segment 3", 2175, results[3]); - assertEquals("Bad interpolation in status segment 4", 2100, results[4]); - assertEquals("Bad interpolation in status segment 5", 1900, results[5]); - assertEquals("Bad interpolation in status segment 6", 1700, results[6]); - assertEquals("Bad interpolation in status segment 7", 1500, results[7]); + assertEquals(275, results[0], "Bad interpolation in status segment 0"); + assertEquals(750, results[1], "Bad interpolation in status segment 1"); + assertEquals(1500, results[2], "Bad interpolation in status segment 2"); + assertEquals(2175, results[3], "Bad interpolation in status segment 3"); + assertEquals(2100, results[4], "Bad interpolation in status segment 4"); + assertEquals(1900, results[5], "Bad interpolation in status segment 5"); + assertEquals(1700, results[6], "Bad interpolation in status segment 6"); + assertEquals(1500, results[7], "Bad interpolation in status segment 7"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java index 94299b6363d7c..e519372b473e3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.mapred; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestTaskStatus { @@ -52,25 +52,25 @@ private void checkTaskStatues(boolean isMap) { // first try to set the finish time before // start time is set. status.setFinishTime(currentTime); - assertEquals("Finish time of the task status set without start time", 0, - status.getFinishTime()); + assertEquals(0 +, status.getFinishTime(), "Finish time of the task status set without start time"); // Now set the start time to right time. status.setStartTime(currentTime); - assertEquals("Start time of the task status not set correctly.", - currentTime, status.getStartTime()); + assertEquals( + currentTime, status.getStartTime(), "Start time of the task status not set correctly."); // try setting wrong start time to task status. long wrongTime = -1; status.setStartTime(wrongTime); assertEquals( - "Start time of the task status is set to wrong negative value", - currentTime, status.getStartTime()); + + currentTime, status.getStartTime(), "Start time of the task status is set to wrong negative value"); // finally try setting wrong finish time i.e. negative value. status.setFinishTime(wrongTime); - assertEquals("Finish time of task status is set to wrong negative value", - 0, status.getFinishTime()); + assertEquals( + 0, status.getFinishTime(), "Finish time of task status is set to wrong negative value"); status.setFinishTime(currentTime); - assertEquals("Finish time of the task status not set correctly.", - currentTime, status.getFinishTime()); + assertEquals( + currentTime, status.getFinishTime(), "Finish time of the task status not set correctly."); // test with null task-diagnostics TaskStatus ts = ((TaskStatus)status.clone()); @@ -117,19 +117,19 @@ public boolean getIsMap() { return false; } }; - assertEquals("Small diagnostic info test failed", - status.getDiagnosticInfo(), test); - assertEquals("Small state string test failed", status.getStateString(), - test); + assertEquals( + status.getDiagnosticInfo(), test, "Small diagnostic info test failed"); + assertEquals(status.getStateString(), + test, "Small state string test failed"); // now append some small string and check String newDInfo = test.concat(test); status.setDiagnosticInfo(test); status.setStateString(newDInfo); - assertEquals("Small diagnostic info append failed", - newDInfo, status.getDiagnosticInfo()); - assertEquals("Small state-string append failed", - newDInfo, status.getStateString()); + assertEquals( + newDInfo, status.getDiagnosticInfo(), "Small diagnostic info append failed"); + assertEquals( + newDInfo, status.getStateString(), "Small state-string append failed"); // update the status with small state strings TaskStatus newStatus = (TaskStatus)status.clone(); @@ -138,47 +138,47 @@ public boolean getIsMap() { status.statusUpdate(newStatus); newDInfo = newDInfo.concat(newStatus.getDiagnosticInfo()); - assertEquals("Status-update on diagnostic-info failed", - newDInfo, status.getDiagnosticInfo()); - assertEquals("Status-update on state-string failed", - newSInfo, status.getStateString()); + assertEquals( + newDInfo, status.getDiagnosticInfo(), "Status-update on diagnostic-info failed"); + assertEquals( + newSInfo, status.getStateString(), "Status-update on state-string failed"); newSInfo = "hi2"; status.statusUpdate(0, newSInfo, null); - assertEquals("Status-update on state-string failed", - newSInfo, status.getStateString()); + assertEquals( + newSInfo, status.getStateString(), "Status-update on state-string failed"); newSInfo = "hi3"; status.statusUpdate(null, 0, newSInfo, null, 0); - assertEquals("Status-update on state-string failed", - newSInfo, status.getStateString()); + assertEquals( + newSInfo, status.getStateString(), "Status-update on state-string failed"); // now append each with large string String large = "hihihihihihihihihihi"; // 20 chars status.setDiagnosticInfo(large); status.setStateString(large); - assertEquals("Large diagnostic info append test failed", - maxSize, status.getDiagnosticInfo().length()); - assertEquals("Large state-string append test failed", - maxSize, status.getStateString().length()); + assertEquals( + maxSize, status.getDiagnosticInfo().length(), "Large diagnostic info append test failed"); + assertEquals( + maxSize, status.getStateString().length(), "Large state-string append test failed"); // update a large status with large strings newStatus.setDiagnosticInfo(large + "0"); newStatus.setStateString(large + "1"); status.statusUpdate(newStatus); - assertEquals("Status-update on diagnostic info failed", - maxSize, status.getDiagnosticInfo().length()); - assertEquals("Status-update on state-string failed", - maxSize, status.getStateString().length()); + assertEquals( + maxSize, status.getDiagnosticInfo().length(), "Status-update on diagnostic info failed"); + assertEquals( + maxSize, status.getStateString().length(), "Status-update on state-string failed"); status.statusUpdate(0, large + "2", null); - assertEquals("Status-update on state-string failed", - maxSize, status.getStateString().length()); + assertEquals( + maxSize, status.getStateString().length(), "Status-update on state-string failed"); status.statusUpdate(null, 0, large + "3", null, 0); - assertEquals("Status-update on state-string failed", - maxSize, status.getStateString().length()); + assertEquals( + maxSize, status.getStateString().length(), "Status-update on state-string failed"); // test passing large string in constructor status = new TaskStatus(null, 0, 0, null, large, large, null, null, @@ -197,9 +197,9 @@ public boolean getIsMap() { return false; } }; - assertEquals("Large diagnostic info test failed", - maxSize, status.getDiagnosticInfo().length()); - assertEquals("Large state-string test failed", - maxSize, status.getStateString().length()); + assertEquals( + maxSize, status.getDiagnosticInfo().length(), "Large diagnostic info test failed"); + assertEquals( + maxSize, status.getStateString().length(), "Large state-string test failed"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java index 29a370de7c333..ef2a4e0319b09 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java @@ -38,12 +38,13 @@ import org.apache.hadoop.io.compress.*; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.nio.charset.StandardCharsets.UTF_8; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestTextInputFormat { private static final Logger LOG = @@ -66,7 +67,8 @@ public class TestTextInputFormat { System.getProperty("test.build.data", "/tmp"), "TestTextInputFormat")); - @Test (timeout=500000) + @Test + @Timeout(value = 500) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); Path file = new Path(workDir, "test.txt"); @@ -110,9 +112,9 @@ public void testFormat() throws Exception { LOG.debug("splitting: got = " + splits.length); if (length == 0) { - assertEquals("Files of length 0 are not returned from FileInputFormat.getSplits().", - 1, splits.length); - assertEquals("Empty file length == 0", 0, splits[0].getLength()); + assertEquals( + 1, splits.length, "Files of length 0 are not returned from FileInputFormat.getSplits()."); + assertEquals(0, splits[0].getLength(), "Empty file length == 0"); } // check each split @@ -131,7 +133,7 @@ public void testFormat() throws Exception { " in split " + j + " at position "+reader.getPos()); } - assertFalse("Key in multiple partitions.", bits.get(v)); + assertFalse(bits.get(v), "Key in multiple partitions."); bits.set(v); count++; } @@ -140,13 +142,14 @@ public void testFormat() throws Exception { reader.close(); } } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } } - @Test (timeout=900000) + @Test + @Timeout(value = 900) public void testSplitableCodecs() throws IOException { JobConf conf = new JobConf(defaultConf); int seed = new Random().nextInt(); @@ -195,7 +198,8 @@ public void testSplitableCodecs() throws IOException { } // Test a corner case when position of stream is right after BZip2 marker - @Test (timeout=900000) + @Test + @Timeout(value = 900) public void testSplitableCodecs2() throws IOException { JobConf conf = new JobConf(defaultConf); // Create the codec @@ -253,7 +257,7 @@ public void testSplitableCodecs2() throws IOException { LOG.warn("conflict with " + v + " in split " + j + " at position " + reader.getPos()); } - assertFalse("Key in multiple partitions.", bits.get(v)); + assertFalse(bits.get(v), "Key in multiple partitions."); bits.set(v); counter++; } @@ -266,7 +270,7 @@ public void testSplitableCodecs2() throws IOException { reader.close(); } } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } } @@ -314,7 +318,7 @@ private void verifyPartitions(int length, int numSplits, Path file, " in split " + j + " at position "+reader.getPos()); } - assertFalse("Key in multiple partitions.", bits.get(v)); + assertFalse(bits.get(v), "Key in multiple partitions."); bits.set(v); counter++; } @@ -327,7 +331,7 @@ private void verifyPartitions(int length, int numSplits, Path file, reader.close(); } } - assertEquals("Some keys in no partition.", length, bits.cardinality()); + assertEquals(length, bits.cardinality(), "Some keys in no partition."); } private static LineReader makeStream(String str) throws IOException { @@ -337,7 +341,8 @@ private static LineReader makeStream(String str, int bufsz) throws IOException { return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), bufsz); } - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testUTF8() throws Exception { LineReader in = makeStream("abcd\u20acbdcd\u20ac"); Text line = new Text(); @@ -356,7 +361,8 @@ public void testUTF8() throws Exception { * * @throws Exception */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testNewLines() throws Exception { final String STR = "a\nbb\n\nccc\rdddd\r\r\r\n\r\neeeee"; final int STRLENBYTES = STR.getBytes().length; @@ -365,25 +371,25 @@ public void testNewLines() throws Exception { LineReader in = makeStream(STR, bufsz); int c = 0; c += in.readLine(out); //"a"\n - assertEquals("line1 length, bufsz:"+bufsz, 1, out.getLength()); + assertEquals(1, out.getLength(), "line1 length, bufsz:"+bufsz); c += in.readLine(out); //"bb"\n - assertEquals("line2 length, bufsz:"+bufsz, 2, out.getLength()); + assertEquals(2, out.getLength(), "line2 length, bufsz:"+bufsz); c += in.readLine(out); //""\n - assertEquals("line3 length, bufsz:"+bufsz, 0, out.getLength()); + assertEquals(0, out.getLength(), "line3 length, bufsz:"+bufsz); c += in.readLine(out); //"ccc"\r - assertEquals("line4 length, bufsz:"+bufsz, 3, out.getLength()); + assertEquals(3, out.getLength(), "line4 length, bufsz:"+bufsz); c += in.readLine(out); //dddd\r - assertEquals("line5 length, bufsz:"+bufsz, 4, out.getLength()); + assertEquals(4, out.getLength(), "line5 length, bufsz:"+bufsz); c += in.readLine(out); //""\r - assertEquals("line6 length, bufsz:"+bufsz, 0, out.getLength()); + assertEquals(0, out.getLength(), "line6 length, bufsz:"+bufsz); c += in.readLine(out); //""\r\n - assertEquals("line7 length, bufsz:"+bufsz, 0, out.getLength()); + assertEquals(0, out.getLength(), "line7 length, bufsz:"+bufsz); c += in.readLine(out); //""\r\n - assertEquals("line8 length, bufsz:"+bufsz, 0, out.getLength()); + assertEquals(0, out.getLength(), "line8 length, bufsz:"+bufsz); c += in.readLine(out); //"eeeee"EOF - assertEquals("line9 length, bufsz:"+bufsz, 5, out.getLength()); - assertEquals("end of file, bufsz: "+bufsz, 0, in.readLine(out)); - assertEquals("total bytes, bufsz: "+bufsz, c, STRLENBYTES); + assertEquals(5, out.getLength(), "line9 length, bufsz:"+bufsz); + assertEquals(0, in.readLine(out), "end of file, bufsz: "+bufsz); + assertEquals(c, STRLENBYTES, "total bytes, bufsz: "+bufsz); } } @@ -396,7 +402,8 @@ public void testNewLines() throws Exception { * * @throws Exception */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testMaxLineLength() throws Exception { final String STR = "a\nbb\n\nccc\rdddd\r\neeeee"; final int STRLENBYTES = STR.getBytes().length; @@ -405,23 +412,24 @@ public void testMaxLineLength() throws Exception { LineReader in = makeStream(STR, bufsz); int c = 0; c += in.readLine(out, 1); - assertEquals("line1 length, bufsz: "+bufsz, 1, out.getLength()); + assertEquals(1, out.getLength(), "line1 length, bufsz: "+bufsz); c += in.readLine(out, 1); - assertEquals("line2 length, bufsz: "+bufsz, 1, out.getLength()); + assertEquals(1, out.getLength(), "line2 length, bufsz: "+bufsz); c += in.readLine(out, 1); - assertEquals("line3 length, bufsz: "+bufsz, 0, out.getLength()); + assertEquals(0, out.getLength(), "line3 length, bufsz: "+bufsz); c += in.readLine(out, 3); - assertEquals("line4 length, bufsz: "+bufsz, 3, out.getLength()); + assertEquals(3, out.getLength(), "line4 length, bufsz: "+bufsz); c += in.readLine(out, 10); - assertEquals("line5 length, bufsz: "+bufsz, 4, out.getLength()); + assertEquals(4, out.getLength(), "line5 length, bufsz: "+bufsz); c += in.readLine(out, 8); - assertEquals("line5 length, bufsz: "+bufsz, 5, out.getLength()); - assertEquals("end of file, bufsz: " +bufsz, 0, in.readLine(out)); - assertEquals("total bytes, bufsz: "+bufsz, c, STRLENBYTES); + assertEquals(5, out.getLength(), "line5 length, bufsz: "+bufsz); + assertEquals(0, in.readLine(out), "end of file, bufsz: " +bufsz); + assertEquals(c, STRLENBYTES, "total bytes, bufsz: "+bufsz); } } - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testMRMaxLine() throws Exception { final int MAXPOS = 1024 * 1024; final int MAXLINE = 10 * 1024; @@ -436,7 +444,7 @@ public int read() { } @Override public int read(byte[] b) { - assertTrue("Read too many bytes from the stream", position < MAXPOSBUF); + assertTrue(position < MAXPOSBUF, "Read too many bytes from the stream"); Arrays.fill(b, (byte) 0); position += b.length; return b.length; @@ -454,10 +462,10 @@ public void reset() { conf.setInt("io.file.buffer.size", BUF); // used by LRR // test another constructor LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf); - assertFalse("Read a line from null", lrr.next(key, val)); + assertFalse(lrr.next(key, val), "Read a line from null"); infNull.reset(); lrr = new LineRecordReader(infNull, 0L, MAXLINE, MAXPOS); - assertFalse("Read a line from null", lrr.next(key, val)); + assertFalse(lrr.next(key, val), "Read a line from null"); } @@ -496,7 +504,8 @@ private static List readSplit(TextInputFormat format, /** * Test using the gzip codec for reading */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testGzip() throws IOException { JobConf job = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); @@ -510,17 +519,17 @@ public void testGzip() throws IOException { TextInputFormat format = new TextInputFormat(); format.configure(job); InputSplit[] splits = format.getSplits(job, 100); - assertEquals("compressed splits == 2", 2, splits.length); + assertEquals(2, splits.length, "compressed splits == 2"); FileSplit tmp = (FileSplit) splits[0]; if (tmp.getPath().getName().equals("part2.txt.gz")) { splits[0] = splits[1]; splits[1] = tmp; } List results = readSplit(format, splits[0], job); - assertEquals("splits[0] length", 6, results.size()); + assertEquals(6, results.size(), "splits[0] length"); assertEquals("splits[0][5]", " dog", results.get(5).toString()); results = readSplit(format, splits[1], job); - assertEquals("splits[1] length", 2, results.size()); + assertEquals(2, results.size(), "splits[1] length"); assertEquals("splits[1][0]", "this is a test", results.get(0).toString()); assertEquals("splits[1][1]", "of gzip", @@ -530,7 +539,8 @@ public void testGzip() throws IOException { /** * Test using the gzip codec and an empty input file */ - @Test (timeout=5000) + @Test + @Timeout(value = 5) public void testGzipEmpty() throws IOException { JobConf job = new JobConf(defaultConf); CompressionCodec gzip = new GzipCodec(); @@ -541,10 +551,10 @@ public void testGzipEmpty() throws IOException { TextInputFormat format = new TextInputFormat(); format.configure(job); InputSplit[] splits = format.getSplits(job, 100); - assertEquals("Compressed files of length 0 are not returned from FileInputFormat.getSplits().", - 1, splits.length); + assertEquals( + 1, splits.length, "Compressed files of length 0 are not returned from FileInputFormat.getSplits()."); List results = readSplit(format, splits[0], job); - assertEquals("Compressed empty file length == 0", 0, results.size()); + assertEquals(0, results.size(), "Compressed empty file length == 0"); } private static String unquote(String in) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java index 1a090386bbd31..694aceebb59de 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextOutputFormat.java @@ -22,10 +22,10 @@ import java.io.FileInputStream; import java.io.IOException; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java index 2d67edc581aaa..60b7799f666b6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUserDefinedCounters.java @@ -24,7 +24,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.BufferedReader; import java.io.File; @@ -35,8 +35,8 @@ import java.io.OutputStreamWriter; import java.io.Writer; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestUserDefinedCounters { private static String TEST_ROOT_DIR = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java index 0c4370487ad55..804bc06d5302b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java @@ -20,8 +20,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; public class TestUtils { private static final Path[] LOG_PATHS = new Path[] { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java index 82c68db30c551..e90fcc287dd0a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestWritableJobConf.java @@ -25,13 +25,13 @@ import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.util.GenericsUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestWritableJobConf { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 0bdc72121799b..73f5b97d62a34 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -20,11 +20,11 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; @@ -117,11 +117,12 @@ import org.apache.log4j.SimpleLayout; import org.apache.log4j.WriterAppender; import org.apache.log4j.spi.LoggingEvent; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; @@ -179,12 +180,12 @@ private List getLogEvents() { private ClientServiceDelegate clientDelegate; private static final String failString = "Rejected job"; - @BeforeClass + @BeforeAll public static void setupBeforeClass() { ResourceUtils.resetResourceTypes(new Configuration()); } - @Before + @BeforeEach public void setUp() throws Exception { resourceMgrDelegate = mock(ResourceMgrDelegate.class); conf = new YarnConfiguration(); @@ -213,13 +214,14 @@ public ApplicationSubmissionContext answer(InvocationOnMock invocation) testWorkDir.mkdirs(); } - @After + @AfterEach public void cleanup() { FileUtil.fullyDelete(testWorkDir); ResourceUtils.resetResourceTypes(new Configuration()); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testJobKill() throws Exception { clientDelegate = mock(ClientServiceDelegate.class); when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new @@ -255,7 +257,8 @@ public ClientServiceDelegate answer(InvocationOnMock invocation) verify(clientDelegate).killJob(jobId); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testJobKillTimeout() throws Exception { long timeToWaitBeforeHardKill = 10000 + MRJobConfig.DEFAULT_MR_AM_HARD_KILL_TIMEOUT_MS; @@ -276,12 +279,13 @@ public ClientServiceDelegate answer(InvocationOnMock invocation) State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp")); long startTimeMillis = System.currentTimeMillis(); yarnRunner.killJob(jobId); - assertTrue("killJob should have waited at least " + timeToWaitBeforeHardKill - + " ms.", System.currentTimeMillis() - startTimeMillis - >= timeToWaitBeforeHardKill); + assertTrue(System.currentTimeMillis() - startTimeMillis + >= timeToWaitBeforeHardKill, "killJob should have waited at least " + timeToWaitBeforeHardKill + + " ms."); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testJobSubmissionFailure() throws Exception { when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))). thenReturn(appId); @@ -303,7 +307,8 @@ public void testJobSubmissionFailure() throws Exception { } } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testResourceMgrDelegate() throws Exception { /* we not want a mock of resource mgr delegate */ final ApplicationClientProtocol clientRMProtocol = mock(ApplicationClientProtocol.class); @@ -371,7 +376,8 @@ protected void serviceStart() throws Exception { verify(clientRMProtocol).getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class)); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testGetHSDelegationToken() throws Exception { try { Configuration conf = new Configuration(); @@ -452,7 +458,8 @@ public void testGetHSDelegationToken() throws Exception { } } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testHistoryServerToken() throws Exception { //Set the master principal in the config conf.set(YarnConfiguration.RM_PRINCIPAL,"foo@LOCAL"); @@ -495,7 +502,8 @@ public Void run() throws Exception { }); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testAMAdminCommandOpts() throws Exception { JobConf jobConf = new JobConf(); @@ -519,8 +527,8 @@ public void testAMAdminCommandOpts() throws Exception { for(String command : commands) { if(command != null) { - assertFalse("Profiler should be disabled by default", - command.contains(PROFILE_PARAMS)); + assertFalse( + command.contains(PROFILE_PARAMS), "Profiler should be disabled by default"); adminPos = command.indexOf("-Djava.net.preferIPv4Stack=true"); if(adminPos >= 0) adminIndex = index; @@ -536,20 +544,21 @@ public void testAMAdminCommandOpts() throws Exception { } // Check java.io.tmpdir opts are set in the commands - assertTrue("java.io.tmpdir is not set for AM", tmpDirPos > 0); + assertTrue(tmpDirPos > 0, "java.io.tmpdir is not set for AM"); // Check both admin java opts and user java opts are in the commands - assertTrue("AM admin command opts not in the commands.", adminPos > 0); - assertTrue("AM user command opts not in the commands.", userPos > 0); + assertTrue(adminPos > 0, "AM admin command opts not in the commands."); + assertTrue(userPos > 0, "AM user command opts not in the commands."); // Check the admin java opts is before user java opts in the commands if(adminIndex == userIndex) { - assertTrue("AM admin command opts is after user command opts.", adminPos < userPos); + assertTrue(adminPos < userPos, "AM admin command opts is after user command opts."); } else { - assertTrue("AM admin command opts is after user command opts.", adminIndex < userIndex); + assertTrue(adminIndex < userIndex, "AM admin command opts is after user command opts."); } } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testWarnCommandOpts() throws Exception { org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(YARNRunner.class); @@ -583,7 +592,8 @@ public void testWarnCommandOpts() throws Exception { "using yarn.app.mapreduce.am.env config settings.")); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testAMProfiler() throws Exception { JobConf jobConf = new JobConf(); @@ -837,7 +847,7 @@ private void testAMStandardEnv(boolean customLibPath, ContainerLaunchContext clc = appSubCtx.getAMContainerSpec(); Map env = clc.getEnvironment(); String libPath = env.get(pathKey); - assertNotNull(pathKey + " not set", libPath); + assertNotNull(libPath, pathKey + " not set"); String cps = jobConf.getBoolean( MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM) @@ -852,12 +862,12 @@ private void testAMStandardEnv(boolean customLibPath, MRJobConfig.DEFAULT_MR_AM_ADMIN_USER_ENV.substring( pathKey.length() + 1); } - assertEquals("Bad AM " + pathKey + " setting", expectedLibPath, libPath); + assertEquals(expectedLibPath, libPath, "Bad AM " + pathKey + " setting"); // make sure SHELL is set String shell = env.get(Environment.SHELL.name()); - assertNotNull("SHELL not set", shell); - assertEquals("Bad SHELL setting", USER_SHELL, shell); + assertNotNull(shell, "SHELL not set"); + assertEquals(USER_SHELL, shell, "Bad SHELL setting"); } @Test @@ -929,13 +939,13 @@ public void testSendJobConf() throws IOException { Configuration confSent = BuilderUtils.parseTokensConf(submissionContext); // configs that match regex should be included - Assert.assertEquals("123.0.0.1", + Assertions.assertEquals("123.0.0.1", confSent.get("dfs.namenode.rpc-address.mycluster2.nn1")); - Assert.assertEquals("123.0.0.2", + Assertions.assertEquals("123.0.0.2", confSent.get("dfs.namenode.rpc-address.mycluster2.nn2")); // configs that aren't matching regex should not be included - Assert.assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent + Assertions.assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent .get("hadoop.tmp.dir").equals("testconfdir")); UserGroupInformation.reset(); } @@ -957,15 +967,15 @@ public void testCustomAMRMResourceType() throws Exception { List resourceRequests = submissionContext.getAMContainerResourceRequests(); - Assert.assertEquals(1, resourceRequests.size()); + Assertions.assertEquals(1, resourceRequests.size()); ResourceRequest resourceRequest = resourceRequests.get(0); ResourceInformation resourceInformation = resourceRequest.getCapability() .getResourceInformation(CUSTOM_RESOURCE_NAME); - Assert.assertEquals("Expecting the default unit (G)", + Assertions.assertEquals("Expecting the default unit (G)", "G", resourceInformation.getUnits()); - Assert.assertEquals(5L, resourceInformation.getValue()); - Assert.assertEquals(3, resourceRequest.getCapability().getVirtualCores()); + Assertions.assertEquals(5L, resourceInformation.getValue()); + Assertions.assertEquals(3, resourceRequest.getCapability().getVirtualCores()); } @Test @@ -983,11 +993,11 @@ public void testAMRMemoryRequest() throws Exception { List resourceRequests = submissionContext.getAMContainerResourceRequests(); - Assert.assertEquals(1, resourceRequests.size()); + Assertions.assertEquals(1, resourceRequests.size()); ResourceRequest resourceRequest = resourceRequests.get(0); long memorySize = resourceRequest.getCapability().getMemorySize(); - Assert.assertEquals(3072, memorySize); + Assertions.assertEquals(3072, memorySize); } } @@ -1012,11 +1022,11 @@ public void testAMRMemoryRequestOverriding() throws Exception { List resourceRequests = submissionContext.getAMContainerResourceRequests(); - Assert.assertEquals(1, resourceRequests.size()); + Assertions.assertEquals(1, resourceRequests.size()); ResourceRequest resourceRequest = resourceRequests.get(0); long memorySize = resourceRequest.getCapability().getMemorySize(); - Assert.assertEquals(3072, memorySize); + Assertions.assertEquals(3072, memorySize); assertTrue(testAppender.getLogEvents().stream().anyMatch( e -> e.getLevel() == Level.WARN && ("Configuration " + "yarn.app.mapreduce.am.resource." + memoryName + "=3Gi is " + diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java index 96954d5dcc0c9..46d407588e574 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java @@ -24,7 +24,7 @@ import java.util.ArrayList; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -32,7 +32,8 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobID; import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * This class performs unit test for Job/JobControl classes. @@ -198,14 +199,15 @@ public static void doJobControlTest() throws Exception { } @SuppressWarnings("deprecation") - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testJobState() throws Exception { Job job_1 = getCopyJob(); JobControl jc = new JobControl("Test"); jc.addJob(job_1); - Assert.assertEquals(Job.WAITING, job_1.getState()); + Assertions.assertEquals(Job.WAITING, job_1.getState()); job_1.setState(Job.SUCCESS); - Assert.assertEquals(Job.WAITING, job_1.getState()); + Assertions.assertEquals(Job.WAITING, job_1.getState()); org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class); @@ -213,20 +215,21 @@ public void testJobState() throws Exception { new org.apache.hadoop.mapreduce.JobID("test", 0); when(mockjob.getJobID()).thenReturn(jid); job_1.setJob(mockjob); - Assert.assertEquals("job_test_0000", job_1.getMapredJobID()); + Assertions.assertEquals("job_test_0000", job_1.getMapredJobID()); job_1.setMapredJobID("job_test_0001"); - Assert.assertEquals("job_test_0000", job_1.getMapredJobID()); + Assertions.assertEquals("job_test_0000", job_1.getMapredJobID()); jc.stop(); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testAddingDependingJob() throws Exception { Job job_1 = getCopyJob(); ArrayList dependingJobs = new ArrayList(); JobControl jc = new JobControl("Test"); jc.addJob(job_1); - Assert.assertEquals(Job.WAITING, job_1.getState()); - Assert.assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(), + Assertions.assertEquals(Job.WAITING, job_1.getState()); + Assertions.assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(), dependingJobs))); } @@ -253,23 +256,25 @@ public Job getCopyJob() throws Exception { return job_1; } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testJobControl() throws Exception { doJobControlTest(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetAssignedJobId() throws Exception { JobConf jc = new JobConf(); Job j = new Job(jc); //Just make sure no exception is thrown - Assert.assertNull(j.getAssignedJobID()); + Assertions.assertNull(j.getAssignedJobID()); org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class); org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0); when(mockjob.getJobID()).thenReturn(jid); j.setJob(mockjob); JobID expected = new JobID("test",0); - Assert.assertEquals(expected, j.getAssignedJobID()); + Assertions.assertEquals(expected, j.getAssignedJobID()); verify(mockjob).getJobID(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java index 3cd9c24db2dcd..92a5868a56b9a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestLocalJobControl.java @@ -25,11 +25,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.JobConf; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * HadoopTestCase that tests the local job runner. @@ -132,7 +132,7 @@ public void testLocalJobControlDataCopy() throws Exception { } } - assertEquals("Some jobs failed", 0, theControl.getFailedJobs().size()); + assertEquals(0, theControl.getFailedJobs().size(), "Some jobs failed"); theControl.stop(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java index a3066765ec008..1ab06bb2a0728 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java @@ -49,23 +49,22 @@ import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; public class TestDatamerge { private static MiniDFSCluster cluster = null; - @Before + @BeforeEach public void setUp() throws Exception { Configuration conf = new Configuration(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); } - @After + @AfterEach public void tearDown() throws Exception { if (cluster != null) { cluster.shutdown(); @@ -131,7 +130,7 @@ private static abstract class SimpleCheckerBase public void close() { } public void configure(JobConf job) { srcs = job.getInt("testdatamerge.sources", 0); - assertTrue("Invalid src count: " + srcs, srcs > 0); + assertTrue(srcs > 0, "Invalid src count: " + srcs); } public abstract void map(IntWritable key, V val, OutputCollector out, Reporter reporter) @@ -143,7 +142,7 @@ public void reduce(IntWritable key, Iterator values, while (values.hasNext()) { seen += values.next().get(); } - assertTrue("Bad count for " + key.get(), verify(key.get(), seen)); + assertTrue(verify(key.get(), seen), "Bad count for " + key.get()); } public abstract boolean verify(int key, int occ); } @@ -155,10 +154,10 @@ public void map(IntWritable key, TupleWritable val, throws IOException { int k = key.get(); final String kvstr = "Unexpected tuple: " + stringify(key, val); - assertTrue(kvstr, 0 == k % (srcs * srcs)); + assertEquals(0, k % (srcs * srcs), kvstr); for (int i = 0; i < val.size(); ++i) { final int vali = ((IntWritable)val.get(i)).get(); - assertTrue(kvstr, (vali - i) * srcs == 10 * k); + assertEquals((vali - i) * srcs, 10 * k, kvstr); } out.collect(key, one); } @@ -177,18 +176,18 @@ public void map(IntWritable key, TupleWritable val, final String kvstr = "Unexpected tuple: " + stringify(key, val); if (0 == k % (srcs * srcs)) { for (int i = 0; i < val.size(); ++i) { - assertTrue(kvstr, val.get(i) instanceof IntWritable); + assertInstanceOf(IntWritable.class, val.get(i), kvstr); final int vali = ((IntWritable)val.get(i)).get(); - assertTrue(kvstr, (vali - i) * srcs == 10 * k); + assertEquals((vali - i) * srcs, 10 * k, kvstr); } } else { for (int i = 0; i < val.size(); ++i) { if (i == k % srcs) { - assertTrue(kvstr, val.get(i) instanceof IntWritable); + assertInstanceOf(IntWritable.class, val.get(i), kvstr); final int vali = ((IntWritable)val.get(i)).get(); - assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i)); + assertEquals(srcs * (vali - i), 10 * (k - i), kvstr); } else { - assertTrue(kvstr, !val.has(i)); + assertFalse(val.has(i), kvstr); } } } @@ -210,10 +209,10 @@ public void map(IntWritable key, IntWritable val, final int vali = val.get(); final String kvstr = "Unexpected tuple: " + stringify(key, val); if (0 == k % (srcs * srcs)) { - assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1); + assertEquals(vali, k * 10 / srcs + srcs - 1, kvstr); } else { final int i = k % srcs; - assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i)); + assertEquals(srcs * (vali - i), 10 * (k - i), kvstr); } out.collect(key, one); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java index 56871550dc9ae..6c742004be53a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java @@ -34,10 +34,10 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; -import org.junit.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestTupleWritable { @@ -97,7 +97,7 @@ private int verifIter(Writable[] writs, TupleWritable t, int i) { i = verifIter(writs, ((TupleWritable)w), i); continue; } - assertTrue("Bad value", w.equals(writs[i++])); + assertEquals(w, writs[i++], "Bad value"); } return i; } @@ -140,7 +140,7 @@ public void testNestedIterable() throws Exception { new IntWritable(r.nextInt()) }; TupleWritable sTuple = makeTuple(writs); - assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0)); + assertEquals(writs.length, verifIter(writs, sTuple, 0), "Bad count"); } @Test @@ -164,7 +164,7 @@ public void testWritable() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); + assertEquals(sTuple, dTuple, "Failed to write/read tuple"); } @Test @@ -183,8 +183,8 @@ public void testWideWritable() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); - assertEquals("All tuple data has not been read from the stream",-1,in.read()); + assertEquals(sTuple, dTuple, "Failed to write/read tuple"); + assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); } @Test @@ -201,8 +201,8 @@ public void testWideWritable2() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); - assertEquals("All tuple data has not been read from the stream",-1,in.read()); + assertEquals(sTuple, dTuple, "Failed to write/read tuple"); + assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); } /** @@ -225,8 +225,8 @@ public void testSparseWideWritable() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue("Failed to write/read tuple", sTuple.equals(dTuple)); - assertEquals("All tuple data has not been read from the stream",-1,in.read()); + assertEquals(sTuple, dTuple, "Failed to write/read tuple"); + assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); } @Test public void testWideTuple() throws Exception { @@ -244,7 +244,7 @@ public void testWideTuple() throws Exception { assertTrue(has); } else { - assertFalse("Tuple position is incorrectly labelled as set: " + pos, has); + assertFalse(has, "Tuple position is incorrectly labelled as set: " + pos); } } } @@ -264,7 +264,7 @@ public void testWideTuple2() throws Exception { assertTrue(has); } else { - assertFalse("Tuple position is incorrectly labelled as set: " + pos, has); + assertFalse(has, "Tuple position is incorrectly labelled as set: " + pos); } } } @@ -288,7 +288,7 @@ public void testWideTupleBoundary() throws Exception { assertTrue(has); } else { - assertFalse("Tuple position is incorrectly labelled as set: " + pos, has); + assertFalse(has, "Tuple position is incorrectly labelled as set: " + pos); } } } @@ -311,8 +311,8 @@ public void testPreVersion21Compatibility() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple)); - assertEquals("All tuple data has not been read from the stream",-1,in.read()); + assertTrue(oldTuple.isCompatible(dTuple), "Tuple writable is unable to read pre-0.21 versions of TupleWritable"); + assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); } @Test public void testPreVersion21CompatibilityEmptyTuple() throws Exception { @@ -324,8 +324,8 @@ public void testPreVersion21CompatibilityEmptyTuple() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple)); - assertEquals("All tuple data has not been read from the stream",-1,in.read()); + assertTrue(oldTuple.isCompatible(dTuple), "Tuple writable is unable to read pre-0.21 versions of TupleWritable"); + assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); } /** @@ -335,7 +335,7 @@ public void testPreVersion21CompatibilityEmptyTuple() throws Exception { private static class PreVersion21TupleWritable { private Writable[] values; - private long written = 0L; + private long written; private PreVersion21TupleWritable(Writable[] vals) { written = 0L; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java index 785898d33ede0..e8102654afcc7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java @@ -33,8 +33,10 @@ import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestWrappedRecordReaderClassloader { /** @@ -47,7 +49,7 @@ public void testClassLoader() throws Exception { JobConf job = new JobConf(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); job.setClassLoader(classLoader); - assertTrue(job.getClassLoader() instanceof Fake_ClassLoader); + assertInstanceOf(Fake_ClassLoader.class, job.getClassLoader()); FileSystem fs = FileSystem.get(job); Path testdir = fs.makeQualified(new Path( @@ -58,7 +60,7 @@ public void testClassLoader() throws Exception { job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src)); - CompositeInputFormat inputFormat = new CompositeInputFormat(); + CompositeInputFormat inputFormat = new CompositeInputFormat<>(); inputFormat.getRecordReader(inputFormat.getSplits(job, 1)[0], job, Reporter.NULL); } @@ -113,7 +115,7 @@ public InputSplit[] getSplits(JobConf conf, int splits) { public RecordReader getRecordReader(InputSplit ignored, JobConf job, Reporter reporter) { - return new RR_ClassLoaderChecker(job); + return new RR_ClassLoaderChecker<>(job); } } @@ -123,9 +125,9 @@ public static class RR_ClassLoaderChecker implements RecordReader { @SuppressWarnings("unchecked") public RR_ClassLoaderChecker(JobConf job) { - assertTrue("The class loader has not been inherited from " - + CompositeRecordReader.class.getSimpleName(), - job.getClassLoader() instanceof Fake_ClassLoader); + assertInstanceOf(Fake_ClassLoader.class, job.getClassLoader(), + "The class loader has not been inherited from " + + CompositeRecordReader.class.getSimpleName()); keyclass = (Class) job.getClass("test.fakeif.keyclass", NullWritable.class, WritableComparable.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java index ddefcd09d8080..8fe481b0427e1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java @@ -19,7 +19,7 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reducer; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java index 0933ecef941fe..a5f34ed517445 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChainMapReduce.java @@ -33,16 +33,16 @@ import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.DataOutputStream; import java.io.IOException; import java.util.Iterator; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestChainMapReduce extends HadoopTestCase { @@ -108,7 +108,7 @@ public void testChain() throws Exception { fs.delete(outDir, true); if (!fs.mkdirs(inDir)) { - throw new IOException("Mkdirs failed to create " + inDir.toString()); + throw new IOException("Mkdirs failed to create " + inDir); } DataOutputStream file = fs.create(new Path(inDir, "part-0")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java index b916026272e3f..5d84d83dfadfb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java @@ -30,9 +30,9 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; -import org.junit.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; public class TestDelegatingInputFormat { @Test @@ -66,7 +66,7 @@ public void testSplitting() throws Exception { int[] bins = new int[3]; for (InputSplit split : splits) { - assertTrue(split instanceof TaggedInputSplit); + assertInstanceOf(TaggedInputSplit.class, split); final TaggedInputSplit tis = (TaggedInputSplit) split; int index = -1; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java index 3f31546c789d5..55a3e3d88d75a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java @@ -34,10 +34,10 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapred.Utils; -import org.junit.After; -import org.junit.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedReader; import java.io.File; @@ -89,10 +89,10 @@ public void configure(String keySpec, int expect) throws Exception { conf.setMapperClass(InverseMapper.class); conf.setReducerClass(IdentityReducer.class); if (!fs.mkdirs(testdir)) { - throw new IOException("Mkdirs failed to create " + testdir.toString()); + throw new IOException("Mkdirs failed to create " + testdir); } if (!fs.mkdirs(inDir)) { - throw new IOException("Mkdirs failed to create " + inDir.toString()); + throw new IOException("Mkdirs failed to create " + inDir); } // set up input data in 2 files Path inFile = new Path(inDir, "part0"); @@ -133,7 +133,7 @@ public void configure(String keySpec, int expect) throws Exception { } } - @After + @AfterEach public void cleanup() { FileUtil.fullyDelete(TEST_DIR); } @@ -161,7 +161,7 @@ public void testBasicUnixComparator() throws Exception { byte[] line2_bytes = line2.getBytes(); public void localTestWithoutMRJob(String keySpec, int expect) throws Exception { - KeyFieldBasedComparator keyFieldCmp = new KeyFieldBasedComparator(); + KeyFieldBasedComparator keyFieldCmp = new KeyFieldBasedComparator<>(); localConf.setKeyFieldComparatorOptions(keySpec); keyFieldCmp.configure(localConf); int result = keyFieldCmp.compare(line1_bytes, 0, line1_bytes.length, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java index 02b0507742a71..f05855479f4e2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.mapred.lib; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestKeyFieldBasedPartitioner { @@ -31,18 +31,16 @@ public class TestKeyFieldBasedPartitioner { @Test public void testEmptyKey() throws Exception { KeyFieldBasedPartitioner kfbp = - new KeyFieldBasedPartitioner(); + new KeyFieldBasedPartitioner<>(); JobConf conf = new JobConf(); conf.setInt("num.key.fields.for.partition", 10); kfbp.configure(conf); - assertEquals("Empty key should map to 0th partition", - 0, kfbp.getPartition(new Text(), new Text(), 10)); + assertEquals(0, kfbp.getPartition(new Text(), new Text(), 10), "Empty key should map to 0th partition"); } @Test public void testMultiConfigure() { - KeyFieldBasedPartitioner kfbp = - new KeyFieldBasedPartitioner(); + KeyFieldBasedPartitioner kfbp = new KeyFieldBasedPartitioner<>(); JobConf conf = new JobConf(); conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k1,1"); kfbp.setConf(conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java index 388de0fb88d14..ea62de0958650 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java @@ -24,8 +24,8 @@ import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestLineInputFormat { private static int MAX_LENGTH = 200; @@ -84,21 +84,21 @@ void checkFormat(JobConf job, int expectedN) throws IOException{ InputSplit[] splits = format.getSplits(job, ignoredNumSplits); // check all splits except last one - int count = 0; + int count; for (int j = 0; j < splits.length -1; j++) { - assertEquals("There are no split locations", 0, - splits[j].getLocations().length); + assertEquals(0 +, splits[j].getLocations().length, "There are no split locations"); RecordReader reader = format.getRecordReader(splits[j], job, voidReporter); Class readerClass = reader.getClass(); - assertEquals("reader class is LineRecordReader.", - LineRecordReader.class, readerClass); + assertEquals( + LineRecordReader.class, readerClass, "reader class is LineRecordReader."); LongWritable key = reader.createKey(); Class keyClass = key.getClass(); - assertEquals("Key class is LongWritable.", LongWritable.class, keyClass); + assertEquals(LongWritable.class, keyClass, "Key class is LongWritable."); Text value = reader.createValue(); Class valueClass = value.getClass(); - assertEquals("Value class is Text.", Text.class, valueClass); + assertEquals(Text.class, valueClass, "Value class is Text."); try { count = 0; @@ -108,8 +108,8 @@ void checkFormat(JobConf job, int expectedN) throws IOException{ } finally { reader.close(); } - assertEquals("number of lines in split is " + expectedN , - expectedN, count); + assertEquals( + expectedN, count, "number of lines in split is " + expectedN); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java index 115a6f70d0820..e388b0c6c84b0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleInputs.java @@ -25,12 +25,12 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.Map; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @see TestDelegatingInputFormat diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java index 8829a093b13bd..489e8258d5947 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java @@ -39,9 +39,9 @@ import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.BufferedReader; import java.io.DataOutputStream; @@ -51,9 +51,7 @@ import java.util.Iterator; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -77,16 +75,18 @@ public void testWithCounters() throws Exception { } @SuppressWarnings("unchecked") - @Test(expected = IOException.class) + @Test public void testParallelCloseIOException() throws IOException { - RecordWriter writer = mock(RecordWriter.class); - Map recordWriters = mock(Map.class); - when(recordWriters.values()).thenReturn(Arrays.asList(writer, writer)); - doThrow(new IOException("test IO exception")).when(writer).close(null); - JobConf conf = createJobConf(); - MultipleOutputs mos = new MultipleOutputs(conf); - mos.setRecordWriters(recordWriters); - mos.close(); + assertThrows(IOException.class, () -> { + RecordWriter writer = mock(RecordWriter.class); + Map recordWriters = mock(Map.class); + when(recordWriters.values()).thenReturn(Arrays.asList(writer, writer)); + doThrow(new IOException("test IO exception")).when(writer).close(null); + JobConf conf = createJobConf(); + MultipleOutputs mos = new MultipleOutputs(conf); + mos.setRecordWriters(recordWriters); + mos.close(); + }); } private static final Path ROOT_DIR = new Path("testing/mo"); @@ -103,7 +103,7 @@ private Path getDir(Path dir) { return dir; } - @Before + @BeforeEach public void setUp() throws Exception { super.setUp(); Path rootDir = getDir(ROOT_DIR); @@ -117,7 +117,7 @@ public void setUp() throws Exception { } } - @After + @AfterEach public void tearDown() throws Exception { Path rootDir = getDir(ROOT_DIR); @@ -202,7 +202,7 @@ protected void _testMOWithJavaSerialization(boolean withCounters) throws Excepti count++; } reader.close(); - assertFalse(count == 0); + assertNotEquals(0, count); Counters.Group counters = job.getCounters().getGroup(MultipleOutputs.class.getName()); @@ -290,7 +290,7 @@ protected void _testMultipleOutputs(boolean withCounters) throws Exception { count++; } reader.close(); - assertFalse(count == 0); + assertNotEquals(0, count); // assert SequenceOutputFormat files correctness SequenceFile.Reader seqReader = @@ -308,7 +308,7 @@ protected void _testMultipleOutputs(boolean withCounters) throws Exception { count++; } seqReader.close(); - assertFalse(count == 0); + assertNotEquals(0, count); Counters.Group counters = job.getCounters().getGroup(MultipleOutputs.class.getName()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java index 1059d29ee911d..93be3b5b3cd8c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java @@ -35,14 +35,14 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.DataOutputStream; import java.io.IOException; import java.util.Iterator; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestMultithreadedMapRunner extends HadoopTestCase { @@ -83,7 +83,7 @@ private void run(boolean ioEx, boolean rtEx) throws Exception { fs.delete(outDir, true); if (!fs.mkdirs(inDir)) { - throw new IOException("Mkdirs failed to create " + inDir.toString()); + throw new IOException("Mkdirs failed to create " + inDir); } { DataOutputStream file = fs.create(new Path(inDir, "part-0")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java index 845139bf35b7a..b69a542fe14d5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java @@ -20,14 +20,11 @@ import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; -import org.apache.hadoop.mapred.lib.*; import org.apache.hadoop.mapreduce.MapReduceTestUtil; -import org.junit.Test; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; -import java.io.*; import java.nio.charset.StandardCharsets; -import java.util.*; import java.text.NumberFormat; public class TestAggregates { @@ -81,7 +78,7 @@ public static void launch() throws Exception { fileOut.close(); System.out.println("inputData:"); - System.out.println(inputData.toString()); + System.out.println(inputData); JobConf job = new JobConf(conf, TestAggregates.class); FileInputFormat.setInputPaths(job, INPUT_DIR); job.setInputFormat(TextInputFormat.class); @@ -114,7 +111,7 @@ public static void launch() throws Exception { Path outPath = new Path(OUTPUT_DIR, "part-00000"); String outdata = MapReduceTestUtil.readOutput(outPath,job); System.out.println("full out data:"); - System.out.println(outdata.toString()); + System.out.println(outdata); outdata = outdata.substring(0, expectedOutput.toString().length()); assertEquals(expectedOutput.toString(),outdata); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java index 203da4e0b7c6a..cd3c9cc4a1159 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java @@ -21,9 +21,9 @@ import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.JobConf; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; public class TestConstructQuery { private String[] fieldNames = new String[] { "id", "name", "value" }; @@ -31,8 +31,7 @@ public class TestConstructQuery { private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);"; private String nullExpected = "INSERT INTO hadoop_output VALUES (?,?,?);"; - private DBOutputFormat format - = new DBOutputFormat(); + private DBOutputFormat format = new DBOutputFormat<>(); @Test public void testConstructQuery() { String actual = format.constructQuery("hadoop_output", fieldNames); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java index 79c0bc12222c5..cc07217ce1ac2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java @@ -68,10 +68,10 @@ import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestPipeApplication { private static File workSpace = new File("target", @@ -82,7 +82,7 @@ public class TestPipeApplication { /** * test PipesMapRunner test the transfer data from reader * - * @throws Exception + * @throws Exception The exception thrown during unit testing. */ @Test public void testRunner() throws Exception { @@ -97,26 +97,25 @@ public void testRunner() throws Exception { conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName); - CombineOutputCollector output = new CombineOutputCollector( - new Counters.Counter(), new Progress()); + CombineOutputCollector output = new CombineOutputCollector<>( + new Counters.Counter(), new Progress()); FileSystem fs = new RawLocalFileSystem(); fs.initialize(FsConstants.LOCAL_FS_URI, conf); - Writer wr = new Writer(conf, fs.create( - new Path(workSpace + File.separator + "outfile")), IntWritable.class, - Text.class, null, null, true); + Writer wr = new Writer<>(conf, fs.create( + new Path(workSpace + File.separator + "outfile")), IntWritable.class, + Text.class, null, null, true); output.setWriter(wr); // stub for client File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub"); conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath()); // token for authorization - Token token = new Token( - "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( - "service")); + Token token = new Token<>( + "user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenCache.setJobToken(token, conf.getCredentials()); conf.setBoolean(MRJobConfig.SKIP_RECORDS, true); TestTaskReporter reporter = new TestTaskReporter(); - PipesMapRunner runner = new PipesMapRunner(); + PipesMapRunner runner = new PipesMapRunner<>(); initStdOut(conf); @@ -153,7 +152,7 @@ public void testRunner() throws Exception { * test org.apache.hadoop.mapred.pipes.Application * test a internal functions: MessageType.REGISTER_COUNTER, INCREMENT_COUNTER, STATUS, PROGRESS... * - * @throws Throwable + * @throws Throwable The exception thrown during unit testing. */ @Test @@ -174,16 +173,15 @@ public void testApplication() throws Throwable { conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath()); // token for authorization - Token token = new Token( - "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( - "service")); + Token token = new Token<>( + "user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenCache.setJobToken(token, conf.getCredentials()); FakeCollector output = new FakeCollector(new Counters.Counter(), new Progress()); FileSystem fs = new RawLocalFileSystem(); fs.initialize(FsConstants.LOCAL_FS_URI, conf); - Writer wr = new Writer(conf, fs.create( + Writer wr = new Writer<>(conf, fs.create( new Path(workSpace.getAbsolutePath() + File.separator + "outfile")), IntWritable.class, Text.class, null, null, true); output.setWriter(wr); @@ -191,8 +189,8 @@ public void testApplication() throws Throwable { initStdOut(conf); - Application, Writable, IntWritable, Text> application = new Application, Writable, IntWritable, Text>( - conf, rReader, output, reporter, IntWritable.class, Text.class); + Application, Writable, IntWritable, Text> application = new Application<>( + conf, rReader, output, reporter, IntWritable.class, Text.class); application.getDownlink().flush(); application.getDownlink().mapItem(new IntWritable(3), new Text("txt")); @@ -245,7 +243,7 @@ public void testApplication() throws Throwable { /** * test org.apache.hadoop.mapred.pipes.Submitter * - * @throws Exception + * @throws Exception The exception thrown during unit testing. */ @Test public void testSubmitter() throws Exception { @@ -264,7 +262,7 @@ public void testSubmitter() throws Exception { Submitter.setKeepCommandFile(conf, false); Submitter.setIsJavaRecordReader(conf, false); Submitter.setIsJavaRecordWriter(conf, false); - PipesPartitioner partitioner = new PipesPartitioner(); + PipesPartitioner partitioner = new PipesPartitioner<>(); partitioner.configure(conf); Submitter.setJavaPartitioner(conf, partitioner.getClass()); @@ -284,7 +282,7 @@ public void testSubmitter() throws Exception { } catch (ExitUtil.ExitException e) { // System.exit prohibited! output message test assertTrue(out.toString().contains("")); - assertTrue(out.toString(), out.toString().contains("pipes")); + assertTrue(out.toString().contains("pipes"), out.toString()); assertTrue(out.toString().contains("[-input ] // Input directory")); assertTrue(out.toString() .contains("[-output ] // Output directory")); @@ -343,7 +341,7 @@ public void testSubmitter() throws Exception { String[] args = new String[22]; File input = new File(workSpace + File.separator + "input"); if (!input.exists()) { - Assert.assertTrue(input.createNewFile()); + Assertions.assertTrue(input.createNewFile()); } File outPut = new File(workSpace + File.separator + "output"); FileUtil.fullyDelete(outPut); @@ -388,7 +386,7 @@ public void testSubmitter() throws Exception { * test org.apache.hadoop.mapred.pipes.PipesReducer * test the transfer of data: key and value * - * @throws Exception + * @throws Exception The exception thrown during unit testing. */ @Test public void testPipesReduser() throws Exception { @@ -396,25 +394,24 @@ public void testPipesReduser() throws Exception { File[] psw = cleanTokenPasswordFile(); JobConf conf = new JobConf(); try { - Token token = new Token( - "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( - "service")); + Token token = new Token<>( + "user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenCache.setJobToken(token, conf.getCredentials()); File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub"); conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath()); - PipesReducer reducer = new PipesReducer(); + PipesReducer reducer = new PipesReducer<>(); reducer.configure(conf); BooleanWritable bw = new BooleanWritable(true); conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName); initStdOut(conf); conf.setBoolean(MRJobConfig.SKIP_RECORDS, true); - CombineOutputCollector output = new CombineOutputCollector( - new Counters.Counter(), new Progress()); + CombineOutputCollector output = new CombineOutputCollector<>( + new Counters.Counter(), new Progress()); Reporter reporter = new TestTaskReporter(); - List texts = new ArrayList(); + List texts = new ArrayList<>(); texts.add(new Text("first")); texts.add(new Text("second")); texts.add(new Text("third")); @@ -447,7 +444,7 @@ public void testPipesReduser() throws Exception { @Test public void testPipesPartitioner() { - PipesPartitioner partitioner = new PipesPartitioner(); + PipesPartitioner partitioner = new PipesPartitioner<>(); JobConf configuration = new JobConf(); Submitter.getJavaPartitioner(configuration); partitioner.configure(new JobConf()); @@ -569,7 +566,7 @@ private String readFile(File file) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); InputStream is = new FileInputStream(file); byte[] buffer = new byte[1024]; - int counter = 0; + int counter; while ((counter = is.read(buffer)) >= 0) { out.write(buffer, 0, counter); } @@ -813,7 +810,7 @@ public void close() throws IOException { private class FakeCollector extends CombineOutputCollector { - final private Map collect = new HashMap(); + final private Map collect = new HashMap<>(); public FakeCollector(Counter outCounter, Progressable progressable) { super(outCounter, progressable); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java index f7ef958a443d8..48fbac70b1a6f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java @@ -41,16 +41,14 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.*; -@Ignore +@Disabled public class TestPipes { private static final Logger LOG = LoggerFactory.getLogger(TestPipes.class); @@ -67,8 +65,9 @@ public class TestPipes { static void cleanup(FileSystem fs, Path p) throws IOException { fs.delete(p, true); - assertFalse("output not cleaned up", fs.exists(p)); + assertFalse(fs.exists(p), "output not cleaned up"); } + @Test public void testPipes() throws IOException { if (System.getProperty("compile.c++") == null) { @@ -84,16 +83,16 @@ public void testPipes() throws IOException { Configuration conf = new Configuration(); dfs = new MiniDFSCluster.Builder(conf).numDataNodes(numWorkers).build(); mr = new MiniMRCluster(numWorkers, - dfs.getFileSystem().getUri().toString(), 1); + dfs.getFileSystem().getUri().toString(), 1); writeInputFile(dfs.getFileSystem(), inputPath); runProgram(mr, dfs, wordCountSimple, - inputPath, outputPath, 3, 2, twoSplitOutput, null); + inputPath, outputPath, 3, 2, twoSplitOutput, null); cleanup(dfs.getFileSystem(), outputPath); runProgram(mr, dfs, wordCountSimple, - inputPath, outputPath, 3, 0, noSortOutput, null); + inputPath, outputPath, 3, 0, noSortOutput, null); cleanup(dfs.getFileSystem(), outputPath); runProgram(mr, dfs, wordCountPart, - inputPath, outputPath, 3, 2, fixedPartitionOutput, null); + inputPath, outputPath, 3, 2, fixedPartitionOutput, null); runNonPipedProgram(mr, dfs, wordCountNoPipes, null); mr.waitUntilIdle(); } finally { @@ -152,15 +151,14 @@ static void writeInputFile(FileSystem fs, Path dir) throws IOException { } static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, - Path program, Path inputPath, Path outputPath, - int numMaps, int numReduces, String[] expectedResults, - JobConf conf - ) throws IOException { + Path program, Path inputPath, Path outputPath, + int numMaps, int numReduces, String[] expectedResults, + JobConf conf) throws IOException { Path wordExec = new Path("testing/bin/application"); - JobConf job = null; - if(conf == null) { + JobConf job; + if (conf == null) { job = mr.createJobConf(); - }else { + } else { job = new JobConf(conf); } job.setNumMapTasks(numMaps); @@ -174,7 +172,7 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, Submitter.setIsJavaRecordWriter(job, true); FileInputFormat.setInputPaths(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); - RunningJob rJob = null; + RunningJob rJob; if (numReduces == 0) { rJob = Submitter.jobSubmit(job); @@ -188,7 +186,7 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, } else { rJob = Submitter.runJob(job); } - assertTrue("pipes job failed", rJob.isSuccessful()); + assertTrue(rJob.isSuccessful(), "pipes job failed"); Counters counters = rJob.getCounters(); Counters.Group wordCountCounters = counters.getGroup("WORDCOUNT"); @@ -197,20 +195,18 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, System.out.println(c); ++numCounters; } - assertTrue("No counters found!", (numCounters > 0)); + assertTrue((numCounters > 0), "No counters found!"); } - List results = new ArrayList(); + List results = new ArrayList<>(); for (Path p:FileUtil.stat2Paths(dfs.getFileSystem().listStatus(outputPath, - new Utils.OutputFileUtils - .OutputFilesFilter()))) { + new Utils.OutputFileUtils.OutputFilesFilter()))) { results.add(MapReduceTestUtil.readOutput(p, job)); } - assertEquals("number of reduces is wrong", - expectedResults.length, results.size()); + assertEquals(expectedResults.length, results.size(), "number of reduces is wrong"); for(int i=0; i < results.size(); i++) { - assertEquals("pipes program " + program + " output " + i + " wrong", - expectedResults[i], results.get(i)); + assertEquals(expectedResults[i], results.get(i), + "pipes program " + program + " output " + i + " wrong"); } } @@ -220,10 +216,10 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, * @param mr The mini mr cluster * @param dfs the dfs cluster * @param program the program to run - * @throws IOException + * @throws IOException The I/O exception thrown during unit testing. */ static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs, - Path program, JobConf conf) throws IOException { + Path program, JobConf conf) throws IOException { JobConf job; if(conf == null) { job = mr.createJobConf(); @@ -233,8 +229,7 @@ static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs, job.setInputFormat(WordCountInputFormat.class); FileSystem local = FileSystem.getLocal(job); - Path testDir = new Path("file:" + System.getProperty("test.build.data"), - "pipes"); + Path testDir = new Path("file:" + System.getProperty("test.build.data"), "pipes"); Path inDir = new Path(testDir, "input"); nonPipedOutDir = new Path(testDir, "output"); Path wordExec = new Path("testing/bin/application"); @@ -265,20 +260,21 @@ static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs, job.writeXml(out); out.close(); System.err.println("About to run: Submitter -conf " + jobXml + - " -input " + inDir + " -output " + nonPipedOutDir + - " -program " + - dfs.getFileSystem().makeQualified(wordExec)); + " -input " + inDir + " -output " + nonPipedOutDir + + " -program " + + dfs.getFileSystem().makeQualified(wordExec)); + try { int ret = ToolRunner.run(new Submitter(), - new String[]{"-conf", jobXml.toString(), - "-input", inDir.toString(), - "-output", nonPipedOutDir.toString(), - "-program", - dfs.getFileSystem().makeQualified(wordExec).toString(), - "-reduces", "2"}); + new String[]{"-conf", jobXml.toString(), + "-input", inDir.toString(), + "-output", nonPipedOutDir.toString(), + "-program", + dfs.getFileSystem().makeQualified(wordExec).toString(), + "-reduces", "2"}); assertEquals(0, ret); } catch (Exception e) { - assertTrue("got exception: " + StringUtils.stringifyException(e), false); + fail("got exception: " + StringUtils.stringifyException(e)); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java index ce1f4f579046f..46affccc482fc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java @@ -29,9 +29,9 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.pipes.TestPipeApplication.FakeSplit; import org.apache.hadoop.util.StringUtils; -import org.junit.Assert; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; @@ -57,16 +57,16 @@ public void testFormat() throws IOException { // input and output files File input1 = new File(workSpace + File.separator + "input1"); if (!input1.getParentFile().exists()) { - Assert.assertTrue(input1.getParentFile().mkdirs()); + Assertions.assertTrue(input1.getParentFile().mkdirs()); } if (!input1.exists()) { - Assert.assertTrue(input1.createNewFile()); + Assertions.assertTrue(input1.createNewFile()); } File input2 = new File(workSpace + File.separator + "input2"); if (!input2.exists()) { - Assert.assertTrue(input2.createNewFile()); + Assertions.assertTrue(input2.createNewFile()); } // set data for splits conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR, From bd685ee681b5ee6fd3854587231c491c0ada7ece Mon Sep 17 00:00:00 2001 From: fanshilun Date: Sat, 8 Feb 2025 22:47:53 +0800 Subject: [PATCH 2/5] MAPREDUCE-7421. [JDK17] Upgrade Junit 4 to 5 in hadoop-mapreduce-client-jobclient Part1. --- .../hadoop/conf/TestNoDefaultsJobConf.java | 9 +- .../java/org/apache/hadoop/fs/DFSCIOTest.java | 4 +- .../org/apache/hadoop/hdfs/TestNNBench.java | 17 +- .../hadoop/ipc/TestMRCJCSocketFactory.java | 18 +- .../org/apache/hadoop/mapred/MRCaching.java | 7 +- .../apache/hadoop/mapred/TestBadRecords.java | 5 +- .../hadoop/mapred/TestClientRedirect.java | 14 +- .../mapred/TestClientServiceDelegate.java | 201 ++++++++++-------- .../mapred/TestCombineFileInputFormat.java | 2 +- .../TestCombineSequenceFileInputFormat.java | 4 +- .../mapred/TestCombineTextInputFormat.java | 8 +- .../mapred/TestCommandLineJobSubmission.java | 4 +- .../TestConcatenatedCompressedInput.java | 85 +++----- .../mapred/TestFixedLengthInputFormat.java | 29 +-- .../org/apache/hadoop/mapred/TestIFile.java | 3 +- .../hadoop/mapred/TestJavaSerialization.java | 21 +- .../apache/hadoop/mapred/TestJobCleanup.java | 23 +- .../apache/hadoop/mapred/TestJobClients.java | 6 +- .../apache/hadoop/mapred/TestJobCounters.java | 14 +- .../hadoop/mapred/TestLocalJobSubmission.java | 18 +- .../mapred/TestMRCJCFileInputFormat.java | 8 +- .../hadoop/mapred/TestMRCJCJobClient.java | 5 +- .../hadoop/mapred/TestMRCJCJobConf.java | 8 +- .../mapred/TestMROpportunisticMaps.java | 2 +- .../mapred/TestMRTimelineEventHandling.java | 121 +++++------ .../apache/hadoop/mapred/TestMapProgress.java | 4 +- .../org/apache/hadoop/mapred/TestMapRed.java | 10 +- .../hadoop/mapred/TestMiniMRBringup.java | 5 +- .../hadoop/mapred/TestMiniMRChildTask.java | 23 +- .../hadoop/mapred/TestMiniMRClasspath.java | 7 +- .../mapred/TestMiniMRClientCluster.java | 43 ++-- .../hadoop/mapred/TestMiniMRDFSCaching.java | 4 +- .../TestMiniMRWithDFSWithDistinctUsers.java | 5 +- .../mapred/TestMultipleLevelCaching.java | 18 +- .../hadoop/mapred/TestNetworkedJob.java | 5 +- .../mapred/TestOldCombinerGrouping.java | 21 +- .../mapred/TestQueueConfigurationParser.java | 3 +- .../apache/hadoop/mapred/TestReduceFetch.java | 8 +- .../mapred/TestReduceFetchFromPartialMem.java | 5 +- .../apache/hadoop/mapred/TestReporter.java | 3 +- .../mapred/TestResourceMgrDelegate.java | 70 +++--- .../TestSequenceFileAsBinaryInputFormat.java | 12 +- .../TestSequenceFileAsBinaryOutputFormat.java | 35 ++- .../TestSequenceFileAsTextInputFormat.java | 3 +- .../TestSpecialCharactersInOutputPath.java | 3 +- .../apache/hadoop/mapred/TestTaskCommit.java | 12 +- .../mapred/TestTaskPerformanceSplits.java | 2 +- .../apache/hadoop/mapred/TestTaskStatus.java | 87 ++++---- .../hadoop/mapred/TestTextInputFormat.java | 31 +-- .../org/apache/hadoop/mapred/TestUtils.java | 3 +- .../apache/hadoop/mapred/TestYARNRunner.java | 32 +-- .../mapred/jobcontrol/TestJobControl.java | 22 +- .../hadoop/mapred/join/TestDatamerge.java | 8 +- .../hadoop/mapred/join/TestTupleWritable.java | 12 +- .../TestWrappedRecordReaderClassloader.java | 14 +- .../mapred/lib/TestDelegatingInputFormat.java | 5 +- .../lib/TestKeyFieldBasedComparator.java | 6 +- .../lib/TestKeyFieldBasedPartitioner.java | 8 +- .../mapred/lib/TestLineInputFormat.java | 12 +- .../mapred/lib/TestMultipleOutputs.java | 5 +- .../lib/TestMultithreadedMapRunner.java | 2 +- .../mapred/lib/aggregate/TestAggregates.java | 7 +- .../mapred/lib/db/TestConstructQuery.java | 3 +- .../mapred/pipes/TestPipeApplication.java | 63 +++--- .../apache/hadoop/mapred/pipes/TestPipes.java | 61 +++--- .../pipes/TestPipesNonJavaInputFormat.java | 14 +- 66 files changed, 690 insertions(+), 642 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java index 4e63121dd6970..ffe4850bf836d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java @@ -39,7 +39,10 @@ import java.io.OutputStreamWriter; import java.io.Writer; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * This testcase tests that a JobConf without default values submits jobs @@ -95,8 +98,8 @@ public void testNoDefaults() throws Exception { JobClient.runJob(conf); Path[] outputFiles = FileUtil.stat2Paths( - getFileSystem().listStatus(outDir, - new Utils.OutputFileUtils.OutputFilesFilter())); + getFileSystem().listStatus(outDir, + new Utils.OutputFileUtils.OutputFilesFilter())); if (outputFiles.length > 0) { InputStream is = getFileSystem().open(outputFiles[0]); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java index b96a64d56041e..b2ab0bc8bf60c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java @@ -34,7 +34,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.mapred.*; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +66,7 @@ *
  • standard i/o rate deviation
  • * */ -@Ignore +@Disabled public class DFSCIOTest { // Constants private static final Logger LOG = LoggerFactory.getLogger(DFSCIOTest.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java index 15e92a0385ecc..621ecc021c717 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java @@ -58,12 +58,11 @@ public void tearDown() throws Exception { public void testNNBenchCreateReadAndDelete() throws Exception { runNNBench(createJobConf(), "create_write"); Path path = new Path(BASE_DIR + "/data/file_0_0"); - assertTrue( - getFileSystem().exists(path), "create_write should create the file"); + assertTrue(getFileSystem().exists(path), "create_write should create the file"); runNNBench(createJobConf(), "open_read"); runNNBench(createJobConf(), "delete"); - assertFalse( - getFileSystem().exists(path), "Delete operation should delete the file"); + assertFalse(getFileSystem().exists(path), + "Delete operation should delete the file"); } @Test @@ -71,13 +70,11 @@ public void testNNBenchCreateReadAndDelete() throws Exception { public void testNNBenchCreateAndRename() throws Exception { runNNBench(createJobConf(), "create_write"); Path path = new Path(BASE_DIR + "/data/file_0_0"); - assertTrue( - getFileSystem().exists(path), "create_write should create the file"); + assertTrue(getFileSystem().exists(path), "create_write should create the file"); runNNBench(createJobConf(), "rename"); Path renamedPath = new Path(BASE_DIR + "/data/file_0_r_0"); assertFalse(getFileSystem().exists(path), "Rename should rename the file"); - assertTrue( - getFileSystem().exists(renamedPath), "Rename should rename the file"); + assertTrue(getFileSystem().exists(renamedPath), "Rename should rename the file"); } @Test @@ -101,8 +98,8 @@ public void testNNBenchCrossCluster() throws Exception { runNNBench(createJobConf(), "create_write", baseDir); Path path = new Path(BASE_DIR + "/data/file_0_0"); - assertTrue( - dfsCluster.getFileSystem().exists(path), "create_write should create the file"); + assertTrue(dfsCluster.getFileSystem().exists(path), + "create_write should create the file"); dfsCluster.shutdown(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java index 4766f42b4458c..8899da38248be 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java @@ -34,9 +34,11 @@ import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; import org.apache.hadoop.net.StandardSocketFactory; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * This class checks that RPCs can use specialized socket factories. */ @@ -56,13 +58,13 @@ public void testSocketFactory() throws IOException { // Get a reference to its DFS directly FileSystem fs = cluster.getFileSystem(); - Assertions.assertTrue(fs instanceof DistributedFileSystem); + assertTrue(fs instanceof DistributedFileSystem); DistributedFileSystem directDfs = (DistributedFileSystem) fs; Configuration cconf = getCustomSocketConfigs(nameNodePort); fs = FileSystem.get(cconf); - Assertions.assertTrue(fs instanceof DistributedFileSystem); + assertTrue(fs instanceof DistributedFileSystem); DistributedFileSystem dfs = (DistributedFileSystem) fs; JobClient client = null; @@ -72,12 +74,12 @@ public void testSocketFactory() throws IOException { // could we test Client-DataNode connections? Path filePath = new Path("/dir"); - Assertions.assertFalse(directDfs.exists(filePath)); - Assertions.assertFalse(dfs.exists(filePath)); + assertFalse(directDfs.exists(filePath)); + assertFalse(dfs.exists(filePath)); directDfs.mkdirs(filePath); - Assertions.assertTrue(directDfs.exists(filePath)); - Assertions.assertTrue(dfs.exists(filePath)); + assertTrue(directDfs.exists(filePath)); + assertTrue(dfs.exists(filePath)); // This will test RPC to a Resource Manager fs = FileSystem.get(sconf); @@ -95,7 +97,7 @@ public void testSocketFactory() throws IOException { client = new JobClient(jconf); JobStatus[] jobs = client.jobsToComplete(); - Assertions.assertTrue(jobs.length == 0); + assertTrue(jobs.length == 0); } finally { closeClient(client); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java index 307ac53521058..5d458b0d9d88f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java @@ -32,7 +32,7 @@ import java.net.URI; -import org.junit.jupiter.api.Assertions; +import static org.junit.jupiter.api.Assertions.assertEquals; public class MRCaching { static String testStr = "This is a test file " + "used for testing caching " @@ -299,13 +299,12 @@ private static void validateCacheFileSizes(Configuration job, String configValues = job.get(configKey, ""); System.out.println(configKey + " -> " + configValues); String[] realSizes = StringUtils.getStrings(configValues); - Assertions.assertEquals( - expectedSizes.length, realSizes.length, "Number of files for "+ configKey); + assertEquals(expectedSizes.length, realSizes.length, "Number of files for "+ configKey); for (int i=0; i < expectedSizes.length; ++i) { long actual = Long.valueOf(realSizes[i]); long expected = expectedSizes[i]; - Assertions.assertEquals(expected, actual, "File "+ i +" for "+ configKey); + assertEquals(expected, actual, "File "+ i +" for "+ configKey); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java index a0bc9dc1e7d35..b39f9d8d2ef01 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java @@ -39,7 +39,7 @@ import org.apache.hadoop.util.ReflectionUtils; import org.junit.jupiter.api.BeforeAll; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,7 +47,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertNotNull; -@Ignore + +@Disabled public class TestBadRecords extends ClusterMapReduceTestCase { private static final Logger LOG = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java index cc4988534a91e..2c2066bac7529 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java @@ -146,11 +146,13 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestClientRedirect { static { @@ -200,7 +202,7 @@ public void testRedirect() throws Exception { org.apache.hadoop.mapreduce.Counters counters = cluster.getJob(jobID).getCounters(); validateCounters(counters); - Assertions.assertTrue(amContact); + assertTrue(amContact); LOG.info("Sleeping for 5 seconds before stop for" + " the client socket to not get EOF immediately.."); @@ -218,7 +220,7 @@ public void testRedirect() throws Exception { // Same client //results are returned from fake (not started job) counters = cluster.getJob(jobID).getCounters(); - Assertions.assertEquals(0, counters.countCounters()); + assertEquals(0, counters.countCounters()); Job job = cluster.getJob(jobID); org.apache.hadoop.mapreduce.TaskID taskId = new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0); @@ -242,7 +244,7 @@ public void testRedirect() throws Exception { counters = cluster.getJob(jobID).getCounters(); validateCounters(counters); - Assertions.assertTrue(amContact); + assertTrue(amContact); // Stop the AM. It is not even restarting. So it should be treated as // completed. @@ -251,7 +253,7 @@ public void testRedirect() throws Exception { // Same client counters = cluster.getJob(jobID).getCounters(); validateCounters(counters); - Assertions.assertTrue(hsContact); + assertTrue(hsContact); rmService.stop(); historyService.stop(); @@ -267,7 +269,7 @@ private void validateCounters(org.apache.hadoop.mapreduce.Counters counters) { LOG.info("Counter is " + itc.next().getDisplayName()); } } - Assertions.assertEquals(1, counters.countCounters()); + assertEquals(1, counters.countCounters()); } class RMService extends AbstractService implements ApplicationClientProtocol { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java index 49956382a5e28..9ba9d64c91487 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java @@ -58,35 +58,36 @@ import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * Tests for ClientServiceDelegate.java */ - -@RunWith(value = Parameterized.class) public class TestClientServiceDelegate { private JobID oldJobId = JobID.forName("job_1315895242400_2"); private org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter .toYarn(oldJobId); private boolean isAMReachableFromClient; - public TestClientServiceDelegate(boolean isAMReachableFromClient) { - this.isAMReachableFromClient = isAMReachableFromClient; + public void initTestClientServiceDelegate(boolean pIsAMReachableFromClient) { + this.isAMReachableFromClient = pIsAMReachableFromClient; } - - @Parameters + public static Collection data() { Object[][] data = new Object[][] { { true }, { false } }; return Arrays.asList(data); } - @Test - public void testUnknownAppInRM() throws Exception { + @MethodSource("data") + @ParameterizedTest + public void testUnknownAppInRM(boolean pIsAMReachableFromClient) throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); MRClientProtocol historyServerProxy = mock(MRClientProtocol.class); when(historyServerProxy.getJobReport(getJobReportRequest())).thenReturn( getJobReportResponse()); @@ -94,12 +95,14 @@ public void testUnknownAppInRM() throws Exception { historyServerProxy, getRMDelegate()); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); + assertNotNull(jobStatus); } - @Test - public void testRemoteExceptionFromHistoryServer() throws Exception { - + @MethodSource("data") + @ParameterizedTest + public void testRemoteExceptionFromHistoryServer(boolean pIsAMReachableFromClient) + throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); MRClientProtocol historyServerProxy = mock(MRClientProtocol.class); when(historyServerProxy.getJobReport(getJobReportRequest())).thenThrow( new IOException("Job ID doesnot Exist")); @@ -113,16 +116,18 @@ public void testRemoteExceptionFromHistoryServer() throws Exception { try { clientServiceDelegate.getJobStatus(oldJobId); - Assertions.fail("Invoke should throw exception after retries."); + fail("Invoke should throw exception after retries."); } catch (IOException e) { - Assertions.assertTrue(e.getMessage().contains( + assertTrue(e.getMessage().contains( "Job ID doesnot Exist")); } } - @Test - public void testRetriesOnConnectionFailure() throws Exception { - + @MethodSource("data") + @ParameterizedTest + public void testRetriesOnConnectionFailure(boolean pIsAMReachableFromClient) + throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); MRClientProtocol historyServerProxy = mock(MRClientProtocol.class); when(historyServerProxy.getJobReport(getJobReportRequest())).thenThrow( new RuntimeException("1")).thenThrow(new RuntimeException("2")) @@ -136,13 +141,16 @@ public void testRetriesOnConnectionFailure() throws Exception { historyServerProxy, rm); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); + assertNotNull(jobStatus); verify(historyServerProxy, times(3)).getJobReport( any(GetJobReportRequest.class)); } - @Test - public void testRetriesOnAMConnectionFailures() throws Exception { + @MethodSource("data") + @ParameterizedTest + public void testRetriesOnAMConnectionFailures(boolean pIsAMReachableFromClient) + throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); if (!isAMReachableFromClient) { return; } @@ -175,16 +183,19 @@ MRClientProtocol instantiateAMProxy( JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); + assertNotNull(jobStatus); // assert maxClientRetry is not decremented. - Assertions.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, + assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate .getMaxClientRetry()); verify(amProxy, times(5)).getJobReport(any(GetJobReportRequest.class)); } - @Test - public void testNoRetryOnAMAuthorizationException() throws Exception { + @MethodSource("data") + @ParameterizedTest + public void testNoRetryOnAMAuthorizationException(boolean pIsAMReachableFromClient) + throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); if (!isAMReachableFromClient) { return; } @@ -213,27 +224,30 @@ MRClientProtocol instantiateAMProxy( try { clientServiceDelegate.getJobStatus(oldJobId); - Assertions.fail("Exception should be thrown upon AuthorizationException"); + fail("Exception should be thrown upon AuthorizationException"); } catch (IOException e) { - Assertions.assertEquals(AuthorizationException.class.getName() + ": Denied", + assertEquals(AuthorizationException.class.getName() + ": Denied", e.getMessage()); } // assert maxClientRetry is not decremented. - Assertions.assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, + assertEquals(conf.getInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES), clientServiceDelegate .getMaxClientRetry()); verify(amProxy, times(1)).getJobReport(any(GetJobReportRequest.class)); } - @Test - public void testHistoryServerNotConfigured() throws Exception { + @MethodSource("data") + @ParameterizedTest + public void testHistoryServerNotConfigured( + boolean pIsAMReachableFromClient) throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); //RM doesn't have app report and job History Server is not configured ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate( null, getRMDelegate()); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertEquals("N/A", jobStatus.getUsername()); - Assertions.assertEquals(JobStatus.State.PREP, jobStatus.getState()); + assertEquals("N/A", jobStatus.getUsername()); + assertEquals(JobStatus.State.PREP, jobStatus.getState()); //RM has app report and job History Server is not configured ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class); @@ -243,12 +257,15 @@ public void testHistoryServerNotConfigured() throws Exception { clientServiceDelegate = getClientServiceDelegate(null, rm); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertEquals(applicationReport.getUser(), jobStatus.getUsername()); - Assertions.assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState()); + assertEquals(applicationReport.getUser(), jobStatus.getUsername()); + assertEquals(JobStatus.State.SUCCEEDED, jobStatus.getState()); } - - @Test - public void testJobReportFromHistoryServer() throws Exception { + + @MethodSource("data") + @ParameterizedTest + public void testJobReportFromHistoryServer( + boolean pIsAMReachableFromClient) throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); MRClientProtocol historyServerProxy = mock(MRClientProtocol.class); when(historyServerProxy.getJobReport(getJobReportRequest())).thenReturn( getJobReportResponseFromHistoryServer()); @@ -259,15 +276,18 @@ public void testJobReportFromHistoryServer() throws Exception { historyServerProxy, rm); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("TestJobFilePath", jobStatus.getJobFile()); - Assertions.assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl()); - Assertions.assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f); - Assertions.assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f); + assertNotNull(jobStatus); + assertEquals("TestJobFilePath", jobStatus.getJobFile()); + assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl()); + assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f); + assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f); } - - @Test - public void testCountersFromHistoryServer() throws Exception { + + @MethodSource("data") + @ParameterizedTest + public void testCountersFromHistoryServer( + boolean pIsAMReachableFromClient) throws Exception { + initTestClientServiceDelegate(pIsAMReachableFromClient); MRClientProtocol historyServerProxy = mock(MRClientProtocol.class); when(historyServerProxy.getCounters(getCountersRequest())).thenReturn( getCountersResponseFromHistoryServer()); @@ -278,12 +298,15 @@ public void testCountersFromHistoryServer() throws Exception { historyServerProxy, rm); Counters counters = TypeConverter.toYarn(clientServiceDelegate.getJobCounters(oldJobId)); - Assertions.assertNotNull(counters); - Assertions.assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); + assertNotNull(counters); + assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); } - @Test - public void testReconnectOnAMRestart() throws IOException { + @MethodSource("data") + @ParameterizedTest + public void testReconnectOnAMRestart( + boolean pIsAMReachableFromClient) throws IOException { + initTestClientServiceDelegate(pIsAMReachableFromClient); //test not applicable when AM not reachable //as instantiateAMProxy is not called at all if(!isAMReachableFromClient) { @@ -338,23 +361,26 @@ public void testReconnectOnAMRestart() throws IOException { clientServiceDelegate).instantiateAMProxy(any(InetSocketAddress.class)); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("jobName-firstGen", jobStatus.getJobName()); + assertNotNull(jobStatus); + assertEquals("jobName-firstGen", jobStatus.getJobName()); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("jobName-secondGen", jobStatus.getJobName()); + assertNotNull(jobStatus); + assertEquals("jobName-secondGen", jobStatus.getJobName()); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("jobName-secondGen", jobStatus.getJobName()); + assertNotNull(jobStatus); + assertEquals("jobName-secondGen", jobStatus.getJobName()); verify(clientServiceDelegate, times(2)).instantiateAMProxy( any(InetSocketAddress.class)); } - - @Test - public void testAMAccessDisabled() throws IOException { + + @MethodSource("data") + @ParameterizedTest + public void testAMAccessDisabled( + boolean pIsAMReachableFromClient) throws IOException { + initTestClientServiceDelegate(pIsAMReachableFromClient); //test only applicable when AM not reachable if(isAMReachableFromClient) { return; @@ -379,56 +405,65 @@ public void testAMAccessDisabled() throws IOException { historyServerProxy, rmDelegate)); JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("N/A", jobStatus.getJobName()); + assertNotNull(jobStatus); + assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); // Should not reach AM even for second and third times too. jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("N/A", jobStatus.getJobName()); + assertNotNull(jobStatus); + assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus); - Assertions.assertEquals("N/A", jobStatus.getJobName()); + assertNotNull(jobStatus); + assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); // The third time around, app is completed, so should go to JHS JobStatus jobStatus1 = clientServiceDelegate.getJobStatus(oldJobId); - Assertions.assertNotNull(jobStatus1); - Assertions.assertEquals("TestJobFilePath", jobStatus1.getJobFile()); - Assertions.assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl()); - Assertions.assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f); - Assertions.assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f); + assertNotNull(jobStatus1); + assertEquals("TestJobFilePath", jobStatus1.getJobFile()); + assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl()); + assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f); + assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); } - - @Test - public void testRMDownForJobStatusBeforeGetAMReport() throws IOException { + + @MethodSource("data") + @ParameterizedTest + public void testRMDownForJobStatusBeforeGetAMReport( + boolean pIsAMReachableFromClient) throws IOException { + initTestClientServiceDelegate(pIsAMReachableFromClient); Configuration conf = new YarnConfiguration(); testRMDownForJobStatusBeforeGetAMReport(conf, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES); } - @Test - public void testRMDownForJobStatusBeforeGetAMReportWithRetryTimes() + @MethodSource("data") + @ParameterizedTest + public void testRMDownForJobStatusBeforeGetAMReportWithRetryTimes( + boolean pIsAMReachableFromClient) throws IOException { + initTestClientServiceDelegate(pIsAMReachableFromClient); Configuration conf = new YarnConfiguration(); conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 2); testRMDownForJobStatusBeforeGetAMReport(conf, conf.getInt( MRJobConfig.MR_CLIENT_MAX_RETRIES, MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES)); } - - @Test - public void testRMDownRestoreForJobStatusBeforeGetAMReport() + + @MethodSource("data") + @ParameterizedTest + public void testRMDownRestoreForJobStatusBeforeGetAMReport( + boolean pIsAMReachableFromClient) throws IOException { + initTestClientServiceDelegate(pIsAMReachableFromClient); Configuration conf = new YarnConfiguration(); conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 3); @@ -451,7 +486,7 @@ public void testRMDownRestoreForJobStatusBeforeGetAMReport() JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); verify(rmDelegate, times(3)).getApplicationReport( any(ApplicationId.class)); - Assertions.assertNotNull(jobStatus); + assertNotNull(jobStatus); } catch (YarnException e) { throw new IOException(e); } @@ -476,7 +511,7 @@ private void testRMDownForJobStatusBeforeGetAMReport(Configuration conf, conf, rmDelegate, oldJobId, historyServerProxy); try { clientServiceDelegate.getJobStatus(oldJobId); - Assertions.fail("It should throw exception after retries"); + fail("It should throw exception after retries"); } catch (IOException e) { System.out.println("fail to get job status,and e=" + e.toString()); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java index 7bbf654a4d3fd..b45cfe2cd0858 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java @@ -30,7 +30,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestCombineFileInputFormat { private static final Logger LOG = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java index 2636b2ca6a84f..c3b41b6be89d4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java @@ -96,8 +96,8 @@ public void testFormat() throws Exception { // the block size assertEquals(1, splits.length, "We got more than one splits!"); InputSplit split = splits[0]; - assertEquals( - CombineFileSplit.class, split.getClass(), "It should be CombineFileSplit"); + assertEquals(CombineFileSplit.class, split.getClass(), + "It should be CombineFileSplit"); // check each split BitSet bits = new BitSet(length); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java index c40a2dbaa93f7..c7abfb1968129 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java @@ -100,8 +100,8 @@ public void testFormat() throws Exception { // the block size assertEquals(1, splits.length, "We got more than one splits!"); InputSplit split = splits[0]; - assertEquals( - CombineFileSplit.class, split.getClass(), "It should be CombineFileSplit"); + assertEquals(CombineFileSplit.class, split.getClass(), + "It should be CombineFileSplit"); // check the split BitSet bits = new BitSet(length); @@ -242,11 +242,11 @@ public void testGzip() throws IOException { private static void testResults(List results, String[] first, String[] second) { for (int i = 0; i < first.length; i++) { - assertEquals("splits[0]["+i+"]", first[i], results.get(i).toString()); + assertEquals(first[i], results.get(i).toString(), "splits[0][" + i + "]"); } for (int i = 0; i < second.length; i++) { int j = i + first.length; - assertEquals("splits[0]["+j+"]", second[i], results.get(j).toString()); + assertEquals(second[i], results.get(j).toString(), "splits[0][" + j + "]"); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java index aa693d75c48ec..b80527d1159cc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java @@ -26,7 +26,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -34,7 +34,7 @@ * check for the job submission options of * -libjars -files -archives */ -@Ignore +@Disabled public class TestCommandLineJobSubmission { // Input output paths for this.. // these are all dummy and does not test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java index 02be9b9a50283..a6663d310b5db 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java @@ -43,7 +43,9 @@ import java.util.List; import java.util.zip.Inflater; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test class for concatenated {@link CompressionInputStream}. @@ -212,15 +214,12 @@ public void testGzip() throws IOException { List results = readSplit(format, splits[0], jobConf); assertEquals(6, results.size(), "splits[0] num lines"); - assertEquals("splits[0][5]", "member #3", - results.get(5).toString()); + assertEquals("member #3", results.get(5).toString(), "splits[0][5]"); results = readSplit(format, splits[1], jobConf); assertEquals(2, results.size(), "splits[1] num lines"); - assertEquals("splits[1][0]", "this is a test", - results.get(0).toString()); - assertEquals("splits[1][1]", "of gzip", - results.get(1).toString()); + assertEquals("this is a test", results.get(0).toString(), "splits[1][0]"); + assertEquals("of gzip", results.get(1).toString(), "splits[1][1]"); } /** @@ -264,14 +263,12 @@ public void testPrototypeInflaterGzip() throws IOException { } if ((flags & 0x08) != 0) { // FNAME while ((numBytesRead = in.read()) != 0) { - assertFalse( - numBytesRead == -1, "unexpected end-of-file while reading filename"); + assertFalse(numBytesRead == -1, "unexpected end-of-file while reading filename"); } } if ((flags & 0x10) != 0) { // FCOMMENT while ((numBytesRead = in.read()) != 0) { - assertFalse( - numBytesRead == -1, "unexpected end-of-file while reading comment"); + assertFalse(numBytesRead == -1, "unexpected end-of-file while reading comment"); } } if ((flags & 0xe0) != 0) { // reserved @@ -320,9 +317,8 @@ public void testBuiltInGzipDecompressor() throws IOException { localFs.delete(workDir, true); // Don't use native libs for this test ZlibFactory.setNativeZlibLoaded(false); - assertEquals( - org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class -, gzip.getDecompressorType(), "[non-native (Java) codec]"); + assertEquals(org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class, + gzip.getDecompressorType(), "[non-native (Java) codec]"); System.out.println(COLOR_BR_YELLOW + "testBuiltInGzipDecompressor() using" + " non-native (Java Inflater) Decompressor (" + gzip.getDecompressorType() + ")" + COLOR_NORMAL); @@ -360,10 +356,10 @@ public void testBuiltInGzipDecompressor() throws IOException { totalBytes += numBytes; } in.close(); - assertEquals( - 5346, totalBytes, "total uncompressed bytes in concatenated test file"); - assertEquals( - 84, lineNum, "total uncompressed lines in concatenated test file"); + assertEquals(5346, totalBytes, + "total uncompressed bytes in concatenated test file"); + assertEquals(84, lineNum, + "total uncompressed lines in concatenated test file"); ZlibFactory.loadNativeZLib(); // test GzipZlibDecompressor (native), just to be sure @@ -453,21 +449,17 @@ private static void doSingleGzipBufferSize(JobConf jConf) throws IOException { List results = readSplit(format, splits[0], jConf); assertEquals(84, results.size(), "splits[0] length (num lines)"); - assertEquals("splits[0][0]", - "Call me Ishmael. Some years ago--never mind how long precisely--having", - results.get(0).toString()); - assertEquals("splits[0][42]", - "Tell me, does the magnetic virtue of the needles of the compasses of", - results.get(42).toString()); + assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having", + results.get(0).toString(), "splits[0][0]"); + assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of", + results.get(42).toString(), "splits[0][42]"); results = readSplit(format, splits[1], jConf); assertEquals(84, results.size(), "splits[1] length (num lines)"); - assertEquals("splits[1][0]", - "Call me Ishmael. Some years ago--never mind how long precisely--having", - results.get(0).toString()); - assertEquals("splits[1][42]", - "Tell me, does the magnetic virtue of the needles of the compasses of", - results.get(42).toString()); + assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having", + results.get(0).toString(), "splits[1][0]"); + assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of", + results.get(42).toString(), "splits[1][42]"); } /** @@ -510,15 +502,12 @@ public void testBzip2() throws IOException { List results = readSplit(format, splits[0], jobConf); assertEquals(6, results.size(), "splits[0] num lines"); - assertEquals("splits[0][5]", "member #3", - results.get(5).toString()); + assertEquals("member #3", results.get(5).toString(), "splits[0][5]"); results = readSplit(format, splits[1], jobConf); assertEquals(2, results.size(), "splits[1] num lines"); - assertEquals("splits[1][0]", "this is a test", - results.get(0).toString()); - assertEquals("splits[1][1]", "of bzip2", - results.get(1).toString()); + assertEquals("this is a test", results.get(0).toString(), "splits[1][0]"); + assertEquals("of bzip2", results.get(1).toString(), "splits[1][1]"); } /** @@ -658,22 +647,18 @@ private static void doSingleBzip2BufferSize(JobConf jConf) // testConcatThenCompress (single) List results = readSplit(format, splits[0], jConf); assertEquals(84, results.size(), "splits[0] length (num lines)"); - assertEquals("splits[0][0]", - "Call me Ishmael. Some years ago--never mind how long precisely--having", - results.get(0).toString()); - assertEquals("splits[0][42]", - "Tell me, does the magnetic virtue of the needles of the compasses of", - results.get(42).toString()); + assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having", + results.get(0).toString(), "splits[0][0]"); + assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of", + results.get(42).toString(), "splits[0][42]"); // testCompressThenConcat (multi) results = readSplit(format, splits[1], jConf); assertEquals(84, results.size(), "splits[1] length (num lines)"); - assertEquals("splits[1][0]", - "Call me Ishmael. Some years ago--never mind how long precisely--having", - results.get(0).toString()); - assertEquals("splits[1][42]", - "Tell me, does the magnetic virtue of the needles of the compasses of", - results.get(42).toString()); + assertEquals("Call me Ishmael. Some years ago--never mind how long precisely--having", + results.get(0).toString(), "splits[1][0]"); + assertEquals("Tell me, does the magnetic virtue of the needles of the compasses of", + results.get(42).toString(), "splits[1][42]"); } private static String unquote(String in) { @@ -705,7 +690,7 @@ private static String unquote(String in) { * @param args * @throws Exception */ - public static void main(String[] args) throws Exception { + /*public static void main(String[] args) throws Exception { for(String arg: args) { System.out.println("Working on " + arg); LineReader reader = makeStream(unquote(arg)); @@ -717,5 +702,5 @@ public static void main(String[] args) throws Exception { } reader.close(); } - } + }*/ } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java index b688d2cdff764..d70b2a8d4094d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java @@ -39,7 +39,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestFixedLengthInputFormat { @@ -227,11 +228,11 @@ public void testGzipWithTwoInputs() throws IOException { } List results = readSplit(format, splits[0], job); assertEquals(10, results.size(), "splits[0] length"); - assertEquals("splits[0][5]", "six ", results.get(5)); + assertEquals("six ", results.get(5), "splits[0][5]"); results = readSplit(format, splits[1], job); assertEquals(10, results.size(), "splits[1] length"); - assertEquals("splits[1][0]", "ten ", results.get(0)); - assertEquals("splits[1][1]", "nine ", results.get(1)); + assertEquals("ten ", results.get(0), "splits[1][0]"); + assertEquals("nine ", results.get(1), "splits[1][1]"); } // Create a file containing fixed length records with random data @@ -338,26 +339,26 @@ private void runRandomTests(CompressionCodec codec) throws IOException { RecordReader reader = format.getRecordReader(split, job, voidReporter); Class clazz = reader.getClass(); - assertEquals( - FixedLengthRecordReader.class, clazz, "RecordReader class should be FixedLengthRecordReader:"); + assertEquals(FixedLengthRecordReader.class, clazz, + "RecordReader class should be FixedLengthRecordReader:"); // Plow through the records in this split while (reader.next(key, value)) { - assertEquals((long)(recordNumber*recordLength) -, key.get(), "Checking key"); + assertEquals((long)(recordNumber*recordLength), + key.get(), "Checking key"); String valueString = new String(value.getBytes(), 0, value.getLength()); - assertEquals(recordLength -, value.getLength(), "Checking record length:"); - assertTrue( - recordNumber < totalRecords, "Checking for more records than expected:"); + assertEquals(recordLength, + value.getLength(), "Checking record length:"); + assertTrue(recordNumber < totalRecords, + "Checking for more records than expected:"); String origRecord = recordList.get(recordNumber); assertEquals(origRecord, valueString, "Checking record content:"); recordNumber++; } reader.close(); } - assertEquals( - recordList.size(), recordNumber, "Total original records should be total read records:"); + assertEquals(recordList.size(), recordNumber, + "Total original records should be total read records:"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java index 5446dbbb24c43..55502f01a95f5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestIFile.java @@ -28,7 +28,8 @@ import org.apache.hadoop.io.compress.GzipCodec; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + public class TestIFile { @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java index 0eadfe08740df..532020d98981c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java @@ -58,8 +58,8 @@ public void map(LongWritable key, Text value, StringTokenizer st = new StringTokenizer(value.toString()); while (st.hasMoreTokens()) { String token = st.nextToken(); - assertTrue( - token.equals("a") || token.equals("b"), "Invalid token; expected 'a' or 'b', got " + token); + assertTrue(token.equals("a") || token.equals("b"), + "Invalid token; expected 'a' or 'b', got " + token); output.collect(token, 1L); } } @@ -124,8 +124,8 @@ public void testMapReduceJob() throws Exception { String inputFileContents = FileUtils.readFileToString(new File(INPUT_FILE.toUri().getPath())); - assertTrue( - inputFileContents.equals("b a\n"), "Input file contents not as expected; contents are '" + assertTrue(inputFileContents.equals("b a\n"), + "Input file contents not as expected; contents are '" + inputFileContents + "', expected \"b a\n\" "); JobClient.runJob(conf); @@ -137,13 +137,12 @@ public void testMapReduceJob() throws Exception { try (InputStream is = fs.open(outputFiles[0])) { String reduceOutput = org.apache.commons.io.IOUtils.toString(is, StandardCharsets.UTF_8); String[] lines = reduceOutput.split("\n"); - assertEquals("Unexpected output; received output '" + reduceOutput + "'", - "a\t1", lines[0]); - assertEquals("Unexpected output; received output '" + reduceOutput + "'", - "b\t1", lines[1]); - assertEquals( - 2 -, lines.length, "Reduce output has extra lines; output is '" + reduceOutput + "'"); + assertEquals("a\t1", lines[0], + "Unexpected output; received output '" + reduceOutput + "'"); + assertEquals("b\t1", lines[1], + "Unexpected output; received output '" + reduceOutput + "'"); + assertEquals(2, lines.length, + "Reduce output has extra lines; output is '" + reduceOutput + "'"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java index dde9a87bc69b6..7ec53cceab59c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java @@ -36,7 +36,9 @@ import org.slf4j.LoggerFactory; import org.slf4j.Logger; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * A JUnit test to test Map-Reduce job cleanup. @@ -169,13 +171,14 @@ private void testSuccessfulJob(String filename, LOG.info("Job finished : " + job.isComplete()); Path testFile = new Path(outDir, filename); - assertTrue( - fileSys.exists(testFile), "Done file \"" + testFile + "\" missing for job " + id); + assertTrue(fileSys.exists(testFile), + "Done file \"" + testFile + "\" missing for job " + id); // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); - assertFalse(fileSys.exists(file), "File " + file + " should not be present for successful job " + assertFalse(fileSys.exists(file), + "File " + file + " should not be present for successful job " + id); } } @@ -207,8 +210,8 @@ private void testFailedJob(String fileName, // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); - assertFalse(fileSys.exists(file), "File " + file + " should not be present for failed job " - + id); + assertFalse(fileSys.exists(file), + "File " + file + " should not be present for failed job " + id); } } @@ -246,15 +249,15 @@ private void testKilledJob(String fileName, if (fileName != null) { Path testFile = new Path(outDir, fileName); - assertTrue( - fileSys.exists(testFile), "File " + testFile + " missing for job " + id); + assertTrue(fileSys.exists(testFile), + "File " + testFile + " missing for job " + id); } // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); - assertFalse(fileSys.exists(file), "File " + file + " should not be present for killed job " - + id); + assertFalse(fileSys.exists(file), + "File " + file + " should not be present for killed job " + id); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java index a6c846992f41e..8c9d421378720 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobClients.java @@ -20,8 +20,9 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; @@ -40,7 +41,6 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskReport; import org.apache.hadoop.mapreduce.TaskType; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @SuppressWarnings("deprecation") @@ -189,7 +189,7 @@ public void testShowJob() throws Exception { client.displayJobList(new JobStatus[] {mockJobStatus}, new PrintWriter(out)); String commandLineOutput = out.toString(); System.out.println(commandLineOutput); - Assertions.assertTrue(commandLineOutput.contains("Total jobs:1")); + assertTrue(commandLineOutput.contains("Total jobs:1")); verify(mockJobStatus, atLeastOnce()).getJobID(); verify(mockJobStatus).getState(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java index bb034d62a85c0..aad7367329559 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCounters.java @@ -581,9 +581,9 @@ private long getTaskCounterUsage (JobClient client, JobID id, int numReports, } else if (TaskType.REDUCE.equals(type)) { reports = client.getReduceTaskReports(id); } - - assertNotNull(reports, "No reports found for task type '" + type.name() - + "' in job " + id); + + assertNotNull(reports, "No reports found for task type '" + type.name() + + "' in job " + id); // make sure that the total number of reports match the expected assertEquals(numReports, reports.length, "Mismatch in task id"); @@ -708,11 +708,11 @@ public void testHeapUsageCounter() throws Exception { System.out.println("Job2 (high memory job) reduce task heap usage: " + highMemJobReduceHeapUsage); - assertTrue( - lowMemJobMapHeapUsage < highMemJobMapHeapUsage, "Incorrect map heap usage reported by the map task"); + assertTrue(lowMemJobMapHeapUsage < highMemJobMapHeapUsage, + "Incorrect map heap usage reported by the map task"); - assertTrue( - lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage, "Incorrect reduce heap usage reported by the reduce task"); + assertTrue(lowMemJobReduceHeapUsage < highMemJobReduceHeapUsage, + "Incorrect reduce heap usage reported by the reduce task"); } finally { // shutdown the mr cluster mrCluster.shutdown(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java index 680bceeafee3c..65cc591a0f17d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestLocalJobSubmission.java @@ -37,16 +37,16 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; -import org.junit.Rule; import org.junit.jupiter.api.Test; -import org.junit.rules.TestName; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * check for the job submission options of @@ -58,8 +58,6 @@ public class TestLocalJobSubmission { private static File testRootDir; - @Rule - public TestName unitTestName = new TestName(); private File unitTestDir; private Path jarPath; private Configuration config; @@ -72,8 +70,8 @@ public static void setupClass() throws Exception { } @BeforeEach - public void setup() throws IOException { - unitTestDir = new File(testRootDir, unitTestName.getMethodName()); + public void setup(TestInfo testInfo) throws IOException { + unitTestDir = new File(testRootDir, testInfo.getDisplayName()); unitTestDir.mkdirs(); config = createConfig(); jarPath = makeJar(new Path(unitTestDir.getAbsolutePath(), "test.jar")); @@ -140,8 +138,8 @@ public void testLocalJobEncryptedIntermediateData() throws IOException { (SpillCallBackPathsFinder) IntermediateEncryptedStream .setSpillCBInjector(new SpillCallBackPathsFinder()); res = ToolRunner.run(config, new SleepJob(), args); - Assertions.assertTrue( - spillInjector.getEncryptedSpilledFiles().size() > 0, "No spill occurred"); + assertTrue(spillInjector.getEncryptedSpilledFiles().size() > 0, + "No spill occurred"); } catch (Exception e) { LOG.error("Job failed with {}", e.getLocalizedMessage(), e); fail("Job failed"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java index 844708b62d1c8..e2e7dc20476e4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java @@ -94,8 +94,8 @@ public void testLocality() throws Exception { blockLocs[0].equals(splitLocs[1]))); } - assertEquals( - 1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0), "Expected value of " + FileInputFormat.NUM_INPUT_FILES); + assertEquals(1, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0), + "Expected value of " + FileInputFormat.NUM_INPUT_FILES); } private void createInputs(FileSystem fs, Path inDir, String fileName) @@ -135,8 +135,8 @@ public void testNumInputs() throws Exception { inFormat.configure(job); InputSplit[] splits = inFormat.getSplits(job, 1); - assertEquals( - numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0), "Expected value of " + FileInputFormat.NUM_INPUT_FILES); + assertEquals(numFiles, job.getLong(FileInputFormat.NUM_INPUT_FILES, 0), + "Expected value of " + FileInputFormat.NUM_INPUT_FILES); } final Path root = new Path("/TestFileInputFormat"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java index ebab173fe7002..36afd43fb082c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java @@ -31,8 +31,9 @@ import org.apache.hadoop.util.Tool; import org.junit.jupiter.api.BeforeAll; -import org.junit.Ignore; -@Ignore +import org.junit.jupiter.api.Disabled; + +@Disabled public class TestMRCJCJobClient extends TestMRJobClient { @BeforeAll diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java index 1c59b5e32a9de..c4b5c42b64a7b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobConf.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.mapred; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.io.File; import java.net.URLClassLoader; @@ -29,9 +29,11 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.util.ClassUtil; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.*; -@Ignore +@Disabled public class TestMRCJCJobConf { private static final String JAR_RELATIVE_PATH = "build/test/mapred/testjar/testjob.jar"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java index 2cd81586789b0..7bd3c241bbd29 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMROpportunisticMaps.java @@ -35,7 +35,7 @@ import java.io.OutputStreamWriter; import java.io.Writer; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Simple MapReduce to test ability of the MRAppMaster to request and use diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java index b7dfc04115418..323d429ff5f88 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java @@ -18,6 +18,8 @@ package org.apache.hadoop.mapred; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -56,7 +58,6 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -85,8 +86,8 @@ public void testTimelineServiceStartInMiniCluster() throws Exception { cluster.start(); //verify that the timeline service is not started. - Assertions.assertNull( - cluster.getApplicationHistoryServer(), "Timeline Service should not have been started"); + assertNull(cluster.getApplicationHistoryServer(), + "Timeline Service should not have been started"); } finally { if(cluster != null) { @@ -103,8 +104,8 @@ public void testTimelineServiceStartInMiniCluster() throws Exception { cluster.start(); //verify that the timeline service is not started. - Assertions.assertNull( - cluster.getApplicationHistoryServer(), "Timeline Service should not have been started"); + assertNull(cluster.getApplicationHistoryServer(), + "Timeline Service should not have been started"); } finally { if(cluster != null) { @@ -135,33 +136,31 @@ public void testMRTimelineEventHandling() throws Exception { Path outDir = new Path(localPathRoot, "output"); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.SUCCEEDED, - job.getJobStatus().getState().getValue()); + assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, - null, null, null, null, null, null, null); - Assertions.assertEquals(1, entities.getEntities().size()); + null, null, null, null, null, null, null); + assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); - Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); - Assertions.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); - Assertions.assertEquals(EventType.AM_STARTED.toString(), - tEntity.getEvents().get(tEntity.getEvents().size() - 1) - .getEventType()); - Assertions.assertEquals(EventType.JOB_FINISHED.toString(), - tEntity.getEvents().get(0).getEventType()); + assertEquals(job.getID().toString(), tEntity.getEntityId()); + assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); + assertEquals(EventType.AM_STARTED.toString(), + tEntity.getEvents().get(tEntity.getEvents().size() - 1) + .getEventType()); + assertEquals(EventType.JOB_FINISHED.toString(), + tEntity.getEvents().get(0).getEventType()); job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.FAILED, - job.getJobStatus().getState().getValue()); + assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assertions.assertEquals(2, entities.getEntities().size()); + assertEquals(2, entities.getEntities().size()); tEntity = entities.getEntities().get(0); - Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); - Assertions.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); - Assertions.assertEquals(EventType.AM_STARTED.toString(), - tEntity.getEvents().get(tEntity.getEvents().size() - 1) - .getEventType()); - Assertions.assertEquals(EventType.JOB_FAILED.toString(), + assertEquals(job.getID().toString(), tEntity.getEntityId()); + assertEquals("MAPREDUCE_JOB", tEntity.getEntityType()); + assertEquals(EventType.AM_STARTED.toString(), + tEntity.getEvents().get(tEntity.getEvents().size() - 1) + .getEventType()); + assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType()); } finally { if (cluster != null) { @@ -221,7 +220,7 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { UtilsForTests.createConfigValue(101 * 1024)); RunningJob job = UtilsForTests.runJobSucceed(successConf, inDir, outDir); - Assertions.assertEquals(JobStatus.SUCCEEDED, + assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); YarnClient yarnClient = YarnClient.createYarnClient(); @@ -232,7 +231,7 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { ApplicationId firstAppId = null; List apps = yarnClient.getApplications(appStates); - Assertions.assertEquals(apps.size(), 1); + assertEquals(apps.size(), 1); ApplicationReport appReport = apps.get(0); firstAppId = appReport.getApplicationId(); UtilsForTests.waitForAppFinished(job, cluster); @@ -240,11 +239,11 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { LOG.info("Run 2nd job which should be failed."); job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.FAILED, + assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue()); apps = yarnClient.getApplications(appStates); - Assertions.assertEquals(apps.size(), 2); + assertEquals(apps.size(), 2); appReport = apps.get(0).getApplicationId().equals(firstAppId) ? apps.get(0) : apps.get(1); @@ -270,7 +269,7 @@ private void checkNewTimelineEvent(ApplicationId appId, File tmpRootFolder = new File(tmpRoot); - Assertions.assertTrue(tmpRootFolder.isDirectory()); + assertTrue(tmpRootFolder.isDirectory()); String basePath = tmpRoot + YarnConfiguration.DEFAULT_RM_CLUSTER_ID + File.separator + UserGroupInformation.getCurrentUser().getShortUserName() + @@ -283,9 +282,8 @@ private void checkNewTimelineEvent(ApplicationId appId, basePath + File.separator + "MAPREDUCE_JOB" + File.separator; File entityFolder = new File(outputDirJob); - Assertions.assertTrue( - entityFolder.isDirectory(), "Job output directory: " + outputDirJob + - " does not exist."); + assertTrue(entityFolder.isDirectory(), + "Job output directory: " + outputDirJob + " does not exist."); // check for job event file String jobEventFileName = appId.toString().replaceAll("application", "job") @@ -293,9 +291,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String jobEventFilePath = outputDirJob + jobEventFileName; File jobEventFile = new File(jobEventFilePath); - Assertions.assertTrue( - jobEventFile.exists(), "jobEventFilePath: " + jobEventFilePath + - " does not exist."); + assertTrue(jobEventFile.exists(), + "jobEventFilePath: " + jobEventFilePath + " does not exist."); verifyEntity(jobEventFile, EventType.JOB_FINISHED.name(), true, false, null, false); Set cfgsToCheck = Sets.newHashSet("dummy_conf1", "dummy_conf2", @@ -306,10 +303,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String outputAppDir = basePath + File.separator + "YARN_APPLICATION" + File.separator; entityFolder = new File(outputAppDir); - Assertions.assertTrue( - - entityFolder.isDirectory(), "Job output directory: " + outputAppDir + - " does not exist."); + assertTrue(entityFolder.isDirectory(), + "Job output directory: " + outputAppDir + " does not exist."); // check for job event file String appEventFileName = appId.toString() @@ -317,9 +312,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String appEventFilePath = outputAppDir + appEventFileName; File appEventFile = new File(appEventFilePath); - Assertions.assertTrue( - - appEventFile.exists(), "appEventFilePath: " + appEventFilePath + + assertTrue(appEventFile.exists(), + "appEventFilePath: " + appEventFilePath + " does not exist."); verifyEntity(appEventFile, null, true, false, null, false); verifyEntity(appEventFile, null, false, true, cfgsToCheck, false); @@ -328,8 +322,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String outputDirTask = basePath + File.separator + "MAPREDUCE_TASK" + File.separator; File taskFolder = new File(outputDirTask); - Assertions.assertTrue( - taskFolder.isDirectory(), "Task output directory: " + outputDirTask + + assertTrue(taskFolder.isDirectory(), + "Task output directory: " + outputDirTask + " does not exist."); String taskEventFileName = @@ -339,9 +333,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String taskEventFilePath = outputDirTask + taskEventFileName; File taskEventFile = new File(taskEventFilePath); - Assertions.assertTrue( - taskEventFile.exists(), "taskEventFileName: " + taskEventFilePath + - " does not exist."); + assertTrue(taskEventFile.exists(), + "taskEventFileName: " + taskEventFilePath + " does not exist."); verifyEntity(taskEventFile, EventType.TASK_FINISHED.name(), true, false, null, true); @@ -349,7 +342,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String outputDirTaskAttempt = basePath + File.separator + "MAPREDUCE_TASK_ATTEMPT" + File.separator; File taskAttemptFolder = new File(outputDirTaskAttempt); - Assertions.assertTrue(taskAttemptFolder.isDirectory(), "TaskAttempt output directory: " + outputDirTaskAttempt + + assertTrue(taskAttemptFolder.isDirectory(), + "TaskAttempt output directory: " + outputDirTaskAttempt + " does not exist."); String taskAttemptEventFileName = appId.toString().replaceAll( @@ -359,7 +353,8 @@ private void checkNewTimelineEvent(ApplicationId appId, String taskAttemptEventFilePath = outputDirTaskAttempt + taskAttemptEventFileName; File taskAttemptEventFile = new File(taskAttemptEventFilePath); - Assertions.assertTrue(taskAttemptEventFile.exists(), "taskAttemptEventFileName: " + taskAttemptEventFilePath + + assertTrue(taskAttemptEventFile.exists(), + "taskAttemptEventFileName: " + taskAttemptEventFilePath + " does not exist."); verifyEntity(taskAttemptEventFile, EventType.MAP_ATTEMPT_FINISHED.name(), true, false, null, true); @@ -397,13 +392,13 @@ private void verifyEntity(File entityFile, String eventId, LOG.info("strLine.trim()= " + strLine.trim()); if (checkIdPrefix) { - Assertions.assertTrue( + assertTrue( entity.getIdPrefix() > 0, "Entity ID prefix expected to be > 0"); if (idPrefix == -1) { idPrefix = entity.getIdPrefix(); } else { - Assertions.assertEquals( - idPrefix, entity.getIdPrefix(), "Entity ID prefix should be same across " + + assertEquals(idPrefix, entity.getIdPrefix(), + "Entity ID prefix should be same across " + "each publish of same entity"); } } @@ -492,21 +487,21 @@ public void testMapreduceJobTimelineServiceEnabled() RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.SUCCEEDED, + assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assertions.assertEquals(0, entities.getEntities().size()); + assertEquals(0, entities.getEntities().size()); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.SUCCEEDED, + assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assertions.assertEquals(1, entities.getEntities().size()); + assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); - Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); + assertEquals(job.getID().toString(), tEntity.getEntityId()); } finally { if (cluster != null) { cluster.stop(); @@ -532,21 +527,21 @@ public void testMapreduceJobTimelineServiceEnabled() conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false); RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.SUCCEEDED, + assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assertions.assertEquals(0, entities.getEntities().size()); + assertEquals(0, entities.getEntities().size()); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true); job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir); - Assertions.assertEquals(JobStatus.SUCCEEDED, + assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue()); entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null); - Assertions.assertEquals(1, entities.getEntities().size()); + assertEquals(1, entities.getEntities().size()); TimelineEntity tEntity = entities.getEntities().get(0); - Assertions.assertEquals(job.getID().toString(), tEntity.getEntityId()); + assertEquals(job.getID().toString(), tEntity.getEntityId()); } finally { if (cluster != null) { cluster.stop(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java index 9f157b6b97fe4..2a7583d0872c4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java @@ -193,8 +193,8 @@ public void setProgress(float progress) { return; } // validate map task progress when the map task is in map phase - assertTrue( - Math.abs(mapTaskProgress - ((float)recordNum/3)) < 0.001, "Map progress is not the expected value."); + assertTrue(Math.abs(mapTaskProgress - ((float)recordNum/3)) < 0.001, + "Map progress is not the expected value."); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java index bea1c8e6ab438..ef6a5c765f4ce 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java @@ -312,9 +312,8 @@ public void reduce(WritableComparable key, Iterator values, assertTrue(fs.exists(input), "reduce input exists " + input); SequenceFile.Reader rdr = new SequenceFile.Reader(fs, input, conf); - assertEquals( - compressInput, - rdr.isCompressed(), "is reduce input compressed " + input); + assertEquals(compressInput, rdr.isCompressed(), + "is reduce input compressed " + input); rdr.close(); } } @@ -422,9 +421,8 @@ private void checkCompression(boolean compressMapOutputs, assertTrue(fs.exists(output), "reduce output exists " + output); SequenceFile.Reader rdr = new SequenceFile.Reader(fs, output, conf); - assertEquals( - redCompression != CompressionType.NONE, - rdr.isCompressed(), "is reduce output compressed " + output); + assertEquals(redCompression != CompressionType.NONE, + rdr.isCompressed(), "is reduce output compressed " + output); rdr.close(); } finally { fs.delete(testdir, true); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java index 35d92cd0fad7b..46f042f87abee 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRBringup.java @@ -20,12 +20,13 @@ import java.io.IOException; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; +import static org.junit.jupiter.api.Assertions.assertEquals; + /** * A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster. */ @@ -50,7 +51,7 @@ public void testMiniMRYarnClusterWithoutJHS() throws IOException { mr = new MiniMRYarnCluster("testMiniMRYarnClusterWithoutJHS"); mr.init(conf); mr.start(); - Assertions.assertEquals(null, mr.getHistoryServer()); + assertEquals(null, mr.getHistoryServer()); } finally { if (mr != null) { mr.stop(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java index 2b50eb6c62230..35398207f3abe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java @@ -170,19 +170,18 @@ public void configure(JobConf job) { boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false); if (oldConfigs) { String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS); - assertNotNull( - javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); + assertNotNull(javaOpts, + JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); assertThat(javaOpts) .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + javaOpts) .isEqualTo(TASK_OPTS_VAL); } else { String mapJavaOpts = job.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS); - assertNotNull( - mapJavaOpts, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!"); - assertEquals( - mapJavaOpts, MAP_OPTS_VAL, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + - mapJavaOpts); + assertNotNull(mapJavaOpts, + JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!"); + assertEquals(mapJavaOpts, MAP_OPTS_VAL, + JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + mapJavaOpts); } // check if X=y works for an already existing parameter @@ -193,8 +192,7 @@ public void configure(JobConf job) { checkEnv("NEW_PATH", File.pathSeparator + "/tmp", "noappend"); String jobLocalDir = job.get(MRJobConfig.JOB_LOCAL_DIR); - assertNotNull( - jobLocalDir, MRJobConfig.JOB_LOCAL_DIR + " is null"); + assertNotNull(jobLocalDir, MRJobConfig.JOB_LOCAL_DIR + " is null"); } public void map(WritableComparable key, Writable value, @@ -214,16 +212,15 @@ public void configure(JobConf job) { boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false); if (oldConfigs) { String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS); - assertNotNull( - javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); + assertNotNull(javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); assertThat(javaOpts) .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + javaOpts) .isEqualTo(TASK_OPTS_VAL); } else { String reduceJavaOpts = job.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS); - assertNotNull( - reduceJavaOpts, JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!"); + assertNotNull(reduceJavaOpts, + JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!"); assertThat(reduceJavaOpts) .withFailMessage(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " + reduceJavaOpts) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java index 6d08b15ef9dc3..ff6658275dcb6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java @@ -32,9 +32,10 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; + /** * A JUnit test to test Mini Map-Reduce Cluster with multiple directories * and check for correct classpath @@ -175,7 +176,7 @@ public void testClassPath() throws IOException { String result; result = launchWordCount(fileSys.getUri(), jobConf, "The quick brown fox\nhas many silly\n" + "red fox sox\n", 3, 1); - Assertions.assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" + assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" + "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result); } finally { @@ -208,7 +209,7 @@ public void testExternalWritable() result = launchExternal(fileSys.getUri(), jobConf, "Dennis was here!\nDennis again!", 3, 1); - Assertions.assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result); + assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result); } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java index 9204e4f7052fe..22b9a64d18831 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java @@ -128,26 +128,25 @@ public void testRestart() throws Exception { String mrHistWebAppAddress2 = mrCluster.getConfig().get( JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS); - assertEquals(rmAddress1 -, rmAddress2, "Address before restart: " + rmAddress1 + assertEquals(rmAddress1, rmAddress2, "Address before restart: " + rmAddress1 + " is different from new address: " + rmAddress2); - assertEquals( - rmAdminAddress1, rmAdminAddress2, "Address before restart: " + rmAdminAddress1 + assertEquals(rmAdminAddress1, rmAdminAddress2, + "Address before restart: " + rmAdminAddress1 + " is different from new address: " + rmAdminAddress2); - assertEquals( - rmSchedAddress1, rmSchedAddress2, "Address before restart: " + rmSchedAddress1 + assertEquals(rmSchedAddress1, rmSchedAddress2, + "Address before restart: " + rmSchedAddress1 + " is different from new address: " + rmSchedAddress2); - assertEquals( - rmRstrackerAddress1, rmRstrackerAddress2, "Address before restart: " + rmRstrackerAddress1 + assertEquals(rmRstrackerAddress1, rmRstrackerAddress2, + "Address before restart: " + rmRstrackerAddress1 + " is different from new address: " + rmRstrackerAddress2); - assertEquals( - rmWebAppAddress1, rmWebAppAddress2, "Address before restart: " + rmWebAppAddress1 + assertEquals(rmWebAppAddress1, rmWebAppAddress2, + "Address before restart: " + rmWebAppAddress1 + " is different from new address: " + rmWebAppAddress2); - assertEquals(mrHistAddress1 -, mrHistAddress2, "Address before restart: " + mrHistAddress1 + assertEquals(mrHistAddress1, mrHistAddress2, + "Address before restart: " + mrHistAddress1 + " is different from new address: " + mrHistAddress2); - assertEquals( - mrHistWebAppAddress1, mrHistWebAppAddress2, "Address before restart: " + mrHistWebAppAddress1 + assertEquals(mrHistWebAppAddress1, mrHistWebAppAddress2, + "Address before restart: " + mrHistWebAppAddress1 + " is different from new address: " + mrHistWebAppAddress2); } @@ -165,14 +164,14 @@ public void testJob() throws Exception { private void validateCounters(Counters counters, long mapInputRecords, long mapOutputRecords, long reduceInputGroups, long reduceOutputRecords) { - assertEquals(mapInputRecords, counters.findCounter( - "MyCounterGroup", "MAP_INPUT_RECORDS").getValue(), "MapInputRecords"); - assertEquals(mapOutputRecords, counters.findCounter( - "MyCounterGroup", "MAP_OUTPUT_RECORDS").getValue(), "MapOutputRecords"); - assertEquals(reduceInputGroups, counters.findCounter( - "MyCounterGroup", "REDUCE_INPUT_GROUPS").getValue(), "ReduceInputGroups"); - assertEquals(reduceOutputRecords, counters - .findCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS").getValue(), "ReduceOutputRecords"); + assertEquals(mapInputRecords, counters.findCounter("MyCounterGroup", + "MAP_INPUT_RECORDS").getValue(), "MapInputRecords"); + assertEquals(mapOutputRecords, counters.findCounter("MyCounterGroup", + "MAP_OUTPUT_RECORDS").getValue(), "MapOutputRecords"); + assertEquals(reduceInputGroups, counters.findCounter("MyCounterGroup", + "REDUCE_INPUT_GROUPS").getValue(), "ReduceInputGroups"); + assertEquals(reduceOutputRecords, counters.findCounter("MyCounterGroup", + "REDUCE_OUTPUT_RECORDS").getValue(), "ReduceOutputRecords"); } private static void createFile(Path inFile, Configuration conf) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java index 73344f7df49a2..8948d9490ea47 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRDFSCaching.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.MRCaching.TestResult; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -31,7 +31,7 @@ * A JUnit test to test caching with DFS * */ -@Ignore +@Disabled public class TestMiniMRDFSCaching { @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java index e245acb0eb417..53ed32d36ced1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java @@ -29,10 +29,11 @@ import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.security.UserGroupInformation; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; + /** * A JUnit test to test Mini Map-Reduce Cluster with Mini-DFS. */ @@ -70,7 +71,7 @@ public RunningJob run() throws IOException { }); rj.waitForCompletion(); - Assertions.assertEquals("SUCCEEDED", JobStatus.getJobRunState(rj.getJobState())); + assertEquals("SUCCEEDED", JobStatus.getJobRunState(rj.getJobState())); } @BeforeEach diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java index 116ce72b4c2cd..1d4ef0b23bcbe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleLevelCaching.java @@ -27,7 +27,7 @@ import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.io.IOException; @@ -37,7 +37,7 @@ /** * This test checks whether the task caches are created and used properly. */ -@Ignore +@Disabled public class TestMultipleLevelCaching { private static final int MAX_LEVEL = 5; final Path inDir = new Path("/cachetesting"); @@ -158,14 +158,12 @@ static void launchJobAndTestCounters(String jobName, MiniMRCluster mr, } RunningJob job = launchJob(jobConf, in, out, numMaps, jobName); Counters counters = job.getCounters(); - assertEquals( - counters.getCounter(JobCounter.OTHER_LOCAL_MAPS), otherLocalMaps, "Number of local maps"); - assertEquals( - counters.getCounter(JobCounter.DATA_LOCAL_MAPS) -, dataLocalMaps, "Number of Data-local maps"); - assertEquals( - counters.getCounter(JobCounter.RACK_LOCAL_MAPS) -, rackLocalMaps, "Number of Rack-local maps"); + assertEquals(counters.getCounter(JobCounter.OTHER_LOCAL_MAPS), + otherLocalMaps, "Number of local maps"); + assertEquals(counters.getCounter(JobCounter.DATA_LOCAL_MAPS), + dataLocalMaps, "Number of Data-local maps"); + assertEquals(counters.getCounter(JobCounter.RACK_LOCAL_MAPS), + rackLocalMaps, "Number of Rack-local maps"); mr.waitUntilIdle(); mr.shutdown(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java index fa653697f1b5c..ee6eb8d77a550 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java @@ -19,7 +19,10 @@ package org.apache.hadoop.mapred; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java index fd464ef0778d2..4b35c57c9bde4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java @@ -19,7 +19,6 @@ package org.apache.hadoop.mapred; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -40,6 +39,12 @@ import java.util.Iterator; import java.util.Set; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + public class TestOldCombinerGrouping { private static File testRootDir = GenericTestUtils.getRandomizedTestDir(); @@ -169,30 +174,30 @@ public void testCombiner() throws Exception { long combinerOutputRecords = counters.getGroup( "org.apache.hadoop.mapreduce.TaskCounter"). getCounter("COMBINE_OUTPUT_RECORDS"); - Assertions.assertTrue(combinerInputRecords > 0); - Assertions.assertTrue(combinerInputRecords > combinerOutputRecords); + assertTrue(combinerInputRecords > 0); + assertTrue(combinerInputRecords > combinerOutputRecords); BufferedReader br = new BufferedReader(new FileReader( new File(out, "part-00000"))); Set output = new HashSet(); String line = br.readLine(); - Assertions.assertNotNull(line); + assertNotNull(line); output.add(line.substring(0, 1) + line.substring(4, 5)); line = br.readLine(); - Assertions.assertNotNull(line); + assertNotNull(line); output.add(line.substring(0, 1) + line.substring(4, 5)); line = br.readLine(); - Assertions.assertNull(line); + assertNull(line); br.close(); Set expected = new HashSet(); expected.add("A2"); expected.add("B5"); - Assertions.assertEquals(expected, output); + assertEquals(expected, output); } else { - Assertions.fail("Job failed"); + fail("Job failed"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java index df2ca9ec2f79d..6233a16fc73f4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestQueueConfigurationParser.java @@ -32,11 +32,12 @@ import org.w3c.dom.Document; import org.w3c.dom.Element; -import static org.junit.jupiter.api.Assertions.*; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestQueueConfigurationParser { /** * test xml generation diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java index be1410eb1e969..3fd71b9e30e0d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java @@ -44,10 +44,10 @@ public void testReduceFromDisk() throws Exception { Counters c = runJob(job); final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter(); final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter(); - assertTrue( - spill >= 2 * out, "Expected all records spilled during reduce (" + spill + ")"); // all records spill at map, reduce - assertTrue( - spill >= 2 * out + (out / MAP_TASKS), "Expected intermediate merges (" + spill + ")"); // some records hit twice + assertTrue(spill >= 2 * out, + "Expected all records spilled during reduce (" + spill + ")"); // all records spill at map, reduce + assertTrue(spill >= 2 * out + (out / MAP_TASKS), + "Expected intermediate merges (" + spill + ")"); // some records hit twice } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java index 18fae4c61659e..3a3f2cb111cc7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java @@ -87,8 +87,9 @@ public void testReduceFromPartialMem() throws Exception { Counters c = runJob(job); final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter(); final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter(); - assertTrue( - spill < 2 * out, "Expected some records not spilled during reduce" + spill + ")"); // spilled map records, some records at the reduce + assertTrue(spill < 2 * out, + "Expected some records not spilled during reduce" + spill + ")"); + // spilled map records, some records at the reduce long shuffleIoErrors = c.getGroup(SHUFFLE_ERR_GRP_NAME).getCounter(Fetcher.ShuffleErrors.IO_ERROR.toString()); assertEquals(0, shuffleIoErrors); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java index 6e3a10a3ab8f6..aefbd0c67c6d2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java @@ -35,7 +35,8 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Tests the old mapred APIs with {@link Reporter#getProgress()}. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index 14b0abaa88f8d..98fa3333e29a2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.List; -import org.junit.jupiter.api.Assertions; - import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.JobStatus.State; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; @@ -43,7 +41,13 @@ import org.apache.hadoop.yarn.util.Records; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TestResourceMgrDelegate { @@ -53,13 +57,13 @@ public class TestResourceMgrDelegate { */ @Test public void testGetRootQueues() throws IOException, InterruptedException { - final ApplicationClientProtocol applicationsManager = Mockito.mock(ApplicationClientProtocol.class); - GetQueueInfoResponse response = Mockito.mock(GetQueueInfoResponse.class); + final ApplicationClientProtocol applicationsManager = mock(ApplicationClientProtocol.class); + GetQueueInfoResponse response = mock(GetQueueInfoResponse.class); org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = - Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); - Mockito.when(response.getQueueInfo()).thenReturn(queueInfo); + mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + when(response.getQueueInfo()).thenReturn(queueInfo); try { - Mockito.when(applicationsManager.getQueueInfo(Mockito.any( + when(applicationsManager.getQueueInfo(any( GetQueueInfoRequest.class))).thenReturn(response); } catch (YarnException e) { throw new IOException(e); @@ -69,7 +73,7 @@ public void testGetRootQueues() throws IOException, InterruptedException { new YarnConfiguration()) { @Override protected void serviceStart() throws Exception { - Assertions.assertTrue(this.client instanceof YarnClientImpl); + assertTrue(this.client instanceof YarnClientImpl); ((YarnClientImpl) this.client).setRMClient(applicationsManager); } }; @@ -78,21 +82,22 @@ protected void serviceStart() throws Exception { ArgumentCaptor argument = ArgumentCaptor.forClass(GetQueueInfoRequest.class); try { - Mockito.verify(applicationsManager).getQueueInfo( + verify(applicationsManager).getQueueInfo( argument.capture()); } catch (YarnException e) { throw new IOException(e); } - Assertions.assertTrue( - argument.getValue().getIncludeChildQueues(), "Children of root queue not requested"); - Assertions.assertTrue( - argument.getValue().getRecursive(), "Request wasn't to recurse through children"); + assertTrue(argument.getValue().getIncludeChildQueues(), + "Children of root queue not requested"); + assertTrue(argument.getValue().getRecursive(), + "Request wasn't to recurse through children"); } @Test public void tesAllJobs() throws Exception { - final ApplicationClientProtocol applicationsManager = Mockito.mock(ApplicationClientProtocol.class); + final ApplicationClientProtocol applicationsManager = + mock(ApplicationClientProtocol.class); GetApplicationsResponse allApplicationsResponse = Records .newRecord(GetApplicationsResponse.class); List applications = new ArrayList(); @@ -105,45 +110,42 @@ public void tesAllJobs() throws Exception { applications.add(getApplicationReport(YarnApplicationState.FAILED, FinalApplicationStatus.FAILED)); allApplicationsResponse.setApplicationList(applications); - Mockito.when( - applicationsManager.getApplications(Mockito - .any(GetApplicationsRequest.class))).thenReturn( - allApplicationsResponse); + when(applicationsManager.getApplications(any(GetApplicationsRequest.class))) + .thenReturn(allApplicationsResponse); ResourceMgrDelegate resourceMgrDelegate = new ResourceMgrDelegate( new YarnConfiguration()) { @Override protected void serviceStart() throws Exception { - Assertions.assertTrue(this.client instanceof YarnClientImpl); + assertTrue(this.client instanceof YarnClientImpl); ((YarnClientImpl) this.client).setRMClient(applicationsManager); } }; JobStatus[] allJobs = resourceMgrDelegate.getAllJobs(); - Assertions.assertEquals(State.FAILED, allJobs[0].getState()); - Assertions.assertEquals(State.SUCCEEDED, allJobs[1].getState()); - Assertions.assertEquals(State.KILLED, allJobs[2].getState()); - Assertions.assertEquals(State.FAILED, allJobs[3].getState()); + assertEquals(State.FAILED, allJobs[0].getState()); + assertEquals(State.SUCCEEDED, allJobs[1].getState()); + assertEquals(State.KILLED, allJobs[2].getState()); + assertEquals(State.FAILED, allJobs[3].getState()); } private ApplicationReport getApplicationReport( YarnApplicationState yarnApplicationState, FinalApplicationStatus finalApplicationStatus) { - ApplicationReport appReport = Mockito.mock(ApplicationReport.class); - ApplicationResourceUsageReport appResources = Mockito - .mock(ApplicationResourceUsageReport.class); - Mockito.when(appReport.getApplicationId()).thenReturn( + ApplicationReport appReport = mock(ApplicationReport.class); + ApplicationResourceUsageReport appResources = mock(ApplicationResourceUsageReport.class); + when(appReport.getApplicationId()).thenReturn( ApplicationId.newInstance(0, 0)); - Mockito.when(appResources.getNeededResources()).thenReturn( + when(appResources.getNeededResources()).thenReturn( Records.newRecord(Resource.class)); - Mockito.when(appResources.getReservedResources()).thenReturn( + when(appResources.getReservedResources()).thenReturn( Records.newRecord(Resource.class)); - Mockito.when(appResources.getUsedResources()).thenReturn( + when(appResources.getUsedResources()).thenReturn( Records.newRecord(Resource.class)); - Mockito.when(appReport.getApplicationResourceUsageReport()).thenReturn( + when(appReport.getApplicationResourceUsageReport()).thenReturn( appResources); - Mockito.when(appReport.getYarnApplicationState()).thenReturn( + when(appReport.getYarnApplicationState()).thenReturn( yarnApplicationState); - Mockito.when(appReport.getFinalApplicationStatus()).thenReturn( + when(appReport.getFinalApplicationStatus()).thenReturn( finalApplicationStatus); return appReport; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java index bb73bf3199618..76349356e2142 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java @@ -88,14 +88,10 @@ public void testBinary() throws IOException { cmpkey.readFields(buf); buf.reset(bval.getBytes(), bval.getLength()); cmpval.readFields(buf); - assertTrue( - - cmpkey.toString().equals(tkey.toString()), "Keys don't match: " + "*" + cmpkey.toString() + ":" + - tkey.toString() + "*"); - assertTrue( - - cmpval.toString().equals(tval.toString()), "Vals don't match: " + "*" + cmpval.toString() + ":" + - tval.toString() + "*"); + assertTrue(cmpkey.toString().equals(tkey.toString()), + "Keys don't match: " + "*" + cmpkey.toString() + ":" + tkey.toString() + "*"); + assertTrue(cmpval.toString().equals(tval.toString()), + "Vals don't match: " + "*" + cmpval.toString() + ":" + tval.toString() + "*"); ++count; } } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java index d99dfcafebb00..17095da8a0aad 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java @@ -122,10 +122,8 @@ public void testBinary() throws IOException { while (reader.next(iwritable, dwritable)) { sourceInt = r.nextInt(); sourceDouble = r.nextDouble(); - assertEquals( - - sourceInt, iwritable.get(), "Keys don't match: " + "*" + iwritable.get() + ":" + - sourceInt + "*"); + assertEquals(sourceInt, iwritable.get(), + "Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*"); assertThat(dwritable.get()).withFailMessage( "Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*") @@ -149,29 +147,24 @@ public void testSequenceOutputClassDefaultsToMapRedOutputClass() job.setOutputKeyClass(FloatWritable.class); job.setOutputValueClass(BooleanWritable.class); - assertEquals( - FloatWritable.class -, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass( - job), "SequenceFileOutputKeyClass should default to ouputKeyClass"); - assertEquals( - BooleanWritable.class -, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass( - job), "SequenceFileOutputValueClass should default to " - + "ouputValueClass"); + assertEquals(FloatWritable.class, + SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass(job), + "SequenceFileOutputKeyClass should default to ouputKeyClass"); + assertEquals(BooleanWritable.class, + SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job), + "SequenceFileOutputValueClass should default to ouputValueClass"); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class ); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class ); - assertEquals( - IntWritable.class -, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass( - job), "SequenceFileOutputKeyClass not updated"); - assertEquals( - DoubleWritable.class -, SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass( - job), "SequenceFileOutputValueClass not updated"); + assertEquals(IntWritable.class, + SequenceFileAsBinaryOutputFormat.getSequenceFileOutputKeyClass(job), + "SequenceFileOutputKeyClass not updated"); + assertEquals(DoubleWritable.class, + SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job), + "SequenceFileOutputValueClass not updated"); } @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java index 193da0b1b85c0..f72ac99df8185 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsTextInputFormat.java @@ -94,7 +94,8 @@ public void testFormat() throws Exception { RecordReader reader = format.getRecordReader(splits[j], job, reporter); Class readerClass = reader.getClass(); - assertEquals(SequenceFileAsTextRecordReader.class, readerClass, "reader class is SequenceFileAsTextRecordReader."); + assertEquals(SequenceFileAsTextRecordReader.class, readerClass, + "reader class is SequenceFileAsTextRecordReader."); Text value = reader.createValue(); Text key = reader.createKey(); try { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java index 859a9d17db8ea..cb532b9a26f97 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java @@ -85,7 +85,8 @@ public static boolean launchJob(URI fileSys, try { assertTrue(runningJob.isComplete()); assertTrue(runningJob.isSuccessful()); - assertTrue(fs.exists(new Path("/testing/output/" + OUTPUT_FILENAME)), "Output folder not found!"); + assertTrue(fs.exists(new Path("/testing/output/" + OUTPUT_FILENAME)), + "Output folder not found!"); } catch (NullPointerException npe) { // This NPE should no more happens fail("A NPE should not have happened."); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java index 3dace91babf78..fe4952579cd02 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskCommit.java @@ -269,24 +269,24 @@ public void testCommitRequiredForReduceTask() throws Exception { public void testCommitNotRequiredForJobSetup() throws Exception { Task testTask = createDummyTask(TaskType.MAP); testTask.setJobSetupTask(); - assertFalse( - testTask.isCommitRequired(), "Job setup task should not need commit"); + assertFalse(testTask.isCommitRequired(), + "Job setup task should not need commit"); } @Test public void testCommitNotRequiredForJobCleanup() throws Exception { Task testTask = createDummyTask(TaskType.MAP); testTask.setJobCleanupTask(); - assertFalse( - testTask.isCommitRequired(), "Job cleanup task should not need commit"); + assertFalse(testTask.isCommitRequired(), + "Job cleanup task should not need commit"); } @Test public void testCommitNotRequiredForTaskCleanup() throws Exception { Task testTask = createDummyTask(TaskType.REDUCE); testTask.setTaskCleanupTask(); - assertFalse( - testTask.isCommitRequired(), "Task cleanup task should not need commit"); + assertFalse(testTask.isCommitRequired(), + "Task cleanup task should not need commit"); } private Task createDummyTask(TaskType type) throws IOException, ClassNotFoundException, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java index 61e0ed7fc1065..f14cf03500365 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskPerformanceSplits.java @@ -19,7 +19,7 @@ package org.apache.hadoop.mapred; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestTaskPerformanceSplits { @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java index e519372b473e3..9a6395f3a4065 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTaskStatus.java @@ -19,7 +19,7 @@ import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestTaskStatus { @@ -52,25 +52,24 @@ private void checkTaskStatues(boolean isMap) { // first try to set the finish time before // start time is set. status.setFinishTime(currentTime); - assertEquals(0 -, status.getFinishTime(), "Finish time of the task status set without start time"); + assertEquals(0, + status.getFinishTime(), "Finish time of the task status set without start time"); // Now set the start time to right time. status.setStartTime(currentTime); - assertEquals( - currentTime, status.getStartTime(), "Start time of the task status not set correctly."); + assertEquals(currentTime, status.getStartTime(), + "Start time of the task status not set correctly."); // try setting wrong start time to task status. long wrongTime = -1; status.setStartTime(wrongTime); - assertEquals( - - currentTime, status.getStartTime(), "Start time of the task status is set to wrong negative value"); + assertEquals(currentTime, status.getStartTime(), + "Start time of the task status is set to wrong negative value"); // finally try setting wrong finish time i.e. negative value. status.setFinishTime(wrongTime); - assertEquals( - 0, status.getFinishTime(), "Finish time of task status is set to wrong negative value"); + assertEquals(0, status.getFinishTime(), + "Finish time of task status is set to wrong negative value"); status.setFinishTime(currentTime); - assertEquals( - currentTime, status.getFinishTime(), "Finish time of the task status not set correctly."); + assertEquals(currentTime, status.getFinishTime(), + "Finish time of the task status not set correctly."); // test with null task-diagnostics TaskStatus ts = ((TaskStatus)status.clone()); @@ -117,19 +116,19 @@ public boolean getIsMap() { return false; } }; - assertEquals( - status.getDiagnosticInfo(), test, "Small diagnostic info test failed"); - assertEquals(status.getStateString(), - test, "Small state string test failed"); + assertEquals(status.getDiagnosticInfo(), test, + "Small diagnostic info test failed"); + assertEquals(status.getStateString(), test, + "Small state string test failed"); // now append some small string and check String newDInfo = test.concat(test); status.setDiagnosticInfo(test); status.setStateString(newDInfo); - assertEquals( - newDInfo, status.getDiagnosticInfo(), "Small diagnostic info append failed"); - assertEquals( - newDInfo, status.getStateString(), "Small state-string append failed"); + assertEquals(newDInfo, status.getDiagnosticInfo(), + "Small diagnostic info append failed"); + assertEquals(newDInfo, status.getStateString(), + "Small state-string append failed"); // update the status with small state strings TaskStatus newStatus = (TaskStatus)status.clone(); @@ -138,47 +137,47 @@ public boolean getIsMap() { status.statusUpdate(newStatus); newDInfo = newDInfo.concat(newStatus.getDiagnosticInfo()); - assertEquals( - newDInfo, status.getDiagnosticInfo(), "Status-update on diagnostic-info failed"); - assertEquals( - newSInfo, status.getStateString(), "Status-update on state-string failed"); + assertEquals(newDInfo, status.getDiagnosticInfo(), + "Status-update on diagnostic-info failed"); + assertEquals(newSInfo, status.getStateString(), + "Status-update on state-string failed"); newSInfo = "hi2"; status.statusUpdate(0, newSInfo, null); - assertEquals( - newSInfo, status.getStateString(), "Status-update on state-string failed"); + assertEquals(newSInfo, status.getStateString(), + "Status-update on state-string failed"); newSInfo = "hi3"; status.statusUpdate(null, 0, newSInfo, null, 0); - assertEquals( - newSInfo, status.getStateString(), "Status-update on state-string failed"); + assertEquals(newSInfo, status.getStateString(), + "Status-update on state-string failed"); // now append each with large string String large = "hihihihihihihihihihi"; // 20 chars status.setDiagnosticInfo(large); status.setStateString(large); - assertEquals( - maxSize, status.getDiagnosticInfo().length(), "Large diagnostic info append test failed"); - assertEquals( - maxSize, status.getStateString().length(), "Large state-string append test failed"); + assertEquals(maxSize, status.getDiagnosticInfo().length(), + "Large diagnostic info append test failed"); + assertEquals(maxSize, status.getStateString().length(), + "Large state-string append test failed"); // update a large status with large strings newStatus.setDiagnosticInfo(large + "0"); newStatus.setStateString(large + "1"); status.statusUpdate(newStatus); - assertEquals( - maxSize, status.getDiagnosticInfo().length(), "Status-update on diagnostic info failed"); - assertEquals( - maxSize, status.getStateString().length(), "Status-update on state-string failed"); + assertEquals(maxSize, status.getDiagnosticInfo().length(), + "Status-update on diagnostic info failed"); + assertEquals(maxSize, status.getStateString().length(), + "Status-update on state-string failed"); status.statusUpdate(0, large + "2", null); - assertEquals( - maxSize, status.getStateString().length(), "Status-update on state-string failed"); + assertEquals(maxSize, status.getStateString().length(), + "Status-update on state-string failed"); status.statusUpdate(null, 0, large + "3", null, 0); - assertEquals( - maxSize, status.getStateString().length(), "Status-update on state-string failed"); + assertEquals(maxSize, status.getStateString().length(), + "Status-update on state-string failed"); // test passing large string in constructor status = new TaskStatus(null, 0, 0, null, large, large, null, null, @@ -197,9 +196,9 @@ public boolean getIsMap() { return false; } }; - assertEquals( - maxSize, status.getDiagnosticInfo().length(), "Large diagnostic info test failed"); - assertEquals( - maxSize, status.getStateString().length(), "Large state-string test failed"); + assertEquals(maxSize, status.getDiagnosticInfo().length(), + "Large diagnostic info test failed"); + assertEquals(maxSize, status.getStateString().length(), + "Large state-string test failed"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java index ef2a4e0319b09..5463e79530fd3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java @@ -44,7 +44,9 @@ import org.slf4j.LoggerFactory; import static java.nio.charset.StandardCharsets.UTF_8; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestTextInputFormat { private static final Logger LOG = @@ -112,8 +114,8 @@ public void testFormat() throws Exception { LOG.debug("splitting: got = " + splits.length); if (length == 0) { - assertEquals( - 1, splits.length, "Files of length 0 are not returned from FileInputFormat.getSplits()."); + assertEquals(1, splits.length, + "Files of length 0 are not returned from FileInputFormat.getSplits()."); assertEquals(0, splits[0].getLength(), "Empty file length == 0"); } @@ -347,11 +349,11 @@ public void testUTF8() throws Exception { LineReader in = makeStream("abcd\u20acbdcd\u20ac"); Text line = new Text(); in.readLine(line); - assertEquals("readLine changed utf8 characters", - "abcd\u20acbdcd\u20ac", line.toString()); + assertEquals("abcd\u20acbdcd\u20ac", line.toString(), + "readLine changed utf8 characters"); in = makeStream("abc\u200axyz"); in.readLine(line); - assertEquals("split on fake newline", "abc\u200axyz", line.toString()); + assertEquals("abc\u200axyz", line.toString(), "split on fake newline"); } /** @@ -527,13 +529,12 @@ public void testGzip() throws IOException { } List results = readSplit(format, splits[0], job); assertEquals(6, results.size(), "splits[0] length"); - assertEquals("splits[0][5]", " dog", results.get(5).toString()); + assertEquals(" dog", results.get(5).toString(), "splits[0][5]"); results = readSplit(format, splits[1], job); assertEquals(2, results.size(), "splits[1] length"); - assertEquals("splits[1][0]", "this is a test", - results.get(0).toString()); - assertEquals("splits[1][1]", "of gzip", - results.get(1).toString()); + assertEquals("this is a test", results.get(0).toString(), "splits[1][0]"); + assertEquals("of gzip", + results.get(1).toString(), "splits[1][1]"); } /** @@ -551,8 +552,8 @@ public void testGzipEmpty() throws IOException { TextInputFormat format = new TextInputFormat(); format.configure(job); InputSplit[] splits = format.getSplits(job, 100); - assertEquals( - 1, splits.length, "Compressed files of length 0 are not returned from FileInputFormat.getSplits()."); + assertEquals(1, splits.length, + "Compressed files of length 0 are not returned from FileInputFormat.getSplits()."); List results = readSplit(format, splits[0], job); assertEquals(0, results.size(), "Compressed empty file length == 0"); } @@ -586,7 +587,7 @@ private static String unquote(String in) { * @param args * @throws Exception */ - public static void main(String[] args) throws Exception { + /*public static void main(String[] args) throws Exception { for(String arg: args) { System.out.println("Working on " + arg); LineReader reader = makeStream(unquote(arg)); @@ -598,5 +599,5 @@ public static void main(String[] args) throws Exception { } reader.close(); } - } + }*/ } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java index 804bc06d5302b..caba53a14a8e0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestUtils.java @@ -21,7 +21,8 @@ import org.apache.hadoop.fs.PathFilter; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestUtils { private static final Path[] LOG_PATHS = new Path[] { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 73f5b97d62a34..6e81b11c94a78 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -118,7 +118,6 @@ import org.apache.log4j.WriterAppender; import org.apache.log4j.spi.LoggingEvent; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -280,7 +279,8 @@ public ClientServiceDelegate answer(InvocationOnMock invocation) long startTimeMillis = System.currentTimeMillis(); yarnRunner.killJob(jobId); assertTrue(System.currentTimeMillis() - startTimeMillis - >= timeToWaitBeforeHardKill, "killJob should have waited at least " + timeToWaitBeforeHardKill + >= timeToWaitBeforeHardKill, + "killJob should have waited at least " + timeToWaitBeforeHardKill + " ms."); } @@ -527,8 +527,8 @@ public void testAMAdminCommandOpts() throws Exception { for(String command : commands) { if(command != null) { - assertFalse( - command.contains(PROFILE_PARAMS), "Profiler should be disabled by default"); + assertFalse(command.contains(PROFILE_PARAMS), + "Profiler should be disabled by default"); adminPos = command.indexOf("-Djava.net.preferIPv4Stack=true"); if(adminPos >= 0) adminIndex = index; @@ -939,13 +939,13 @@ public void testSendJobConf() throws IOException { Configuration confSent = BuilderUtils.parseTokensConf(submissionContext); // configs that match regex should be included - Assertions.assertEquals("123.0.0.1", + assertEquals("123.0.0.1", confSent.get("dfs.namenode.rpc-address.mycluster2.nn1")); - Assertions.assertEquals("123.0.0.2", + assertEquals("123.0.0.2", confSent.get("dfs.namenode.rpc-address.mycluster2.nn2")); // configs that aren't matching regex should not be included - Assertions.assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent + assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent .get("hadoop.tmp.dir").equals("testconfdir")); UserGroupInformation.reset(); } @@ -967,15 +967,15 @@ public void testCustomAMRMResourceType() throws Exception { List resourceRequests = submissionContext.getAMContainerResourceRequests(); - Assertions.assertEquals(1, resourceRequests.size()); + assertEquals(1, resourceRequests.size()); ResourceRequest resourceRequest = resourceRequests.get(0); ResourceInformation resourceInformation = resourceRequest.getCapability() .getResourceInformation(CUSTOM_RESOURCE_NAME); - Assertions.assertEquals("Expecting the default unit (G)", - "G", resourceInformation.getUnits()); - Assertions.assertEquals(5L, resourceInformation.getValue()); - Assertions.assertEquals(3, resourceRequest.getCapability().getVirtualCores()); + assertEquals("G", resourceInformation.getUnits(), + "Expecting the default unit (G)"); + assertEquals(5L, resourceInformation.getValue()); + assertEquals(3, resourceRequest.getCapability().getVirtualCores()); } @Test @@ -993,11 +993,11 @@ public void testAMRMemoryRequest() throws Exception { List resourceRequests = submissionContext.getAMContainerResourceRequests(); - Assertions.assertEquals(1, resourceRequests.size()); + assertEquals(1, resourceRequests.size()); ResourceRequest resourceRequest = resourceRequests.get(0); long memorySize = resourceRequest.getCapability().getMemorySize(); - Assertions.assertEquals(3072, memorySize); + assertEquals(3072, memorySize); } } @@ -1022,11 +1022,11 @@ public void testAMRMemoryRequestOverriding() throws Exception { List resourceRequests = submissionContext.getAMContainerResourceRequests(); - Assertions.assertEquals(1, resourceRequests.size()); + assertEquals(1, resourceRequests.size()); ResourceRequest resourceRequest = resourceRequests.get(0); long memorySize = resourceRequest.getCapability().getMemorySize(); - Assertions.assertEquals(3072, memorySize); + assertEquals(3072, memorySize); assertTrue(testAppender.getLogEvents().stream().anyMatch( e -> e.getLevel() == Level.WARN && ("Configuration " + "yarn.app.mapreduce.am.resource." + memoryName + "=3Gi is " + diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java index 46d407588e574..7f26bb33e8179 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java @@ -24,8 +24,6 @@ import java.util.ArrayList; -import org.junit.jupiter.api.Assertions; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -35,6 +33,10 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * This class performs unit test for Job/JobControl classes. * @@ -205,9 +207,9 @@ public void testJobState() throws Exception { Job job_1 = getCopyJob(); JobControl jc = new JobControl("Test"); jc.addJob(job_1); - Assertions.assertEquals(Job.WAITING, job_1.getState()); + assertEquals(Job.WAITING, job_1.getState()); job_1.setState(Job.SUCCESS); - Assertions.assertEquals(Job.WAITING, job_1.getState()); + assertEquals(Job.WAITING, job_1.getState()); org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class); @@ -215,9 +217,9 @@ public void testJobState() throws Exception { new org.apache.hadoop.mapreduce.JobID("test", 0); when(mockjob.getJobID()).thenReturn(jid); job_1.setJob(mockjob); - Assertions.assertEquals("job_test_0000", job_1.getMapredJobID()); + assertEquals("job_test_0000", job_1.getMapredJobID()); job_1.setMapredJobID("job_test_0001"); - Assertions.assertEquals("job_test_0000", job_1.getMapredJobID()); + assertEquals("job_test_0000", job_1.getMapredJobID()); jc.stop(); } @@ -228,8 +230,8 @@ public void testAddingDependingJob() throws Exception { ArrayList dependingJobs = new ArrayList(); JobControl jc = new JobControl("Test"); jc.addJob(job_1); - Assertions.assertEquals(Job.WAITING, job_1.getState()); - Assertions.assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(), + assertEquals(Job.WAITING, job_1.getState()); + assertTrue(job_1.addDependingJob(new Job(job_1.getJobConf(), dependingJobs))); } @@ -268,13 +270,13 @@ public void testGetAssignedJobId() throws Exception { JobConf jc = new JobConf(); Job j = new Job(jc); //Just make sure no exception is thrown - Assertions.assertNull(j.getAssignedJobID()); + assertNull(j.getAssignedJobID()); org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class); org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0); when(mockjob.getJobID()).thenReturn(jid); j.setJob(mockjob); JobID expected = new JobID("test",0); - Assertions.assertEquals(expected, j.getAssignedJobID()); + assertEquals(expected, j.getAssignedJobID()); verify(mockjob).getJobID(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java index 1ab06bb2a0728..3d11d823f627d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestDatamerge.java @@ -53,7 +53,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestDatamerge { @@ -176,14 +178,14 @@ public void map(IntWritable key, TupleWritable val, final String kvstr = "Unexpected tuple: " + stringify(key, val); if (0 == k % (srcs * srcs)) { for (int i = 0; i < val.size(); ++i) { - assertInstanceOf(IntWritable.class, val.get(i), kvstr); + assertTrue(val.get(i) instanceof IntWritable, kvstr); final int vali = ((IntWritable)val.get(i)).get(); assertEquals((vali - i) * srcs, 10 * k, kvstr); } } else { for (int i = 0; i < val.size(); ++i) { if (i == k % srcs) { - assertInstanceOf(IntWritable.class, val.get(i), kvstr); + assertTrue(val.get(i) instanceof IntWritable, kvstr); final int vali = ((IntWritable)val.get(i)).get(); assertEquals(srcs * (vali - i), 10 * (k - i), kvstr); } else { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java index 6c742004be53a..c54fb306ff45e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestTupleWritable.java @@ -311,8 +311,10 @@ public void testPreVersion21Compatibility() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue(oldTuple.isCompatible(dTuple), "Tuple writable is unable to read pre-0.21 versions of TupleWritable"); - assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); + assertTrue(oldTuple.isCompatible(dTuple), + "Tuple writable is unable to read pre-0.21 versions of TupleWritable"); + assertEquals(-1, in.read(), + "All tuple data has not been read from the stream"); } @Test public void testPreVersion21CompatibilityEmptyTuple() throws Exception { @@ -324,8 +326,10 @@ public void testPreVersion21CompatibilityEmptyTuple() throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); TupleWritable dTuple = new TupleWritable(); dTuple.readFields(new DataInputStream(in)); - assertTrue(oldTuple.isCompatible(dTuple), "Tuple writable is unable to read pre-0.21 versions of TupleWritable"); - assertEquals(-1, in.read(), "All tuple data has not been read from the stream"); + assertTrue(oldTuple.isCompatible(dTuple), + "Tuple writable is unable to read pre-0.21 versions of TupleWritable"); + assertEquals(-1, in.read(), + "All tuple data has not been read from the stream"); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java index e8102654afcc7..4d64ceeec7fb6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java @@ -34,8 +34,6 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertTrue; public class TestWrappedRecordReaderClassloader { @@ -49,7 +47,7 @@ public void testClassLoader() throws Exception { JobConf job = new JobConf(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); job.setClassLoader(classLoader); - assertInstanceOf(Fake_ClassLoader.class, job.getClassLoader()); + assertTrue(job.getClassLoader() instanceof Fake_ClassLoader); FileSystem fs = FileSystem.get(job); Path testdir = fs.makeQualified(new Path( @@ -60,7 +58,7 @@ public void testClassLoader() throws Exception { job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src)); - CompositeInputFormat inputFormat = new CompositeInputFormat<>(); + CompositeInputFormat inputFormat = new CompositeInputFormat(); inputFormat.getRecordReader(inputFormat.getSplits(job, 1)[0], job, Reporter.NULL); } @@ -115,7 +113,7 @@ public InputSplit[] getSplits(JobConf conf, int splits) { public RecordReader getRecordReader(InputSplit ignored, JobConf job, Reporter reporter) { - return new RR_ClassLoaderChecker<>(job); + return new RR_ClassLoaderChecker(job); } } @@ -125,9 +123,9 @@ public static class RR_ClassLoaderChecker implements RecordReader { @SuppressWarnings("unchecked") public RR_ClassLoaderChecker(JobConf job) { - assertInstanceOf(Fake_ClassLoader.class, job.getClassLoader(), - "The class loader has not been inherited from " + - CompositeRecordReader.class.getSimpleName()); + assertTrue(job.getClassLoader() instanceof Fake_ClassLoader, + "The class loader has not been inherited from " + + CompositeRecordReader.class.getSimpleName()); keyclass = (Class) job.getClass("test.fakeif.keyclass", NullWritable.class, WritableComparable.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java index 5d84d83dfadfb..5a6641fed5088 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestDelegatingInputFormat.java @@ -32,7 +32,8 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestDelegatingInputFormat { @Test @@ -66,7 +67,7 @@ public void testSplitting() throws Exception { int[] bins = new int[3]; for (InputSplit split : splits) { - assertInstanceOf(TaggedInputSplit.class, split); + assertTrue(split instanceof TaggedInputSplit); final TaggedInputSplit tis = (TaggedInputSplit) split; int index = -1; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java index 55a3e3d88d75a..695b9397f4ae4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java @@ -89,10 +89,10 @@ public void configure(String keySpec, int expect) throws Exception { conf.setMapperClass(InverseMapper.class); conf.setReducerClass(IdentityReducer.class); if (!fs.mkdirs(testdir)) { - throw new IOException("Mkdirs failed to create " + testdir); + throw new IOException("Mkdirs failed to create " + testdir.toString()); } if (!fs.mkdirs(inDir)) { - throw new IOException("Mkdirs failed to create " + inDir); + throw new IOException("Mkdirs failed to create " + inDir.toString()); } // set up input data in 2 files Path inFile = new Path(inDir, "part0"); @@ -161,7 +161,7 @@ public void testBasicUnixComparator() throws Exception { byte[] line2_bytes = line2.getBytes(); public void localTestWithoutMRJob(String keySpec, int expect) throws Exception { - KeyFieldBasedComparator keyFieldCmp = new KeyFieldBasedComparator<>(); + KeyFieldBasedComparator keyFieldCmp = new KeyFieldBasedComparator(); localConf.setKeyFieldComparatorOptions(keySpec); keyFieldCmp.configure(localConf); int result = keyFieldCmp.compare(line1_bytes, 0, line1_bytes.length, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java index f05855479f4e2..c13631840589d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestKeyFieldBasedPartitioner.java @@ -31,16 +31,18 @@ public class TestKeyFieldBasedPartitioner { @Test public void testEmptyKey() throws Exception { KeyFieldBasedPartitioner kfbp = - new KeyFieldBasedPartitioner<>(); + new KeyFieldBasedPartitioner(); JobConf conf = new JobConf(); conf.setInt("num.key.fields.for.partition", 10); kfbp.configure(conf); - assertEquals(0, kfbp.getPartition(new Text(), new Text(), 10), "Empty key should map to 0th partition"); + assertEquals(0, kfbp.getPartition(new Text(), new Text(), 10), + "Empty key should map to 0th partition"); } @Test public void testMultiConfigure() { - KeyFieldBasedPartitioner kfbp = new KeyFieldBasedPartitioner<>(); + KeyFieldBasedPartitioner kfbp = + new KeyFieldBasedPartitioner(); JobConf conf = new JobConf(); conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k1,1"); kfbp.setConf(conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java index ea62de0958650..c5f73586c56d1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestLineInputFormat.java @@ -86,13 +86,13 @@ void checkFormat(JobConf job, int expectedN) throws IOException{ // check all splits except last one int count; for (int j = 0; j < splits.length -1; j++) { - assertEquals(0 -, splits[j].getLocations().length, "There are no split locations"); + assertEquals(0, splits[j].getLocations().length, + "There are no split locations"); RecordReader reader = format.getRecordReader(splits[j], job, voidReporter); Class readerClass = reader.getClass(); - assertEquals( - LineRecordReader.class, readerClass, "reader class is LineRecordReader."); + assertEquals(LineRecordReader.class, readerClass, + "reader class is LineRecordReader."); LongWritable key = reader.createKey(); Class keyClass = key.getClass(); assertEquals(LongWritable.class, keyClass, "Key class is LongWritable."); @@ -108,8 +108,8 @@ void checkFormat(JobConf job, int expectedN) throws IOException{ } finally { reader.close(); } - assertEquals( - expectedN, count, "number of lines in split is " + expectedN); + assertEquals(expectedN, count, + "number of lines in split is " + expectedN); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java index 489e8258d5947..41cd160025428 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultipleOutputs.java @@ -51,7 +51,10 @@ import java.util.Iterator; import java.util.Map; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java index 93be3b5b3cd8c..7406a34a9b91a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java @@ -83,7 +83,7 @@ private void run(boolean ioEx, boolean rtEx) throws Exception { fs.delete(outDir, true); if (!fs.mkdirs(inDir)) { - throw new IOException("Mkdirs failed to create " + inDir); + throw new IOException("Mkdirs failed to create " + inDir.toString()); } { DataOutputStream file = fs.create(new Path(inDir, "part-0")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java index b69a542fe14d5..aa50755be52f0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java @@ -20,11 +20,14 @@ import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; +import org.apache.hadoop.mapred.lib.*; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; +import java.io.*; import java.nio.charset.StandardCharsets; +import java.util.*; import java.text.NumberFormat; public class TestAggregates { @@ -78,7 +81,7 @@ public static void launch() throws Exception { fileOut.close(); System.out.println("inputData:"); - System.out.println(inputData); + System.out.println(inputData.toString()); JobConf job = new JobConf(conf, TestAggregates.class); FileInputFormat.setInputPaths(job, INPUT_DIR); job.setInputFormat(TextInputFormat.class); @@ -111,7 +114,7 @@ public static void launch() throws Exception { Path outPath = new Path(OUTPUT_DIR, "part-00000"); String outdata = MapReduceTestUtil.readOutput(outPath,job); System.out.println("full out data:"); - System.out.println(outdata); + System.out.println(outdata.toString()); outdata = outdata.substring(0, expectedOutput.toString().length()); assertEquals(expectedOutput.toString(),outdata); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java index cd3c9cc4a1159..3a3b13d586abe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java @@ -31,7 +31,8 @@ public class TestConstructQuery { private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);"; private String nullExpected = "INSERT INTO hadoop_output VALUES (?,?,?);"; - private DBOutputFormat format = new DBOutputFormat<>(); + private DBOutputFormat format + = new DBOutputFormat(); @Test public void testConstructQuery() { String actual = format.constructQuery("hadoop_output", fieldNames); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java index cc07217ce1ac2..cc3a92031d83c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java @@ -68,10 +68,12 @@ import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestPipeApplication { private static File workSpace = new File("target", @@ -97,25 +99,26 @@ public void testRunner() throws Exception { conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName); - CombineOutputCollector output = new CombineOutputCollector<>( - new Counters.Counter(), new Progress()); + CombineOutputCollector output = new CombineOutputCollector( + new Counters.Counter(), new Progress()); FileSystem fs = new RawLocalFileSystem(); fs.initialize(FsConstants.LOCAL_FS_URI, conf); - Writer wr = new Writer<>(conf, fs.create( - new Path(workSpace + File.separator + "outfile")), IntWritable.class, - Text.class, null, null, true); + Writer wr = new Writer(conf, fs.create( + new Path(workSpace + File.separator + "outfile")), IntWritable.class, + Text.class, null, null, true); output.setWriter(wr); // stub for client File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub"); conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath()); // token for authorization - Token token = new Token<>( - "user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); + Token token = new Token( + "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( + "service")); TokenCache.setJobToken(token, conf.getCredentials()); conf.setBoolean(MRJobConfig.SKIP_RECORDS, true); TestTaskReporter reporter = new TestTaskReporter(); - PipesMapRunner runner = new PipesMapRunner<>(); + PipesMapRunner runner = new PipesMapRunner(); initStdOut(conf); @@ -152,7 +155,7 @@ public void testRunner() throws Exception { * test org.apache.hadoop.mapred.pipes.Application * test a internal functions: MessageType.REGISTER_COUNTER, INCREMENT_COUNTER, STATUS, PROGRESS... * - * @throws Throwable The exception thrown during unit testing. + * @throws Throwable */ @Test @@ -173,15 +176,16 @@ public void testApplication() throws Throwable { conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath()); // token for authorization - Token token = new Token<>( - "user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); + Token token = new Token( + "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( + "service")); TokenCache.setJobToken(token, conf.getCredentials()); FakeCollector output = new FakeCollector(new Counters.Counter(), new Progress()); FileSystem fs = new RawLocalFileSystem(); fs.initialize(FsConstants.LOCAL_FS_URI, conf); - Writer wr = new Writer<>(conf, fs.create( + Writer wr = new Writer(conf, fs.create( new Path(workSpace.getAbsolutePath() + File.separator + "outfile")), IntWritable.class, Text.class, null, null, true); output.setWriter(wr); @@ -189,8 +193,8 @@ public void testApplication() throws Throwable { initStdOut(conf); - Application, Writable, IntWritable, Text> application = new Application<>( - conf, rReader, output, reporter, IntWritable.class, Text.class); + Application, Writable, IntWritable, Text> application = new Application, Writable, IntWritable, Text>( + conf, rReader, output, reporter, IntWritable.class, Text.class); application.getDownlink().flush(); application.getDownlink().mapItem(new IntWritable(3), new Text("txt")); @@ -243,7 +247,7 @@ public void testApplication() throws Throwable { /** * test org.apache.hadoop.mapred.pipes.Submitter * - * @throws Exception The exception thrown during unit testing. + * @throws Exception */ @Test public void testSubmitter() throws Exception { @@ -262,7 +266,7 @@ public void testSubmitter() throws Exception { Submitter.setKeepCommandFile(conf, false); Submitter.setIsJavaRecordReader(conf, false); Submitter.setIsJavaRecordWriter(conf, false); - PipesPartitioner partitioner = new PipesPartitioner<>(); + PipesPartitioner partitioner = new PipesPartitioner(); partitioner.configure(conf); Submitter.setJavaPartitioner(conf, partitioner.getClass()); @@ -341,7 +345,7 @@ public void testSubmitter() throws Exception { String[] args = new String[22]; File input = new File(workSpace + File.separator + "input"); if (!input.exists()) { - Assertions.assertTrue(input.createNewFile()); + assertTrue(input.createNewFile()); } File outPut = new File(workSpace + File.separator + "output"); FileUtil.fullyDelete(outPut); @@ -386,7 +390,7 @@ public void testSubmitter() throws Exception { * test org.apache.hadoop.mapred.pipes.PipesReducer * test the transfer of data: key and value * - * @throws Exception The exception thrown during unit testing. + * @throws Exception */ @Test public void testPipesReduser() throws Exception { @@ -394,24 +398,25 @@ public void testPipesReduser() throws Exception { File[] psw = cleanTokenPasswordFile(); JobConf conf = new JobConf(); try { - Token token = new Token<>( - "user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); + Token token = new Token( + "user".getBytes(), "password".getBytes(), new Text("kind"), new Text( + "service")); TokenCache.setJobToken(token, conf.getCredentials()); File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub"); conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath()); - PipesReducer reducer = new PipesReducer<>(); + PipesReducer reducer = new PipesReducer(); reducer.configure(conf); BooleanWritable bw = new BooleanWritable(true); conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName); initStdOut(conf); conf.setBoolean(MRJobConfig.SKIP_RECORDS, true); - CombineOutputCollector output = new CombineOutputCollector<>( - new Counters.Counter(), new Progress()); + CombineOutputCollector output = new CombineOutputCollector( + new Counters.Counter(), new Progress()); Reporter reporter = new TestTaskReporter(); - List texts = new ArrayList<>(); + List texts = new ArrayList(); texts.add(new Text("first")); texts.add(new Text("second")); texts.add(new Text("third")); @@ -444,7 +449,7 @@ public void testPipesReduser() throws Exception { @Test public void testPipesPartitioner() { - PipesPartitioner partitioner = new PipesPartitioner<>(); + PipesPartitioner partitioner = new PipesPartitioner(); JobConf configuration = new JobConf(); Submitter.getJavaPartitioner(configuration); partitioner.configure(new JobConf()); @@ -566,7 +571,7 @@ private String readFile(File file) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); InputStream is = new FileInputStream(file); byte[] buffer = new byte[1024]; - int counter; + int counter = 0; while ((counter = is.read(buffer)) >= 0) { out.write(buffer, 0, counter); } @@ -810,7 +815,7 @@ public void close() throws IOException { private class FakeCollector extends CombineOutputCollector { - final private Map collect = new HashMap<>(); + final private Map collect = new HashMap(); public FakeCollector(Counter outCounter, Progressable progressable) { super(outCounter, progressable); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java index 48fbac70b1a6f..83a958fb5192a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipes.java @@ -46,7 +46,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; @Disabled public class TestPipes { @@ -67,7 +69,6 @@ static void cleanup(FileSystem fs, Path p) throws IOException { fs.delete(p, true); assertFalse(fs.exists(p), "output not cleaned up"); } - @Test public void testPipes() throws IOException { if (System.getProperty("compile.c++") == null) { @@ -83,16 +84,16 @@ public void testPipes() throws IOException { Configuration conf = new Configuration(); dfs = new MiniDFSCluster.Builder(conf).numDataNodes(numWorkers).build(); mr = new MiniMRCluster(numWorkers, - dfs.getFileSystem().getUri().toString(), 1); + dfs.getFileSystem().getUri().toString(), 1); writeInputFile(dfs.getFileSystem(), inputPath); runProgram(mr, dfs, wordCountSimple, - inputPath, outputPath, 3, 2, twoSplitOutput, null); + inputPath, outputPath, 3, 2, twoSplitOutput, null); cleanup(dfs.getFileSystem(), outputPath); runProgram(mr, dfs, wordCountSimple, - inputPath, outputPath, 3, 0, noSortOutput, null); + inputPath, outputPath, 3, 0, noSortOutput, null); cleanup(dfs.getFileSystem(), outputPath); runProgram(mr, dfs, wordCountPart, - inputPath, outputPath, 3, 2, fixedPartitionOutput, null); + inputPath, outputPath, 3, 2, fixedPartitionOutput, null); runNonPipedProgram(mr, dfs, wordCountNoPipes, null); mr.waitUntilIdle(); } finally { @@ -151,14 +152,15 @@ static void writeInputFile(FileSystem fs, Path dir) throws IOException { } static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, - Path program, Path inputPath, Path outputPath, - int numMaps, int numReduces, String[] expectedResults, - JobConf conf) throws IOException { + Path program, Path inputPath, Path outputPath, + int numMaps, int numReduces, String[] expectedResults, + JobConf conf + ) throws IOException { Path wordExec = new Path("testing/bin/application"); - JobConf job; - if (conf == null) { + JobConf job = null; + if(conf == null) { job = mr.createJobConf(); - } else { + }else { job = new JobConf(conf); } job.setNumMapTasks(numMaps); @@ -172,7 +174,7 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, Submitter.setIsJavaRecordWriter(job, true); FileInputFormat.setInputPaths(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); - RunningJob rJob; + RunningJob rJob = null; if (numReduces == 0) { rJob = Submitter.jobSubmit(job); @@ -198,9 +200,10 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, assertTrue((numCounters > 0), "No counters found!"); } - List results = new ArrayList<>(); + List results = new ArrayList(); for (Path p:FileUtil.stat2Paths(dfs.getFileSystem().listStatus(outputPath, - new Utils.OutputFileUtils.OutputFilesFilter()))) { + new Utils.OutputFileUtils + .OutputFilesFilter()))) { results.add(MapReduceTestUtil.readOutput(p, job)); } assertEquals(expectedResults.length, results.size(), "number of reduces is wrong"); @@ -216,10 +219,10 @@ static void runProgram(MiniMRCluster mr, MiniDFSCluster dfs, * @param mr The mini mr cluster * @param dfs the dfs cluster * @param program the program to run - * @throws IOException The I/O exception thrown during unit testing. + * @throws IOException */ static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs, - Path program, JobConf conf) throws IOException { + Path program, JobConf conf) throws IOException { JobConf job; if(conf == null) { job = mr.createJobConf(); @@ -229,7 +232,8 @@ static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs, job.setInputFormat(WordCountInputFormat.class); FileSystem local = FileSystem.getLocal(job); - Path testDir = new Path("file:" + System.getProperty("test.build.data"), "pipes"); + Path testDir = new Path("file:" + System.getProperty("test.build.data"), + "pipes"); Path inDir = new Path(testDir, "input"); nonPipedOutDir = new Path(testDir, "output"); Path wordExec = new Path("testing/bin/application"); @@ -260,21 +264,20 @@ static void runNonPipedProgram(MiniMRCluster mr, MiniDFSCluster dfs, job.writeXml(out); out.close(); System.err.println("About to run: Submitter -conf " + jobXml + - " -input " + inDir + " -output " + nonPipedOutDir + - " -program " + - dfs.getFileSystem().makeQualified(wordExec)); - + " -input " + inDir + " -output " + nonPipedOutDir + + " -program " + + dfs.getFileSystem().makeQualified(wordExec)); try { int ret = ToolRunner.run(new Submitter(), - new String[]{"-conf", jobXml.toString(), - "-input", inDir.toString(), - "-output", nonPipedOutDir.toString(), - "-program", - dfs.getFileSystem().makeQualified(wordExec).toString(), - "-reduces", "2"}); + new String[]{"-conf", jobXml.toString(), + "-input", inDir.toString(), + "-output", nonPipedOutDir.toString(), + "-program", + dfs.getFileSystem().makeQualified(wordExec).toString(), + "-reduces", "2"}); assertEquals(0, ret); } catch (Exception e) { - fail("got exception: " + StringUtils.stringifyException(e)); + assertTrue(false, "got exception: " + StringUtils.stringifyException(e)); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java index 46affccc482fc..6e32bf9fc6bd5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipesNonJavaInputFormat.java @@ -29,11 +29,11 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.pipes.TestPipeApplication.FakeSplit; import org.apache.hadoop.util.StringUtils; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; - +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; public class TestPipesNonJavaInputFormat { private static File workSpace = new File("target", @@ -57,16 +57,16 @@ public void testFormat() throws IOException { // input and output files File input1 = new File(workSpace + File.separator + "input1"); if (!input1.getParentFile().exists()) { - Assertions.assertTrue(input1.getParentFile().mkdirs()); + assertTrue(input1.getParentFile().mkdirs()); } if (!input1.exists()) { - Assertions.assertTrue(input1.createNewFile()); + assertTrue(input1.createNewFile()); } File input2 = new File(workSpace + File.separator + "input2"); if (!input2.exists()) { - Assertions.assertTrue(input2.createNewFile()); + assertTrue(input2.createNewFile()); } // set data for splits conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR, From 9400e1f0409363ee407326eb20d77a8d43210f53 Mon Sep 17 00:00:00 2001 From: fanshilun Date: Sun, 9 Feb 2025 08:23:29 +0800 Subject: [PATCH 3/5] MAPREDUCE-7421. Fix CheckStyle & Junit Test. --- .../mapred/TestClientServiceDelegate.java | 17 +-- .../mapred/TestCommandLineJobSubmission.java | 2 +- .../apache/hadoop/mapred/TestJobCleanup.java | 4 +- .../mapred/TestKeyValueTextInputFormat.java | 20 ++-- .../mapred/TestMRTimelineEventHandling.java | 4 +- .../hadoop/mapred/TestNetworkedJob.java | 4 +- .../apache/hadoop/mapred/TestReduceFetch.java | 3 +- .../apache/hadoop/mapred/TestReporter.java | 2 +- .../mapred/TestResourceMgrDelegate.java | 2 +- .../hadoop/mapred/TestTextInputFormat.java | 6 +- .../apache/hadoop/mapreduce/TestChild.java | 66 +++++------- .../hadoop/mapreduce/TestMRJobClient.java | 100 +++++++++--------- .../TestMRKeyFieldBasedComparator.java | 6 +- 13 files changed, 115 insertions(+), 121 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java index 9ba9d64c91487..8344466afdcbf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java @@ -78,7 +78,7 @@ public class TestClientServiceDelegate { public void initTestClientServiceDelegate(boolean pIsAMReachableFromClient) { this.isAMReachableFromClient = pIsAMReachableFromClient; } - + public static Collection data() { Object[][] data = new Object[][] { { true }, { false } }; return Arrays.asList(data); @@ -277,8 +277,8 @@ public void testJobReportFromHistoryServer( JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId); assertNotNull(jobStatus); - assertEquals("TestJobFilePath", jobStatus.getJobFile()); - assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl()); + assertEquals("TestJobFilePath", jobStatus.getJobFile()); + assertEquals("http://TestTrackingUrl", jobStatus.getTrackingUrl()); assertEquals(1.0f, jobStatus.getMapProgress(), 0.0f); assertEquals(1.0f, jobStatus.getReduceProgress(), 0.0f); } @@ -299,7 +299,8 @@ public void testCountersFromHistoryServer( Counters counters = TypeConverter.toYarn(clientServiceDelegate.getJobCounters(oldJobId)); assertNotNull(counters); - assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); + assertEquals(1001, + counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); } @MethodSource("data") @@ -414,20 +415,20 @@ public void testAMAccessDisabled( // Should not reach AM even for second and third times too. jobStatus = clientServiceDelegate.getJobStatus(oldJobId); assertNotNull(jobStatus); - assertEquals("N/A", jobStatus.getJobName()); + assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); jobStatus = clientServiceDelegate.getJobStatus(oldJobId); assertNotNull(jobStatus); - assertEquals("N/A", jobStatus.getJobName()); + assertEquals("N/A", jobStatus.getJobName()); verify(clientServiceDelegate, times(0)).instantiateAMProxy( any(InetSocketAddress.class)); // The third time around, app is completed, so should go to JHS JobStatus jobStatus1 = clientServiceDelegate.getJobStatus(oldJobId); assertNotNull(jobStatus1); - assertEquals("TestJobFilePath", jobStatus1.getJobFile()); - assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl()); + assertEquals("TestJobFilePath", jobStatus1.getJobFile()); + assertEquals("http://TestTrackingUrl", jobStatus1.getTrackingUrl()); assertEquals(1.0f, jobStatus1.getMapProgress(), 0.0f); assertEquals(1.0f, jobStatus1.getReduceProgress(), 0.0f); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java index b80527d1159cc..b3b66e73911d0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java @@ -59,7 +59,7 @@ public void testJobShell() throws Exception { stream.close(); mr = new MiniMRCluster(2, fs.getUri().toString(), 1); File thisbuildDir = new File(buildDir, "jobCommand"); - assertTrue(thisbuildDir.mkdirs(), "create build dir"); + assertTrue(thisbuildDir.mkdirs(), "create build dir"); File f = new File(thisbuildDir, "files_tmp"); FileOutputStream fstream = new FileOutputStream(f); fstream.write("somestrings".getBytes()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java index 7ec53cceab59c..aaa13f61057b9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java @@ -203,8 +203,8 @@ private void testFailedJob(String fileName, if (fileName != null) { Path testFile = new Path(outDir, fileName); - assertTrue( - fileSys.exists(testFile), "File " + testFile + " missing for failed job " + id); + assertTrue(fileSys.exists(testFile), + "File " + testFile + " missing for failed job " + id); } // check if the files from the missing set exists diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java index a89f398578145..4d733684e4080 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java @@ -102,7 +102,8 @@ public void testFormat() throws Exception { RecordReader reader = format.getRecordReader(splits[j], job, reporter); Class readerClass = reader.getClass(); - assertEquals(KeyValueLineRecordReader.class, readerClass, "reader class is KeyValueLineRecordReader."); + assertEquals(KeyValueLineRecordReader.class, readerClass, + "reader class is KeyValueLineRecordReader."); Text key = reader.createKey(); Class keyClass = key.getClass(); @@ -145,11 +146,12 @@ public void testUTF8() throws Exception { in = makeStream("abcd\u20acbdcd\u20ac"); Text line = new Text(); in.readLine(line); - assertEquals("readLine changed utf8 characters", - "abcd\u20acbdcd\u20ac", line.toString()); + assertEquals("abcd\u20acbdcd\u20ac", line.toString(), + "readLine changed utf8 characters"); in = makeStream("abc\u200axyz"); in.readLine(line); - assertEquals("split on fake newline", "abc\u200axyz", line.toString()); + assertEquals("abc\u200axyz", line.toString(), + "split on fake newline"); } finally { if (in != null) { in.close(); @@ -244,16 +246,14 @@ public void testGzip() throws IOException { } List results = readSplit(format, splits[0], job); assertEquals(6, results.size(), "splits[0] length"); - assertEquals("splits[0][5]", " dog", results.get(5).toString()); + assertEquals(" dog", results.get(5).toString(), "splits[0][5]"); results = readSplit(format, splits[1], job); assertEquals(2, results.size(), "splits[1] length"); - assertEquals("splits[1][0]", "this is a test", - results.get(0).toString()); - assertEquals("splits[1][1]", "of gzip", - results.get(1).toString()); + assertEquals("this is a test", results.get(0).toString(), "splits[1][0]"); + assertEquals("of gzip", results.get(1).toString(), "splits[1][1]"); } - public static void main(String[] args) throws Exception { + public static void main(String[] args) throws Exception { new TestKeyValueTextInputFormat().testFormat(); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java index 323d429ff5f88..8274e9db36177 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java @@ -392,8 +392,8 @@ private void verifyEntity(File entityFile, String eventId, LOG.info("strLine.trim()= " + strLine.trim()); if (checkIdPrefix) { - assertTrue( - entity.getIdPrefix() > 0, "Entity ID prefix expected to be > 0"); + assertTrue(entity.getIdPrefix() > 0, + "Entity ID prefix expected to be > 0"); if (idPrefix == -1) { idPrefix = entity.getIdPrefix(); } else { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java index ee6eb8d77a550..c01e7a0bfb6f3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java @@ -261,8 +261,8 @@ public void testNetworkedJob() throws Exception { // JobStatus objects correspond to the same Job. assertEquals(jobId, client.getJob(jobId) .getJobStatus().getJobID(), "Expected matching JobIDs"); - assertEquals(rj.getJobStatus() - .getStartTime(), client.getJob(jobId).getJobStatus().getStartTime(), "Expected matching startTimes"); + assertEquals(rj.getJobStatus().getStartTime(), + client.getJob(jobId).getJobStatus().getStartTime(), "Expected matching startTimes"); } finally { if (fileSys != null) { fileSys.delete(testDir, true); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java index 3fd71b9e30e0d..1d7284bce8d0a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReduceFetch.java @@ -45,7 +45,8 @@ public void testReduceFromDisk() throws Exception { final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter(); final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter(); assertTrue(spill >= 2 * out, - "Expected all records spilled during reduce (" + spill + ")"); // all records spill at map, reduce + "Expected all records spilled during reduce (" + + spill + ")"); // all records spill at map, reduce assertTrue(spill >= 2 * out + (out / MAP_TASKS), "Expected intermediate merges (" + spill + ")"); // some records hit twice } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java index aefbd0c67c6d2..d42ab19c90e71 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestReporter.java @@ -176,7 +176,7 @@ public void reduce(Text key, Iterator values, throws IOException { float reducePhaseProgress = ((float)++recordCount)/INPUT_LINES; float weightedReducePhaseProgress = - reducePhaseProgress * REDUCE_PROGRESS_RANGE; + reducePhaseProgress * REDUCE_PROGRESS_RANGE; assertEquals(SHUFFLE_PROGRESS_RANGE + weightedReducePhaseProgress, reporter.getProgress(), 0.02f, "Invalid progress in reduce"); this.reporter = reporter; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index 98fa3333e29a2..1b90ecc618962 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -60,7 +60,7 @@ public void testGetRootQueues() throws IOException, InterruptedException { final ApplicationClientProtocol applicationsManager = mock(ApplicationClientProtocol.class); GetQueueInfoResponse response = mock(GetQueueInfoResponse.class); org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = - mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); when(response.getQueueInfo()).thenReturn(queueInfo); try { when(applicationsManager.getQueueInfo(any( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java index 5463e79530fd3..dd9d6b5037d21 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java @@ -114,9 +114,9 @@ public void testFormat() throws Exception { LOG.debug("splitting: got = " + splits.length); if (length == 0) { - assertEquals(1, splits.length, - "Files of length 0 are not returned from FileInputFormat.getSplits()."); - assertEquals(0, splits[0].getLength(), "Empty file length == 0"); + assertEquals(1, splits.length, + "Files of length 0 are not returned from FileInputFormat.getSplits()."); + assertEquals(0, splits[0].getLength(), "Empty file length == 0"); } // check each split diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java index 338f1172b04f0..9ea112afed85e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestChild.java @@ -30,13 +30,12 @@ import org.apache.hadoop.mapred.HadoopTestCase; import org.apache.hadoop.mapred.JobConf; import org.apache.log4j.Level; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; public class TestChild extends HadoopTestCase { private static String TEST_ROOT_DIR = @@ -63,25 +62,21 @@ protected void setup(Context context) throws IOException, boolean oldConfigs = conf.getBoolean(OLD_CONFIGS, false); if (oldConfigs) { String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", - javaOpts); - assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + - javaOpts, - javaOpts, TASK_OPTS_VAL); + assertNotNull(javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); + assertEquals(javaOpts, TASK_OPTS_VAL, + JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + javaOpts); } else { String mapJavaOpts = conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!", - mapJavaOpts); - assertEquals(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + - mapJavaOpts, - mapJavaOpts, MAP_OPTS_VAL); + assertNotNull(mapJavaOpts, JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!"); + assertEquals(mapJavaOpts, MAP_OPTS_VAL, + JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + mapJavaOpts); } Level logLevel = Level.toLevel(conf.get(JobConf.MAPRED_MAP_TASK_LOG_LEVEL, Level.INFO.toString())); - assertEquals(JobConf.MAPRED_MAP_TASK_LOG_LEVEL + "has value of " + - logLevel, logLevel, Level.OFF); + assertEquals(logLevel, Level.OFF, + JobConf.MAPRED_MAP_TASK_LOG_LEVEL + "has value of " + logLevel); } } @@ -95,25 +90,22 @@ protected void setup(Context context) boolean oldConfigs = conf.getBoolean(OLD_CONFIGS, false); if (oldConfigs) { String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", - javaOpts); - assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + - javaOpts, - javaOpts, TASK_OPTS_VAL); + assertNotNull(javaOpts, JobConf.MAPRED_TASK_JAVA_OPTS + " is null!"); + assertEquals(javaOpts, TASK_OPTS_VAL, + JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + javaOpts); } else { String reduceJavaOpts = conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS); - assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!", - reduceJavaOpts); - assertEquals(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " + - reduceJavaOpts, - reduceJavaOpts, REDUCE_OPTS_VAL); + assertNotNull(reduceJavaOpts, + JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!"); + assertEquals(reduceJavaOpts, REDUCE_OPTS_VAL, + JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " + reduceJavaOpts); } Level logLevel = Level.toLevel(conf.get(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL, Level.INFO.toString())); - assertEquals(JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL + "has value of " + - logLevel, logLevel, Level.OFF); + assertEquals(logLevel, Level.OFF, + JobConf.MAPRED_REDUCE_TASK_LOG_LEVEL + "has value of " + logLevel); } } @@ -135,21 +127,21 @@ private Job submitAndValidateJob(JobConf conf, int numMaps, int numReds, numMaps, numReds); job.setMapperClass(MyMapper.class); job.setReducerClass(MyReducer.class); - assertFalse("Job already has a job tracker connection, before it's submitted", - job.isConnected()); + assertFalse(job.isConnected(), + "Job already has a job tracker connection, before it's submitted"); job.submit(); - assertTrue("Job doesn't have a job tracker connection, even though it's been submitted", - job.isConnected()); + assertTrue(job.isConnected(), + "Job doesn't have a job tracker connection, even though it's been submitted"); job.waitForCompletion(true); assertTrue(job.isSuccessful()); // Check output directory FileSystem fs = FileSystem.get(conf); - assertTrue("Job output directory doesn't exit!", fs.exists(outDir)); + assertTrue(fs.exists(outDir), "Job output directory doesn't exit!"); FileStatus[] list = fs.listStatus(outDir, new OutputFilter()); int numPartFiles = numReds == 0 ? numMaps : numReds; - assertTrue("Number of part-files is " + list.length + " and not " - + numPartFiles, list.length == numPartFiles); + assertTrue(list.length == numPartFiles, "Number of part-files is " + list.length + " and not " + + numPartFiles); return job; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java index 17cd5bfaace33..308b069adffe4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java @@ -30,8 +30,8 @@ import org.apache.hadoop.util.ToolRunner; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,11 +50,11 @@ import java.nio.charset.StandardCharsets; import java.util.Arrays; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** test CLI class. CLI class implemented the Tool interface. @@ -65,7 +65,7 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { private static final Logger LOG = LoggerFactory.getLogger(TestMRJobClient.class); - @BeforeClass + @BeforeAll public static void setupClass() throws Exception { setupClassBase(TestMRJobClient.class); } @@ -133,8 +133,8 @@ public void testJobSubmissionSpecsAndFiles() throws Exception { job.getConfiguration()); Path submitJobDir = new Path(jobStagingArea, "JobId"); Path submitJobFile = JobSubmissionFiles.getJobConfPath(submitJobDir); - assertFalse("Shouldn't have created a job file if job specs failed.", - FileSystem.get(conf).exists(submitJobFile)); + assertFalse(FileSystem.get(conf).exists(submitJobFile), + "Shouldn't have created a job file if job specs failed."); } /** @@ -191,7 +191,7 @@ private void testfailTask(Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); // TaskAttemptId is not set int exitCode = runTool(conf, jc, new String[] { "-fail-task" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out); String answer = new String(out.toByteArray(), StandardCharsets.UTF_8); @@ -209,7 +209,7 @@ private void testKillTask(Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); // bad parameters int exitCode = runTool(conf, jc, new String[] { "-kill-task" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out); String answer = new String(out.toByteArray(), StandardCharsets.UTF_8); @@ -227,10 +227,10 @@ private void testKillJob(Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); // without jobId int exitCode = runTool(conf, jc, new String[] { "-kill" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); // good parameters exitCode = runTool(conf, jc, new String[] { "-kill", jobId }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String answer = new String(out.toByteArray(), StandardCharsets.UTF_8); assertTrue(answer.contains("Killed job " + jobId)); @@ -257,12 +257,12 @@ private void testSubmit(Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); // bad parameters int exitCode = runTool(conf, jc, new String[] { "-submit" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-submit", fconUri }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String answer = new String(out.toByteArray()); // in console was written assertTrue(answer.contains("Created job ")); @@ -312,10 +312,10 @@ private void testListBlackList(Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); int exitCode = runTool(conf, jc, new String[] { "-list-blacklisted-trackers", "second in" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-list-blacklisted-trackers" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String line; BufferedReader br = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); @@ -334,10 +334,10 @@ private void testListAttemptIds(String jobId, Configuration conf) CLI jc = createJobClient(); ByteArrayOutputStream out = new ByteArrayOutputStream(); int exitCode = runTool(conf, jc, new String[] { "-list-attempt-ids" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-list-attempt-ids", jobId, "MAP", "completed" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String line; BufferedReader br = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); @@ -356,9 +356,9 @@ private void testListTrackers(Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); int exitCode = runTool(conf, jc, new String[] { "-list-active-trackers", "second parameter" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-list-active-trackers" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String line; BufferedReader br = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); @@ -387,7 +387,7 @@ private void testJobHistory(String jobId, Configuration conf) historyFileUri = file.getPath().toUri().toString(); } } - assertNotNull("Could not find jhist file", historyFileUri); + assertNotNull(historyFileUri, "Could not find jhist file"); for (String historyFileOrJobId : new String[]{historyFileUri, jobId}) { // Try a bunch of different valid combinations of the command @@ -396,7 +396,7 @@ private void testJobHistory(String jobId, Configuration conf) "all", historyFileOrJobId, }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryHumanOutput(jobId, out); File outFile = File.createTempFile("myout", ".txt"); exitCode = runTool(conf, jc, new String[]{ @@ -406,7 +406,7 @@ private void testJobHistory(String jobId, Configuration conf) "-outfile", outFile.getAbsolutePath() }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryHumanFileOutput(jobId, out, outFile); outFile = File.createTempFile("myout", ".txt"); exitCode = runTool(conf, jc, new String[]{ @@ -418,7 +418,7 @@ private void testJobHistory(String jobId, Configuration conf) "-format", "human" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryHumanFileOutput(jobId, out, outFile); exitCode = runTool(conf, jc, new String[]{ "-history", @@ -426,7 +426,7 @@ private void testJobHistory(String jobId, Configuration conf) "-format", "human" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryHumanOutput(jobId, out); exitCode = runTool(conf, jc, new String[]{ "-history", @@ -435,7 +435,7 @@ private void testJobHistory(String jobId, Configuration conf) "-format", "json" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryJSONOutput(jobId, out); outFile = File.createTempFile("myout", ".txt"); exitCode = runTool(conf, jc, new String[]{ @@ -447,7 +447,7 @@ private void testJobHistory(String jobId, Configuration conf) "-format", "json" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryJSONFileOutput(jobId, out, outFile); exitCode = runTool(conf, jc, new String[]{ "-history", @@ -455,7 +455,7 @@ private void testJobHistory(String jobId, Configuration conf) "-format", "json" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); checkHistoryJSONOutput(jobId, out); // Check some bad arguments @@ -464,19 +464,19 @@ private void testJobHistory(String jobId, Configuration conf) historyFileOrJobId, "foo" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[]{ "-history", historyFileOrJobId, "-format" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[]{ "-history", historyFileOrJobId, "-outfile", }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); try { runTool(conf, jc, new String[]{ "-history", @@ -553,16 +553,16 @@ private void testConfig(String jobId, Configuration conf) throws Exception { // bad arguments int exitCode = runTool(conf, jc, new String[] { "-config" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-config job_invalid foo.xml" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); // good arguments File outFile = File.createTempFile("config", ".xml"); exitCode = runTool(conf, jc, new String[] { "-config", jobId, outFile.toString()}, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); BufferedReader br = new BufferedReader(new FileReader(outFile)); String line = br.readLine(); br.close(); @@ -577,11 +577,11 @@ private void testJobEvents(String jobId, Configuration conf) throws Exception { CLI jc = createJobClient(); ByteArrayOutputStream out = new ByteArrayOutputStream(); int exitCode = runTool(conf, jc, new String[] { "-events" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-events", jobId, "0", "100" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String line; BufferedReader br = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); @@ -603,10 +603,10 @@ private void testJobStatus(String jobId, Configuration conf) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); // bad options int exitCode = runTool(conf, jc, new String[] { "-status" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, jc, new String[] { "-status", jobId }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); String line; BufferedReader br = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); @@ -629,14 +629,14 @@ public void testGetCounter(String jobId, Configuration conf) throws Exception { // bad command int exitCode = runTool(conf, createJobClient(), new String[] { "-counter", }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, createJobClient(), new String[] { "-counter", jobId, "org.apache.hadoop.mapreduce.TaskCounter", "MAP_INPUT_RECORDS" }, out); - assertEquals("Exit code", 0, exitCode); - assertEquals("Counter", "3", out.toString().trim()); + assertEquals(0, exitCode, "Exit code"); + assertEquals("3", out.toString().trim(), "Counter"); } /** * print a job list @@ -648,11 +648,11 @@ protected void testAllJobList(String jobId, Configuration conf) int exitCode = runTool(conf, createJobClient(), new String[] { "-list", "alldata" }, out); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, createJobClient(), // all jobs new String[] { "-list", "all" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); BufferedReader br = new BufferedReader(new InputStreamReader( new ByteArrayInputStream(out.toByteArray()))); String line; @@ -675,7 +675,7 @@ protected void testSubmittedJobList(Configuration conf) throws Exception { // only submitted int exitCode = runTool(conf, createJobClient(), new String[] { "-list" }, out); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream( out.toByteArray()))); @@ -695,7 +695,7 @@ protected void verifyJobPriority(String jobId, String priority, PipedInputStream pis = new PipedInputStream(); PipedOutputStream pos = new PipedOutputStream(pis); int exitCode = runTool(conf, jc, new String[] { "-list", "all" }, pos); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); BufferedReader br = new BufferedReader(new InputStreamReader(pis)); String line; while ((line = br.readLine()) != null) { @@ -713,10 +713,10 @@ public void testChangingJobPriority(String jobId, Configuration conf) throws Exception { int exitCode = runTool(conf, createJobClient(), new String[] { "-set-priority" }, new ByteArrayOutputStream()); - assertEquals("Exit code", -1, exitCode); + assertEquals(-1, exitCode, "Exit code"); exitCode = runTool(conf, createJobClient(), new String[] { "-set-priority", jobId, "VERY_LOW" }, new ByteArrayOutputStream()); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); // set-priority is fired after job is completed in YARN, hence need not // have to update the priority. verifyJobPriority(jobId, "DEFAULT", conf, createJobClient()); @@ -752,7 +752,7 @@ protected void verifyJobName(String jobId, String name, PipedOutputStream pos = new PipedOutputStream(pis); int exitCode = runTool(conf, jc, new String[] { "-list", "all" }, pos); - assertEquals("Exit code", 0, exitCode); + assertEquals(0, exitCode, "Exit code"); BufferedReader br = new BufferedReader(new InputStreamReader(pis)); String line = null; while ((line = br.readLine()) != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java index 889137c20e5f6..a17d3c2762d52 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java @@ -30,15 +30,15 @@ import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.map.InverseMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestMRKeyFieldBasedComparator extends HadoopTestCase { From 909d346efff52d2c0b6c6143c4df3ae1c818c17f Mon Sep 17 00:00:00 2001 From: fanshilun Date: Sun, 9 Feb 2025 12:00:38 +0800 Subject: [PATCH 4/5] MAPREDUCE-7421. Fix CheckStyle & Junit Test. --- .../org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java index 4d733684e4080..6393337ee0171 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java @@ -253,7 +253,7 @@ public void testGzip() throws IOException { assertEquals("of gzip", results.get(1).toString(), "splits[1][1]"); } - public static void main(String[] args) throws Exception { + public static void main(String[] args) throws Exception { new TestKeyValueTextInputFormat().testFormat(); } } From 44a43381302dcab17c310470b6f6d8b4132c39dd Mon Sep 17 00:00:00 2001 From: fanshilun Date: Wed, 12 Feb 2025 01:16:59 +0800 Subject: [PATCH 5/5] MAPREDUCE-7421. Fix CheckStyle. --- .../apache/hadoop/mapred/TestConcatenatedCompressedInput.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java index a6663d310b5db..0cac3667df327 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java @@ -690,7 +690,7 @@ private static String unquote(String in) { * @param args * @throws Exception */ - /*public static void main(String[] args) throws Exception { + public static void main(String[] args) throws Exception { for(String arg: args) { System.out.println("Working on " + arg); LineReader reader = makeStream(unquote(arg)); @@ -702,5 +702,5 @@ private static String unquote(String in) { } reader.close(); } - }*/ + } }