diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java
index e2d75ab268a19..ffe4850bf836d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestNoDefaultsJobConf.java
@@ -29,7 +29,7 @@
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.Utils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import java.io.BufferedReader;
import java.io.IOException;
@@ -39,8 +39,10 @@
import java.io.OutputStreamWriter;
import java.io.Writer;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* This testcase tests that a JobConf without default values submits jobs
@@ -56,10 +58,10 @@ public TestNoDefaultsJobConf() throws IOException {
@Test
public void testNoDefaults() throws Exception {
JobConf configuration = new JobConf();
- assertTrue(configuration.get("hadoop.tmp.dir", null) != null);
+ assertNotNull(configuration.get("hadoop.tmp.dir", null));
configuration = new JobConf(false);
- assertTrue(configuration.get("hadoop.tmp.dir", null) == null);
+ assertNull(configuration.get("hadoop.tmp.dir", null));
Path inDir = new Path("testing/jobconf/input");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
index d718556a05437..b2ab0bc8bf60c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
@@ -34,8 +34,8 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.mapred.*;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -66,7 +66,7 @@
*
standard i/o rate deviation
*
*/
-@Ignore
+@Disabled
public class DFSCIOTest {
// Constants
private static final Logger LOG = LoggerFactory.getLogger(DFSCIOTest.class);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
index 6ee143dcf4127..9b9fdacbe577a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
@@ -66,9 +66,10 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -226,7 +227,7 @@ private static Path getDataDir(Configuration conf) {
private static MiniDFSCluster cluster;
private static TestDFSIO bench;
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
bench = new TestDFSIO();
bench.getConf().setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
@@ -241,7 +242,7 @@ public static void beforeClass() throws Exception {
testWrite();
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
if(cluster == null)
return;
@@ -256,14 +257,16 @@ public static void testWrite() throws Exception {
bench.analyzeResult(fs, TestType.TEST_TYPE_WRITE, execTime);
}
- @Test (timeout = 10000)
+ @Test
+ @Timeout(value = 10)
public void testRead() throws Exception {
FileSystem fs = cluster.getFileSystem();
long execTime = bench.readTest(fs);
bench.analyzeResult(fs, TestType.TEST_TYPE_READ, execTime);
}
- @Test (timeout = 10000)
+ @Test
+ @Timeout(value = 10)
public void testReadRandom() throws Exception {
FileSystem fs = cluster.getFileSystem();
bench.getConf().setLong("test.io.skip.size", 0);
@@ -271,7 +274,8 @@ public void testReadRandom() throws Exception {
bench.analyzeResult(fs, TestType.TEST_TYPE_READ_RANDOM, execTime);
}
- @Test (timeout = 10000)
+ @Test
+ @Timeout(value = 10)
public void testReadBackward() throws Exception {
FileSystem fs = cluster.getFileSystem();
bench.getConf().setLong("test.io.skip.size", -DEFAULT_BUFFER_SIZE);
@@ -279,7 +283,8 @@ public void testReadBackward() throws Exception {
bench.analyzeResult(fs, TestType.TEST_TYPE_READ_BACKWARD, execTime);
}
- @Test (timeout = 10000)
+ @Test
+ @Timeout(value = 10)
public void testReadSkip() throws Exception {
FileSystem fs = cluster.getFileSystem();
bench.getConf().setLong("test.io.skip.size", 1);
@@ -287,14 +292,16 @@ public void testReadSkip() throws Exception {
bench.analyzeResult(fs, TestType.TEST_TYPE_READ_SKIP, execTime);
}
- @Test (timeout = 10000)
+ @Test
+ @Timeout(value = 10)
public void testAppend() throws Exception {
FileSystem fs = cluster.getFileSystem();
long execTime = bench.appendTest(fs);
bench.analyzeResult(fs, TestType.TEST_TYPE_APPEND, execTime);
}
- @Test (timeout = 60000)
+ @Test
+ @Timeout(value = 60)
public void testTruncate() throws Exception {
FileSystem fs = cluster.getFileSystem();
bench.createControlFile(fs, DEFAULT_NR_BYTES / 2, DEFAULT_NR_FILES);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
index 075ab9a3f3ca7..2d69f4819dcbf 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
@@ -47,16 +47,16 @@
import org.apache.hadoop.mapred.lib.LongSumReducer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.fail;
public class TestFileSystem {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
index 9334a8a0dfaf6..019b358031774 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
@@ -23,9 +23,9 @@
import java.io.OutputStreamWriter;
import java.io.File;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -40,7 +40,7 @@ public class TestJHLA {
private String historyLog = System.getProperty("test.build.data",
"build/test/data") + "/history/test.log";
- @Before
+ @BeforeEach
public void setUp() throws Exception {
File logFile = new File(historyLog);
if(!logFile.getParentFile().exists())
@@ -121,7 +121,7 @@ public void setUp() throws Exception {
writer.close();
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
File logFile = new File(historyLog);
if(!logFile.delete())
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
index 218cae8bf7bea..61b4f718f4ccf 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
@@ -19,8 +19,8 @@
package org.apache.hadoop.fs.slive;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.DataInputStream;
import java.io.File;
@@ -40,8 +40,8 @@
import org.apache.hadoop.fs.slive.DataVerifier.VerifyOutput;
import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -194,7 +194,7 @@ private ConfigExtractor getTestConfig(boolean sleep) throws Exception {
return extractor;
}
- @Before
+ @BeforeEach
public void ensureDeleted() throws Exception {
rDelete(getTestFile());
rDelete(getTestDir());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java
index 0273613f9fb2f..621ecc021c717 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/TestNNBench.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hdfs;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -31,8 +31,9 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
public class TestNNBench extends HadoopTestCase {
private static final String BASE_DIR =
@@ -45,39 +46,39 @@ public TestNNBench() throws IOException {
super(LOCAL_MR, LOCAL_FS, 1, 1);
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
getFileSystem().delete(new Path(BASE_DIR), true);
getFileSystem().delete(new Path(NNBench.DEFAULT_RES_FILE_NAME), true);
super.tearDown();
}
- @Test(timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testNNBenchCreateReadAndDelete() throws Exception {
runNNBench(createJobConf(), "create_write");
Path path = new Path(BASE_DIR + "/data/file_0_0");
- assertTrue("create_write should create the file",
- getFileSystem().exists(path));
+ assertTrue(getFileSystem().exists(path), "create_write should create the file");
runNNBench(createJobConf(), "open_read");
runNNBench(createJobConf(), "delete");
- assertFalse("Delete operation should delete the file",
- getFileSystem().exists(path));
+ assertFalse(getFileSystem().exists(path),
+ "Delete operation should delete the file");
}
- @Test(timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testNNBenchCreateAndRename() throws Exception {
runNNBench(createJobConf(), "create_write");
Path path = new Path(BASE_DIR + "/data/file_0_0");
- assertTrue("create_write should create the file",
- getFileSystem().exists(path));
+ assertTrue(getFileSystem().exists(path), "create_write should create the file");
runNNBench(createJobConf(), "rename");
Path renamedPath = new Path(BASE_DIR + "/data/file_0_r_0");
- assertFalse("Rename should rename the file", getFileSystem().exists(path));
- assertTrue("Rename should rename the file",
- getFileSystem().exists(renamedPath));
+ assertFalse(getFileSystem().exists(path), "Rename should rename the file");
+ assertTrue(getFileSystem().exists(renamedPath), "Rename should rename the file");
}
- @Test(timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testNNBenchCreateControlFilesWithPool() throws Exception {
runNNBench(createJobConf(), "create_write", BASE_DIR, "5");
Path path = new Path(BASE_DIR, CONTROL_DIR_NAME);
@@ -86,7 +87,8 @@ public void testNNBenchCreateControlFilesWithPool() throws Exception {
assertEquals(5, fileStatuses.length);
}
- @Test(timeout = 30000)
+ @Test
+ @Timeout(value = 30)
public void testNNBenchCrossCluster() throws Exception {
MiniDFSCluster dfsCluster = new MiniDFSCluster.Builder(new JobConf())
.numDataNodes(1).build();
@@ -96,8 +98,8 @@ public void testNNBenchCrossCluster() throws Exception {
runNNBench(createJobConf(), "create_write", baseDir);
Path path = new Path(BASE_DIR + "/data/file_0_0");
- assertTrue("create_write should create the file",
- dfsCluster.getFileSystem().exists(path));
+ assertTrue(dfsCluster.getFileSystem().exists(path),
+ "create_write should create the file");
dfsCluster.shutdown();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
index 4f1318c044103..b4e424bac9516 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
@@ -28,8 +28,8 @@
import org.apache.hadoop.mapred.*;
import org.slf4j.Logger;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestSequenceFileMergeProgress {
private static final Logger LOG = FileInputFormat.LOG;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
index 123947a39a302..8899da38248be 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
@@ -34,8 +34,10 @@
import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
import org.apache.hadoop.net.StandardSocketFactory;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* This class checks that RPCs can use specialized socket factories.
@@ -56,13 +58,13 @@ public void testSocketFactory() throws IOException {
// Get a reference to its DFS directly
FileSystem fs = cluster.getFileSystem();
- Assert.assertTrue(fs instanceof DistributedFileSystem);
+ assertTrue(fs instanceof DistributedFileSystem);
DistributedFileSystem directDfs = (DistributedFileSystem) fs;
Configuration cconf = getCustomSocketConfigs(nameNodePort);
fs = FileSystem.get(cconf);
- Assert.assertTrue(fs instanceof DistributedFileSystem);
+ assertTrue(fs instanceof DistributedFileSystem);
DistributedFileSystem dfs = (DistributedFileSystem) fs;
JobClient client = null;
@@ -72,12 +74,12 @@ public void testSocketFactory() throws IOException {
// could we test Client-DataNode connections?
Path filePath = new Path("/dir");
- Assert.assertFalse(directDfs.exists(filePath));
- Assert.assertFalse(dfs.exists(filePath));
+ assertFalse(directDfs.exists(filePath));
+ assertFalse(dfs.exists(filePath));
directDfs.mkdirs(filePath);
- Assert.assertTrue(directDfs.exists(filePath));
- Assert.assertTrue(dfs.exists(filePath));
+ assertTrue(directDfs.exists(filePath));
+ assertTrue(dfs.exists(filePath));
// This will test RPC to a Resource Manager
fs = FileSystem.get(sconf);
@@ -95,7 +97,7 @@ public void testSocketFactory() throws IOException {
client = new JobClient(jconf);
JobStatus[] jobs = client.jobsToComplete();
- Assert.assertTrue(jobs.length == 0);
+ assertTrue(jobs.length == 0);
} finally {
closeClient(client);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
index f16b8a0f18fc2..b3e1998803ae5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
@@ -22,8 +22,8 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
import java.io.File;
import java.io.IOException;
@@ -64,7 +64,7 @@ protected static void setupClassBase(Class> testClass) throws Exception {
*
* @throws Exception
*/
- @Before
+ @BeforeEach
public void setUp() throws Exception {
startCluster(true, null);
}
@@ -125,7 +125,7 @@ protected void stopCluster() throws Exception {
*
* @throws Exception
*/
- @After
+ @AfterEach
public void tearDown() throws Exception {
stopCluster();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java
index 277c0fd4b0ac5..ef7b2de903ac0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/HadoopTestCase.java
@@ -21,8 +21,8 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapreduce.MRConfig;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
import java.io.IOException;
@@ -139,7 +139,7 @@ public boolean isLocalFS() {
*
* @throws Exception
*/
- @Before
+ @BeforeEach
public void setUp() throws Exception {
if (localFS) {
fileSystem = FileSystem.getLocal(new JobConf());
@@ -163,7 +163,7 @@ public void setUp() throws Exception {
*
* @throws Exception
*/
- @After
+ @AfterEach
public void tearDown() throws Exception {
try {
if (mrCluster != null) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java
index a71550bce8a44..5d458b0d9d88f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRCaching.java
@@ -32,7 +32,7 @@
import java.net.URI;
-import org.junit.Assert;
+import static org.junit.jupiter.api.Assertions.assertEquals;
public class MRCaching {
static String testStr = "This is a test file " + "used for testing caching "
@@ -299,13 +299,12 @@ private static void validateCacheFileSizes(Configuration job,
String configValues = job.get(configKey, "");
System.out.println(configKey + " -> " + configValues);
String[] realSizes = StringUtils.getStrings(configValues);
- Assert.assertEquals("Number of files for "+ configKey,
- expectedSizes.length, realSizes.length);
+ assertEquals(expectedSizes.length, realSizes.length, "Number of files for "+ configKey);
for (int i=0; i < expectedSizes.length; ++i) {
long actual = Long.valueOf(realSizes[i]);
long expected = expectedSizes[i];
- Assert.assertEquals("File "+ i +" for "+ configKey, expected, actual);
+ assertEquals(expected, actual, "File "+ i +" for "+ configKey);
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
index 8acd015ab0987..26feba37a0672 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
@@ -35,11 +35,11 @@
import java.io.IOException;
import java.io.DataOutputStream;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import org.junit.Before;
-import org.junit.After;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
/**
@@ -132,7 +132,7 @@ protected void verifyQuery(String query, String expected)
return;
}
failureCounter++;
- assertTrue("The request (" + query + ") does not contain " + expected, false);
+ assertTrue(false, "The request (" + query + ") does not contain " + expected);
}
}
@@ -149,13 +149,13 @@ protected JobConf createJobConf() {
return conf;
}
- @Before
+ @BeforeEach
public void setUp() throws Exception {
super.setUp();
startHttpServer();
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
stopHttpServer();
NotificationServlet.counter = 0;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
index 1b39583bd729f..b39f9d8d2ef01 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
@@ -38,16 +38,17 @@
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
-@Ignore
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+
+@Disabled
public class TestBadRecords extends ClusterMapReduceTestCase {
private static final Logger LOG =
@@ -61,7 +62,7 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
private List input;
- @BeforeClass
+ @BeforeAll
public static void setupClass() throws Exception {
setupClassBase(TestBadRecords.class);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
index f034f34742a6e..2c2066bac7529 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
@@ -146,11 +146,13 @@
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
public class TestClientRedirect {
static {
@@ -200,7 +202,7 @@ public void testRedirect() throws Exception {
org.apache.hadoop.mapreduce.Counters counters =
cluster.getJob(jobID).getCounters();
validateCounters(counters);
- Assert.assertTrue(amContact);
+ assertTrue(amContact);
LOG.info("Sleeping for 5 seconds before stop for" +
" the client socket to not get EOF immediately..");
@@ -218,7 +220,7 @@ public void testRedirect() throws Exception {
// Same client
//results are returned from fake (not started job)
counters = cluster.getJob(jobID).getCounters();
- Assert.assertEquals(0, counters.countCounters());
+ assertEquals(0, counters.countCounters());
Job job = cluster.getJob(jobID);
org.apache.hadoop.mapreduce.TaskID taskId =
new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0);
@@ -242,7 +244,7 @@ public void testRedirect() throws Exception {
counters = cluster.getJob(jobID).getCounters();
validateCounters(counters);
- Assert.assertTrue(amContact);
+ assertTrue(amContact);
// Stop the AM. It is not even restarting. So it should be treated as
// completed.
@@ -251,7 +253,7 @@ public void testRedirect() throws Exception {
// Same client
counters = cluster.getJob(jobID).getCounters();
validateCounters(counters);
- Assert.assertTrue(hsContact);
+ assertTrue(hsContact);
rmService.stop();
historyService.stop();
@@ -267,7 +269,7 @@ private void validateCounters(org.apache.hadoop.mapreduce.Counters counters) {
LOG.info("Counter is " + itc.next().getDisplayName());
}
}
- Assert.assertEquals(1, counters.countCounters());
+ assertEquals(1, counters.countCounters());
}
class RMService extends AbstractService implements ApplicationClientProtocol {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
index ef460e8d74d47..8344466afdcbf 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
@@ -58,35 +58,36 @@
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.Records;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
/**
* Tests for ClientServiceDelegate.java
*/
-
-@RunWith(value = Parameterized.class)
public class TestClientServiceDelegate {
private JobID oldJobId = JobID.forName("job_1315895242400_2");
private org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = TypeConverter
.toYarn(oldJobId);
private boolean isAMReachableFromClient;
- public TestClientServiceDelegate(boolean isAMReachableFromClient) {
- this.isAMReachableFromClient = isAMReachableFromClient;
+ public void initTestClientServiceDelegate(boolean pIsAMReachableFromClient) {
+ this.isAMReachableFromClient = pIsAMReachableFromClient;
}
- @Parameters
public static Collection