diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
index b2d2a8d100ff1..97e281ba85ea2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
@@ -33,7 +33,7 @@ public class TestAvroSerialization {
@Test
public void testSpecific() throws Exception {
AvroRecord before = new AvroRecord();
- before.intField = 5;
+ before.setIntField(5);
AvroRecord after = SerializationTestUtil.testSerialization(conf, before);
assertEquals(before, after);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
index 66f378123986d..aa70bbff62490 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
@@ -31,8 +31,8 @@ public class JobQueueChangeEvent implements HistoryEvent {
private JobQueueChange datum = new JobQueueChange();
public JobQueueChangeEvent(JobID id, String queueName) {
- datum.jobid = new Utf8(id.toString());
- datum.jobQueueName = new Utf8(queueName);
+ datum.setJobid(new Utf8(id.toString()));
+ datum.setJobQueueName(new Utf8(queueName));
}
JobQueueChangeEvent() { }
@@ -54,13 +54,13 @@ public void setDatum(Object datum) {
/** Get the Job ID */
public JobID getJobId() {
- return JobID.forName(datum.jobid.toString());
+ return JobID.forName(datum.getJobid().toString());
}
/** Get the new Job queue name */
public String getJobQueueName() {
- if (datum.jobQueueName != null) {
- return datum.jobQueueName.toString();
+ if (datum.getJobQueueName() != null) {
+ return datum.getJobQueueName().toString();
}
return null;
}
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 3086e74969981..05abb0fa0c5c3 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -63,7 +63,7 @@
file:///dev/urandom
- 1.9.2
+ 1.11.3
1.19.4
@@ -108,7 +108,7 @@
3.0.5
3.4.0
- 27.0-jre
+ 32.0.1-jre
4.2.3
1.70
@@ -151,7 +151,7 @@
0.5.1
1.5.3
- 3.5.1
+ 3.16.3
1.10.0
1.7.0
@@ -169,7 +169,7 @@
-Xmx2048m -XX:+HeapDumpOnOutOfMemoryError
- 3.0.0-M1
+ 3.2.5
${maven-surefire-plugin.version}
${maven-surefire-plugin.version}
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
index 1213e6a46f449..603b248f6e848 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
@@ -460,7 +460,7 @@ private void processTaskFinishedEvent(TaskFinishedEvent event) {
}
task.setFinishTime(event.getFinishTime());
task.setTaskStatus(getPre21Value(event.getTaskStatus()));
- task.incorporateCounters(((TaskFinished) event.getDatum()).counters);
+ task.incorporateCounters(((TaskFinished) event.getDatum()).getCounters());
}
private void processTaskFailedEvent(TaskFailedEvent event) {
@@ -472,7 +472,7 @@ private void processTaskFailedEvent(TaskFailedEvent event) {
task.setFinishTime(event.getFinishTime());
task.setTaskStatus(getPre21Value(event.getTaskStatus()));
TaskFailed t = (TaskFailed)(event.getDatum());
- task.putDiagnosticInfo(t.error.toString());
+ task.putDiagnosticInfo(t.getError().toString());
// killed task wouldn't have failed attempt.
if (t.getFailedDueToAttempt() != null) {
task.putFailedDueToAttemptId(t.getFailedDueToAttempt().toString());
@@ -542,7 +542,7 @@ private void processTaskAttemptFinishedEvent(TaskAttemptFinishedEvent event) {
}
attempt.setFinishTime(event.getFinishTime());
attempt
- .incorporateCounters(((TaskAttemptFinished) event.getDatum()).counters);
+ .incorporateCounters(((TaskAttemptFinished) event.getDatum()).getCounters());
}
private void processReduceAttemptFinishedEvent(
@@ -568,7 +568,7 @@ private void processReduceAttemptFinishedEvent(
attempt.setShuffleFinished(event.getShuffleFinishTime());
attempt.setSortFinished(event.getSortFinishTime());
attempt
- .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).counters);
+ .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).getCounters());
attempt.arraySetClockSplits(event.getClockSplits());
attempt.arraySetCpuUsages(event.getCpuUsages());
attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -596,7 +596,7 @@ private void processMapAttemptFinishedEvent(MapAttemptFinishedEvent event) {
// is redundant, but making this will add future-proofing.
attempt.setFinishTime(event.getFinishTime());
attempt
- .incorporateCounters(((MapAttemptFinished) event.getDatum()).counters);
+ .incorporateCounters(((MapAttemptFinished) event.getDatum()).getCounters());
attempt.arraySetClockSplits(event.getClockSplits());
attempt.arraySetCpuUsages(event.getCpuUsages());
attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -661,11 +661,11 @@ private void processJobFinishedEvent(JobFinishedEvent event) {
JobFinished job = (JobFinished)event.getDatum();
Map countersMap =
- JobHistoryUtils.extractCounters(job.totalCounters);
+ JobHistoryUtils.extractCounters(job.getTotalCounters());
result.putTotalCounters(countersMap);
- countersMap = JobHistoryUtils.extractCounters(job.mapCounters);
+ countersMap = JobHistoryUtils.extractCounters(job.getMapCounters());
result.putMapCounters(countersMap);
- countersMap = JobHistoryUtils.extractCounters(job.reduceCounters);
+ countersMap = JobHistoryUtils.extractCounters(job.getReduceCounters());
result.putReduceCounters(countersMap);
}
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
index 6ae87bbd40a11..34ef95f337858 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
@@ -157,9 +157,9 @@ static boolean isJobConfXml(String fileName) {
static Map extractCounters(JhCounters counters) {
Map countersMap = new HashMap();
if (counters != null) {
- for (JhCounterGroup group : counters.groups) {
- for (JhCounter counter : group.counts) {
- countersMap.put(counter.name.toString(), counter.value);
+ for (JhCounterGroup group : counters.getGroups()) {
+ for (JhCounter counter : group.getCounts()) {
+ countersMap.put(counter.getName().toString(), counter.getValue());
}
}
}
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
index 4ae33a76617fb..2308e586900bb 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
@@ -268,11 +268,11 @@ private static void incorporateCounter(SetField thunk, JhCounters counters,
String counterName) {
counterName = canonicalizeCounterName(counterName);
- for (JhCounterGroup group : counters.groups) {
- for (JhCounter counter : group.counts) {
+ for (JhCounterGroup group : counters.getGroups()) {
+ for (JhCounter counter : group.getCounts()) {
if (counterName
- .equals(canonicalizeCounterName(counter.name.toString()))) {
- thunk.set(counter.value);
+ .equals(canonicalizeCounterName(counter.getName().toString()))) {
+ thunk.set(counter.getValue());
return;
}
}
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
index 5c6abd372c081..fae53b2926c44 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
@@ -636,11 +636,11 @@ private static void incorporateCounter(SetField thunk, JhCounters counters,
String counterName) {
counterName = canonicalizeCounterName(counterName);
- for (JhCounterGroup group : counters.groups) {
- for (JhCounter counter : group.counts) {
+ for (JhCounterGroup group : counters.getGroups()) {
+ for (JhCounter counter : group.getCounts()) {
if (counterName
- .equals(canonicalizeCounterName(counter.name.toString()))) {
- thunk.set(counter.value);
+ .equals(canonicalizeCounterName(counter.getName().toString()))) {
+ thunk.set(counter.getValue());
return;
}
}
@@ -769,4 +769,4 @@ public void deepCompare(DeepCompare comparand, TreePath loc)
compare1(vMemKbytes, other.vMemKbytes, loc, "vMemKbytes");
compare1(physMemKbytes, other.physMemKbytes, loc, "physMemKbytes");
}
-}
\ No newline at end of file
+}