Skip to content

Commit 66493fa

Browse files
Generic code cleanup
1 parent e55839b commit 66493fa

File tree

13 files changed

+31
-34
lines changed

13 files changed

+31
-34
lines changed

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,6 @@
4949
import java.util.Map;
5050
import java.util.Set;
5151
import java.util.concurrent.ConcurrentHashMap;
52-
import java.util.concurrent.atomic.AtomicInteger;
5352

5453

5554
public class AMRunner {

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/ReservationClientUtil.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,9 +70,7 @@ public static ReservationSubmissionRequest createMRReservation(
7070
deadline, reservationRequests, name);
7171

7272
// outermost request
73-
ReservationSubmissionRequest request = ReservationSubmissionRequest
73+
return ReservationSubmissionRequest
7474
.newInstance(resDef, queueName, reservationId);
75-
76-
return request;
7775
}
7876
}

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import java.io.OutputStreamWriter;
2626
import java.io.Reader;
2727
import java.io.Writer;
28+
import java.nio.charset.StandardCharsets;
2829
import java.util.ArrayList;
2930
import java.util.Iterator;
3031
import java.util.LinkedHashMap;
@@ -54,7 +55,7 @@ public class RumenToSLSConverter {
5455
private static Map<String, Set<String>> rackNodeMap =
5556
new TreeMap<String, Set<String>>();
5657

57-
public static void main(String args[]) throws Exception {
58+
public static void main(String[] args) throws Exception {
5859
Options options = new Options();
5960
options.addOption("input", true, "input rumen json file");
6061
options.addOption("outputJobs", true, "output jobs file");
@@ -121,9 +122,10 @@ public static void main(String args[]) throws Exception {
121122
private static void generateSLSLoadFile(String inputFile, String outputFile)
122123
throws IOException {
123124
try (Reader input =
124-
new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
125+
new InputStreamReader(new FileInputStream(inputFile),
126+
StandardCharsets.UTF_8)) {
125127
try (Writer output =
126-
new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
128+
new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
127129
ObjectMapper mapper = new ObjectMapper();
128130
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
129131
Iterator<Map> i = mapper.readValues(
@@ -140,7 +142,7 @@ private static void generateSLSLoadFile(String inputFile, String outputFile)
140142
private static void generateSLSNodeFile(String outputFile)
141143
throws IOException {
142144
try (Writer output =
143-
new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
145+
new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
144146
ObjectMapper mapper = new ObjectMapper();
145147
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
146148
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
@@ -218,7 +220,7 @@ private static List createSLSTasks(String taskType,
218220
task.put("container.priority", priority);
219221
task.put("container.type", taskType);
220222
array.add(task);
221-
String rackHost[] = SLSUtils.getRackHostName(hostname);
223+
String[] rackHost = SLSUtils.getRackHostName(hostname);
222224
if (rackNodeMap.containsKey(rackHost[0])) {
223225
rackNodeMap.get(rackHost[0]).add(rackHost[1]);
224226
} else {

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NMSimulator.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ public void init(String nodeIdStr, Resource nodeResource, int dispatchTime,
8585
super.init(dispatchTime, dispatchTime + 1000000L * heartBeatInterval,
8686
heartBeatInterval);
8787
// create resource
88-
String rackHostName[] = SLSUtils.getRackHostName(nodeIdStr);
88+
String[] rackHostName = SLSUtils.getRackHostName(nodeIdStr);
8989
this.node = NodeInfo.newNodeInfo(rackHostName[0], rackHostName[1],
9090
Resources.clone(nodeResource));
9191
this.rm = pRm;
@@ -128,7 +128,7 @@ public void firstStep() {
128128
@Override
129129
public void middleStep() throws Exception {
130130
// we check the lifetime for each running containers
131-
ContainerSimulator cs = null;
131+
ContainerSimulator cs;
132132
synchronized(completedContainerList) {
133133
while ((cs = containerQueue.poll()) != null) {
134134
runningContainers.remove(cs.getId());

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/nodemanager/NodeInfo.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -258,9 +258,8 @@ public static RMNode newNodeInfo(String rackName, String hostName,
258258
final Resource resource, int port) {
259259
final NodeId nodeId = newNodeID(hostName, port);
260260
final String nodeAddr = hostName + ":" + port;
261-
final String httpAddress = hostName;
262-
263-
return new FakeRMNodeImpl(nodeId, nodeAddr, httpAddress,
261+
262+
return new FakeRMNodeImpl(nodeId, nodeAddr, hostName,
264263
resource, rackName, "Me good",
265264
port, hostName, null);
266265
}

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/resourcemanager/MockAMLauncher.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
import org.apache.hadoop.yarn.sls.SLSRunner;
3838
import org.apache.hadoop.yarn.sls.appmaster.AMSimulator;
3939

40-
import java.util.Map;
4140

4241
public class MockAMLauncher extends ApplicationMasterLauncher
4342
implements EventHandler<AMLauncherEvent> {

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SchedulerMetrics.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
import java.io.BufferedWriter;
2222
import java.io.IOException;
23+
import java.nio.charset.StandardCharsets;
2324
import java.util.HashMap;
2425
import java.util.HashSet;
2526
import java.util.Map;
@@ -184,7 +185,7 @@ void init(ResourceScheduler resourceScheduler, Configuration config)
184185
// application running information
185186
jobRuntimeLogBW =
186187
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
187-
metricsOutputDir + "/jobruntime.csv"), "UTF-8"));
188+
metricsOutputDir + "/jobruntime.csv"), StandardCharsets.UTF_8));
188189
jobRuntimeLogBW.write("JobID,real_start_time,real_end_time," +
189190
"simulate_start_time,simulate_end_time" + EOL);
190191
jobRuntimeLogBW.flush();
@@ -560,7 +561,7 @@ class MetricsLogRunnable implements Runnable {
560561
try {
561562
metricsLogBW =
562563
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(
563-
metricsOutputDir + "/realtimetrack.json"), "UTF-8"));
564+
metricsOutputDir + "/realtimetrack.json"), StandardCharsets.UTF_8));
564565
metricsLogBW.write("[");
565566
} catch (IOException e) {
566567
LOG.info(e.getMessage());
@@ -717,11 +718,10 @@ public void addAMRuntime(ApplicationId appId, long traceStartTimeMS,
717718
long traceEndTimeMS, long simulateStartTimeMS, long simulateEndTimeMS) {
718719
try {
719720
// write job runtime information
720-
StringBuilder sb = new StringBuilder();
721-
sb.append(appId).append(",").append(traceStartTimeMS).append(",")
722-
.append(traceEndTimeMS).append(",").append(simulateStartTimeMS)
723-
.append(",").append(simulateEndTimeMS);
724-
jobRuntimeLogBW.write(sb.toString() + EOL);
721+
String runtimeInfo = appId + "," + traceStartTimeMS + "," +
722+
traceEndTimeMS + "," + simulateStartTimeMS +
723+
"," + simulateEndTimeMS;
724+
jobRuntimeLogBW.write(runtimeInfo + EOL);
725725
jobRuntimeLogBW.flush();
726726
} catch (IOException e) {
727727
LOG.info(e.getMessage());

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthJob.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -184,9 +184,9 @@ protected SynthJob(JDKRandomGenerator rand, Configuration conf,
184184
int num = task.count.getInt();
185185
String taskType = task.type;
186186
long memory = task.max_memory.getLong();
187-
memory = memory < MIN_MEMORY ? MIN_MEMORY: memory;
187+
memory = Math.max(memory, MIN_MEMORY);
188188
long vcores = task.max_vcores.getLong();
189-
vcores = vcores < MIN_VCORES ? MIN_VCORES : vcores;
189+
vcores = Math.max(vcores, MIN_VCORES);
190190
int priority = task.priority;
191191
ExecutionType executionType = task.executionType == null
192192
? ExecutionType.GUARANTEED

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,6 @@ public void init(JDKRandomGenerator random){
310310

311311
// Initialize job weights
312312
job_weights = new ArrayList<>();
313-
job_weights = new ArrayList<>();
314313
for(JobDefinition j : job_classes){
315314
job_weights.add(j.class_weight);
316315
}
@@ -638,7 +637,7 @@ public String getString(){
638637
public String toString(){
639638
switch(mode){
640639
case CONST:
641-
return "value: " + Double.toString(val);
640+
return "value: " + val;
642641
case DIST:
643642
return "value: " + this.val + " std: " + this.std + " dist: "
644643
+ this.dist.name();

hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ public static Set<NodeDetails> parseNodesFromRumenTrace(
8585
JobTraceReader reader = new JobTraceReader(
8686
new Path(fin.getAbsolutePath()), conf);
8787
try {
88-
LoggedJob job = null;
88+
LoggedJob job;
8989
while ((job = reader.getNext()) != null) {
9090
for(LoggedTask mapTask : job.getMapTasks()) {
9191
// select the last attempt
@@ -123,7 +123,7 @@ public static Set<NodeDetails> parseNodesFromSLSTrace(
123123
JsonFactory jsonF = new JsonFactory();
124124
ObjectMapper mapper = new ObjectMapper();
125125
Reader input =
126-
new InputStreamReader(new FileInputStream(jobTrace), "UTF-8");
126+
new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8);
127127
try {
128128
Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
129129
while (i.hasNext()) {
@@ -170,7 +170,7 @@ public static Set<NodeDetails> parseNodesFromNodeFile(
170170
JsonFactory jsonF = new JsonFactory();
171171
ObjectMapper mapper = new ObjectMapper();
172172
Reader input =
173-
new InputStreamReader(new FileInputStream(nodeFile), "UTF-8");
173+
new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8);
174174
try {
175175
Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
176176
while (i.hasNext()) {

0 commit comments

Comments
 (0)