Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,20 @@
import org.mockito.junit.MockitoJUnitRunner;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.doReturn;

/**
* Tests the behavior of TaskAttemptListenerImpl.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@
import org.junit.Before;
import org.junit.Test;

import static org.mockito.Mockito.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.eq;

/**
* Tests the behavior of YarnChild.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
*/
package org.apache.hadoop.mapred;

import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

import java.io.BufferedReader;
import java.io.DataOutputStream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@

import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.QueueState;
import org.apache.hadoop.security.authorize.AccessControlList;
import static org.apache.hadoop.mapred.QueueManager.*;
import static org.apache.hadoop.mapred.QueueManager.QUEUE_CONF_FILE_NAME;
import static org.apache.hadoop.mapred.QueueManager.toFullPropertyName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.util.ReflectionUtils;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,17 @@

package org.apache.hadoop.mapred;

import java.io.*;
import java.io.IOException;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.SplittableCompressionCodec;

import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.*;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;

/**
* An {@link OutputFormat} that writes plain text files.
Expand All @@ -42,7 +43,7 @@
public class TextOutputFormat<K, V> extends FileOutputFormat<K, V> {

protected static class LineRecordWriter<K, V>
implements RecordWriter<K, V> {
implements RecordWriter<K, V> {
private static final byte[] NEWLINE =
"\n".getBytes(StandardCharsets.UTF_8);

Expand Down Expand Up @@ -75,7 +76,7 @@ private void writeObject(Object o) throws IOException {
}

public synchronized void write(K key, V value)
throws IOException {
throws IOException {

boolean nullKey = key == null || key instanceof NullWritable;
boolean nullValue = value == null || value instanceof NullWritable;
Expand Down Expand Up @@ -103,7 +104,7 @@ public RecordWriter<K, V> getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
throws IOException {
throws IOException {
boolean isCompressed = getCompressOutput(job);
String keyValueSeparator = job.get("mapreduce.output.textoutputformat.separator",
"\t");
Expand All @@ -114,12 +115,12 @@ public RecordWriter<K, V> getRecordWriter(FileSystem ignored,
return new LineRecordWriter<K, V>(fileOut, keyValueSeparator);
} else {
Class<? extends CompressionCodec> codecClass =
getOutputCompressorClass(job, GzipCodec.class);
getOutputCompressorClass(job, GzipCodec.class);
// create the named codec
CompressionCodec codec = ReflectionUtils.newInstance(codecClass, job);
// build the filename including the extension
Path file =
FileOutputFormat.getTaskOutputPath(job,
FileOutputFormat.getTaskOutputPath(job,
name + codec.getDefaultExtension());
FileSystem fs = file.getFileSystem(job);
FSDataOutputStream fileOut = fs.create(file, progress);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapreduce;

import org.apache.hadoop.classification.*;
import org.apache.hadoop.classification.InterfaceAudience;

/**
* Job related ACLs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import org.junit.Test;

import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
/**
* test Clock class
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@

import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Assert;

import org.apache.hadoop.fs.FileStatus;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@

import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;

public class TestIndexCache {
private JobConf conf;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@

import java.util.regex.Pattern;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@

import org.junit.After;
import org.junit.Before;
import static org.junit.Assert.*;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer2;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
import org.junit.Test;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;

/**
* Test deprecated methods
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,12 @@
import org.junit.Test;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

/**
* TestCounters checks the sanity and recoverability of Queue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;

/**
* test SkipBadRecords
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@

package org.apache.hadoop.mapreduce;

import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.junit.Assert.assertSame;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

import java.io.IOException;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@

import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertTrue;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.TaskReport;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@
import org.apache.hadoop.io.DataInputByteBuffer;
import org.apache.hadoop.io.WritableUtils;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;

/**
* Test the {@link TaskID} class.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@

import java.io.IOException;
import java.net.URI;
import java.util.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
Expand All @@ -31,7 +33,11 @@
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
Expand All @@ -54,9 +60,9 @@
* [-totalOrder <i>pcnt</i> <i>num samples</i> <i>max splits</i>]
* <i>in-dir</i> <i>out-dir</i>
*/
public class Sort<K,V> extends Configured implements Tool {
public class Sort<K, V> extends Configured implements Tool {
public static final String REDUCES_PER_HOST =
"mapreduce.sort.reducesperhost";
"mapreduce.sort.reducesperhost";
private Job job = null;

static int printUsage() {
Expand Down Expand Up @@ -85,17 +91,17 @@ public int run(String[] args) throws Exception {
int num_reduces = (int) (cluster.getMaxReduceTasks() * 0.9);
String sort_reduces = conf.get(REDUCES_PER_HOST);
if (sort_reduces != null) {
num_reduces = cluster.getTaskTrackers() *
num_reduces = cluster.getTaskTrackers() *
Integer.parseInt(sort_reduces);
}
Class<? extends InputFormat> inputFormatClass =
SequenceFileInputFormat.class;
SequenceFileInputFormat.class;
Class<? extends OutputFormat> outputFormatClass =
SequenceFileOutputFormat.class;
SequenceFileOutputFormat.class;
Class<? extends WritableComparable> outputKeyClass = BytesWritable.class;
Class<? extends Writable> outputValueClass = BytesWritable.class;
List<String> otherArgs = new ArrayList<String>();
InputSampler.Sampler<K,V> sampler = null;
InputSampler.Sampler<K, V> sampler = null;
for(int i=0; i < args.length; ++i) {
try {
if ("-r".equals(args[i])) {
Expand All @@ -116,9 +122,11 @@ public int run(String[] args) throws Exception {
double pcnt = Double.parseDouble(args[++i]);
int numSamples = Integer.parseInt(args[++i]);
int maxSplits = Integer.parseInt(args[++i]);
if (0 >= maxSplits) maxSplits = Integer.MAX_VALUE;
if (0 >= maxSplits) {
maxSplits = Integer.MAX_VALUE;
}
sampler =
new InputSampler.RandomSampler<K,V>(pcnt, numSamples, maxSplits);
new InputSampler.RandomSampler<K, V>(pcnt, numSamples, maxSplits);
} else {
otherArgs.add(args[i]);
}
Expand Down Expand Up @@ -164,7 +172,7 @@ public int run(String[] args) throws Exception {
inputDir = inputDir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
Path partitionFile = new Path(inputDir, "_sortPartitioning");
TotalOrderPartitioner.setPartitionFile(conf, partitionFile);
InputSampler.<K,V>writePartitionFile(job, sampler);
InputSampler.<K, V>writePartitionFile(job, sampler);
URI partitionUri = new URI(partitionFile.toString() +
"#" + "_sortPartitioning");
job.addCacheFile(partitionUri);
Expand Down
Loading