Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion CHANGES.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Hadoop Change Log

Trunk (unreleased changes)
Release 0.22.0 - Unreleased

INCOMPATIBLE CHANGES

Expand Down Expand Up @@ -41,6 +41,9 @@ Trunk (unreleased changes)
HADOOP-7013. Add boolean field isCorrupt to BlockLocation.
(Patrick Kling via hairong)

HADOOP-6978. Adds support for NativeIO using JNI.
(Todd Lipcon, Devaraj Das & Owen O'Malley via ddas)

IMPROVEMENTS

HADOOP-6644. util.Shell getGROUPS_FOR_USER_COMMAND method name
Expand Down Expand Up @@ -190,6 +193,9 @@ Trunk (unreleased changes)
HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).
(Erik Steffl via szetszwo)

HADOOP-6683. ZlibCompressor does not fully utilize the buffer.
(Kang Xiao via eli)

BUG FIXES

HADOOP-6638. try to relogin in a case of failed RPC connection (expired
Expand Down Expand Up @@ -337,6 +343,9 @@ Trunk (unreleased changes)
HADOOP-6496. HttpServer sends wrong content-type for CSS files
(and others). (Todd Lipcon via tomwhite)

HADOOP-7057. IOUtils.readFully and IOUtils.skipFully have typo in
exception creation's message. (cos)

Release 0.21.1 - Unreleased

IMPROVEMENTS
Expand Down Expand Up @@ -364,6 +373,12 @@ Release 0.21.1 - Unreleased
HADOOP-6954. Sources JARs are not correctly published to the Maven
repository. (tomwhite)

HADOOP-7052. misspelling of threshold in conf/log4j.properties.
(Jingguo Yao via eli)

HADOOP-7053. wrong FSNamesystem Audit logging setting in
conf/log4j.properties. (Jingguo Yao via eli)

Release 0.21.0 - 2010-08-13

INCOMPATIBLE CHANGES
Expand Down
11 changes: 10 additions & 1 deletion build.xml
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
<property name="test.all.tests.file" value="${test.src.dir}/all-tests"/>

<property name="javadoc.link.java"
value="http://java.sun.com/javase/6/docs/api/"/>
value="http://download.oracle.com/javase/6/docs/api"/>
<property name="javadoc.packages" value="org.apache.hadoop.*"/>
<property name="javadoc.maxmemory" value="512m" />

Expand Down Expand Up @@ -366,6 +366,7 @@

<mkdir dir="${build.native}/lib"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
<mkdir dir="${build.native}/src/org/apache/hadoop/security"/>

<javah
Expand All @@ -386,6 +387,14 @@
>
<class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" />
</javah>
<javah
classpath="${build.classes}"
destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
force="yes"
verbose="yes"
>
<class name="org.apache.hadoop.io.nativeio.NativeIO" />
</javah>

<exec dir="${build.native}" executable="sh" failonerror="true">
<env key="OS_NAME" value="${os.name}"/>
Expand Down
6 changes: 3 additions & 3 deletions conf/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
log4j.rootLogger=${hadoop.root.logger}, EventCounter

# Logging Threshold
log4j.threshhold=ALL
log4j.threshold=ALL

#
# Daily Rolling File Appender
Expand Down Expand Up @@ -100,13 +100,13 @@ log4j.category.SecurityLogger=INFO,DRFAS
# FSNamesystem Audit logging
# All audit events are logged at INFO level
#
log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN

# Custom Logging levels

#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
#log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=DEBUG

# Jets3t library
log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
Expand Down
16 changes: 16 additions & 0 deletions ivy.xml
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,22 @@
<exclude module="jetty"/>
<exclude module="slf4j-simple"/>
</dependency>
<dependency org="com.google.protobuf"
name="protobuf-java"
rev="${protobuf.version}"
conf="common->default"/>
<dependency org="org.apache.hadoop"
name="libthrift"
rev="${thrift.version}"
conf="common->default">
<exclude module="servlet-api"/>
<exclude module="slf4j-api"/>
<exclude module="slf4j-log4j12"/>
</dependency>
<dependency org="org.yaml"
name="snakeyaml"
rev="${snakeyaml.version}"
conf="common->default"/>
<dependency org="org.codehaus.jackson"
name="jackson-mapper-asl"
rev="${jackson.version}"
Expand Down
15 changes: 15 additions & 0 deletions ivy/hadoop-common-template.xml
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,21 @@
<artifactId>oro</artifactId>
<version>2.0.8</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>2.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>libthrift</artifactId>
<version>0.5.0.0</version>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.7</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>avro</artifactId>
Expand Down
4 changes: 4 additions & 0 deletions ivy/ivysettings.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,17 @@
<resolvers>
<!--ibiblio resolvers-->
<ibiblio name="maven2" root="${repo.maven.org}" m2compatible="true"/>
<ibiblio name="apache"
root="https://repository.apache.org/content/repositories/releases"
m2compatible="true"/>

<filesystem name="fs" m2compatible="true" force="true">
<artifact pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision].[ext]"/>
<ivy pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision].pom"/>
</filesystem>

<chain name="default" dual="true">
<resolver ref="apache"/>
<resolver ref="maven2"/>
</chain>

Expand Down
5 changes: 5 additions & 0 deletions ivy/libraries.properties
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,18 @@ mina-core.version=2.0.0-M5

oro.version=2.0.8

protobuf.version=2.3.0

rats-lib.version=0.6

servlet.version=4.0.6
servlet-api-2.5.version=6.1.14
servlet-api.version=2.5
slf4j-api.version=1.5.11
slf4j-log4j12.version=1.5.11
snakeyaml.version=1.7

thrift.version=0.5.0.0

wagon-http.version=1.0-beta-2

Expand Down
4 changes: 2 additions & 2 deletions src/java/core-default.xml
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@
</property>

<property>
<name>io.serializations</name>
<value>org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerialization</value>
<name>hadoop.serializations</name>
<value>org.apache.hadoop.io.serial.lib.WritableSerialization,org.apache.hadoop.io.serial.lib.protobuf.ProtoBufSerialization,org.apache.hadoop.io.serial.lib.thrift.ThriftSerialization,org.apache.hadoop.io.serial.lib.avro.AvroSerialization,org.apache.hadoop.io.serial.lib.CompatibilitySerialization</value>
<description>A list of serialization classes that can be used for
obtaining serializers and deserializers.</description>
</property>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,11 @@ public class CommonConfigurationKeysPublic {
public static final String IO_SORT_FACTOR_KEY = "io.sort.factor";
/** Default value for IO_SORT_FACTOR_DEFAULT */
public static final int IO_SORT_FACTOR_DEFAULT = 100;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
/** Defines the list of the deprecated serializations. */
public static final String IO_SERIALIZATIONS_KEY = "io.serializations";

/** Defines the list of serializations */
public static final String HADOOP_SERIALIZATIONS_KEY = "hadoop.serializations";

/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String TFILE_IO_CHUNK_SIZE_KEY = "tfile.io.chunk.size";
/** Default value for TFILE_IO_CHUNK_SIZE_DEFAULT */
Expand Down
27 changes: 19 additions & 8 deletions src/java/org/apache/hadoop/io/ArrayFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,15 @@ public static class Writer extends MapFile.Writer {

/** Create the named file for values of the named class. */
public Writer(Configuration conf, FileSystem fs,
String file, Class<? extends Writable> valClass)
String file, Class<?> valClass)
throws IOException {
super(conf, new Path(file), keyClass(LongWritable.class),
valueClass(valClass));
}

/** Create the named file for values of the named class. */
public Writer(Configuration conf, FileSystem fs,
String file, Class<? extends Writable> valClass,
String file, Class<?> valClass,
CompressionType compress, Progressable progress)
throws IOException {
super(conf, new Path(file),
Expand All @@ -59,7 +59,7 @@ public Writer(Configuration conf, FileSystem fs,
}

/** Append a value to the file. */
public synchronized void append(Writable value) throws IOException {
public synchronized void append(Object value) throws IOException {
super.append(count, value); // add to map
count.set(count.get()+1); // increment count
}
Expand All @@ -81,20 +81,31 @@ public synchronized void seek(long n) throws IOException {
seek(key);
}

/** Read and return the next value in the file. */
@Deprecated
public synchronized Writable next(Writable value) throws IOException {
return next(key, value) ? value : null;
return (Writable) next((Object) value);
}

/** Read and return the next value in the file. */
public synchronized Object next(Object value) throws IOException {
key = (LongWritable) nextKey(key);
return key == null? null : getCurrentValue(value);
}

/** Returns the key associated with the most recent call to {@link
* #seek(long)}, {@link #next(Writable)}, or {@link
* #get(long,Writable)}. */
* #seek(long)}, {@link #next(Object)}, or {@link
* #get(long,Object)}. */
public synchronized long key() throws IOException {
return key.get();
}

@Deprecated
public synchronized Writable get(long n, Writable value) throws IOException{
return (Writable) get(n, (Object) value);
}

/** Return the <code>n</code>th value in the file. */
public synchronized Writable get(long n, Writable value)
public synchronized Object get(long n, Object value)
throws IOException {
key.set(n);
return get(key, value);
Expand Down
Loading