Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -27,22 +27,31 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.util.CleanerUtil;

import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@InterfaceAudience.Private
public class CryptoStreamUtils {
private static final int MIN_BUFFER_SIZE = 512;

private static final Logger LOG =
LoggerFactory.getLogger(CryptoStreamUtils.class);

/** Forcibly free the direct buffer. */
public static void freeDB(ByteBuffer buffer) {
if (buffer instanceof sun.nio.ch.DirectBuffer) {
final sun.misc.Cleaner bufferCleaner =
((sun.nio.ch.DirectBuffer) buffer).cleaner();
bufferCleaner.clean();
if (CleanerUtil.UNMAP_SUPPORTED) {
try {
CleanerUtil.getCleaner().freeBuffer(buffer);
} catch (IOException e) {
LOG.info("Failed to free the buffer", e);
}
} else {
LOG.trace(CleanerUtil.UNMAP_NOT_SUPPORTED_REASON);
}
}

/** Read crypto buffer size */
public static int getBufferSize(Configuration conf) {
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hadoop.fs.HardLink;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
import org.apache.hadoop.util.CleanerUtil;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.PerformanceAdvisory;
Expand Down Expand Up @@ -312,7 +313,7 @@ static void mlock(ByteBuffer buffer, long len)
}
mlock_native(buffer, len);
}

/**
* Unmaps the block from memory. See munmap(2).
*
Expand All @@ -326,10 +327,14 @@ static void mlock(ByteBuffer buffer, long len)
* @param buffer The buffer to unmap.
*/
public static void munmap(MappedByteBuffer buffer) {
if (buffer instanceof sun.nio.ch.DirectBuffer) {
sun.misc.Cleaner cleaner =
((sun.nio.ch.DirectBuffer)buffer).cleaner();
cleaner.clean();
if (CleanerUtil.UNMAP_SUPPORTED) {
try {
CleanerUtil.getCleaner().freeBuffer(buffer);
} catch (IOException e) {
LOG.info("Failed to unmap the buffer", e);
}
} else {
LOG.trace(CleanerUtil.UNMAP_NOT_SUPPORTED_REASON);
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;

import java.io.IOException;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Objects;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

import static java.lang.invoke.MethodHandles.constant;
import static java.lang.invoke.MethodHandles.dropArguments;
import static java.lang.invoke.MethodHandles.filterReturnValue;
import static java.lang.invoke.MethodHandles.guardWithTest;
import static java.lang.invoke.MethodType.methodType;

/**
* sun.misc.Cleaner has moved in OpenJDK 9 and
* sun.misc.Unsafe#invokeCleaner(ByteBuffer) is the replacement.
* This class is a hack to use sun.misc.Cleaner in Java 8 and
* use the replacement in Java 9+.
* This implementation is inspired by LUCENE-6989.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public final class CleanerUtil {

// Prevent instantiation
private CleanerUtil(){}

/**
* <code>true</code>, if this platform supports unmapping mmapped files.
*/
public static final boolean UNMAP_SUPPORTED;

/**
* if {@link #UNMAP_SUPPORTED} is {@code false}, this contains the reason
* why unmapping is not supported.
*/
public static final String UNMAP_NOT_SUPPORTED_REASON;


private static final BufferCleaner CLEANER;

/**
* Reference to a BufferCleaner that does unmapping.
* @return {@code null} if not supported.
*/
public static BufferCleaner getCleaner() {
return CLEANER;
}

static {
final Object hack = AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
return unmapHackImpl();
}
});
if (hack instanceof BufferCleaner) {
CLEANER = (BufferCleaner) hack;
UNMAP_SUPPORTED = true;
UNMAP_NOT_SUPPORTED_REASON = null;
} else {
CLEANER = null;
UNMAP_SUPPORTED = false;
UNMAP_NOT_SUPPORTED_REASON = hack.toString();
}
}

private static Object unmapHackImpl() {
final MethodHandles.Lookup lookup = MethodHandles.lookup();
try {
try {
// *** sun.misc.Unsafe unmapping (Java 9+) ***
final Class<?> unsafeClass = Class.forName("sun.misc.Unsafe");
// first check if Unsafe has the right method, otherwise we can
// give up without doing any security critical stuff:
final MethodHandle unmapper = lookup.findVirtual(unsafeClass,
"invokeCleaner", methodType(void.class, ByteBuffer.class));
// fetch the unsafe instance and bind it to the virtual MH:
final Field f = unsafeClass.getDeclaredField("theUnsafe");
f.setAccessible(true);
final Object theUnsafe = f.get(null);
return newBufferCleaner(ByteBuffer.class, unmapper.bindTo(theUnsafe));
} catch (SecurityException se) {
// rethrow to report errors correctly (we need to catch it here,
// as we also catch RuntimeException below!):
throw se;
} catch (ReflectiveOperationException | RuntimeException e) {
// *** sun.misc.Cleaner unmapping (Java 8) ***
final Class<?> directBufferClass =
Class.forName("java.nio.DirectByteBuffer");

final Method m = directBufferClass.getMethod("cleaner");
m.setAccessible(true);
final MethodHandle directBufferCleanerMethod = lookup.unreflect(m);
final Class<?> cleanerClass =
directBufferCleanerMethod.type().returnType();

/*
* "Compile" a MethodHandle that basically is equivalent
* to the following code:
*
* void unmapper(ByteBuffer byteBuffer) {
* sun.misc.Cleaner cleaner =
* ((java.nio.DirectByteBuffer) byteBuffer).cleaner();
* if (Objects.nonNull(cleaner)) {
* cleaner.clean();
* } else {
* // the noop is needed because MethodHandles#guardWithTest
* // always needs ELSE
* noop(cleaner);
* }
* }
*/
final MethodHandle cleanMethod = lookup.findVirtual(
cleanerClass, "clean", methodType(void.class));
final MethodHandle nonNullTest = lookup.findStatic(Objects.class,
"nonNull", methodType(boolean.class, Object.class))
.asType(methodType(boolean.class, cleanerClass));
final MethodHandle noop = dropArguments(
constant(Void.class, null).asType(methodType(void.class)),
0, cleanerClass);
final MethodHandle unmapper = filterReturnValue(
directBufferCleanerMethod,
guardWithTest(nonNullTest, cleanMethod, noop))
.asType(methodType(void.class, ByteBuffer.class));
return newBufferCleaner(directBufferClass, unmapper);
}
} catch (SecurityException se) {
return "Unmapping is not supported, because not all required " +
"permissions are given to the Hadoop JAR file: " + se +
" [Please grant at least the following permissions: " +
"RuntimePermission(\"accessClassInPackage.sun.misc\") " +
" and ReflectPermission(\"suppressAccessChecks\")]";
} catch (ReflectiveOperationException | RuntimeException e) {
return "Unmapping is not supported on this platform, " +
"because internal Java APIs are not compatible with " +
"this Hadoop version: " + e;
}
}

private static BufferCleaner newBufferCleaner(
final Class<?> unmappableBufferClass, final MethodHandle unmapper) {
assert Objects.equals(
methodType(void.class, ByteBuffer.class), unmapper.type());
return new BufferCleaner() {
@Override
public void freeBuffer(final ByteBuffer buffer) throws IOException {
if (!buffer.isDirect()) {
throw new IllegalArgumentException("unmapping only works with direct buffers");
}
if (!unmappableBufferClass.isInstance(buffer)) {
throw new IllegalArgumentException("buffer is not an instance of " + unmappableBufferClass.getName());
}
final Throwable error = AccessController.doPrivileged(new PrivilegedAction<Throwable>() {
@Override
public Throwable run() {
try {
unmapper.invokeExact(buffer);
return null;
} catch (Throwable t) {
return t;
}
}
});
if (error != null) {
throw new IOException("Unable to unmap the mapped buffer", error);
}
}
};
}

/**
* Pass in an implementation of this interface to cleanup ByteBuffers.
* CleanerUtil implements this to allow unmapping of bytebuffers
* with private Java APIs.
*/
public interface BufferCleaner {
void freeBuffer(ByteBuffer b) throws IOException;
}
}