Skip to content

Commit 51e35e3

Browse files
committed
Remove shutdown hook permission from hdfs plugin (#65016)
(cherry picked from commit c0ba2ec)
1 parent d4e1add commit 51e35e3

File tree

6 files changed

+76
-21
lines changed

6 files changed

+76
-21
lines changed

gradle/ide.gradle

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,12 @@ if (System.getProperty('idea.active') == 'true') {
3838
}
3939
}
4040

41+
tasks.register('buildDependencyArtifacts') {
42+
group = 'ide'
43+
description = 'Builds artifacts needed as dependency for IDE modules'
44+
dependsOn ':plugins:repository-hdfs:hadoop-common:shadowJar'
45+
}
46+
4147
idea {
4248
project {
4349
vcs = 'Git'
@@ -49,7 +55,7 @@ if (System.getProperty('idea.active') == 'true') {
4955
testRunner = 'choose_per_test'
5056
}
5157
taskTriggers {
52-
afterSync tasks.named('configureIdeaGradleJvm')
58+
afterSync tasks.named('configureIdeaGradleJvm'), tasks.named('buildDependencyArtifacts')
5359
}
5460
codeStyle {
5561
java {

plugins/repository-hdfs/build.gradle

Lines changed: 1 addition & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ configurations {
4848

4949
dependencies {
5050
api "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
51-
api "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
51+
api project(path: 'hadoop-common', configuration: 'shadow')
5252
api "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
5353
api "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
5454
api "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
@@ -306,25 +306,10 @@ integTestSecureHa {
306306
thirdPartyAudit {
307307
ignoreMissingClasses()
308308
ignoreViolations(
309-
// internal java api: sun.net.dns.ResolverConfiguration
310-
// internal java api: sun.net.util.IPAddressUtil
311-
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
312-
313309
// internal java api: sun.misc.Unsafe
314310
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
315311
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
316-
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
317-
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
318-
'org.apache.hadoop.io.nativeio.NativeIO',
319312
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
320313
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
321-
322-
// internal java api: sun.nio.ch.DirectBuffer
323-
// internal java api: sun.misc.Cleaner
324-
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
325-
'org.apache.hadoop.crypto.CryptoStreamUtils',
326-
327-
// internal java api: sun.misc.SignalHandler
328-
'org.apache.hadoop.util.SignalLogger$Handler',
329314
)
330315
}
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
apply plugin: 'elasticsearch.java'
2+
apply plugin: 'com.github.johnrengelman.shadow'
3+
4+
dependencies {
5+
implementation "org.apache.hadoop:hadoop-common:${project.parent.versions.hadoop2}"
6+
}
7+
8+
tasks.named('shadowJar').configure {
9+
exclude 'org/apache/hadoop/util/ShutdownHookManager$*.class'
10+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
/*
2+
* Licensed to Elasticsearch under one or more contributor
3+
* license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright
5+
* ownership. Elasticsearch licenses this file to you under
6+
* the Apache License, Version 2.0 (the "License"); you may
7+
* not use this file except in compliance with the License.
8+
* You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.hadoop.util;
21+
22+
import java.util.concurrent.TimeUnit;
23+
24+
/**
25+
* A replacement for the ShutdownHookManager from hadoop.
26+
*
27+
* This class does not actually add a shutdown hook. Hadoop's shutdown hook
28+
* manager does not fail gracefully when it lacks security manager permissions
29+
* to add shutdown hooks. This implements the same api as the hadoop class, but
30+
* with no-ops.
31+
*/
32+
public class ShutdownHookManager {
33+
private static final ShutdownHookManager MGR = new ShutdownHookManager();
34+
35+
public static ShutdownHookManager get() {
36+
return MGR;
37+
}
38+
39+
private ShutdownHookManager() {}
40+
41+
public void addShutdownHook(Runnable shutdownHook, int priority) {}
42+
43+
public void addShutdownHook(Runnable shutdownHook, int priority, long timeout, TimeUnit unit) {}
44+
45+
public boolean removeShutdownHook(Runnable shutdownHook) {
46+
return false;
47+
}
48+
49+
public boolean hasShutdownHook(Runnable shutdownHook) {
50+
return false;
51+
}
52+
53+
public boolean isShutdownInProgress() {
54+
return false;
55+
}
56+
57+
public void clearShutdownHooks() {}
58+
}

plugins/repository-hdfs/licenses/hadoop-common-2.8.5.jar.sha1

Lines changed: 0 additions & 1 deletion
This file was deleted.

plugins/repository-hdfs/src/main/plugin-metadata/plugin-security.policy

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,6 @@ grant {
3333
// org.apache.hadoop.util.StringUtils clinit
3434
permission java.util.PropertyPermission "*", "read,write";
3535

36-
// org.apache.hadoop.util.ShutdownHookManager clinit
37-
permission java.lang.RuntimePermission "shutdownHooks";
38-
3936
// JAAS is used by Hadoop for authentication purposes
4037
// The Hadoop Login JAAS module modifies a Subject's private credentials and principals
4138
// The Hadoop RPC Layer must be able to read these credentials, and initiate Kerberos connections

0 commit comments

Comments
 (0)