Skip to content

Commit a6c7518

Browse files
committed
Merge remote-tracking branch 'upstream/master'
2 parents 8089c6f + a456477 commit a6c7518

File tree

36 files changed

+748
-137
lines changed

36 files changed

+748
-137
lines changed

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 99 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -50,17 +50,19 @@ import org.apache.spark.util.Utils
5050
* secure the UI if it has data that other users should not be allowed to see. The javax
5151
* servlet filter specified by the user can authenticate the user and then once the user
5252
* is logged in, Spark can compare that user versus the view acls to make sure they are
53-
* authorized to view the UI. The configs 'spark.acls.enable' and 'spark.ui.view.acls'
54-
* control the behavior of the acls. Note that the person who started the application
55-
* always has view access to the UI.
53+
* authorized to view the UI. The configs 'spark.acls.enable', 'spark.ui.view.acls' and
54+
* 'spark.ui.view.acls.groups' control the behavior of the acls. Note that the person who
55+
* started the application always has view access to the UI.
5656
*
57-
* Spark has a set of modify acls (`spark.modify.acls`) that controls which users have permission
58-
* to modify a single application. This would include things like killing the application. By
59-
* default the person who started the application has modify access. For modify access through
60-
* the UI, you must have a filter that does authentication in place for the modify acls to work
61-
* properly.
57+
* Spark has a set of individual and group modify acls (`spark.modify.acls`) and
58+
* (`spark.modify.acls.groups`) that controls which users and groups have permission to
59+
* modify a single application. This would include things like killing the application.
60+
* By default the person who started the application has modify access. For modify access
61+
* through the UI, you must have a filter that does authentication in place for the modify
62+
* acls to work properly.
6263
*
63-
* Spark also has a set of admin acls (`spark.admin.acls`) which is a set of users/administrators
64+
* Spark also has a set of individual and group admin acls (`spark.admin.acls`) and
65+
* (`spark.admin.acls.groups`) which is a set of users/administrators and admin groups
6466
* who always have permission to view or modify the Spark application.
6567
*
6668
* Starting from version 1.3, Spark has partial support for encrypted connections with SSL.
@@ -184,6 +186,9 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
184186

185187
import SecurityManager._
186188

189+
// allow all users/groups to have view/modify permissions
190+
private val WILDCARD_ACL = "*"
191+
187192
private val authOn = sparkConf.getBoolean(SecurityManager.SPARK_AUTH_CONF, false)
188193
// keep spark.ui.acls.enable for backwards compatibility with 1.0
189194
private var aclsOn =
@@ -193,24 +198,37 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
193198
private var adminAcls: Set[String] =
194199
stringToSet(sparkConf.get("spark.admin.acls", ""))
195200

201+
// admin group acls should be set before view or modify group acls
202+
private var adminAclsGroups : Set[String] =
203+
stringToSet(sparkConf.get("spark.admin.acls.groups", ""))
204+
196205
private var viewAcls: Set[String] = _
197206

207+
private var viewAclsGroups: Set[String] = _
208+
198209
// list of users who have permission to modify the application. This should
199210
// apply to both UI and CLI for things like killing the application.
200211
private var modifyAcls: Set[String] = _
201212

213+
private var modifyAclsGroups: Set[String] = _
214+
202215
// always add the current user and SPARK_USER to the viewAcls
203216
private val defaultAclUsers = Set[String](System.getProperty("user.name", ""),
204217
Utils.getCurrentUserName())
205218

206219
setViewAcls(defaultAclUsers, sparkConf.get("spark.ui.view.acls", ""))
207220
setModifyAcls(defaultAclUsers, sparkConf.get("spark.modify.acls", ""))
208221

222+
setViewAclsGroups(sparkConf.get("spark.ui.view.acls.groups", ""));
223+
setModifyAclsGroups(sparkConf.get("spark.modify.acls.groups", ""));
224+
209225
private val secretKey = generateSecretKey()
210226
logInfo("SecurityManager: authentication " + (if (authOn) "enabled" else "disabled") +
211227
"; ui acls " + (if (aclsOn) "enabled" else "disabled") +
212-
"; users with view permissions: " + viewAcls.toString() +
213-
"; users with modify permissions: " + modifyAcls.toString())
228+
"; users with view permissions: " + viewAcls.toString() +
229+
"; groups with view permissions: " + viewAclsGroups.toString() +
230+
"; users with modify permissions: " + modifyAcls.toString() +
231+
"; groups with modify permissions: " + modifyAclsGroups.toString())
214232

215233
// Set our own authenticator to properly negotiate user/password for HTTP connections.
216234
// This is needed by the HTTP client fetching from the HttpServer. Put here so its
@@ -302,17 +320,34 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
302320
setViewAcls(Set[String](defaultUser), allowedUsers)
303321
}
304322

323+
/**
324+
* Admin acls groups should be set before the view or modify acls groups. If you modify the admin
325+
* acls groups you should also set the view and modify acls groups again to pick up the changes.
326+
*/
327+
def setViewAclsGroups(allowedUserGroups: String) {
328+
viewAclsGroups = (adminAclsGroups ++ stringToSet(allowedUserGroups));
329+
logInfo("Changing view acls groups to: " + viewAclsGroups.mkString(","))
330+
}
331+
305332
/**
306333
* Checking the existence of "*" is necessary as YARN can't recognize the "*" in "defaultuser,*"
307334
*/
308335
def getViewAcls: String = {
309-
if (viewAcls.contains("*")) {
310-
"*"
336+
if (viewAcls.contains(WILDCARD_ACL)) {
337+
WILDCARD_ACL
311338
} else {
312339
viewAcls.mkString(",")
313340
}
314341
}
315342

343+
def getViewAclsGroups: String = {
344+
if (viewAclsGroups.contains(WILDCARD_ACL)) {
345+
WILDCARD_ACL
346+
} else {
347+
viewAclsGroups.mkString(",")
348+
}
349+
}
350+
316351
/**
317352
* Admin acls should be set before the view or modify acls. If you modify the admin
318353
* acls you should also set the view and modify acls again to pick up the changes.
@@ -322,17 +357,34 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
322357
logInfo("Changing modify acls to: " + modifyAcls.mkString(","))
323358
}
324359

360+
/**
361+
* Admin acls groups should be set before the view or modify acls groups. If you modify the admin
362+
* acls groups you should also set the view and modify acls groups again to pick up the changes.
363+
*/
364+
def setModifyAclsGroups(allowedUserGroups: String) {
365+
modifyAclsGroups = (adminAclsGroups ++ stringToSet(allowedUserGroups));
366+
logInfo("Changing modify acls groups to: " + modifyAclsGroups.mkString(","))
367+
}
368+
325369
/**
326370
* Checking the existence of "*" is necessary as YARN can't recognize the "*" in "defaultuser,*"
327371
*/
328372
def getModifyAcls: String = {
329-
if (modifyAcls.contains("*")) {
330-
"*"
373+
if (modifyAcls.contains(WILDCARD_ACL)) {
374+
WILDCARD_ACL
331375
} else {
332376
modifyAcls.mkString(",")
333377
}
334378
}
335379

380+
def getModifyAclsGroups: String = {
381+
if (modifyAclsGroups.contains(WILDCARD_ACL)) {
382+
WILDCARD_ACL
383+
} else {
384+
modifyAclsGroups.mkString(",")
385+
}
386+
}
387+
336388
/**
337389
* Admin acls should be set before the view or modify acls. If you modify the admin
338390
* acls you should also set the view and modify acls again to pick up the changes.
@@ -342,6 +394,15 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
342394
logInfo("Changing admin acls to: " + adminAcls.mkString(","))
343395
}
344396

397+
/**
398+
* Admin acls groups should be set before the view or modify acls groups. If you modify the admin
399+
* acls groups you should also set the view and modify acls groups again to pick up the changes.
400+
*/
401+
def setAdminAclsGroups(adminUserGroups: String) {
402+
adminAclsGroups = stringToSet(adminUserGroups)
403+
logInfo("Changing admin acls groups to: " + adminAclsGroups.mkString(","))
404+
}
405+
345406
def setAcls(aclSetting: Boolean) {
346407
aclsOn = aclSetting
347408
logInfo("Changing acls enabled to: " + aclsOn)
@@ -398,36 +459,49 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
398459
def aclsEnabled(): Boolean = aclsOn
399460

400461
/**
401-
* Checks the given user against the view acl list to see if they have
462+
* Checks the given user against the view acl and groups list to see if they have
402463
* authorization to view the UI. If the UI acls are disabled
403464
* via spark.acls.enable, all users have view access. If the user is null
404-
* it is assumed authentication is off and all users have access.
465+
* it is assumed authentication is off and all users have access. Also if any one of the
466+
* UI acls or groups specify the WILDCARD(*) then all users have view access.
405467
*
406468
* @param user to see if is authorized
407469
* @return true is the user has permission, otherwise false
408470
*/
409471
def checkUIViewPermissions(user: String): Boolean = {
410472
logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " viewAcls=" +
411-
viewAcls.mkString(","))
412-
!aclsEnabled || user == null || viewAcls.contains(user) || viewAcls.contains("*")
473+
viewAcls.mkString(",") + " viewAclsGroups=" + viewAclsGroups.mkString(","))
474+
if (!aclsEnabled || user == null || viewAcls.contains(user) ||
475+
viewAcls.contains(WILDCARD_ACL) || viewAclsGroups.contains(WILDCARD_ACL)) {
476+
return true
477+
}
478+
val currentUserGroups = Utils.getCurrentUserGroups(sparkConf, user)
479+
logDebug("userGroups=" + currentUserGroups.mkString(","))
480+
viewAclsGroups.exists(currentUserGroups.contains(_))
413481
}
414482

415483
/**
416-
* Checks the given user against the modify acl list to see if they have
417-
* authorization to modify the application. If the UI acls are disabled
484+
* Checks the given user against the modify acl and groups list to see if they have
485+
* authorization to modify the application. If the modify acls are disabled
418486
* via spark.acls.enable, all users have modify access. If the user is null
419-
* it is assumed authentication isn't turned on and all users have access.
487+
* it is assumed authentication isn't turned on and all users have access. Also if any one
488+
* of the modify acls or groups specify the WILDCARD(*) then all users have modify access.
420489
*
421490
* @param user to see if is authorized
422491
* @return true is the user has permission, otherwise false
423492
*/
424493
def checkModifyPermissions(user: String): Boolean = {
425494
logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " modifyAcls=" +
426-
modifyAcls.mkString(","))
427-
!aclsEnabled || user == null || modifyAcls.contains(user) || modifyAcls.contains("*")
495+
modifyAcls.mkString(",") + " modifyAclsGroups=" + modifyAclsGroups.mkString(","))
496+
if (!aclsEnabled || user == null || modifyAcls.contains(user) ||
497+
modifyAcls.contains(WILDCARD_ACL) || modifyAclsGroups.contains(WILDCARD_ACL)) {
498+
return true
499+
}
500+
val currentUserGroups = Utils.getCurrentUserGroups(sparkConf, user)
501+
logDebug("userGroups=" + currentUserGroups)
502+
modifyAclsGroups.exists(currentUserGroups.contains(_))
428503
}
429504

430-
431505
/**
432506
* Check to see if authentication for the Spark communication protocols is enabled
433507
* @return true if authentication is enabled, otherwise false

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -245,6 +245,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
245245
ui.getSecurityManager.setAdminAcls(appListener.adminAcls.getOrElse(""))
246246
ui.getSecurityManager.setViewAcls(attempt.sparkUser,
247247
appListener.viewAcls.getOrElse(""))
248+
ui.getSecurityManager.setAdminAclsGroups(appListener.adminAclsGroups.getOrElse(""))
249+
ui.getSecurityManager.setViewAclsGroups(appListener.viewAclsGroups.getOrElse(""))
248250
LoadedAppUI(ui, updateProbe(appId, attemptId, attempt.fileSize))
249251
}
250252
}

core/src/main/scala/org/apache/spark/scheduler/ApplicationEventListener.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@ private[spark] class ApplicationEventListener extends SparkListener {
3232
var endTime: Option[Long] = None
3333
var viewAcls: Option[String] = None
3434
var adminAcls: Option[String] = None
35+
var viewAclsGroups: Option[String] = None
36+
var adminAclsGroups: Option[String] = None
3537

3638
override def onApplicationStart(applicationStart: SparkListenerApplicationStart) {
3739
appName = Some(applicationStart.appName)
@@ -51,6 +53,8 @@ private[spark] class ApplicationEventListener extends SparkListener {
5153
val allProperties = environmentDetails("Spark Properties").toMap
5254
viewAcls = allProperties.get("spark.ui.view.acls")
5355
adminAcls = allProperties.get("spark.admin.acls")
56+
viewAclsGroups = allProperties.get("spark.ui.view.acls.groups")
57+
adminAclsGroups = allProperties.get("spark.admin.acls.groups")
5458
}
5559
}
5660
}
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.security
19+
20+
/**
21+
* This Spark trait is used for mapping a given userName to a set of groups which it belongs to.
22+
* This is useful for specifying a common group of admins/developers to provide them admin, modify
23+
* and/or view access rights. Based on whether access control checks are enabled using
24+
* spark.acls.enable, every time a user tries to access or modify the application, the
25+
* SecurityManager gets the corresponding groups a user belongs to from the instance of the groups
26+
* mapping provider specified by the entry spark.user.groups.mapping.
27+
*/
28+
29+
trait GroupMappingServiceProvider {
30+
31+
/**
32+
* Get the groups the user belongs to.
33+
* @param userName User's Name
34+
* @return set of groups that the user belongs to. Empty in case of an invalid user.
35+
*/
36+
def getGroups(userName : String) : Set[String]
37+
38+
}
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.security
19+
20+
import org.apache.spark.internal.Logging
21+
import org.apache.spark.util.Utils
22+
23+
/**
24+
* This class is responsible for getting the groups for a particular user in Unix based
25+
* environments. This implementation uses the Unix Shell based id command to fetch the user groups
26+
* for the specified user. It does not cache the user groups as the invocations are expected
27+
* to be infrequent.
28+
*/
29+
30+
private[spark] class ShellBasedGroupsMappingProvider extends GroupMappingServiceProvider
31+
with Logging {
32+
33+
override def getGroups(username: String): Set[String] = {
34+
val userGroups = getUnixGroups(username)
35+
logDebug("User: " + username + " Groups: " + userGroups.mkString(","))
36+
userGroups
37+
}
38+
39+
// shells out a "bash -c id -Gn username" to get user groups
40+
private def getUnixGroups(username: String): Set[String] = {
41+
val cmdSeq = Seq("bash", "-c", "id -Gn " + username)
42+
// we need to get rid of the trailing "\n" from the result of command execution
43+
Utils.executeAndGetOutput(cmdSeq).stripLineEnd.split(" ").toSet
44+
}
45+
}

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2181,6 +2181,25 @@ private[spark] object Utils extends Logging {
21812181
.getOrElse(UserGroupInformation.getCurrentUser().getShortUserName())
21822182
}
21832183

2184+
val EMPTY_USER_GROUPS = Set[String]()
2185+
2186+
// Returns the groups to which the current user belongs.
2187+
def getCurrentUserGroups(sparkConf: SparkConf, username: String): Set[String] = {
2188+
val groupProviderClassName = sparkConf.get("spark.user.groups.mapping",
2189+
"org.apache.spark.security.ShellBasedGroupsMappingProvider")
2190+
if (groupProviderClassName != "") {
2191+
try {
2192+
val groupMappingServiceProvider = classForName(groupProviderClassName).newInstance.
2193+
asInstanceOf[org.apache.spark.security.GroupMappingServiceProvider]
2194+
val currentUserGroups = groupMappingServiceProvider.getGroups(username)
2195+
return currentUserGroups
2196+
} catch {
2197+
case e: Exception => logError(s"Error getting groups for user=$username", e)
2198+
}
2199+
}
2200+
EMPTY_USER_GROUPS
2201+
}
2202+
21842203
/**
21852204
* Split the comma delimited string of master URLs into a list.
21862205
* For instance, "spark://abc,def" becomes [spark://abc, spark://def].

0 commit comments

Comments
 (0)