Skip to content

Commit d9e4cf6

Browse files
HeartSaVioRMarcelo Vanzin
authored and
Marcelo Vanzin
committed
[SPARK-26482][CORE] Use ConfigEntry for hardcoded configs for ui categories
## What changes were proposed in this pull request? The PR makes hardcoded configs below to use `ConfigEntry`. * spark.ui * spark.ssl * spark.authenticate * spark.master.rest * spark.master.ui * spark.metrics * spark.admin * spark.modify.acl This patch doesn't change configs which are not relevant to SparkConf (e.g. system properties). ## How was this patch tested? Existing tests. Closes apache#23423 from HeartSaVioR/SPARK-26466. Authored-by: Jungtaek Lim (HeartSaVioR) <[email protected]> Signed-off-by: Marcelo Vanzin <[email protected]>
1 parent 51a6ba0 commit d9e4cf6

File tree

60 files changed

+496
-305
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+496
-305
lines changed

core/src/main/scala/org/apache/spark/SecurityManager.scala

+22-32
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ import org.apache.hadoop.security.{Credentials, UserGroupInformation}
2929
import org.apache.spark.deploy.SparkHadoopUtil
3030
import org.apache.spark.internal.Logging
3131
import org.apache.spark.internal.config._
32+
import org.apache.spark.internal.config.UI._
3233
import org.apache.spark.launcher.SparkLauncher
3334
import org.apache.spark.network.sasl.SecretKeyHolder
3435
import org.apache.spark.util.Utils
@@ -56,17 +57,13 @@ private[spark] class SecurityManager(
5657
private val WILDCARD_ACL = "*"
5758

5859
private val authOn = sparkConf.get(NETWORK_AUTH_ENABLED)
59-
// keep spark.ui.acls.enable for backwards compatibility with 1.0
60-
private var aclsOn =
61-
sparkConf.getBoolean("spark.acls.enable", sparkConf.getBoolean("spark.ui.acls.enable", false))
60+
private var aclsOn = sparkConf.get(ACLS_ENABLE)
6261

6362
// admin acls should be set before view or modify acls
64-
private var adminAcls: Set[String] =
65-
stringToSet(sparkConf.get("spark.admin.acls", ""))
63+
private var adminAcls: Set[String] = sparkConf.get(ADMIN_ACLS).toSet
6664

6765
// admin group acls should be set before view or modify group acls
68-
private var adminAclsGroups : Set[String] =
69-
stringToSet(sparkConf.get("spark.admin.acls.groups", ""))
66+
private var adminAclsGroups: Set[String] = sparkConf.get(ADMIN_ACLS_GROUPS).toSet
7067

7168
private var viewAcls: Set[String] = _
7269

@@ -82,11 +79,11 @@ private[spark] class SecurityManager(
8279
private val defaultAclUsers = Set[String](System.getProperty("user.name", ""),
8380
Utils.getCurrentUserName())
8481

85-
setViewAcls(defaultAclUsers, sparkConf.get("spark.ui.view.acls", ""))
86-
setModifyAcls(defaultAclUsers, sparkConf.get("spark.modify.acls", ""))
82+
setViewAcls(defaultAclUsers, sparkConf.get(UI_VIEW_ACLS))
83+
setModifyAcls(defaultAclUsers, sparkConf.get(MODIFY_ACLS))
8784

88-
setViewAclsGroups(sparkConf.get("spark.ui.view.acls.groups", ""));
89-
setModifyAclsGroups(sparkConf.get("spark.modify.acls.groups", ""));
85+
setViewAclsGroups(sparkConf.get(UI_VIEW_ACLS_GROUPS))
86+
setModifyAclsGroups(sparkConf.get(MODIFY_ACLS_GROUPS))
9087

9188
private var secretKey: String = _
9289
logInfo("SecurityManager: authentication " + (if (authOn) "enabled" else "disabled") +
@@ -127,32 +124,25 @@ private[spark] class SecurityManager(
127124
opts
128125
}
129126

130-
/**
131-
* Split a comma separated String, filter out any empty items, and return a Set of strings
132-
*/
133-
private def stringToSet(list: String): Set[String] = {
134-
list.split(',').map(_.trim).filter(!_.isEmpty).toSet
135-
}
136-
137127
/**
138128
* Admin acls should be set before the view or modify acls. If you modify the admin
139129
* acls you should also set the view and modify acls again to pick up the changes.
140130
*/
141-
def setViewAcls(defaultUsers: Set[String], allowedUsers: String) {
142-
viewAcls = (adminAcls ++ defaultUsers ++ stringToSet(allowedUsers))
131+
def setViewAcls(defaultUsers: Set[String], allowedUsers: Seq[String]) {
132+
viewAcls = adminAcls ++ defaultUsers ++ allowedUsers
143133
logInfo("Changing view acls to: " + viewAcls.mkString(","))
144134
}
145135

146-
def setViewAcls(defaultUser: String, allowedUsers: String) {
136+
def setViewAcls(defaultUser: String, allowedUsers: Seq[String]) {
147137
setViewAcls(Set[String](defaultUser), allowedUsers)
148138
}
149139

150140
/**
151141
* Admin acls groups should be set before the view or modify acls groups. If you modify the admin
152142
* acls groups you should also set the view and modify acls groups again to pick up the changes.
153143
*/
154-
def setViewAclsGroups(allowedUserGroups: String) {
155-
viewAclsGroups = (adminAclsGroups ++ stringToSet(allowedUserGroups));
144+
def setViewAclsGroups(allowedUserGroups: Seq[String]) {
145+
viewAclsGroups = adminAclsGroups ++ allowedUserGroups
156146
logInfo("Changing view acls groups to: " + viewAclsGroups.mkString(","))
157147
}
158148

@@ -179,17 +169,17 @@ private[spark] class SecurityManager(
179169
* Admin acls should be set before the view or modify acls. If you modify the admin
180170
* acls you should also set the view and modify acls again to pick up the changes.
181171
*/
182-
def setModifyAcls(defaultUsers: Set[String], allowedUsers: String) {
183-
modifyAcls = (adminAcls ++ defaultUsers ++ stringToSet(allowedUsers))
172+
def setModifyAcls(defaultUsers: Set[String], allowedUsers: Seq[String]) {
173+
modifyAcls = adminAcls ++ defaultUsers ++ allowedUsers
184174
logInfo("Changing modify acls to: " + modifyAcls.mkString(","))
185175
}
186176

187177
/**
188178
* Admin acls groups should be set before the view or modify acls groups. If you modify the admin
189179
* acls groups you should also set the view and modify acls groups again to pick up the changes.
190180
*/
191-
def setModifyAclsGroups(allowedUserGroups: String) {
192-
modifyAclsGroups = (adminAclsGroups ++ stringToSet(allowedUserGroups));
181+
def setModifyAclsGroups(allowedUserGroups: Seq[String]) {
182+
modifyAclsGroups = adminAclsGroups ++ allowedUserGroups
193183
logInfo("Changing modify acls groups to: " + modifyAclsGroups.mkString(","))
194184
}
195185

@@ -216,17 +206,17 @@ private[spark] class SecurityManager(
216206
* Admin acls should be set before the view or modify acls. If you modify the admin
217207
* acls you should also set the view and modify acls again to pick up the changes.
218208
*/
219-
def setAdminAcls(adminUsers: String) {
220-
adminAcls = stringToSet(adminUsers)
209+
def setAdminAcls(adminUsers: Seq[String]) {
210+
adminAcls = adminUsers.toSet
221211
logInfo("Changing admin acls to: " + adminAcls.mkString(","))
222212
}
223213

224214
/**
225215
* Admin acls groups should be set before the view or modify acls groups. If you modify the admin
226216
* acls groups you should also set the view and modify acls groups again to pick up the changes.
227217
*/
228-
def setAdminAclsGroups(adminUserGroups: String) {
229-
adminAclsGroups = stringToSet(adminUserGroups)
218+
def setAdminAclsGroups(adminUserGroups: Seq[String]) {
219+
adminAclsGroups = adminUserGroups.toSet
230220
logInfo("Changing admin acls groups to: " + adminAclsGroups.mkString(","))
231221
}
232222

@@ -416,7 +406,7 @@ private[spark] object SecurityManager {
416406

417407
val k8sRegex = "k8s.*".r
418408
val SPARK_AUTH_CONF = NETWORK_AUTH_ENABLED.key
419-
val SPARK_AUTH_SECRET_CONF = "spark.authenticate.secret"
409+
val SPARK_AUTH_SECRET_CONF = AUTH_SECRET.key
420410
// This is used to set auth secret to an executor's env variable. It should have the same
421411
// value as SPARK_AUTH_SECRET_CONF set in SparkConf
422412
val ENV_AUTH_SECRET = "_SPARK_AUTH_SECRET"

core/src/main/scala/org/apache/spark/SparkContext.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ import org.apache.spark.input.{FixedLengthBinaryInputFormat, PortableDataStream,
4646
import org.apache.spark.internal.Logging
4747
import org.apache.spark.internal.config._
4848
import org.apache.spark.internal.config.Tests._
49+
import org.apache.spark.internal.config.UI._
4950
import org.apache.spark.io.CompressionCodec
5051
import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
5152
import org.apache.spark.rdd._
@@ -440,7 +441,7 @@ class SparkContext(config: SparkConf) extends Logging {
440441
}
441442

442443
_ui =
443-
if (conf.getBoolean("spark.ui.enabled", true)) {
444+
if (conf.get(UI_ENABLED)) {
444445
Some(SparkUI.create(Some(this), _statusStore, _conf, _env.securityManager, appName, "",
445446
startTime))
446447
} else {
@@ -510,7 +511,7 @@ class SparkContext(config: SparkConf) extends Logging {
510511
_applicationId = _taskScheduler.applicationId()
511512
_applicationAttemptId = taskScheduler.applicationAttemptId()
512513
_conf.set("spark.app.id", _applicationId)
513-
if (_conf.getBoolean("spark.ui.reverseProxy", false)) {
514+
if (_conf.get(UI_REVERSE_PROXY)) {
514515
System.setProperty("spark.ui.proxyBase", "/proxy/" + _applicationId)
515516
}
516517
_ui.foreach(_.setAppId(_applicationId))

core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,8 @@ class LocalSparkCluster(
5151

5252
// Disable REST server on Master in this mode unless otherwise specified
5353
val _conf = conf.clone()
54-
.setIfMissing("spark.master.rest.enabled", "false")
55-
.set(config.SHUFFLE_SERVICE_ENABLED.key, "false")
54+
.setIfMissing(config.MASTER_REST_SERVER_ENABLED, false)
55+
.set(config.SHUFFLE_SERVICE_ENABLED, false)
5656

5757
/* Start the Master */
5858
val (rpcEnv, webUiPort, _) = Master.startRpcEnvAndEndpoint(localHostname, 0, 0, _conf)

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

+1
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ import org.apache.spark.api.r.RUtils
5050
import org.apache.spark.deploy.rest._
5151
import org.apache.spark.internal.Logging
5252
import org.apache.spark.internal.config._
53+
import org.apache.spark.internal.config.UI._
5354
import org.apache.spark.launcher.SparkLauncher
5455
import org.apache.spark.util._
5556

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

+30-17
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,11 @@ import org.fusesource.leveldbjni.internal.NativeDB
4242
import org.apache.spark.{SecurityManager, SparkConf, SparkException}
4343
import org.apache.spark.deploy.SparkHadoopUtil
4444
import org.apache.spark.internal.Logging
45-
import org.apache.spark.internal.config.{DRIVER_LOG_DFS_DIR, History}
45+
import org.apache.spark.internal.config._
4646
import org.apache.spark.internal.config.History._
4747
import org.apache.spark.internal.config.Status._
4848
import org.apache.spark.internal.config.Tests.IS_TESTING
49+
import org.apache.spark.internal.config.UI._
4950
import org.apache.spark.io.CompressionCodec
5051
import org.apache.spark.scheduler._
5152
import org.apache.spark.scheduler.ReplayListenerBus._
@@ -105,12 +106,12 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
105106

106107
private val logDir = conf.get(History.HISTORY_LOG_DIR)
107108

108-
private val HISTORY_UI_ACLS_ENABLE = conf.get(History.UI_ACLS_ENABLE)
109-
private val HISTORY_UI_ADMIN_ACLS = conf.get(History.UI_ADMIN_ACLS)
110-
private val HISTORY_UI_ADMIN_ACLS_GROUPS = conf.get(History.UI_ADMIN_ACLS_GROUPS)
111-
logInfo(s"History server ui acls " + (if (HISTORY_UI_ACLS_ENABLE) "enabled" else "disabled") +
112-
"; users with admin permissions: " + HISTORY_UI_ADMIN_ACLS.toString +
113-
"; groups with admin permissions" + HISTORY_UI_ADMIN_ACLS_GROUPS.toString)
109+
private val historyUiAclsEnable = conf.get(History.HISTORY_SERVER_UI_ACLS_ENABLE)
110+
private val historyUiAdminAcls = conf.get(History.HISTORY_SERVER_UI_ADMIN_ACLS)
111+
private val historyUiAdminAclsGroups = conf.get(History.HISTORY_SERVER_UI_ADMIN_ACLS_GROUPS)
112+
logInfo(s"History server ui acls " + (if (historyUiAclsEnable) "enabled" else "disabled") +
113+
"; users with admin permissions: " + historyUiAdminAcls.mkString(",") +
114+
"; groups with admin permissions" + historyUiAdminAclsGroups.mkString(","))
114115

115116
private val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
116117
// Visible for testing
@@ -314,6 +315,13 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
314315

315316
override def getLastUpdatedTime(): Long = lastScanTime.get()
316317

318+
/**
319+
* Split a comma separated String, filter out any empty items, and return a Sequence of strings
320+
*/
321+
private def stringToSeq(list: String): Seq[String] = {
322+
list.split(',').map(_.trim).filter(!_.isEmpty)
323+
}
324+
317325
override def getAppUI(appId: String, attemptId: Option[String]): Option[LoadedAppUI] = {
318326
val app = try {
319327
load(appId)
@@ -330,13 +338,13 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
330338
val conf = this.conf.clone()
331339
val secManager = new SecurityManager(conf)
332340

333-
secManager.setAcls(HISTORY_UI_ACLS_ENABLE)
341+
secManager.setAcls(historyUiAclsEnable)
334342
// make sure to set admin acls before view acls so they are properly picked up
335-
secManager.setAdminAcls(HISTORY_UI_ADMIN_ACLS + "," + attempt.adminAcls.getOrElse(""))
336-
secManager.setViewAcls(attempt.info.sparkUser, attempt.viewAcls.getOrElse(""))
337-
secManager.setAdminAclsGroups(HISTORY_UI_ADMIN_ACLS_GROUPS + "," +
338-
attempt.adminAclsGroups.getOrElse(""))
339-
secManager.setViewAclsGroups(attempt.viewAclsGroups.getOrElse(""))
343+
secManager.setAdminAcls(historyUiAdminAcls ++ stringToSeq(attempt.adminAcls.getOrElse("")))
344+
secManager.setViewAcls(attempt.info.sparkUser, stringToSeq(attempt.viewAcls.getOrElse("")))
345+
secManager.setAdminAclsGroups(historyUiAdminAclsGroups ++
346+
stringToSeq(attempt.adminAclsGroups.getOrElse("")))
347+
secManager.setViewAclsGroups(stringToSeq(attempt.viewAclsGroups.getOrElse("")))
340348

341349
val kvstore = try {
342350
diskManager match {
@@ -1187,11 +1195,16 @@ private[history] class AppListingListener(
11871195
// Only parse the first env update, since any future changes don't have any effect on
11881196
// the ACLs set for the UI.
11891197
if (!gotEnvUpdate) {
1198+
def emptyStringToNone(strOption: Option[String]): Option[String] = strOption match {
1199+
case Some("") => None
1200+
case _ => strOption
1201+
}
1202+
11901203
val allProperties = event.environmentDetails("Spark Properties").toMap
1191-
attempt.viewAcls = allProperties.get("spark.ui.view.acls")
1192-
attempt.adminAcls = allProperties.get("spark.admin.acls")
1193-
attempt.viewAclsGroups = allProperties.get("spark.ui.view.acls.groups")
1194-
attempt.adminAclsGroups = allProperties.get("spark.admin.acls.groups")
1204+
attempt.viewAcls = emptyStringToNone(allProperties.get(UI_VIEW_ACLS.key))
1205+
attempt.adminAcls = emptyStringToNone(allProperties.get(ADMIN_ACLS.key))
1206+
attempt.viewAclsGroups = emptyStringToNone(allProperties.get(UI_VIEW_ACLS_GROUPS.key))
1207+
attempt.adminAclsGroups = emptyStringToNone(allProperties.get(ADMIN_ACLS_GROUPS.key))
11951208

11961209
gotEnvUpdate = true
11971210
checkProgress()

core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala

+5-5
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import org.apache.spark.deploy.SparkHadoopUtil
3131
import org.apache.spark.internal.Logging
3232
import org.apache.spark.internal.config._
3333
import org.apache.spark.internal.config.History
34+
import org.apache.spark.internal.config.UI._
3435
import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationInfo, UIRoot}
3536
import org.apache.spark.ui.{SparkUI, UIUtils, WebUI}
3637
import org.apache.spark.ui.JettyUtils._
@@ -302,11 +303,10 @@ object HistoryServer extends Logging {
302303
config.set(SecurityManager.SPARK_AUTH_CONF, "false")
303304
}
304305

305-
if (config.getBoolean("spark.acls.enable", config.getBoolean("spark.ui.acls.enable", false))) {
306-
logInfo("Either spark.acls.enable or spark.ui.acls.enable is configured, clearing it and " +
307-
"only using spark.history.ui.acl.enable")
308-
config.set("spark.acls.enable", "false")
309-
config.set("spark.ui.acls.enable", "false")
306+
if (config.get(ACLS_ENABLE)) {
307+
logInfo(s"${ACLS_ENABLE.key} is configured, " +
308+
s"clearing it and only using ${History.HISTORY_SERVER_UI_ACLS_ENABLE.key}")
309+
config.set(ACLS_ENABLE, false)
310310
}
311311

312312
new SecurityManager(config)

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

+6-4
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@ import org.apache.spark.deploy.master.MasterMessages._
3333
import org.apache.spark.deploy.master.ui.MasterWebUI
3434
import org.apache.spark.deploy.rest.StandaloneRestServer
3535
import org.apache.spark.internal.Logging
36+
import org.apache.spark.internal.config._
37+
import org.apache.spark.internal.config.UI._
3638
import org.apache.spark.metrics.MetricsSystem
3739
import org.apache.spark.rpc._
3840
import org.apache.spark.serializer.{JavaSerializer, Serializer}
@@ -115,13 +117,13 @@ private[deploy] class Master(
115117

116118
// Default maxCores for applications that don't specify it (i.e. pass Int.MaxValue)
117119
private val defaultCores = conf.getInt("spark.deploy.defaultCores", Int.MaxValue)
118-
val reverseProxy = conf.getBoolean("spark.ui.reverseProxy", false)
120+
val reverseProxy = conf.get(UI_REVERSE_PROXY)
119121
if (defaultCores < 1) {
120122
throw new SparkException("spark.deploy.defaultCores must be positive")
121123
}
122124

123125
// Alternative application submission gateway that is stable across Spark versions
124-
private val restServerEnabled = conf.getBoolean("spark.master.rest.enabled", false)
126+
private val restServerEnabled = conf.get(MASTER_REST_SERVER_ENABLED)
125127
private var restServer: Option[StandaloneRestServer] = None
126128
private var restServerBoundPort: Option[Int] = None
127129

@@ -140,7 +142,7 @@ private[deploy] class Master(
140142
webUi.bind()
141143
masterWebUiUrl = "http://" + masterPublicAddress + ":" + webUi.boundPort
142144
if (reverseProxy) {
143-
masterWebUiUrl = conf.get("spark.ui.reverseProxyUrl", masterWebUiUrl)
145+
masterWebUiUrl = conf.get(UI_REVERSE_PROXY_URL).orElse(Some(masterWebUiUrl)).get
144146
webUi.addProxy()
145147
logInfo(s"Spark Master is acting as a reverse proxy. Master, Workers and " +
146148
s"Applications UIs are available at $masterWebUiUrl")
@@ -152,7 +154,7 @@ private[deploy] class Master(
152154
}, 0, WORKER_TIMEOUT_MS, TimeUnit.MILLISECONDS)
153155

154156
if (restServerEnabled) {
155-
val port = conf.getInt("spark.master.rest.port", 6066)
157+
val port = conf.get(MASTER_REST_SERVER_PORT)
156158
restServer = Some(new StandaloneRestServer(address.host, port, conf, self, masterUrl))
157159
}
158160
restServerBoundPort = restServer.map(_.start())

core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala

+3-2
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import scala.annotation.tailrec
2121

2222
import org.apache.spark.SparkConf
2323
import org.apache.spark.internal.Logging
24+
import org.apache.spark.internal.config.MASTER_UI_PORT
2425
import org.apache.spark.util.{IntParam, Utils}
2526

2627
/**
@@ -53,8 +54,8 @@ private[master] class MasterArguments(args: Array[String], conf: SparkConf) exte
5354
// This mutates the SparkConf, so all accesses to it must be made after this line
5455
propertiesFile = Utils.loadDefaultSparkProperties(conf, propertiesFile)
5556

56-
if (conf.contains("spark.master.ui.port")) {
57-
webUiPort = conf.get("spark.master.ui.port").toInt
57+
if (conf.contains(MASTER_UI_PORT.key)) {
58+
webUiPort = conf.get(MASTER_UI_PORT)
5859
}
5960

6061
@tailrec

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark.deploy.master.ui
2020
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
2121
import org.apache.spark.deploy.master.Master
2222
import org.apache.spark.internal.Logging
23+
import org.apache.spark.internal.config.UI.UI_KILL_ENABLED
2324
import org.apache.spark.ui.{SparkUI, WebUI}
2425
import org.apache.spark.ui.JettyUtils._
2526

@@ -34,7 +35,7 @@ class MasterWebUI(
3435
requestedPort, master.conf, name = "MasterUI") with Logging {
3536

3637
val masterEndpointRef = master.self
37-
val killEnabled = master.conf.getBoolean("spark.ui.killEnabled", true)
38+
val killEnabled = master.conf.get(UI_KILL_ENABLED)
3839

3940
initialize()
4041

core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ private[rest] class StandaloneSubmitRequestServlet(
146146
// the driver.
147147
val masters = sparkProperties.get("spark.master")
148148
val (_, masterPort) = Utils.extractHostPortFromSparkUrl(masterUrl)
149-
val masterRestPort = this.conf.getInt("spark.master.rest.port", 6066)
149+
val masterRestPort = this.conf.get(config.MASTER_REST_SERVER_PORT)
150150
val updatedMasters = masters.map(
151151
_.replace(s":$masterRestPort", s":$masterPort")).getOrElse(masterUrl)
152152
val appArgs = request.appArgs

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.{SecurityManager, SparkConf}
2828
import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
2929
import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
3030
import org.apache.spark.internal.Logging
31+
import org.apache.spark.internal.config.UI._
3132
import org.apache.spark.rpc.RpcEndpointRef
3233
import org.apache.spark.util.{ShutdownHookManager, Utils}
3334
import org.apache.spark.util.logging.FileAppender
@@ -160,7 +161,7 @@ private[deploy] class ExecutorRunner(
160161

161162
// Add webUI log urls
162163
val baseUrl =
163-
if (conf.getBoolean("spark.ui.reverseProxy", false)) {
164+
if (conf.get(UI_REVERSE_PROXY)) {
164165
s"/proxy/$workerId/logPage/?appId=$appId&executorId=$execId&logType="
165166
} else {
166167
s"http://$publicAddress:$webUiPort/logPage/?appId=$appId&executorId=$execId&logType="

0 commit comments

Comments
 (0)