From 3ca96ee17d43c12aa4cd04a9e83eacb0abd6e055 Mon Sep 17 00:00:00 2001 From: Catalin Toda Date: Thu, 25 Jan 2024 15:04:53 -0800 Subject: [PATCH] Prevent YARN filter from being added to the Spark UI --- .../org/apache/spark/deploy/yarn/ApplicationMaster.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala index 42e661cd47b19..d38f61dd2fef9 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala @@ -316,7 +316,7 @@ private[spark] class ApplicationMaster( registerAM(Utils.localHostName, -1, sparkConf, sparkConf.getOption("spark.driver.appUIAddress"), appAttemptId) val encodedAppId = URLEncoder.encode(appAttemptId.getApplicationId.toString, "UTF-8") - addAmIpFilter(Some(driverRef), s"/proxy/$encodedAppId") +// addAmIpFilter(Some(driverRef), s"/proxy/$encodedAppId") createAllocator(driverRef, sparkConf, clientRpcEnv, appAttemptId, cachedResourcesConf) reporterThread.join() } catch { @@ -499,7 +499,7 @@ private[spark] class ApplicationMaster( } private def runDriver(): Unit = { - addAmIpFilter(None, System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV)) +// addAmIpFilter(None, System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV)) userClassThread = startUserApplication() // This a bit hacky, but we need to wait until the spark.driver.port property has @@ -556,8 +556,8 @@ private[spark] class ApplicationMaster( val driverRef = rpcEnv.setupEndpointRef( RpcAddress(driverHost, driverPort), YarnSchedulerBackend.ENDPOINT_NAME) - addAmIpFilter(Some(driverRef), - System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV)) +// addAmIpFilter(Some(driverRef), +// System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV)) createAllocator(driverRef, sparkConf, rpcEnv, appAttemptId, distCacheConf) // In client mode the actor will stop the reporter thread.