Skip to content

Commit c599d24

Browse files
committed
refactor
1 parent 7190f93 commit c599d24

File tree

1 file changed

+17
-24
lines changed

1 file changed

+17
-24
lines changed

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/HadoopConfExecutorFeatureStepSuite.scala

Lines changed: 17 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -21,30 +21,16 @@ import java.io.File
2121
import java.nio.charset.StandardCharsets.UTF_8
2222

2323
import com.google.common.io.Files
24-
import org.scalatest.BeforeAndAfter
2524

2625
import org.apache.spark.{SparkConf, SparkFunSuite}
27-
import org.apache.spark.deploy.k8s.{KubernetesExecutorConf, KubernetesTestConf, SecretVolumeUtils, SparkPod}
26+
import org.apache.spark.deploy.k8s.{KubernetesTestConf, SecretVolumeUtils, SparkPod}
2827
import org.apache.spark.deploy.k8s.Constants._
2928
import org.apache.spark.deploy.k8s.features.KubernetesFeaturesTestUtils.containerHasEnvVar
3029
import org.apache.spark.util.{SparkConfWithEnv, Utils}
3130

32-
class HadoopConfExecutorFeatureStepSuite extends SparkFunSuite with BeforeAndAfter {
31+
class HadoopConfExecutorFeatureStepSuite extends SparkFunSuite {
3332
import SecretVolumeUtils._
3433

35-
private var baseConf: SparkConf = _
36-
37-
before {
38-
baseConf = new SparkConf(false)
39-
}
40-
41-
private def newExecutorConf(environment: Map[String, String] = Map.empty):
42-
KubernetesExecutorConf = {
43-
KubernetesTestConf.createExecutorConf(
44-
sparkConf = baseConf,
45-
environment = environment)
46-
}
47-
4834
test("SPARK-43504: mount hadoop config map in executor side") {
4935
val confDir = Utils.createTempDir()
5036
val confFiles = Set("core-site.xml", "hdfs-site.xml")
@@ -53,19 +39,26 @@ class HadoopConfExecutorFeatureStepSuite extends SparkFunSuite with BeforeAndAft
5339
Files.write("some data", new File(confDir, f), UTF_8)
5440
}
5541

56-
val sparkConf = new SparkConfWithEnv(Map(ENV_HADOOP_CONF_DIR -> confDir.getAbsolutePath()))
57-
val conf = KubernetesTestConf.createDriverConf(sparkConf = sparkConf)
42+
val driverSparkConf = new SparkConfWithEnv(
43+
Map(ENV_HADOOP_CONF_DIR -> confDir.getAbsolutePath()))
44+
val executorSparkConf = new SparkConf(false)
5845

59-
val driverStep = new HadoopConfDriverFeatureStep(conf)
46+
val driverConf = KubernetesTestConf.createDriverConf(sparkConf = driverSparkConf)
47+
val driverStep = new HadoopConfDriverFeatureStep(driverConf)
6048
driverStep.getAdditionalPodSystemProperties().foreach { case (key, value) =>
61-
baseConf.set(key, value)
49+
executorSparkConf.set(key, value)
6250
}
6351

64-
val executorStep = new HadoopConfExecutorFeatureStep(newExecutorConf())
52+
val executorConf = KubernetesTestConf.createExecutorConf(sparkConf = executorSparkConf)
53+
val executorStep = new HadoopConfExecutorFeatureStep(executorConf)
6554
val executorPod = executorStep.configurePod(SparkPod.initialPod())
6655

67-
assert(podHasVolume(executorPod.pod, HADOOP_CONF_VOLUME))
68-
assert(containerHasVolume(executorPod.container, HADOOP_CONF_VOLUME, HADOOP_CONF_DIR_PATH))
69-
assert(containerHasEnvVar(executorPod.container, ENV_HADOOP_CONF_DIR))
56+
checkPod(executorPod)
57+
}
58+
59+
private def checkPod(pod: SparkPod): Unit = {
60+
assert(podHasVolume(pod.pod, HADOOP_CONF_VOLUME))
61+
assert(containerHasVolume(pod.container, HADOOP_CONF_VOLUME, HADOOP_CONF_DIR_PATH))
62+
assert(containerHasEnvVar(pod.container, ENV_HADOOP_CONF_DIR))
7063
}
7164
}

0 commit comments

Comments
 (0)