diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index 44b2da603a1f6..089371b62f5f1 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -41,6 +41,7 @@ import org.apache.spark.executor.ExecutorExitCode import org.apache.spark.internal.config._ import org.apache.spark.internal.config.Tests._ import org.apache.spark.internal.config.UI._ +import org.apache.spark.launcher.SparkLauncher import org.apache.spark.resource.ResourceAllocation import org.apache.spark.resource.ResourceUtils._ import org.apache.spark.resource.TestResourceIDs._ @@ -1423,6 +1424,17 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu sc = new SparkContext(conf) sc.stop() } + + test("SPARK-49984: Don't duplicate default Java options to extra Java options") { + val conf = new SparkConf().setAppName("test").setMaster("local") + conf.set(SparkLauncher.DRIVER_DEFAULT_JAVA_OPTIONS, "-Dfoo=bar") + conf.set(SparkLauncher.EXECUTOR_DEFAULT_JAVA_OPTIONS, "-Dfoo=bar") + sc = new SparkContext(conf) + assert(!sc.conf.get(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS).contains("-Dfoo=bar")) + assert(!sc.conf.get(SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS).contains("-Dfoo=bar")) + + sc.stop() + } } object SparkContextSuite {