Skip to content

Commit

Permalink
[SPARK-42474][CORE][K8S] Add extraJVMOptions JVM GC option K8s test c…
Browse files Browse the repository at this point in the history
…ases

### What changes were proposed in this pull request?

This PR aims to add JVM GC option test coverage to K8s Integration Suite.
To reuse the existing code, `isG1GC` variable is moved from `MemoryManager` to `Utils`.

### Why are the changes needed?

To provide more test coverage for JVM Options.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

Pass the CIs

```
[info] KubernetesSuite:
[info] - SPARK-42190: Run SparkPi with local[*] (4 seconds, 990 milliseconds)
[info] - Run SparkPi with no resources (7 seconds, 101 milliseconds)
[info] - Run SparkPi with no resources & statefulset allocation (7 seconds, 27 milliseconds)
[info] - Run SparkPi with a very long application name. (7 seconds, 100 milliseconds)
[info] - Use SparkLauncher.NO_RESOURCE (7 seconds, 947 milliseconds)
[info] - Run SparkPi with a master URL without a scheme. (6 seconds, 932 milliseconds)
[info] - Run SparkPi with an argument. (9 seconds, 47 milliseconds)
[info] - Run SparkPi with custom labels, annotations, and environment variables. (6 seconds, 969 milliseconds)
[info] - All pods have the same service account by default (6 seconds, 916 milliseconds)
[info] - Run extraJVMOptions check on driver (3 seconds, 964 milliseconds)
[info] - Run extraJVMOptions JVM GC option check - G1GC (3 seconds, 948 milliseconds)
[info] - Run extraJVMOptions JVM GC option check - Other GC (4 seconds, 51 milliseconds)
...
```

Closes apache#40062 from dongjoon-hyun/SPARK-42474.

Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
  • Loading branch information
dongjoon-hyun committed Feb 17, 2023
1 parent 9f7582c commit ba8abdd
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 23 deletions.
23 changes: 1 addition & 22 deletions core/src/main/scala/org/apache/spark/memory/MemoryManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,8 @@

package org.apache.spark.memory

import java.lang.management.{ManagementFactory, PlatformManagedObject}
import javax.annotation.concurrent.GuardedBy

import scala.util.Try

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
Expand Down Expand Up @@ -263,7 +260,7 @@ private[spark] abstract class MemoryManager(
}
val size = ByteArrayMethods.nextPowerOf2(maxTungstenMemory / cores / safetyFactor)
val chosenPageSize = math.min(maxPageSize, math.max(minPageSize, size))
if (isG1GC && tungstenMemoryMode == MemoryMode.ON_HEAP) {
if (Utils.isG1GC && tungstenMemoryMode == MemoryMode.ON_HEAP) {
chosenPageSize - Platform.LONG_ARRAY_OFFSET
} else {
chosenPageSize
Expand All @@ -281,22 +278,4 @@ private[spark] abstract class MemoryManager(
case MemoryMode.OFF_HEAP => MemoryAllocator.UNSAFE
}
}

/**
* Return whether we are using G1GC or not
*/
private lazy val isG1GC: Boolean = {
Try {
val clazz = Utils.classForName("com.sun.management.HotSpotDiagnosticMXBean")
.asInstanceOf[Class[_ <: PlatformManagedObject]]
val vmOptionClazz = Utils.classForName("com.sun.management.VMOption")
val hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz)
val vmOptionMethod = clazz.getMethod("getVMOption", classOf[String])
val valueMethod = vmOptionClazz.getMethod("getValue")

val useG1GCObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseG1GC")
val useG1GC = valueMethod.invoke(useG1GCObject).asInstanceOf[String]
"true".equals(useG1GC)
}.getOrElse(false)
}
}
20 changes: 19 additions & 1 deletion core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.util

import java.io._
import java.lang.{Byte => JByte}
import java.lang.management.{LockInfo, ManagementFactory, MonitorInfo, ThreadInfo}
import java.lang.management.{LockInfo, ManagementFactory, MonitorInfo, PlatformManagedObject, ThreadInfo}
import java.lang.reflect.InvocationTargetException
import java.math.{MathContext, RoundingMode}
import java.net._
Expand Down Expand Up @@ -3278,6 +3278,24 @@ private[spark] object Utils extends Logging {
case _ => math.max(sortedSize(len / 2), 1)
}
}

/**
* Return whether we are using G1GC or not
*/
lazy val isG1GC: Boolean = {
Try {
val clazz = Utils.classForName("com.sun.management.HotSpotDiagnosticMXBean")
.asInstanceOf[Class[_ <: PlatformManagedObject]]
val vmOptionClazz = Utils.classForName("com.sun.management.VMOption")
val hotSpotDiagnosticMXBean = ManagementFactory.getPlatformMXBean(clazz)
val vmOptionMethod = clazz.getMethod("getVMOption", classOf[String])
val valueMethod = vmOptionClazz.getMethod("getValue")

val useG1GCObject = vmOptionMethod.invoke(hotSpotDiagnosticMXBean, "UseG1GC")
val useG1GC = valueMethod.invoke(useG1GCObject).asInstanceOf[String]
"true".equals(useG1GC)
}.getOrElse(false)
}
}

private[util] object CallerContext extends Logging {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ object DriverSubmissionTest {
println("System properties containing spark.test:")
properties.filter { case (k, _) => k.contains("spark.test") }.foreach(println)

println("JVM G1GC Flag: " + Utils.isG1GC)

for (i <- 1 until numSecondsToSleep) {
println(s"Alive for $i out of $numSecondsToSleep seconds")
Thread.sleep(1000)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,22 @@ private[spark] trait BasicTestsSuite { k8sSuite: KubernetesSuite =>
expectedJVMValue = Seq("(spark.test.foo,spark.test.bar)"))
}

test("SPARK-42474: Run extraJVMOptions JVM GC option check - G1GC", k8sTestTag) {
sparkAppConf
.set("spark.driver.extraJavaOptions", "-XX:+UseG1GC")
.set("spark.executor.extraJavaOptions", "-XX:+UseG1GC")
runSparkJVMCheckAndVerifyCompletion(
expectedJVMValue = Seq("JVM G1GC Flag: true"))
}

test("SPARK-42474: Run extraJVMOptions JVM GC option check - Other GC", k8sTestTag) {
sparkAppConf
.set("spark.driver.extraJavaOptions", "-XX:+UseParallelGC")
.set("spark.executor.extraJavaOptions", "-XX:+UseParallelGC")
runSparkJVMCheckAndVerifyCompletion(
expectedJVMValue = Seq("JVM G1GC Flag: false"))
}

test("Run SparkRemoteFileTest using a remote data file", k8sTestTag, localTestTag) {
assert(sys.props.contains("spark.test.home"), "spark.test.home is not set!")
TestUtils.withHttpServer(sys.props("spark.test.home")) { baseURL =>
Expand Down

0 comments on commit ba8abdd

Please sign in to comment.