diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index ce62f8f5d5473..6352a1f3f5d1d 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -614,7 +614,7 @@ class SparkContext(config: SparkConf) extends Logging { _heartbeater = new Heartbeater( () => SparkContext.this.reportHeartBeat(_executorMetricsSource), "driver-heartbeater", - conf.get(EXECUTOR_HEARTBEAT_INTERVAL)) + conf.get(DRIVER_METRICS_POLLING_INTERVAL)) _heartbeater.start() // start TaskScheduler after taskScheduler sets DAGScheduler reference in DAGScheduler's diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index c25a4fd45c58a..45390e0bb666d 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -1209,6 +1209,14 @@ package object config { .checkValue(v => v >= 0, "The value should be a non-negative time value.") .createWithDefaultString("0min") + private[spark] val DRIVER_METRICS_POLLING_INTERVAL = + ConfigBuilder("spark.driver.metrics.pollingInterval") + .doc("How often to collect driver metrics (in milliseconds). " + + "If unset, the polling is done at the executor heartbeat interval. " + + "If set, the polling is done at this interval.") + .version("4.1.0") + .fallbackConf(EXECUTOR_HEARTBEAT_INTERVAL) + private[spark] val DRIVER_BIND_ADDRESS = ConfigBuilder("spark.driver.bindAddress") .doc("Address where to bind network listen sockets on the driver.") .version("2.1.0")