Created
November 4, 2015 22:20
-
-
Save tzachz/63ddc2bba8c814722e68 to your computer and use it in GitHub Desktop.
Expose Spark's cache memory status as Metrics Gauges
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import com.yammer.metrics.Metrics | |
import com.yammer.metrics.core.Gauge | |
import org.apache.spark.SparkContext | |
/** | |
* Created by tzachz on 10/21/15 | |
*/ | |
object SparkContextInstrumentor { | |
def instrument(context: SparkContext): SparkContext = { | |
registerGauge("maxCacheMemory", u => context.getExecutorMemoryStatus.map(_._2._1).sum) | |
registerGauge("freeCacheMemory", u => context.getExecutorMemoryStatus.map(_._2._2).sum) | |
registerGauge("cachedRDDs", u => context.getRDDStorageInfo.count(_.isCached)) | |
registerGauge("cachedRDDsTotalMemory", u => context.getRDDStorageInfo.filter(_.isCached).map(_.memSize).sum) | |
context | |
} | |
private def registerGauge(name: String, valueProducer: Unit => Long): Unit = { | |
Metrics.newGauge[Long](classOf[SparkContext], name, new Gauge[Long]() { | |
override def value(): Long = { | |
valueProducer.apply() | |
} | |
}) | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
This deserves a blog post 😄 and I got here via Google, believe it or not.