Skip to content

Instantly share code, notes, and snippets.

def main(args: Array[String]): Unit = {
val res = divide(10, 0)
res match {
case Left(value) => println(s"Result is ${value}")
case Right(e) => println(s"Calculation failed reason ${e}")
}
val result = trydivide(10, 0)
result match {
public void close() {
try {
this.writer.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
int main ()
{
FILE * fp;
fp = fopen ("somejunkfile.txt", "rb");
if (fp == NULL)
{
printf("errno: %d\n", errno);
printf("Error opening the file: %s\n", strerror(errno));
exit(EXIT_FAILURE)
val metrics = client.clusterMetrics()
val clusterQueueInfo = client.clusterQueue()
val queueWithCapacity = client.freeQueues(metrics, clusterQueueInfo)
val selectedYarnApps = client.appsBy(user, status)
if (hasApps(selectedYarnApps)) {
val now = System.currentTimeMillis()
trait YarnClient {
def clusterMetrics(): ClusterMetrics
def clusterQueue(): ClusterScheduler
def freeQueues(metrics: ClusterMetrics, clusterQueueInfo: ClusterScheduler): List[Queue]
def appsBy(user: String, status: String): YarnApps
def moveApp(app: YarnApp, queues: List[Queue])
19/01/27 13:35:57 INFO StageAccumulatorListener: Stage accumulator values:map at SparkMonitoringAdvance.scala:44
19/01/27 13:35:57 INFO StageAccumulatorListener: 8:Some(internal.metrics.memoryBytesSpilled):Some(0)
19/01/27 13:35:57 INFO StageAccumulatorListener: 2:Some(internal.metrics.executorDeserializeCpuTime):Some(468750000)
19/01/27 13:35:57 INFO StageAccumulatorListener: 20:Some(internal.metrics.shuffle.write.writeTime):Some(3082492535)
19/01/27 13:35:57 INFO StageAccumulatorListener: 5:Some(internal.metrics.resultSize):Some(396352)
class StageAccumulatorListener extends SparkListener {
val log = LoggerFactory.getLogger(this.getClass.getName)
override def onStageCompleted(event: SparkListenerStageCompleted): Unit = {
log.info(s"Stage accumulator values:${event.stageInfo.name}")
event.stageInfo.accumulables.foreach { case (id, accInfo) =>
log.info(s"$id:${accInfo.name}:${accInfo.value}")
}
}
private def monitorRecordsProcessed(pointsRecordCounter: LongAccumulator): Unit = {
val startTime = System.currentTimeMillis()
val counter = pointsRecordCounter
log.info("Records monitoring started")
while (true) {
val timeInSec = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime)
val recordsCount = counter.sum
val tp = recordsCount.toFloat / timeInSec
log.info(s"Records processed ${recordsCount} in ${timeInSec} sec , throughput ${tp} / sec")
Thread.sleep(TimeUnit.SECONDS.toMillis(1))
private def checkSparkContext(): Unit = {
val startTime = System.currentTimeMillis()
log.info("Context creation monitor thread started")
while (SparkContextBuilder.isContextNotCreated()) {
Thread.sleep(TimeUnit.SECONDS.toMillis(1))
val total = System.currentTimeMillis() - startTime
log.info("Waiting for spark context from {} seconds", TimeUnit.MILLISECONDS.toSeconds(total))
}
log.info("Spark context creation took {} seconds", TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime))
val total = randomNumbers.mapPartitions(data => {
val by2 = data.map(x => {
log_.info("Processing {}", x)
x * 2
})
by2
}).sum()