This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[error] (Thread-37) org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:2 failed 1 times, most recent failure: Exception failure in TID 2 on host localhost: java.lang.StackOverflowError | |
[error] scala.Option$$anonfun$orNull$1.<init>(Option.scala:131) | |
[error] scala.Option.orNull(Option.scala:131) | |
[error] org.apache.spark.streaming.receiver.ReceiverSupervisor.stop(ReceiverSupervisor.scala:111) | |
[error] org.apache.spark.streaming.receiver.ReceiverSupervisor.stopReceiver(ReceiverSupervisor.scala:141) | |
[error] org.apache.spark.streaming.receiver.ReceiverSupervisor.stop(ReceiverSupervisor.scala:112) | |
[error] org.apache.spark.streaming.receiver.ReceiverSupervisor.stopReceiver(ReceiverSupervisor.scala:141) | |
[error] org.apache.spark.streaming.receiver.ReceiverSupervisor.stop(ReceiverSupervisor.scala:112) | |
[error] org.apache.spark.streaming.receiver.ReceiverSupervisor.stopReceiver(ReceiverSupervisor.scala:141) | |
[error] org.apache.s |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
[error] (Thread-37) org.apache.spark.SparkException: Job aborted due to stage failure: All masters are unresponsive! Giving up. | |
org.apache.spark.SparkException: Job aborted due to stage failure: All masters are unresponsive! Giving up. | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1044) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1028) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1026) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1026) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:634) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:634) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import consumer.kafka.client.KafkaReceiver | |
import org.apache.spark.streaming.{Seconds, StreamingContext} | |
import org.apache.spark.{SparkContext, SparkConf} | |
/** | |
* Created by akhld on 11/12/14. | |
*/ | |
object LowLevelKafkaConsumer { |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import java.io.Serializable; | |
import java.sql.Connection; | |
import java.sql.DriverManager; | |
import java.sql.ResultSet; | |
import java.sql.SQLException; | |
import org.apache.spark.SparkConf; | |
import org.apache.spark.SparkContext; | |
import org.apache.spark.api.java.JavaPairRDD; | |
import org.apache.spark.rdd.JdbcRDD; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
val rdd = new org.apache.spark.rdd.JdbcRDD( | |
ssc.sparkContext, | |
() => { | |
Class.forName("org.mysql.Driver") | |
DriverManager.getConnection("jdbc:mysql://localhost:3306/sigmoid"), "akhld", "pass") | |
}, | |
"SELECT * FROM logs WHERE ? <= id AND id <= ?", | |
0, 1000, 10, | |
row => { |
NewerOlder