Created
August 14, 2014 14:44
-
-
Save zsxwing/4f9f17201d4378fe3e16 to your computer and use it in GitHub Desktop.
Report Foo cannot be serialized.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
scala> class Foo { def foo() = Array(1.0) } | |
defined class Foo | |
scala> val t = new Foo | |
t: Foo = $iwC$$iwC$$iwC$$iwC$Foo@5ef6a5b6 | |
scala> val m = t.foo | |
m: Array[Double] = Array(1.0) | |
scala> val r1 = sc.parallelize(List(1, 2, 3)) | |
r1: org.apache.spark.rdd.RDD[Int] = ParallelCollectionRDD[0] at parallelize at <console>:12 | |
scala> val r2 = r1.map(_ + m(0)) | |
r2: org.apache.spark.rdd.RDD[Double] = MappedRDD[1] at map at <console>:20 | |
scala> r2.toArray | |
14/08/14 22:44:17 INFO SparkContext: Starting job: toArray at <console>:23 | |
14/08/14 22:44:17 INFO DAGScheduler: Got job 0 (toArray at <console>:23) with 1 output partitions (allowLocal=false) | |
14/08/14 22:44:17 INFO DAGScheduler: Final stage: Stage 0 (toArray at <console>:23) | |
14/08/14 22:44:17 INFO DAGScheduler: Parents of final stage: List() | |
14/08/14 22:44:17 INFO DAGScheduler: Missing parents: List() | |
14/08/14 22:44:17 INFO DAGScheduler: Submitting Stage 0 (MappedRDD[1] at map at <console>:20), which has no missing parents | |
14/08/14 22:44:17 INFO DAGScheduler: Failed to run toArray at <console>:23 | |
org.apache.spark.SparkException: Job aborted: Task not serializable: java.io.NotSerializableException: $iwC$$iwC$$iwC$$iwC$Foo | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$abortStage$1.apply(DAGScheduler.scala:1028) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$abortStage$1.apply(DAGScheduler.scala:1026) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$abortStage(DAGScheduler.scala:1026) | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitMissingTasks(DAGScheduler.scala:794) | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitStage(DAGScheduler.scala:737) | |
at org.apache.spark.scheduler.DAGScheduler.processEvent(DAGScheduler.scala:569) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$start$1$$anon$2$$anonfun$receive$1.applyOrElse(DAGScheduler.scala:207) | |
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) | |
at akka.actor.ActorCell.invoke(ActorCell.scala:456) | |
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) | |
at akka.dispatch.Mailbox.run(Mailbox.scala:219) | |
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) | |
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) | |
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) | |
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) | |
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment