Created
June 6, 2016 03:56
-
-
Save allwefantasy/973a08e4036c29f76460184ad60d1a47 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Exception in thread "main" org.apache.spark.SparkException: RDD transformations and actions can only be invoked by the driver, not inside of other transformations; for example, rdd1.map(x => rdd2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the rdd1.map transformation. For more information, see SPARK-5063. | |
at org.apache.spark.rdd.RDD.org$apache$spark$rdd$RDD$$sc(RDD.scala:87) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:316) | |
at org.apache.spark.rdd.PairRDDFunctions.partitionBy(PairRDDFunctions.scala:530) | |
at org.apache.spark.streaming.rdd.MapWithStateRDD$.createFromPairRDD(MapWithStateRDD.scala:189) | |
at org.apache.spark.streaming.dstream.InternalMapWithStateDStream.compute(MapWithStateDStream.scala:146) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351) | |
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344) | |
at scala.Option.orElse(Option.scala:257) | |
at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341) | |
at org.apache.spark.streaming.dstream.InternalMapWithStateDStream.compute(MapWithStateDStream.scala:134) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351) | |
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344) | |
at scala.Option.orElse(Option.scala:257) | |
at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341) | |
at org.apache.spark.streaming.dstream.FlatMappedDStream.compute(FlatMappedDStream.scala:35) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352) | |
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351) | |
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346) | |
at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344) | |
at scala.Option.orElse(Option.scala:257) | |
at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341) | |
at org.apache.spark.streaming.dstream.ForEachDStream.generateJob(ForEachDStream.scala:47) | |
at org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:115) | |
at org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:114) | |
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:251) | |
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:251) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) | |
at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:251) | |
at scala.collection.AbstractTraversable.flatMap(Traversable.scala:105) | |
at org.apache.spark.streaming.DStreamGraph.generateJobs(DStreamGraph.scala:114) | |
at org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$restart$4.apply(JobGenerator.scala:232) | |
at org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$restart$4.apply(JobGenerator.scala:227) | |
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33) | |
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108) | |
at org.apache.spark.streaming.scheduler.JobGenerator.restart(JobGenerator.scala:227) | |
at org.apache.spark.streaming.scheduler.JobGenerator.start(JobGenerator.scala:96) | |
at org.apache.spark.streaming.scheduler.JobScheduler.start(JobScheduler.scala:83) | |
at org.apache.spark.streaming.StreamingContext$$anonfun$liftedTree1$1$1.apply$mcV$sp(StreamingContext.scala:610) | |
at org.apache.spark.streaming.StreamingContext$$anonfun$liftedTree1$1$1.apply(StreamingContext.scala:606) | |
at org.apache.spark.streaming.StreamingContext$$anonfun$liftedTree1$1$1.apply(StreamingContext.scala:606) | |
at ... run in separate thread using org.apache.spark.util.ThreadUtils ... () | |
at org.apache.spark.streaming.StreamingContext.liftedTree1$1(StreamingContext.scala:606) | |
at org.apache.spark.streaming.StreamingContext.start(StreamingContext.scala:600) | |
at com.stuq.chapter02.StuqExampleMapWithState$.main(StuqExampleMapWithState.scala:80) | |
at com.stuq.chapter02.StuqExampleMapWithState.main(StuqExampleMapWithState.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:606) | |
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment