Created
October 6, 2017 09:26
-
-
Save MLnick/ddf4d531d5125208771beee0cc9c697e to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
1. Error: gapply() and gapplyCollect() on a DataFrame (@test_sparkSQL.R#2569) -- | |
org.apache.spark.SparkException: Job aborted due to stage failure: Task 114 in stage 957.0 failed 1 times, most recent failure: Lost task 114.0 in stage 957.0 (TID 13209, localhost, executor driver): org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.sql.execution.FlatMapGroupsInRExec$$anonfun$12.apply(objects.scala:404) | |
at org.apache.spark.sql.execution.FlatMapGroupsInRExec$$anonfun$12.apply(objects.scala:386) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:827) | |
at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:827) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1928) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1941) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1954) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1968) | |
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:362) | |
at org.apache.spark.rdd.RDD.collect(RDD.scala:935) | |
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275) | |
at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2386) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57) | |
at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2788) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2385) | |
at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$collect$1.apply(Dataset.scala:2390) | |
at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$collect$1.apply(Dataset.scala:2390) | |
at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2801) | |
at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2390) | |
at org.apache.spark.sql.Dataset.collect(Dataset.scala:2366) | |
at org.apache.spark.sql.api.r.SQLUtils$.dfToCols(SQLUtils.scala:210) | |
at org.apache.spark.sql.api.r.SQLUtils.dfToCols(SQLUtils.scala) | |
at sun.reflect.GeneratedMethodAccessor114.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:167) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:108) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:40) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:293) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:267) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131) | |
at io.netty.util.concurrent.DefaultThreadFactory$Defaul | |
1: collect(df2) at /home/npentreath/spark-2.1.2/R/pkg/tests/fulltests/test_sparkSQL.R:2569 | |
2: collect(df2) | |
3: .local(x, ...) | |
4: callJStatic("org.apache.spark.sql.api.r.SQLUtils", "dfToCols", x@sdf) | |
5: invokeJava(isStatic = TRUE, className, methodName, ...) | |
6: handleErrors(returnStatus, conn) | |
7: stop(readString(conn)) | |
2. Error: Pipelined operations on RDDs created using textFile (@test_textFile.R#153) | |
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 15.0 failed 1 times, most recent failure: Lost task 0.0 in stage 15.0 (TID 29, localhost, executor driver): org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.api.r.BaseRRDD.compute(RRDD.scala:51) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1928) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1941) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1954) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1968) | |
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:362) | |
at org.apache.spark.rdd.RDD.collect(RDD.scala:935) | |
at org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:361) | |
at org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) | |
at sun.reflect.GeneratedMethodAccessor61.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:167) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:108) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:40) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:293) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:267) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131) | |
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.api.r.BaseRRDD.compute(RRDD.scala:51) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
... 1 more | |
1: expect_equal(collectRDD(lengthsPipelined), list(11, 11)) at /home/npentreath/spark-2.1.2/R/pkg/tests/fulltests/test_textFile.R:153 | |
2: compare(object, expected, ...) | |
3: collectRDD(lengthsPipelined) | |
4: collectRDD(lengthsPipelined) | |
5: .local(x, ...) | |
6: callJMethod(getJRDD(x), "collect") | |
7: invokeJava(isStatic = FALSE, objId$id, methodName, ...) | |
8: handleErrors(returnStatus, conn) | |
9: stop(readString(conn)) | |
3. Error: serializeToBytes on RDD (@test_utils.R#51) --------------------------- | |
org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 2.0 failed 1 times, most recent failure: Lost task 1.0 in stage 2.0 (TID 3, localhost, executor driver): org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.api.r.BaseRRDD.compute(RRDD.scala:51) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1928) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1941) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1954) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1968) | |
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:362) | |
at org.apache.spark.rdd.RDD.collect(RDD.scala:935) | |
at org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:361) | |
at org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) | |
at sun.reflect.GeneratedMethodAccessor61.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:167) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:108) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:40) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:293) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:267) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131) | |
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.api.r.BaseRRDD.compute(RRDD.scala:51) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
... 1 more | |
1: expect_equal(collectRDD(ser.rdd), as.list(mockFile)) at /home/npentreath/spark-2.1.2/R/pkg/tests/fulltests/test_utils.R:51 | |
2: compare(object, expected, ...) | |
3: collectRDD(ser.rdd) | |
4: collectRDD(ser.rdd) | |
5: .local(x, ...) | |
6: callJMethod(getJRDD(x), "collect") | |
7: invokeJava(isStatic = FALSE, objId$id, methodName, ...) | |
8: handleErrors(returnStatus, conn) | |
9: stop(readString(conn)) | |
4. Error: cleanClosure on R functions (@test_utils.R#131) ---------------------- | |
org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 3.0 failed 1 times, most recent failure: Lost task 1.0 in stage 3.0 (TID 5, localhost, executor driver): org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.api.r.BaseRRDD.compute(RRDD.scala:51) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1928) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1941) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1954) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1968) | |
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:362) | |
at org.apache.spark.rdd.RDD.collect(RDD.scala:935) | |
at org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:361) | |
at org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) | |
at sun.reflect.GeneratedMethodAccessor61.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:167) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:108) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:40) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:293) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:267) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:336) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:357) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:343) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131) | |
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: org.apache.spark.SparkException: R computation failed with | |
[1] 1 | |
[1] 3 | |
[1] 2 | |
[1][1] 1 2 | |
[1] 3 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
[1] 2 | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
ignoring SIGPIPE signal | |
at org.apache.spark.api.r.RRunner.compute(RRunner.scala:108) | |
at org.apache.spark.api.r.BaseRRDD.compute(RRDD.scala:51) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
... 1 more | |
1: collectRDD(lapply(rdd, f)) at /home/npentreath/spark-2.1.2/R/pkg/tests/fulltests/test_utils.R:131 | |
2: collectRDD(lapply(rdd, f)) | |
3: .local(x, ...) | |
4: callJMethod(getJRDD(x), "collect") | |
5: invokeJava(isStatic = FALSE, objId$id, methodName, ...) | |
6: handleErrors(returnStatus, conn) | |
7: stop(readString(conn)) | |
DONE =========================================================================== | |
Error: Test failures | |
Execution halted | |
Had test warnings or failures; see logs. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment