Last active
April 11, 2019 07:13
-
-
Save geoHeil/22a10d33bf75054d7165958718e80ca6 to your computer and use it in GitHub Desktop.
Assertion failed
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
19/04/11 09:11:13 ERROR Executor: Exception in task 0.0 in stage 11.0 (TID 11) | |
java.lang.AssertionError: assertion failed | |
at scala.Predef$.assert(Predef.scala:156) | |
at org.apache.spark.sql.geosparksql.expressions.ST_GeomFromWKT.eval(Constructors.scala:123) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.writeFields_0_0$(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:100) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:90) | |
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221) | |
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:298) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
19/04/11 09:11:13 WARN TaskSetManager: Lost task 0.0 in stage 11.0 (TID 11, localhost, executor driver): java.lang.AssertionError: assertion failed | |
at scala.Predef$.assert(Predef.scala:156) | |
at org.apache.spark.sql.geosparksql.expressions.ST_GeomFromWKT.eval(Constructors.scala:123) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.writeFields_0_0$(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:100) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:90) | |
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221) | |
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:298) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
19/04/11 09:11:13 ERROR TaskSetManager: Task 0 in stage 11.0 failed 1 times; aborting job | |
19/04/11 09:11:13 INFO TaskSchedulerImpl: Removed TaskSet 11.0, whose tasks have all completed, from pool | |
19/04/11 09:11:13 INFO TaskSchedulerImpl: Cancelling stage 11 | |
19/04/11 09:11:13 INFO TaskSchedulerImpl: Killing all running tasks in stage 11: Stage cancelled | |
19/04/11 09:11:13 INFO DAGScheduler: ShuffleMapStage 11 (sql at NativeMethodAccessorImpl.java:0) failed in 0,133 s due to Job aborted due to stage failure: Task 0 in stage 11.0 failed 1 times, most recent failure: Lost task 0.0 in stage 11.0 (TID 11, localhost, executor driver): java.lang.AssertionError: assertion failed | |
at scala.Predef$.assert(Predef.scala:156) | |
at org.apache.spark.sql.geosparksql.expressions.ST_GeomFromWKT.eval(Constructors.scala:123) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.writeFields_0_0$(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:100) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:90) | |
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221) | |
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:298) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
19/04/11 09:11:13 INFO DAGScheduler: Job 7 failed: sql at NativeMethodAccessorImpl.java:0, took 0,138703 s | |
19/04/11 09:11:13 ERROR sparklyr: Gateway (54528) failed calling sql on 13: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 11.0 failed 1 times, most recent failure: Lost task 0.0 in stage 11.0 (TID 11, localhost, executor driver): java.lang.AssertionError: assertion failed | |
at scala.Predef$.assert(Predef.scala:156) | |
at org.apache.spark.sql.geosparksql.expressions.ST_GeomFromWKT.eval(Constructors.scala:123) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.writeFields_0_0$(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:100) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:90) | |
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221) | |
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:298) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1887) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1875) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1874) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1874) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:926) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:926) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2108) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2057) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2046) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:737) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2082) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2101) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2126) | |
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:945) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) | |
at org.apache.spark.rdd.RDD.collect(RDD.scala:944) | |
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:299) | |
at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2831) | |
at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2830) | |
at org.apache.spark.sql.Dataset$$anonfun$53.apply(Dataset.scala:3365) | |
at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) | |
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3364) | |
at org.apache.spark.sql.Dataset.count(Dataset.scala:2830) | |
at org.apache.spark.sql.execution.command.CacheTableCommand.run(cache.scala:43) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:195) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:195) | |
at org.apache.spark.sql.Dataset$$anonfun$53.apply(Dataset.scala:3365) | |
at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78) | |
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3364) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:195) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:80) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at sparklyr.Invoke.invoke(invoke.scala:139) | |
at sparklyr.StreamHandler.handleMethodCall(stream.scala:123) | |
at sparklyr.StreamHandler.read(stream.scala:66) | |
at sparklyr.BackendHandler.channelRead0(handler.scala:51) | |
at sparklyr.BackendHandler.channelRead0(handler.scala:4) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1359) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:935) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:138) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:645) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) | |
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: java.lang.AssertionError: assertion failed | |
at scala.Predef$.assert(Predef.scala:156) | |
at org.apache.spark.sql.geosparksql.expressions.ST_GeomFromWKT.eval(Constructors.scala:123) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.writeFields_0_0$(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at org.apache.spark.sql.catalyst.expressions.GeneratedClass$SpecificUnsafeProjection.apply(Unknown Source) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:410) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:100) | |
at org.apache.spark.sql.execution.columnar.CachedRDDBuilder$$anonfun$1$$anon$1.next(InMemoryRelation.scala:90) | |
at org.apache.spark.storage.memory.MemoryStore.putIterator(MemoryStore.scala:221) | |
at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:298) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1165) | |
at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:1091) | |
at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1156) | |
at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:882) | |
at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:335) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:286) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) | |
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324) | |
at org.apache.spark.rdd.RDD.iterator(RDD.scala:288) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) | |
at org.apache.spark.scheduler.Task.run(Task.scala:121) | |
at org.apache.spark.executor.Executor$TaskRunner$$anonfun$10.apply(Executor.scala:402) | |
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1360) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:408) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
... 1 more | |
19/04/11 09:11:13 INFO HiveMetaStore: 0: get_database: default | |
19/04/11 09:11:13 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_database: default | |
19/04/11 09:11:13 INFO HiveMetaStore: 0: get_database: default | |
19/04/11 09:11:13 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_database: default | |
19/04/11 09:11:13 INFO HiveMetaStore: 0: get_tables: db=default pat=* | |
19/04/11 09:11:13 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_tables: db=default pat=* | |
19/04/11 09:11:51 INFO HiveMetaStore: 0: get_database: default | |
19/04/11 09:11:51 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_database: default | |
19/04/11 09:11:51 INFO HiveMetaStore: 0: get_database: default | |
19/04/11 09:11:51 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_database: default | |
19/04/11 09:11:51 INFO HiveMetaStore: 0: get_tables: db=default pat=* | |
19/04/11 09:11:51 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_tables: db=default pat=* | |
19/04/11 09:11:51 INFO HiveMetaStore: 0: get_database: default | |
19/04/11 09:11:51 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_database: default | |
19/04/11 09:11:51 INFO HiveMetaStore: 0: get_database: default | |
19/04/11 09:11:51 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_database: default | |
19/04/11 09:11:51 INFO HiveMetaStore: 0: get_tables: db=default pat=* | |
19/04/11 09:11:51 INFO audit: ugi=geoheil ip=unknown-ip-addr cmd=get_tables: db=default pat=* |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment