Created
July 2, 2020 16:06
-
-
Save moolen/091b62d537bab7f01c9f8072ad6d19a2 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
20/07/02 15:59:08 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
20/07/02 15:59:09 INFO ElasticsearchDependenciesJob: Running Dependencies job for 2020-07-02T00:00Z, reading from jaeger-span-2020-07-02 index, result storing to jaeger-dependencies-2020-07-02 | |
20/07/02 15:59:11 ERROR Executor: Exception in task 0.0 in stage 0.0 (TID 0) | |
java.lang.NullPointerException | |
at java.util.Objects.requireNonNull(Objects.java:203) | |
at java.util.Arrays$ArrayList.<init>(Arrays.java:3813) | |
at java.util.Arrays.asList(Arrays.java:3800) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserializeReferences(SpanDeserializer.java:100) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:69) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:37) | |
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3736) | |
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2726) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:31) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:25) | |
at org.apache.spark.api.java.JavaPairRDD$$anonfun$toScalaFunction$1.apply(JavaPairRDD.scala:1040) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:149) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) | |
at org.apache.spark.scheduler.Task.run(Task.scala:109) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
20/07/02 15:59:12 WARN TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.NullPointerException | |
at java.util.Objects.requireNonNull(Objects.java:203) | |
at java.util.Arrays$ArrayList.<init>(Arrays.java:3813) | |
at java.util.Arrays.asList(Arrays.java:3800) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserializeReferences(SpanDeserializer.java:100) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:69) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:37) | |
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3736) | |
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2726) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:31) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:25) | |
at org.apache.spark.api.java.JavaPairRDD$$anonfun$toScalaFunction$1.apply(JavaPairRDD.scala:1040) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:149) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) | |
at org.apache.spark.scheduler.Task.run(Task.scala:109) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
20/07/02 15:59:12 ERROR TaskSetManager: Task 0 in stage 0.0 failed 1 times; aborting job | |
20/07/02 15:59:12 WARN TaskSetManager: Lost task 1.0 in stage 0.0 (TID 1, localhost, executor driver): TaskKilled (Stage cancelled) | |
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): java.lang.NullPointerException | |
at java.util.Objects.requireNonNull(Objects.java:203) | |
at java.util.Arrays$ArrayList.<init>(Arrays.java:3813) | |
at java.util.Arrays.asList(Arrays.java:3800) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserializeReferences(SpanDeserializer.java:100) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:69) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:37) | |
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3736) | |
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2726) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:31) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:25) | |
at org.apache.spark.api.java.JavaPairRDD$$anonfun$toScalaFunction$1.apply(JavaPairRDD.scala:1040) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:149) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) | |
at org.apache.spark.scheduler.Task.run(Task.scala:109) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Driver stacktrace: | |
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586) | |
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) | |
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831) | |
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769) | |
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758) | |
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) | |
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2067) | |
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2092) | |
at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:939) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) | |
at org.apache.spark.rdd.RDD.collect(RDD.scala:938) | |
at org.apache.spark.api.java.JavaRDDLike$class.collect(JavaRDDLike.scala:361) | |
at org.apache.spark.api.java.AbstractJavaRDDLike.collect(JavaRDDLike.scala:45) | |
at io.jaegertracing.spark.dependencies.DependenciesSparkHelper.derive(DependenciesSparkHelper.java:44) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticsearchDependenciesJob.run(ElasticsearchDependenciesJob.java:225) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticsearchDependenciesJob.run(ElasticsearchDependenciesJob.java:203) | |
at io.jaegertracing.spark.dependencies.DependenciesSparkJob.run(DependenciesSparkJob.java:54) | |
at io.jaegertracing.spark.dependencies.DependenciesSparkJob.main(DependenciesSparkJob.java:40) | |
Caused by: java.lang.NullPointerException | |
at java.util.Objects.requireNonNull(Objects.java:203) | |
at java.util.Arrays$ArrayList.<init>(Arrays.java:3813) | |
at java.util.Arrays.asList(Arrays.java:3800) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserializeReferences(SpanDeserializer.java:100) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:69) | |
at io.jaegertracing.spark.dependencies.elastic.json.SpanDeserializer.deserialize(SpanDeserializer.java:37) | |
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3736) | |
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2726) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:31) | |
at io.jaegertracing.spark.dependencies.elastic.ElasticTupleToSpan.call(ElasticTupleToSpan.java:25) | |
at org.apache.spark.api.java.JavaPairRDD$$anonfun$toScalaFunction$1.apply(JavaPairRDD.scala:1040) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at scala.collection.Iterator$$anon$11.next(Iterator.scala:409) | |
at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:149) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:96) | |
at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) | |
at org.apache.spark.scheduler.Task.run(Task.scala:109) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment