Created
August 28, 2018 07:52
-
-
Save ScrapCodes/8a88978110482d932081f491daa9f669 to your computer and use it in GitHub Desktop.
Error while running Spark-r on K8s
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
18/08/28 07:01:13 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
Loading required package: methods | |
Attaching package: ‘SparkR’ | |
The following objects are masked from ‘package:stats’: | |
cov, filter, lag, na.omit, predict, sd, var, window | |
The following objects are masked from ‘package:base’: | |
as.data.frame, colnames, colnames<-, drop, endsWith, intersect, | |
rank, rbind, sample, startsWith, subset, summary, transform, union | |
Warning message: | |
package ‘SparkR’ was built under R version 3.4.4 | |
Spark package found in SPARK_HOME: /opt/spark | |
18/08/28 07:01:14 ERROR RBackendHandler: createSparkContext on org.apache.spark.api.r.RRDD failed | |
java.lang.reflect.InvocationTargetException | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.api.r.RBackendHandler.handleMethodCall(RBackendHandler.scala:167) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:108) | |
at org.apache.spark.api.r.RBackendHandler.channelRead0(RBackendHandler.scala:40) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:286) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:310) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:284) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) | |
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1359) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:935) | |
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:138) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:645) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580) | |
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497) | |
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858) | |
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: java.lang.NoSuchFieldError: GIT_HASH | |
at org.eclipse.jetty.server.Server.doStart(Server.java:348) | |
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68) | |
at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:328) | |
at org.apache.spark.ui.WebUI.bind(WebUI.scala:132) | |
at org.apache.spark.SparkContext$$anonfun$11.apply(SparkContext.scala:452) | |
at org.apache.spark.SparkContext$$anonfun$11.apply(SparkContext.scala:452) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:452) | |
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520) | |
at org.apache.spark.api.r.RRDD$.createSparkContext(RRDD.scala:139) | |
at org.apache.spark.api.r.RRDD.createSparkContext(RRDD.scala) | |
... 36 more | |
Error in handleErrors(returnStatus, conn) : | |
java.lang.NoSuchFieldError: GIT_HASH | |
at org.eclipse.jetty.server.Server.doStart(Server.java:348) | |
at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68) | |
at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:328) | |
at org.apache.spark.ui.WebUI.bind(WebUI.scala:132) | |
at org.apache.spark.SparkContext$$anonfun$11.apply(SparkContext.scala:452) | |
at org.apache.spark.SparkContext$$anonfun$11.apply(SparkContext.scala:452) | |
at scala.Option.foreach(Option.scala:257) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:452) | |
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520) | |
at org.apache.spark.api.r.RRDD$.createSparkContext(RRDD.scala:139) | |
at org.apache.spark.api.r.RRDD.createSparkContext(RRDD.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethod | |
Calls: sparkR.session ... assign -> callJStatic -> invokeJava -> handleErrors | |
Execution halted |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment