Created
February 6, 2019 21:41
-
-
Save maziyarpanahi/3380e230246271217a2feb4512f5d665 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
INFO [2019-02-06 22:23:16,364] ({main} RemoteInterpreterServer.java[<init>]:148) - Starting remote interpreter server on port 0, intpEventServerAddress: IP_ADDRESS:36131 | |
INFO [2019-02-06 22:23:16,384] ({main} RemoteInterpreterServer.java[<init>]:175) - Launching ThriftServer at IP_ADDRESS:46727 | |
INFO [2019-02-06 22:23:16,549] ({pool-6-thread-1} RemoteInterpreterServer.java[createInterpreter]:333) - Instantiate interpreter org.apache.zeppelin.spark.SparkInterpreter | |
INFO [2019-02-06 22:23:16,553] ({pool-6-thread-1} RemoteInterpreterServer.java[createInterpreter]:333) - Instantiate interpreter org.apache.zeppelin.spark.SparkSqlInterpreter | |
INFO [2019-02-06 22:23:16,556] ({pool-6-thread-1} RemoteInterpreterServer.java[createInterpreter]:333) - Instantiate interpreter org.apache.zeppelin.spark.DepInterpreter | |
INFO [2019-02-06 22:23:16,560] ({pool-6-thread-1} RemoteInterpreterServer.java[createInterpreter]:333) - Instantiate interpreter org.apache.zeppelin.spark.PySparkInterpreter | |
INFO [2019-02-06 22:23:16,563] ({pool-6-thread-1} RemoteInterpreterServer.java[createInterpreter]:333) - Instantiate interpreter org.apache.zeppelin.spark.IPySparkInterpreter | |
INFO [2019-02-06 22:23:16,565] ({pool-6-thread-1} RemoteInterpreterServer.java[createInterpreter]:333) - Instantiate interpreter org.apache.zeppelin.spark.SparkRInterpreter | |
WARN [2019-02-06 22:23:16,680] ({pool-6-thread-2} ZeppelinConfiguration.java[create]:123) - Failed to load configuration, proceeding with a default | |
INFO [2019-02-06 22:23:16,701] ({pool-6-thread-2} ZeppelinConfiguration.java[create]:135) - Server Host: 0.0.0.0 | |
INFO [2019-02-06 22:23:16,701] ({pool-6-thread-2} ZeppelinConfiguration.java[create]:137) - Server Port: 8080 | |
INFO [2019-02-06 22:23:16,702] ({pool-6-thread-2} ZeppelinConfiguration.java[create]:141) - Context Path: / | |
INFO [2019-02-06 22:23:16,702] ({pool-6-thread-2} ZeppelinConfiguration.java[create]:142) - Zeppelin Version: 0.9.0-SNAPSHOT | |
INFO [2019-02-06 22:23:16,702] ({pool-6-thread-2} SchedulerFactory.java[<init>]:62) - Scheduler Thread Pool Size: 100 | |
INFO [2019-02-06 22:23:16,704] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-06 22:23:16,704] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-06 22:23:16,706] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-06 22:23:16,706] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob@f7c36f41 | |
INFO [2019-02-06 22:23:16,706] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:820) - getStatus: PENDING | |
INFO [2019-02-06 22:23:16,707] ({FIFOScheduler-Worker-1} AbstractScheduler.java[runJob]:116) - Job 20190122-114738_891569967 started by scheduler interpreter_2072451381 | |
INFO [2019-02-06 22:23:19,625] ({FIFOScheduler-Worker-1} OldSparkInterpreter.java[createSparkSession]:265) - ------ Create new SparkSession yarn ------- | |
INFO [2019-02-06 22:23:19,706] ({FIFOScheduler-Worker-1} HiveConf.java[findConfigFile]:188) - Found configuration file file:/etc/hive/conf.cloudera.hive/hive-site.xml | |
INFO [2019-02-06 22:23:19,823] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Running Spark version 2.4.0-cdh6.1.0 | |
INFO [2019-02-06 22:23:19,847] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Submitted application: Zeppelin | |
INFO [2019-02-06 22:23:19,899] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing view acls to: maziyar | |
INFO [2019-02-06 22:23:19,900] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing modify acls to: maziyar | |
INFO [2019-02-06 22:23:19,900] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing view acls groups to: | |
INFO [2019-02-06 22:23:19,900] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing modify acls groups to: | |
INFO [2019-02-06 22:23:19,900] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(maziyar); groups with view permissions: Set(); users with modify permissions: Set(maziyar); groups with modify permissions: Set() | |
INFO [2019-02-06 22:23:20,110] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Successfully started service 'sparkDriver' on port 36050. | |
INFO [2019-02-06 22:23:20,132] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Registering MapOutputTracker | |
INFO [2019-02-06 22:23:20,150] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Registering BlockManagerMaster | |
INFO [2019-02-06 22:23:20,153] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information | |
INFO [2019-02-06 22:23:20,153] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - BlockManagerMasterEndpoint up | |
INFO [2019-02-06 22:23:20,163] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Created local directory at /tmp/blockmgr-a2730412-d3d8-4a14-bca0-dd75aac172bb | |
INFO [2019-02-06 22:23:20,176] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - MemoryStore started with capacity 2004.6 MB | |
INFO [2019-02-06 22:23:20,190] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Registering OutputCommitCoordinator | |
INFO [2019-02-06 22:23:20,263] ({FIFOScheduler-Worker-1} Log.java[initialized]:192) - Logging initialized @7402ms | |
INFO [2019-02-06 22:23:20,666] ({FIFOScheduler-Worker-1} Server.java[doStart]:346) - jetty-9.3.z-SNAPSHOT | |
INFO [2019-02-06 22:23:20,683] ({FIFOScheduler-Worker-1} Server.java[doStart]:414) - Started @7823ms | |
INFO [2019-02-06 22:23:20,703] ({FIFOScheduler-Worker-1} AbstractConnector.java[doStart]:278) - Started ServerConnector@377e21b7{HTTP/1.1,[http/1.1]}{0.0.0.0:4040} | |
INFO [2019-02-06 22:23:20,704] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Successfully started service 'SparkUI' on port 4040. | |
INFO [2019-02-06 22:23:20,731] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@16de3bcd{/jobs,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,731] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@150c2ded{/jobs/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,732] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@36526c78{/jobs/job,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,734] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@5eb4d873{/jobs/job/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,735] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@2aa3f50d{/stages,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,735] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@f376d8d{/stages/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,736] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@456a39f5{/stages/stage,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,737] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@3cb4d8d4{/stages/stage/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,738] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@2885eb26{/stages/pool,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,738] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@3dc6e613{/stages/pool/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,739] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@69f3af53{/storage,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,740] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@731ba8c1{/storage/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,740] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@7440b15f{/storage/rdd,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,741] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@7e3c37ba{/storage/rdd/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,741] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@a419619{/environment,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,742] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@24326a30{/environment/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,743] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@5589eda0{/executors,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,744] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@4a17814c{/executors/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,745] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@4ce02749{/executors/threadDump,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,745] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@7b34a70f{/executors/threadDump/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,752] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@61cab124{/static,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,753] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@187fc614{/,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,754] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@3749ece{/api,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,755] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@988a9e5{/jobs/job/kill,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,755] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@7726818c{/stages/stage/kill,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:20,757] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Bound SparkUI to 0.0.0.0, and started at http://hadoop-gateway:4040 | |
INFO [2019-02-06 22:23:20,792] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Added JAR hdfs:///user/maziyar/jars/zeppelin/graphframes/graphframes-assembly-0.7.0-spark2.3-SNAPSHOT.jar at hdfs:///user/maziyar/jars/zeppelin/graphframes/graphframes-assembly-0.7.0-spark2.3-SNAPSHOT.jar with timestamp 1549488200792 | |
INFO [2019-02-06 22:23:20,792] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Added JAR hdfs:///user/maziyar/jars/zeppelin/spark-nlp/spark-nlp-assembly-1.8.0.jar at hdfs:///user/maziyar/jars/zeppelin/spark-nlp/spark-nlp-assembly-1.8.0.jar with timestamp 1549488200792 | |
INFO [2019-02-06 22:23:20,793] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Added JAR hdfs:///user/maziyar/jars/zeppelin/elasticsearch/elasticsearch-spark-20_2.11-6.4.2.jar at hdfs:///user/maziyar/jars/zeppelin/elasticsearch/elasticsearch-spark-20_2.11-6.4.2.jar with timestamp 1549488200793 | |
INFO [2019-02-06 22:23:20,793] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Added JAR hdfs:///user/maziyar/jars/zeppelin/magellan/magellan-assembly-1.0.6-SNAPSHOT.jar at hdfs:///user/maziyar/jars/zeppelin/magellan/magellan-assembly-1.0.6-SNAPSHOT.jar with timestamp 1549488200793 | |
WARN [2019-02-06 22:23:20,828] ({FIFOScheduler-Worker-1} Logging.scala[logWarning]:66) - Fair Scheduler configuration file not found so jobs will be scheduled in FIFO order. To use fair scheduling, configure pools in fairscheduler.xml or set spark.scheduler.allocation.file to a file that contains the configuration. | |
INFO [2019-02-06 22:23:20,833] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Created default pool: default, schedulingMode: FIFO, minShare: 0, weight: 1 | |
INFO [2019-02-06 22:23:20,851] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Using initial executors = 1, max of spark.dynamicAllocation.initialExecutors, spark.dynamicAllocation.minExecutors and spark.executor.instances | |
INFO [2019-02-06 22:23:20,916] ({FIFOScheduler-Worker-1} RMProxy.java[newProxyInstance]:133) - Connecting to ResourceManager at hadoop-master-1/IP_ADDRESS:8032 | |
INFO [2019-02-06 22:23:21,063] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Requesting a new application from cluster with 30 NodeManagers | |
INFO [2019-02-06 22:23:21,108] ({FIFOScheduler-Worker-1} Configuration.java[getConfResourceAsInputStream]:2663) - resource-types.xml not found | |
INFO [2019-02-06 22:23:21,109] ({FIFOScheduler-Worker-1} ResourceUtils.java[addResourcesFileToConf]:418) - Unable to find 'resource-types.xml'. | |
INFO [2019-02-06 22:23:21,116] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Verifying our application has not requested more than the maximum memory capability of the cluster (12288 MB per container) | |
INFO [2019-02-06 22:23:21,117] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Will allocate AM container, with 896 MB memory including 384 MB overhead | |
INFO [2019-02-06 22:23:21,118] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Setting up container launch context for our AM | |
INFO [2019-02-06 22:23:21,121] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Setting up the launch environment for our AM container | |
INFO [2019-02-06 22:23:21,130] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Preparing resources for our AM container | |
INFO [2019-02-06 22:23:21,189] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Source and destination file systems are the same. Not copying hdfs://hadoop-master-1:8020/user/maziyar/jars/zeppelin/graphframes/graphframes-assembly-0.7.0-spark2.3-SNAPSHOT.jar | |
INFO [2019-02-06 22:23:21,227] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Source and destination file systems are the same. Not copying hdfs://hadoop-master-1:8020/user/maziyar/jars/zeppelin/spark-nlp/spark-nlp-assembly-1.8.0.jar | |
INFO [2019-02-06 22:23:21,235] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Source and destination file systems are the same. Not copying hdfs://hadoop-master-1:8020/user/maziyar/jars/zeppelin/elasticsearch/elasticsearch-spark-20_2.11-6.4.2.jar | |
INFO [2019-02-06 22:23:21,242] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Source and destination file systems are the same. Not copying hdfs://hadoop-master-1:8020/user/maziyar/jars/zeppelin/magellan/magellan-assembly-1.0.6-SNAPSHOT.jar | |
INFO [2019-02-06 22:23:21,394] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Uploading resource file:/tmp/spark-ce077844-983e-42a5-beea-2c53f8081aae/__spark_conf__5317273313304773118.zip -> hdfs://hadoop-master-1:8020/user/maziyar/.sparkStaging/application_1549482247985_0006/__spark_conf__.zip | |
INFO [2019-02-06 22:23:21,676] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing view acls to: maziyar | |
INFO [2019-02-06 22:23:21,677] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing modify acls to: maziyar | |
INFO [2019-02-06 22:23:21,685] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing view acls groups to: | |
INFO [2019-02-06 22:23:21,685] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Changing modify acls groups to: | |
INFO [2019-02-06 22:23:21,686] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(maziyar); groups with view permissions: Set(); users with modify permissions: Set(maziyar); groups with modify permissions: Set() | |
INFO [2019-02-06 22:23:21,728] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Submitting application application_1549482247985_0006 to ResourceManager | |
INFO [2019-02-06 22:23:21,776] ({FIFOScheduler-Worker-1} YarnClientImpl.java[submitApplication]:310) - Submitted application application_1549482247985_0006 | |
INFO [2019-02-06 22:23:21,779] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Starting Yarn extension services with app application_1549482247985_0006 and attemptId None | |
INFO [2019-02-06 22:23:22,786] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: ACCEPTED) | |
INFO [2019-02-06 22:23:22,790] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - | |
client token: N/A | |
diagnostics: AM container is launched, waiting for AM container to Register with RM | |
ApplicationMaster host: N/A | |
ApplicationMaster RPC port: -1 | |
queue: root.users.maziyar | |
start time: 1549488346609 | |
final status: UNDEFINED | |
tracking URL: http://hadoop-master-1:8088/proxy/application_1549482247985_0006/ | |
user: maziyar | |
INFO [2019-02-06 22:23:23,793] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: ACCEPTED) | |
INFO [2019-02-06 22:23:24,796] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: ACCEPTED) | |
INFO [2019-02-06 22:23:25,802] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: ACCEPTED) | |
INFO [2019-02-06 22:23:26,806] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: ACCEPTED) | |
INFO [2019-02-06 22:23:27,887] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: ACCEPTED) | |
INFO [2019-02-06 22:23:28,493] ({dispatcher-event-loop-10} Logging.scala[logInfo]:54) - Add WebUI Filter. org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter, Map(PROXY_HOSTS -> hadoop-master-1, PROXY_URI_BASES -> http://hadoop-master-1:8088/proxy/application_1549482247985_0006), /proxy/application_1549482247985_0006 | |
INFO [2019-02-06 22:23:28,496] ({dispatcher-event-loop-10} Logging.scala[logInfo]:54) - Adding filter org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter to /jobs, /jobs/json, /jobs/job, /jobs/job/json, /stages, /stages/json, /stages/stage, /stages/stage/json, /stages/pool, /stages/pool/json, /storage, /storage/json, /storage/rdd, /storage/rdd/json, /environment, /environment/json, /executors, /executors/json, /executors/threadDump, /executors/threadDump/json, /st | |
atic, /, /api, /jobs/job/kill, /stages/stage/kill. | |
INFO [2019-02-06 22:23:28,688] ({dispatcher-event-loop-12} Logging.scala[logInfo]:54) - ApplicationMaster registered as NettyRpcEndpointRef(spark-client://YarnAM) | |
INFO [2019-02-06 22:23:28,890] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application report for application_1549482247985_0006 (state: RUNNING) | |
INFO [2019-02-06 22:23:28,891] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - | |
client token: N/A | |
diagnostics: N/A | |
ApplicationMaster host: IP_ADDRESS | |
ApplicationMaster RPC port: -1 | |
queue: root.users.maziyar | |
start time: 1549488346609 | |
final status: UNDEFINED | |
tracking URL: http://hadoop-master-1:8088/proxy/application_1549482247985_0006/ | |
user: maziyar | |
INFO [2019-02-06 22:23:28,893] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Application application_1549482247985_0006 has started running. | |
INFO [2019-02-06 22:23:28,905] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34413. | |
INFO [2019-02-06 22:23:28,906] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Server created on hadoop-gateway:34413 | |
INFO [2019-02-06 22:23:28,909] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy | |
INFO [2019-02-06 22:23:28,946] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Registering BlockManager BlockManagerId(driver, hadoop-gateway, 34413, None) | |
INFO [2019-02-06 22:23:28,951] ({dispatcher-event-loop-14} Logging.scala[logInfo]:54) - Registering block manager hadoop-gateway:34413 with 2004.6 MB RAM, BlockManagerId(driver, hadoop-gateway, 34413, None) | |
INFO [2019-02-06 22:23:28,956] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Registered BlockManager BlockManagerId(driver, hadoop-gateway, 34413, None) | |
INFO [2019-02-06 22:23:28,956] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - external shuffle service port = 7337 | |
INFO [2019-02-06 22:23:28,957] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Initialized BlockManager: BlockManagerId(driver, hadoop-gateway, 34413, None) | |
INFO [2019-02-06 22:23:29,121] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Adding filter org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter to /metrics/json. | |
INFO [2019-02-06 22:23:29,124] ({FIFOScheduler-Worker-1} ContextHandler.java[doStart]:781) - Started o.s.j.s.ServletContextHandler@49cc99bc{/metrics/json,null,AVAILABLE,@Spark} | |
INFO [2019-02-06 22:23:29,315] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Logging events to hdfs://hadoop-master-1:8020/user/spark/spark2ApplicationHistory/application_1549482247985_0006 | |
INFO [2019-02-06 22:23:29,317] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Using initial executors = 1, max of spark.dynamicAllocation.initialExecutors, spark.dynamicAllocation.minExecutors and spark.executor.instances | |
WARN [2019-02-06 22:23:29,347] ({FIFOScheduler-Worker-1} Logging.scala[logWarning]:66) - Lineage directory /var/log/spark/lineage doesn't exist or is not writable. Lineage for this application will be disabled. | |
INFO [2019-02-06 22:23:29,348] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - Extension com.cloudera.spark.lineage.NavigatorAppListener not being initialized. | |
INFO [2019-02-06 22:23:35,308] ({dispatcher-event-loop-7} Logging.scala[logInfo]:54) - Registered executor NettyRpcEndpointRef(spark-client://Executor) (IP_ADDRESS:43592) with ID 1 | |
INFO [2019-02-06 22:23:35,330] ({spark-listener-group-executorManagement} Logging.scala[logInfo]:54) - New executor 1 has registered (new total is 1) | |
INFO [2019-02-06 22:23:35,371] ({FIFOScheduler-Worker-1} Logging.scala[logInfo]:54) - SchedulerBackend is ready for scheduling beginning after reached minRegisteredResourcesRatio: 0.8 | |
INFO [2019-02-06 22:23:35,381] ({FIFOScheduler-Worker-1} OldSparkInterpreter.java[createSparkSession]:301) - Created Spark session with Hive support | |
INFO [2019-02-06 22:23:35,399] ({FIFOScheduler-Worker-1} SparkShims.java[loadShims]:62) - Initializing shims for Spark 2.x | |
INFO [2019-02-06 22:23:35,429] ({dispatcher-event-loop-1} Logging.scala[logInfo]:54) - Registering block manager hadoop-13:39255 with 2.5 GB RAM, BlockManagerId(1, hadoop-13, 39255, None) | |
INFO [2019-02-06 22:23:39,589] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-06 22:23:39,589] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-06 22:23:39,589] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-06 22:23:39,590] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob@f7c36f41 | |
INFO [2019-02-06 22:23:39,590] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:820) - getStatus: RUNNING | |
INFO [2019-02-06 22:23:39,591] ({FIFOScheduler-Worker-1} PythonInterpreter.java[open]:104) - IPython is not available, use the native PythonInterpreter | |
INFO [2019-02-06 22:23:39,597] ({FIFOScheduler-Worker-1} PythonUtils.java[createGatewayServer]:42) - Launching GatewayServer at IP_ADDRESS:44020, useAuth: true | |
INFO [2019-02-06 22:23:39,616] ({FIFOScheduler-Worker-1} PythonInterpreter.java[createPythonScript]:175) - Create Python working dir: /tmp/1549488219616-0 | |
INFO [2019-02-06 22:23:39,630] ({FIFOScheduler-Worker-1} PythonInterpreter.java[setupPythonEnv]:208) - PYTHONPATH: :/opt/cloudera/parcels/Anaconda/envs/py36/bin/python3:/opt/cloudera/parcels/Anaconda/envs/py36/bin/python3:/tmp/1549488219616-0 | |
INFO [2019-02-06 22:23:39,630] ({FIFOScheduler-Worker-1} PythonInterpreter.java[createGatewayServerAndStartScript]:157) - Launching Python Process Command: /opt/cloudera/parcels/Anaconda/envs/py36/bin/python3 /tmp/1549488219616-0/zeppelin_python.py IP_ADDRESS 44020 | |
INFO [2019-02-06 22:23:39,631] ({FIFOScheduler-Worker-1} PythonInterpreter.java[bootstrapInterpreter]:573) - Bootstrap interpreter via python/zeppelin_pyspark.py | |
INFO [2019-02-06 22:23:39,632] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:39,732] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:39,832] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:39,933] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:40,033] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:40,133] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:40,234] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:40,334] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:40,434] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-06 22:23:40,534] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
ERROR [2019-02-06 22:23:40,599] ({FIFOScheduler-Worker-1} Job.java[run]:174) - Job failed | |
org.apache.zeppelin.interpreter.InterpreterException: Fail to bootstrap pyspark | |
at org.apache.zeppelin.spark.PySparkInterpreter.open(PySparkInterpreter.java:124) | |
at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:69) | |
at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:593) | |
at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:502) | |
at org.apache.zeppelin.scheduler.Job.run(Job.java:172) | |
at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:121) | |
at org.apache.zeppelin.scheduler.FIFOScheduler.lambda$runJobInScheduler$0(FIFOScheduler.java:39) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: java.io.IOException: Fail to run bootstrap script: python/zeppelin_pyspark.py | |
at org.apache.zeppelin.python.PythonInterpreter.bootstrapInterpreter(PythonInterpreter.java:581) | |
at org.apache.zeppelin.spark.PySparkInterpreter.open(PySparkInterpreter.java:122) | |
... 9 more |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment