Created
February 18, 2019 10:35
-
-
Save maziyarpanahi/5996ea58e431209a9b96ca31c2f52866 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
DEBUG [2019-02-18 11:27:25,397] ({YARN application state monitor} ProtobufRpcEngine.java[invoke]:249) - Call: getApplicationReport took 2ms | |
DEBUG [2019-02-18 11:27:25,878] ({FIFOScheduler-Worker-1} InterpreterOutputStream.java[processLine]:81) - Interpreter output:import org.apache.spark.sql.functions._ | |
INFO [2019-02-18 11:27:25,931] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
DEBUG [2019-02-18 11:27:25,931] ({pool-6-thread-2} Interpreter.java[getProperty]:204) - key: zeppelin.spark.concurrentSQL, value: false | |
INFO [2019-02-18 11:27:25,931] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-18 11:27:25,931] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:null | |
INFO [2019-02-18 11:27:25,931] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:818) - job:org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob@f7c36f41 | |
INFO [2019-02-18 11:27:25,931] ({pool-6-thread-2} RemoteInterpreterServer.java[getStatus]:820) - getStatus: RUNNING | |
DEBUG [2019-02-18 11:27:25,932] ({FIFOScheduler-Worker-1} Interpreter.java[getProperty]:211) - key: zeppelin.python.useIPython, value: false | |
INFO [2019-02-18 11:27:25,933] ({FIFOScheduler-Worker-1} PythonInterpreter.java[open]:104) - IPython is not available, use the native PythonInterpreter | |
DEBUG [2019-02-18 11:27:25,935] ({FIFOScheduler-Worker-1} Interpreter.java[getProperty]:211) - key: zeppelin.py4j.useAuth, value: true | |
INFO [2019-02-18 11:27:25,940] ({FIFOScheduler-Worker-1} PythonUtils.java[createGatewayServer]:42) - Launching GatewayServer at 134.158.74.122:34879, useAuth: true | |
INFO [2019-02-18 11:27:25,959] ({FIFOScheduler-Worker-1} PythonInterpreter.java[createPythonScript]:175) - Create Python working dir: /tmp/1550485645958-0 | |
DEBUG [2019-02-18 11:27:25,964] ({FIFOScheduler-Worker-1} Interpreter.java[getProperty]:211) - key: spark.pyspark.driver.python, value: | |
DEBUG [2019-02-18 11:27:25,966] ({FIFOScheduler-Worker-1} Interpreter.java[getProperty]:211) - key: spark.pyspark.python, value: | |
INFO [2019-02-18 11:27:25,973] ({FIFOScheduler-Worker-1} PythonInterpreter.java[setupPythonEnv]:208) - PYTHONPATH: :/opt/cloudera/parcels/Anaconda/envs/py36/bin/python3:/opt/cloudera/parcels/Anaconda/envs/py36/bin/python3:/tmp/1550485645958-0 | |
INFO [2019-02-18 11:27:25,973] ({FIFOScheduler-Worker-1} PythonInterpreter.java[createGatewayServerAndStartScript]:157) - Launching Python Process Command: /opt/cloudera/parcels/Anaconda/envs/py36/bin/python3 /tmp/1550485645958-0/zeppelin_python.py 134. | |
158.74.122 34879 | |
INFO [2019-02-18 11:27:25,974] ({FIFOScheduler-Worker-1} PythonInterpreter.java[bootstrapInterpreter]:573) - Bootstrap interpreter via python/zeppelin_pyspark.py | |
INFO [2019-02-18 11:27:25,975] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-18 11:27:26,075] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-18 11:27:26,175] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-18 11:27:26,276] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
INFO [2019-02-18 11:27:26,376] ({FIFOScheduler-Worker-1} PythonInterpreter.java[interpret]:380) - Wait for PythonScript initialized | |
DEBUG [2019-02-18 11:27:26,397] ({IPC Parameter Sending Thread #1} Client.java[run]:1120) - IPC Client (1209770703) connection to hadoop-master-1/134.158.74.189:8032 from maziyar sending #71 org.apache.hadoop.yarn.api.ApplicationClientProtocolPB | |
.getApplicationReport | |
DEBUG [2019-02-18 11:27:26,398] ({IPC Client (1209770703) connection to hadoop-master-1/134.158.74.189:8032 from maziyar} Client.java[receiveRpcResponse]:1174) - IPC Client (1209770703) connection to hadoop-master-1/134.158.74.189:8032 | |
from maziyar got value #71 | |
DEBUG [2019-02-18 11:27:26,399] ({YARN application state monitor} ProtobufRpcEngine.java[invoke]:249) - Call: getApplicationReport took 2ms | |
DEBUG [2019-02-18 11:27:26,432] ({Thread-35} PythonInterpreter.java[onPythonScriptInitialized]:325) - onPythonScriptInitialized is called | |
DEBUG [2019-02-18 11:27:26,434] ({Thread-35} PythonInterpreter.java[setStatementsFinished]:314) - Setting python statement output: , error: false | |
DEBUG [2019-02-18 11:27:26,436] ({Thread-35} PythonInterpreter.java[setStatementsFinished]:314) - Setting python statement output: Fail to execute line 1: sc.setLocalProperty('spark.scheduler.pool', None) | |
Traceback (most recent call last): | |
File "/tmp/1550485645958-0/zeppelin_python.py", line 158, in <module> | |
exec(code, _zcUserQueryNameSpace) | |
File "<stdin>", line 1, in <module> | |
NameError: name 'sc' is not defined | |
, error: true | |
DEBUG [2019-02-18 11:27:26,438] ({Thread-35} PythonInterpreter.java[setStatementsFinished]:314) - Setting python statement output: Fail to execute line 19: from pyspark.conf import SparkConf | |
Traceback (most recent call last): | |
File "/tmp/1550485645958-0/zeppelin_python.py", line 153, in <module> | |
exec(code, _zcUserQueryNameSpace) | |
File "<stdin>", line 19, in <module> | |
ModuleNotFoundError: No module named 'pyspark' | |
, error: true | |
ERROR [2019-02-18 11:27:26,443] ({FIFOScheduler-Worker-1} Job.java[run]:174) - Job failed | |
org.apache.zeppelin.interpreter.InterpreterException: Fail to bootstrap pyspark | |
at org.apache.zeppelin.spark.PySparkInterpreter.open(PySparkInterpreter.java:124) | |
at org.apache.zeppelin.interpreter.LazyOpenInterpreter.open(LazyOpenInterpreter.java:69) | |
at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:593) | |
at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:502) | |
at org.apache.zeppelin.scheduler.Job.run(Job.java:172) | |
at org.apache.zeppelin.scheduler.AbstractScheduler.runJob(AbstractScheduler.java:121) | |
at org.apache.zeppelin.scheduler.FIFOScheduler.lambda$runJobInScheduler$0(FIFOScheduler.java:39) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) | |
at java.lang.Thread.run(Thread.java:748) | |
Caused by: java.io.IOException: Fail to run bootstrap script: python/zeppelin_pyspark.py | |
at org.apache.zeppelin.python.PythonInterpreter.bootstrapInterpreter(PythonInterpreter.java:581) | |
at org.apache.zeppelin.spark.PySparkInterpreter.open(PySparkInterpreter.java:122) | |
... 9 more | |
DEBUG [2019-02-18 11:27:26,454] ({FIFOScheduler-Worker-1} AbstractScheduler.java[runJob]:128) - Job Error, 20190122-114738_891569967, null | |
INFO [2019-02-18 11:27:26,454] ({FIFOScheduler-Worker-1} AbstractScheduler.java[runJob]:142) - Job 20190122-114738_891569967 finished by scheduler interpreter_1210474215 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Did you get any solution for this error?