Created
July 17, 2025 11:49
-
-
Save rjurney/2b3541d29afeeb1cfe668569a18ecb6b to your computer and use it in GitHub Desktop.
pyspark --packages graphframes:graphframes:0.9.0-spark3.5-s_2.12
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Python 3.12.9 | packaged by conda-forge | (main, Mar 4 2025, 22:44:42) [Clang 18.1.8 ] | |
Type 'copyright', 'credits' or 'license' for more information | |
IPython 9.0.2 -- An enhanced Interactive Python. Type '?' for help. | |
Tip: Use `--theme`, or the `%colors` magic to change ipython themes and colors. | |
25/07/17 21:48:25 WARN Utils: Your hostname, Achilles-3.local resolves to a loopback address: 127.0.0.1; using 10.26.184.245 instead (on interface en0) | |
25/07/17 21:48:25 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
:: loading settings :: url = jar:file:/Users/rjurney/Software/spark/assembly/target/scala-2.12/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml | |
Ivy Default Cache set to: /Users/rjurney/.ivy2/cache | |
The jars for the packages stored in: /Users/rjurney/.ivy2/jars | |
graphframes#graphframes added as a dependency | |
:: resolving dependencies :: org.apache.spark#spark-submit-parent-3783cd0e-1fa6-45ce-a910-bc2689be8775;1.0 | |
confs: [default] | |
:: resolution report :: resolve 2869ms :: artifacts dl 0ms | |
:: modules in use: | |
--------------------------------------------------------------------- | |
| | modules || artifacts | | |
| conf | number| search|dwnlded|evicted|| number|dwnlded| | |
--------------------------------------------------------------------- | |
| default | 1 | 0 | 0 | 0 || 0 | 0 | | |
--------------------------------------------------------------------- | |
:: problems summary :: | |
:::: WARNINGS | |
module not found: graphframes#graphframes;0.9.0-spark3.5-s_2.12 | |
==== local-m2-cache: tried | |
file:/Users/rjurney/.m2/repository/graphframes/graphframes/0.9.0-spark3.5-s_2.12/graphframes-0.9.0-spark3.5-s_2.12.pom | |
-- artifact graphframes#graphframes;0.9.0-spark3.5-s_2.12!graphframes.jar: | |
file:/Users/rjurney/.m2/repository/graphframes/graphframes/0.9.0-spark3.5-s_2.12/graphframes-0.9.0-spark3.5-s_2.12.jar | |
==== local-ivy-cache: tried | |
/Users/rjurney/.ivy2/local/graphframes/graphframes/0.9.0-spark3.5-s_2.12/ivys/ivy.xml | |
-- artifact graphframes#graphframes;0.9.0-spark3.5-s_2.12!graphframes.jar: | |
/Users/rjurney/.ivy2/local/graphframes/graphframes/0.9.0-spark3.5-s_2.12/jars/graphframes.jar | |
==== central: tried | |
https://repo1.maven.org/maven2/graphframes/graphframes/0.9.0-spark3.5-s_2.12/graphframes-0.9.0-spark3.5-s_2.12.pom | |
-- artifact graphframes#graphframes;0.9.0-spark3.5-s_2.12!graphframes.jar: | |
https://repo1.maven.org/maven2/graphframes/graphframes/0.9.0-spark3.5-s_2.12/graphframes-0.9.0-spark3.5-s_2.12.jar | |
==== spark-packages: tried | |
https://repos.spark-packages.org/graphframes/graphframes/0.9.0-spark3.5-s_2.12/graphframes-0.9.0-spark3.5-s_2.12.pom | |
-- artifact graphframes#graphframes;0.9.0-spark3.5-s_2.12!graphframes.jar: | |
https://repos.spark-packages.org/graphframes/graphframes/0.9.0-spark3.5-s_2.12/graphframes-0.9.0-spark3.5-s_2.12.jar | |
:::::::::::::::::::::::::::::::::::::::::::::: | |
:: UNRESOLVED DEPENDENCIES :: | |
:::::::::::::::::::::::::::::::::::::::::::::: | |
:: graphframes#graphframes;0.9.0-spark3.5-s_2.12: not found | |
:::::::::::::::::::::::::::::::::::::::::::::: | |
:: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS | |
Exception in thread "main" java.lang.RuntimeException: [unresolved dependency: graphframes#graphframes;0.9.0-spark3.5-s_2.12: not found] | |
at org.apache.spark.deploy.SparkSubmitUtils$.resolveMavenCoordinates(SparkSubmit.scala:1613) | |
at org.apache.spark.util.DependencyUtils$.resolveMavenDependencies(DependencyUtils.scala:185) | |
at org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:339) | |
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:969) | |
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:199) | |
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:222) | |
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91) | |
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1125) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1134) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
[TerminalIPythonApp] WARNING | Unknown error in handling PYTHONSTARTUP file /Users/rjurney/Software/spark/python/pyspark/shell.py: | |
--------------------------------------------------------------------------- | |
PySparkRuntimeError Traceback (most recent call last) | |
File ~/anaconda3/envs/graphframes/lib/python3.12/site-packages/IPython/core/shellapp.py:404, in InteractiveShellApp._exec_file(self, fname, shell_futures) | |
400 self.shell.safe_execfile_ipy(full_filename, | |
401 shell_futures=shell_futures) | |
402 else: | |
403 # default to python, even without extension | |
--> 404 self.shell.safe_execfile(full_filename, | |
405 self.shell.user_ns, | |
406 shell_futures=shell_futures, | |
407 raise_exceptions=True) | |
408 finally: | |
409 sys.argv = save_argv | |
File ~/anaconda3/envs/graphframes/lib/python3.12/site-packages/IPython/core/interactiveshell.py:2899, in InteractiveShell.safe_execfile(self, fname, exit_ignore, raise_exceptions, shell_futures, *where) | |
2897 try: | |
2898 glob, loc = (where + (None, ))[:2] | |
-> 2899 py3compat.execfile( | |
2900 fname, glob, loc, | |
2901 self.compile if shell_futures else None) | |
2902 except SystemExit as status: | |
2903 # If the call was made with 0 or None exit status (sys.exit(0) | |
2904 # or sys.exit() ), don't bother showing a traceback, as both of | |
(...) 2910 # For other exit status, we show the exception unless | |
2911 # explicitly silenced, but only in short form. | |
2912 if status.code: | |
File ~/anaconda3/envs/graphframes/lib/python3.12/site-packages/IPython/utils/py3compat.py:56, in execfile(fname, glob, loc, compiler) | |
54 with open(fname, "rb") as f: | |
55 compiler = compiler or compile | |
---> 56 exec(compiler(f.read(), fname, "exec"), glob, loc) | |
File ~/Software/spark/python/pyspark/shell.py:66 | |
63 if os.environ.get("SPARK_EXECUTOR_URI"): | |
64 SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"]) | |
---> 66 SparkContext._ensure_initialized() | |
68 try: | |
69 spark = SparkSession._create_shell_session() | |
File ~/Software/spark/python/pyspark/context.py:436, in SparkContext._ensure_initialized(cls, instance, gateway, conf) | |
434 with SparkContext._lock: | |
435 if not SparkContext._gateway: | |
--> 436 SparkContext._gateway = gateway or launch_gateway(conf) | |
437 SparkContext._jvm = SparkContext._gateway.jvm | |
439 if instance: | |
File ~/Software/spark/python/pyspark/java_gateway.py:107, in launch_gateway(conf, popen_kwargs) | |
104 time.sleep(0.1) | |
106 if not os.path.isfile(conn_info_file): | |
--> 107 raise PySparkRuntimeError( | |
108 error_class="JAVA_GATEWAY_EXITED", | |
109 message_parameters={}, | |
110 ) | |
112 with open(conn_info_file, "rb") as info: | |
113 gateway_port = read_int(info) | |
PySparkRuntimeError: [JAVA_GATEWAY_EXITED] Java gateway process exited before sending its port number. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment