Created
April 18, 2024 16:22
-
-
Save eculver/cceb8d8280b0e6258e20317e3b0ea11b to your computer and use it in GitHub Desktop.
mac + pyspark fail
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
----> 1 spark = pyspark.sql.SparkSession.builder.getOrCreate() | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_pyspark/site-packages/pyspark/sql/session.py:477, in SparkSession.Builder.getOrCreate(self) | |
475 sparkConf.set(key, value) | |
476 # This SparkContext may be an existing one. | |
--> 477 sc = SparkContext.getOrCreate(sparkConf) | |
478 # Do not update `SparkConf` for existing `SparkContext`, as it's shared | |
479 # by all sessions. | |
480 session = SparkSession(sc, options=self._options) | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_pyspark/site-packages/pyspark/context.py:512, in SparkContext.getOrCreate(cls, conf) | |
510 with SparkContext._lock: | |
511 if SparkContext._active_spark_context is None: | |
--> 512 SparkContext(conf=conf or SparkConf()) | |
513 assert SparkContext._active_spark_context is not None | |
514 return SparkContext._active_spark_context | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_pyspark/site-packages/pyspark/context.py:200, in SparkContext.__init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls, udf_profiler_cls, memory_profiler_cls) | |
198 SparkContext._ensure_initialized(self, gateway=gateway, conf=conf) | |
199 try: | |
--> 200 self._do_init( | |
201 master, | |
202 appName, | |
203 sparkHome, | |
204 pyFiles, | |
205 environment, | |
206 batchSize, | |
207 serializer, | |
208 conf, | |
209 jsc, | |
210 profiler_cls, | |
211 udf_profiler_cls, | |
212 memory_profiler_cls, | |
213 ) | |
214 except BaseException: | |
215 # If an error occurs, clean up in order to allow future SparkContext creation: | |
216 self.stop() | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_pyspark/site-packages/pyspark/context.py:287, in SparkContext._do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, jsc, profiler_cls, udf_profiler_cls, memory_profiler_cls) | |
284 self.environment["PYTHONHASHSEED"] = os.environ.get("PYTHONHASHSEED", "0") | |
286 # Create the Java SparkContext through Py4J | |
--> 287 self._jsc = jsc or self._initialize_context(self._conf._jconf) | |
288 # Reset the SparkConf to the one actually used by the SparkContext in JVM. | |
289 self._conf = SparkConf(_jconf=self._jsc.sc().conf()) | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_pyspark/site-packages/pyspark/context.py:417, in SparkContext._initialize_context(self, jconf) | |
413 """ | |
414 Initialize SparkContext in function to allow subclass specific initialization | |
415 """ | |
416 assert self._jvm is not None | |
--> 417 return self._jvm.JavaSparkContext(jconf) | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_py4j/site-packages/py4j/java_gateway.py:1587, in JavaClass.__call__(self, *args) | |
1581 command = proto.CONSTRUCTOR_COMMAND_NAME +\ | |
1582 self._command_header +\ | |
1583 args_command +\ | |
1584 proto.END_COMMAND_PART | |
1586 answer = self._gateway_client.send_command(command) | |
-> 1587 return_value = get_return_value( | |
1588 answer, self._gateway_client, None, self._fqn) | |
1590 for temp_arg in temp_args: | |
1591 if hasattr(temp_arg, "_detach"): | |
File /private/var/tmp/_bazel_evanculver/07c3e2715b8fccafd72cb9d10f0c890e/execroot/__main__/bazel-out/darwin_arm64-fastbuild/bin/optout/src/main/python/optout_service_repl.runfiles/3rdparty_deps_py4j/site-packages/py4j/protocol.py:326, in get_return_value(answer, gateway_client, target_id, name) | |
324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client) | |
325 if answer[1] == REFERENCE_TYPE: | |
--> 326 raise Py4JJavaError( | |
327 "An error occurred while calling {0}{1}{2}.\n". | |
328 format(target_id, ".", name), value) | |
329 else: | |
330 raise Py4JError( | |
331 "An error occurred while calling {0}{1}{2}. Trace:\n{3}\n". | |
332 format(target_id, ".", name, value)) | |
Py4JJavaError: An error occurred while calling None.org.apache.spark.api.java.JavaSparkContext. | |
: java.lang.ExceptionInInitializerError | |
at org.apache.spark.unsafe.array.ByteArrayMethods.<clinit>(ByteArrayMethods.java:52) | |
at org.apache.spark.memory.MemoryManager.defaultPageSizeBytes$lzycompute(MemoryManager.scala:261) | |
at org.apache.spark.memory.MemoryManager.defaultPageSizeBytes(MemoryManager.scala:251) | |
at org.apache.spark.memory.MemoryManager.$anonfun$pageSizeBytes$1(MemoryManager.scala:270) | |
at scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23) | |
at scala.Option.getOrElse(Option.scala:189) | |
at org.apache.spark.memory.MemoryManager.<init>(MemoryManager.scala:270) | |
at org.apache.spark.memory.UnifiedMemoryManager.<init>(UnifiedMemoryManager.scala:58) | |
at org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:207) | |
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:325) | |
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:196) | |
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:279) | |
at org.apache.spark.SparkContext.<init>(SparkContext.scala:464) | |
at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58) | |
at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) | |
at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:75) | |
at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:53) | |
at java.base/java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:502) | |
at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:486) | |
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:247) | |
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374) | |
at py4j.Gateway.invoke(Gateway.java:238) | |
at py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:80) | |
at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:69) | |
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182) | |
at py4j.ClientServerConnection.run(ClientServerConnection.java:106) | |
at java.base/java.lang.Thread.run(Thread.java:1583) | |
Caused by: java.lang.IllegalStateException: java.lang.NoSuchMethodException: java.nio.DirectByteBuffer.<init>(long,int) | |
at org.apache.spark.unsafe.Platform.<clinit>(Platform.java:113) | |
... 27 more | |
Caused by: java.lang.NoSuchMethodException: java.nio.DirectByteBuffer.<init>(long,int) | |
at java.base/java.lang.Class.getConstructor0(Class.java:3761) | |
at java.base/java.lang.Class.getDeclaredConstructor(Class.java:2930) | |
at org.apache.spark.unsafe.Platform.<clinit>(Platform.java:71) | |
... 27 more |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import pyspark | |
spark = pyspark.sql.SparkSession.builder.getOrCreate() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment