Last active
April 11, 2016 16:41
-
-
Save chicagobuss/8fbf0c8e9ed780e050e455629d7a28f0 to your computer and use it in GitHub Desktop.
jupyter kernel config
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| --------------------------------------------------------------------------- | |
| TypeError Traceback (most recent call last) | |
| <ipython-input-3-65fd6de06602> in <module>() | |
| 1 from pyspark.sql import SQLContext | |
| ----> 2 df = sqlContext.createDataFrame([("test", 1)]) | |
| /usr/lib/spark/python/pyspark/sql/context.py in createDataFrame(self, data, schema, samplingRatio) | |
| 428 rdd, schema = self._createFromLocal(data, schema) | |
| 429 jrdd = self._jvm.SerDeUtil.toJavaArray(rdd._to_java_object_rdd()) | |
| --> 430 jdf = self._ssql_ctx.applySchemaToPythonRDD(jrdd.rdd(), schema.json()) | |
| 431 df = DataFrame(jdf, self) | |
| 432 df._schema = schema | |
| /usr/lib/spark/python/pyspark/sql/context.py in _ssql_ctx(self) | |
| 684 try: | |
| 685 if not hasattr(self, '_scala_HiveContext'): | |
| --> 686 self._scala_HiveContext = self._get_hive_ctx() | |
| 687 return self._scala_HiveContext | |
| 688 except Py4JError as e: | |
| /usr/lib/spark/python/pyspark/sql/context.py in _get_hive_ctx(self) | |
| 692 | |
| 693 def _get_hive_ctx(self): | |
| --> 694 return self._jvm.HiveContext(self._jsc.sc()) | |
| 695 | |
| 696 def refreshTable(self, tableName): | |
| TypeError: 'JavaPackage' object is not callable |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| "display_name": "pySpark 2-cores (Spark 1.6.0)", | |
| "language": "python", | |
| "argv": [ | |
| "/bin/python2", | |
| "-m", | |
| "IPython.kernel", | |
| "-f", | |
| "{connection_file}" | |
| ], | |
| "env": { | |
| "SPARK_HOME": "/usr/lib/spark", | |
| "PYTHONPATH": "/usr/lib/spark/python/:/usr/lib/spark/python/lib/py4j-0.9-src.zip", | |
| "PYTHONSTARTUP": "/usr/lib/spark/python/pyspark/shell.py", | |
| "PYSPARK_SUBMIT_ARGS": "--executor-cores 2 --num-executors 1 --master spark://spark-master-1:7077,spark-master-2:7077 pyspark-shell" | |
| } | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment