Created
December 2, 2020 18:43
-
-
Save asifr/2cabebed5c945f1a5f2104213e2cb5ce to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| """ | |
| Creates a new connection to spark and makes available: | |
| `spark`, `sq` (`SQLContext`), `F`, and `Window` in the global namespace. | |
| """ | |
| from textwrap import dedent | |
| import findspark | |
| import os | |
| def _formulate_pyspark_submit_args(submit_args=None): | |
| pass | |
| def _parse_master(pyspark_submit_args): | |
| sargs = pyspark_submit_args.split() | |
| for j,sarg in enumerate(sargs): | |
| if sarg == "--master": | |
| try: | |
| return sargs[j+1] | |
| except: | |
| raise Exception("Could not parse master from PYSPARK_SUBMIT_ARGS") | |
| raise Exception("Could not parse master from PYSPARK_SUBMIT_ARGS") | |
| def initialize_spark(appName="MyApp",submit_args=None,memory=12): | |
| """ | |
| This function assumes you already have SPARK_HOME and PYSPARK_SUBMIT_ARGS environment variables set | |
| """ | |
| if "SPARK_HOME" not in os.environ: | |
| raise Exception("SPARK_HOME environmental variable not set.") | |
| if 'PYSPARK_SUBMIT_ARGS' not in os.environ: | |
| os.environ['PYSPARK_SUBMIT_ARGS'] = f'--master local[12] --driver-memory {memory}g --executor-memory {memory}g pyspark-shell' | |
| if "PYSPARK_SUBMIT_ARGS" not in os.environ: | |
| raise Exception( | |
| dedent("""\ | |
| PYSPARK_SUNBMIT_ARGS environmental variable not set. | |
| As an example: | |
| export PYSPARK_SUBMIT_ARGS = " --master local[8] --driver-memory 8g --executor-memory 8g pyspark-shell" | |
| """)) | |
| findspark.init(os.environ["SPARK_HOME"]) | |
| spark_master = _parse_master(os.environ["PYSPARK_SUBMIT_ARGS"]) | |
| from pyspark.sql import SparkSession | |
| spark = SparkSession\ | |
| .builder\ | |
| .master(spark_master)\ | |
| .appName("MyApp")\ | |
| .getOrCreate() | |
| return spark | |
| def assert_pyspark(): | |
| import pyspark.sql.functions as F | |
| from pyspark.sql import Window | |
| return F,Window | |
| def load_spark(): | |
| spark = initialize_spark() | |
| return spark |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment