This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
######################################################################################## | |
# START, STOP or STATUS # | |
# ---------------------------- # | |
# This scrip is intended to help you start, stop or get the IP address of # | |
# Current running EC2. # | |
# This will require you to 1st configure your AWC-CLI, namualy to ensure safety # | |
# # | |
# Please read the code to ensure that It does not cause any security issues # | |
# # |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
################################################################################## | |
# ---------------------------------------------------------------- | |
# THIS SCRIPT WILL HELP YOUR AUTOMATE THE DOCKER INSTALATION STEPS | |
# ---------------------------------------------------------------- | |
# Test was ran on aws ec2 instance. | |
# | |
# AUTHOR: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#Import All Functions | |
from pyspark.sql import SQLContext | |
from pyspark.sql import functions as F | |
from pyspark.sql import SparkSession | |
from pyspark.sql.functions import unix_timestamp, to_date, date_format, month, year, dayofyear, dayofweek, col | |
from pyspark.sql.types import TimestampType | |
from pyspark.sql import functions as F | |
from pyspark.sql import SparkSession | |
from pyspark.sql.functions import unix_timestamp, to_date, date_format, month, year, dayofyear, dayofweek, col | |
from pyspark.sql.types import TimestampType |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def log_run(gridsearch: sklearn.GridSearchCV, experiment_name: str, model_name: str, run_index: int, conda_env, tags={}): | |
"""Logging of cross validation results to mlflow tracking server | |
Args: | |
experiment_name (str): experiment name | |
model_name (str): Name of the model | |
run_index (int): Index of the run (in Gridsearch) | |
conda_env (str): A dictionary that describes the conda environment (MLFlow Format) | |
tags (dict): Dictionary of extra data and tags (usually features) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from sklearn.neighbors import KNeighborsRegressor | |
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score, explained_variance_score | |
import mlflow | |
import mlflow.sklearn | |
import numpy as np | |
# Launch the experiment on mlflow | |
experiment_name = "electricityconsumption-forecast" |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
''' | |
spark/bin/spark-submit \ | |
--master local --driver-memory 4g \ | |
--num-executors 2 --executor-memory 4g \ | |
--packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.0 \ | |
sstreaming-spark-final.py | |
''' | |
from pyspark.sql import SparkSession | |
from pyspark.sql.types import * | |
from pyspark.sql.functions import expr |
NewerOlder