Skip to content

Instantly share code, notes, and snippets.

def generate_cyclical_features(df, col_name, period, start_num=0):
kwargs = {
f'sin_{col_name}' : lambda x: np.sin(2*np.pi*(df[col_name]-start_num)/period),
f'cos_{col_name}' : lambda x: np.cos(2*np.pi*(df[col_name]-start_num)/period)
}
return df.assign(**kwargs).drop(columns=[col_name])
df_features = generate_cyclical_features(df_features, 'hour', 24, 0)
# df_features = generate_cyclical_features(df_features, 'day_of_week', 7, 0)
# df_features = generate_cyclical_features(df_features, 'month', 12, 1)
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder
def onehot_encode(df, onehot_columns):
ct = ColumnTransformer(
[('onehot', OneHotEncoder(drop='first'), onehot_columns)],
remainder='passthrough'
)
return ct.fit_transform(df)
def onehot_encode_pd(df, col_name):
dummies = pd.get_dummies(df[col_name], prefix=col_name)
return pd.concat([df, dummies], axis=1).drop(columns=[col_name])
df_features = onehot_encode_pd(df_features, ['month','day','day_of_week','week_of_year'])
df_features = (
df
.assign(hour = df.index.hour)
.assign(day = df.index.day)
.assign(month = df.index.month)
.assign(day_of_week = df.index.dayofweek)
.assign(week_of_year = df.index.week)
)
class Optimization:
"""Optimization is a helper class that allows training, validation, prediction.
Optimization is a helper class that takes model, loss function, optimizer function
learning scheduler (optional), early stopping (optional) as inputs. In return, it
provides a framework to train and validate the models, and to predict future values
based on the models.
Attributes:
model (RNNModel, LSTMModel, GRUModel): Model class created for the type of RNN
class GRUModel(nn.Module):
def __init__(self, input_dim, hidden_dim, layer_dim, output_dim, dropout_prob):
super(GRUModel, self).__init__()
# Defining the number of layers and the nodes in each layer
self.layer_dim = layer_dim
self.hidden_dim = hidden_dim
# GRU layers
self.gru = nn.GRU(
class LSTMModel(nn.Module):
def __init__(self, input_dim, hidden_dim, layer_dim, output_dim, dropout_prob):
super(LSTMModel, self).__init__()
# Defining the number of layers and the nodes in each layer
self.hidden_dim = hidden_dim
self.layer_dim = layer_dim
# LSTM layers
self.lstm = nn.LSTM(
class RNNModel(nn.Module):
def __init__(self, input_dim, hidden_dim, layer_dim, output_dim, dropout_prob):
super(RNNModel, self).__init__()
# Defining the number of layers and the nodes in each layer
self.hidden_dim = hidden_dim
self.layer_dim = layer_dim
# RNN layers
self.rnn = nn.RNN(
from torch.utils.data import TensorDataset, DataLoader
batch_size = 64
train_features = torch.Tensor(X_train_arr)
train_targets = torch.Tensor(y_train_arr)
val_features = torch.Tensor(X_val_arr)
val_targets = torch.Tensor(y_val_arr)
test_features = torch.Tensor(X_test_arr)
test_targets = torch.Tensor(y_test_arr)
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
X_train_arr = scaler.fit_transform(X_train)
X_val_arr = scaler.transform(X_val)
X_test_arr = scaler.transform(X_test)
y_train_arr = scaler.fit_transform(y_train)
y_val_arr = scaler.transform(y_val)
y_test_arr = scaler.transform(y_test)