This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def inverse_transform(scaler, df, columns): | |
for col in columns: | |
df[col] = scaler.inverse_transform(df[col]) | |
return df | |
def format_predictions(predictions, values, df_test, scaler): | |
vals = np.concatenate(values, axis=0).ravel() | |
preds = np.concatenate(predictions, axis=0).ravel() | |
df_result = pd.DataFrame(data={"value": vals, "prediction": preds}, index=df_test.head(len(vals)).index) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score | |
def calculate_metrics(df): | |
return {'mae' : mean_absolute_error(df.value, df.prediction), | |
'rmse' : mean_squared_error(df.value, df.prediction) ** 0.5, | |
'r2' : r2_score(df.value, df.prediction)} | |
result_metrics = calculate_metrics(df_result) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch.optim as optim | |
input_dim = len(X_train.columns) | |
output_dim = 1 | |
hidden_dim = 64 | |
layer_dim = 3 | |
batch_size = 64 | |
dropout = 0.2 | |
n_epochs = 100 | |
learning_rate = 1e-3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def get_model(model, model_params): | |
models = { | |
"rnn": RNNModel, | |
"lstm": LSTMModel, | |
"gru": GRUModel, | |
} | |
return models.get(model.lower())(**model_params) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def plot_losses(self): | |
plt.plot(self.train_losses, label="Training loss") | |
plt.plot(self.val_losses, label="Validation loss") | |
plt.legend() | |
plt.title("Losses") | |
plt.show() | |
plt.close() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def evaluate(self, test_loader, batch_size=1, n_features=1): | |
with torch.no_grad(): | |
predictions = [] | |
values = [] | |
for x_test, y_test in test_loader: | |
x_test = x_test.view([batch_size, -1, n_features]).to(device) | |
y_test = y_test.to(device) | |
self.model.eval() | |
yhat = self.model(x_test) | |
predictions.append(yhat.to(device).detach().numpy()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def train(self, train_loader, val_loader, batch_size=64, n_epochs=50, n_features=1): | |
model_path = f'models/{self.model}_{datetime.now().strftime("%Y-%m-%d %H:%M:%S")}' | |
for epoch in range(1, n_epochs + 1): | |
batch_losses = [] | |
for x_batch, y_batch in train_loader: | |
x_batch = x_batch.view([batch_size, -1, n_features]).to(device) | |
y_batch = y_batch.to(device) | |
loss = self.train_step(x_batch, y_batch) | |
batch_losses.append(loss) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class Optimization: | |
def __init__(self, model, loss_fn, optimizer): | |
self.model = model | |
self.loss_fn = loss_fn | |
self.optimizer = optimizer | |
self.train_losses = [] | |
self.val_losses = [] | |
def train_step(self, x, y): | |
# Sets model to train mode |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from sklearn.preprocessing import MinMaxScaler, StandardScaler, MaxAbsScaler, RobustScaler | |
def get_scaler(scaler): | |
scalers = { | |
"minmax": MinMaxScaler, | |
"standard": StandardScaler, | |
"maxabs": MaxAbsScaler, | |
"robust": RobustScaler, | |
} | |
return scalers.get(scaler.lower())() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from datetime import date | |
import holidays | |
us_holidays = holidays.US() | |
def is_holiday(date): | |
date = date.replace(hour = 0) | |
return 1 if (date in us_holidays) else 0 | |
def add_holiday_col(df, holidays): |