Skip to content

Instantly share code, notes, and snippets.

@erap129
Created September 27, 2021 13:07
Show Gist options
  • Select an option

  • Save erap129/fa3a83747ffdd3402fc6ddaa4551ab34 to your computer and use it in GitHub Desktop.

Select an option

Save erap129/fa3a83747ffdd3402fc6ddaa4551ab34 to your computer and use it in GitHub Desktop.
NASA RUL project - LSTM model
class RULModel(nn.Module):
def __init__(self, n_features, n_hidden=256, n_layers=3):
super().__init__()
self.lstm = nn.LSTM(
input_size=n_features,
hidden_size=n_hidden,
num_layers=n_layers,
batch_first=True,
dropout=0.75
)
self.regressor = nn.Linear(n_hidden, 1)
def forward(self, x):
self.lstm.flatten_parameters()
_, (hidden, _) = self.lstm(x)
out = hidden[-1]
return self.regressor(out).squeeze()
class RULPredictor(pl.LightningModule):
def __init__(self, n_features: int):
super().__init__()
self.model = RULModel(n_features)
self.criterion = nn.MSELoss()
def forward(self, x, labels=None):
output = self.model(x)
loss = 0
if labels is not None:
loss = self.criterion(output, labels)
return loss, output
def training_step(self, batch, batch_idx):
X, y = batch
loss, outputs = self(X, y)
step_rmse = torchmetrics.functional.mean_squared_error(outputs, y, squared=False)
self.log('train_loss', loss, prog_bar=True, logger=True)
self.log('train_RMSE', step_rmse, prog_bar=True, logger=True)
return {'loss': loss, 'RMSE': step_rmse}
def validation_step(self, batch, batch_idx):
X, y = batch
loss, outputs = self(X, y)
step_rmse = torchmetrics.functional.mean_squared_error(outputs, y, squared=False)
self.log('val_loss', loss, prog_bar=True, logger=True)
self.log('val_RMSE', step_rmse, prog_bar=True, logger=True)
return {'loss': loss, 'RMSE': step_rmse}
def test_step(self, batch, batch_idx):
X, y = batch
loss, outputs = self(X, y)
step_rmse = torchmetrics.functional.mean_squared_error(outputs, y, squared=False)
self.log('test_loss', loss, prog_bar=True, logger=True)
self.log('test_RMSE', step_rmse, prog_bar=True, logger=True)
return {'loss': loss, 'RMSE': step_rmse}
def configure_optimizers(self):
return optim.Adam(self.parameters(), lr=0.0001)
model = RULPredictor(n_features=len(feature_columns))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment