Created
November 21, 2019 18:15
-
-
Save n0obcoder/e915fdab6b5079580ec7c427492a1c68 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# We will now freeze the 'layer4' and train just the 'fc' layer of the model for 2 more epochs | |
for name, param in model.named_parameters(): | |
if 'layer4' in name: | |
param.requires_grad = False # layer4 parameters would not get trained now | |
# Define the new learning rate and the new optimizer which would contain only the parameters with requires_grad = True | |
lr = 0.0003 | |
optimizer = optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr = lr) | |
epochs = 3 | |
log_interval = 2 | |
# Let's train the model for 3 more epochs ! | |
train_losses, val_losses, batch_train_losses, batch_val_losses = trainer(loader, model, loss_fn, optimizer, epochs = epochs, log_interval = log_interval) | |
# Ploting the epoch losses | |
plt.plot(train_losses) | |
plt.plot(val_losses) | |
plt.legend(['train losses', 'val_losses']) | |
plt.title('Loss vs Epoch') | |
plt.figure() | |
plt.plot(batch_train_losses) | |
plt.title('batch_train_losses') | |
plt.figure() | |
plt.plot(batch_val_losses) | |
plt.title('batch_val_losses') | |
# Saving the model(architecture and weights) | |
torch.save(model, 'stage2.pth') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Training started... | |
epoch >>> 1/3 | |
___TRAINING___ | |
batch_loss at batch_idx 01/16: 0.20289725065231323 | |
batch_loss at batch_idx 03/16: 0.2349197268486023 | |
batch_loss at batch_idx 05/16: 0.2194989025592804 | |
batch_loss at batch_idx 07/16: 0.20219461619853973 | |
batch_loss at batch_idx 09/16: 0.27012479305267334 | |
batch_loss at batch_idx 11/16: 0.20639048516750336 | |
batch_loss at batch_idx 13/16: 0.1523684412240982 | |
batch_loss at batch_idx 15/16: 0.14577656984329224 | |
>>> train loss at epoch 1/3: 0.2009116342887577 | |
___VALIDATION___ | |
batch_loss at batch_idx 01/16: 0.20299889147281647 | |
batch_loss at batch_idx 03/16: 0.19083364307880402 | |
>>> val loss at epoch 1/3: 0.20429044950196124 | |
========================= | |
epoch >>> 2/3 | |
___TRAINING___ | |
batch_loss at batch_idx 01/16: 0.14590243995189667 | |
batch_loss at batch_idx 03/16: 0.10861243307590485 | |
batch_loss at batch_idx 05/16: 0.14622969925403595 | |
batch_loss at batch_idx 07/16: 0.1130327433347702 | |
batch_loss at batch_idx 09/16: 0.1342758983373642 | |
batch_loss at batch_idx 11/16: 0.13757610321044922 | |
batch_loss at batch_idx 13/16: 0.15501776337623596 | |
batch_loss at batch_idx 15/16: 0.11977922171354294 | |
>>> train loss at epoch 2/3: 0.14645167593310474 | |
___VALIDATION___ | |
batch_loss at batch_idx 01/16: 0.16367006301879883 | |
batch_loss at batch_idx 03/16: 0.16462600231170654 | |
>>> val loss at epoch 2/3: 0.17527046447663797 | |
========================= | |
epoch >>> 3/3 | |
___TRAINING___ | |
batch_loss at batch_idx 01/16: 0.1762229949235916 | |
batch_loss at batch_idx 03/16: 0.10568083077669144 | |
batch_loss at batch_idx 05/16: 0.14333905279636383 | |
batch_loss at batch_idx 07/16: 0.08794888854026794 | |
batch_loss at batch_idx 09/16: 0.1599852591753006 | |
batch_loss at batch_idx 11/16: 0.15842339396476746 | |
batch_loss at batch_idx 13/16: 0.08625025302171707 | |
batch_loss at batch_idx 15/16: 0.12491285800933838 | |
>>> train loss at epoch 3/3: 0.13451774695174026 | |
___VALIDATION___ | |
batch_loss at batch_idx 01/16: 0.15565256774425507 | |
batch_loss at batch_idx 03/16: 0.13937778770923615 | |
>>> val loss at epoch 3/3: 0.15460531577819914 | |
========================= |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment