from IPython.display import HTML, Image
from google.colab.output import eval_js
from base64 import b64decode
# setup the canvas
canvas_html = """
<canvas width=%d height=%d></canvas>
<button>Done!</button>
<script>
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
########################################################################################## | |
import torch | |
import torch.nn.Functional as F | |
import pytorch_lightning as pl | |
########################################################################################## | |
class FlashModel(pl.LightningModule): | |
def __init__(self, model): | |
super().__init__() | |
self.model = model |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn.Functional as F | |
import pytorch_lightning as pl | |
########################################################################################## | |
class FlashModel(pl.LightningModule): | |
"""DOCSTRING""" | |
def __init__(self, model): | |
super().__init__() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn.Functional as F | |
import pytorch_lightning as pl | |
########################################################################################### | |
class FlashModel(pl.LightningModule): | |
"""DOCSTRING""" | |
def __init__(self, model): | |
super().__init__() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.nn.Functional as F | |
import pytorch_lightning as pl | |
########################################################################################### | |
## Pytorch_Lightning version | |
## | |
class FlashModel(pl.LightningModule): | |
"""DOCSTRING""" | |
def __init__(self, model): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import torch | |
import torch.nn.Functional as F | |
from torchvision import datasets, transforms | |
from torch.utils.data import DataLoader | |
import pytorch_lightning as pl | |
########################################################################################### |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# A LightningModule ORGANIZES the PyTorch code into the following modules: | |
# 1. Computations (init) | |
# 2. Training loop (training_step) | |
# 3. Validation loop (validation_step) | |
# 4. Test loop (test_step) | |
# 5. Optimizers (configure_optimizers) | |
############################################################################## | |
model = FlashModel() | |
trainer = Trainer() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
### DATALOADERS ################################################################## | |
# When building DataLoaders. Set `num_workers>0` and `pin_memory=True` | |
DataLoader(dataset, num_workers=8, pin_memory=True) | |
### num_workers ################################################################## | |
# num_workers depends on the batch size and the machine | |
# A general place to start is to set num_workers = number of CPUs in the machine. | |
# Increasing num_workers all increases the CPU usage | |
# BEST TIP: Increase num_workers slowly and stop when there is no performance increase. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch as pt | |
import pytorch_lightning as pl | |
####################################################################### | |
class FlashModel(pl.LightningModule): | |
"""This defines a MODEL""" | |
def __init__(self, num_layers: int = 3): | |
super().__init__() | |
self.layer1 = pt.nn.Linear() | |
self.layer2 = pt.nn.Linear() |
NewerOlder