Skip to content

Instantly share code, notes, and snippets.

@omarsar
Created August 24, 2019 17:56
Show Gist options
  • Save omarsar/ac4e6474293bee5c058c2e12dd013a0e to your computer and use it in GitHub Desktop.
Save omarsar/ac4e6474293bee5c058c2e12dd013a0e to your computer and use it in GitHub Desktop.
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
# 28x28x1 => 26x26x32
self.conv1 = nn.Conv2d(in_channels=1, out_channels=32, kernel_size=3)
self.d1 = nn.Linear(26 * 26 * 32, 128)
self.d2 = nn.Linear(128, 10)
def forward(self, x):
# 32x1x28x28 => 32x32x26x26
x = self.conv1(x)
x = F.relu(x)
# flatten => 32 x (32*26*26)
x = x.flatten(start_dim = 1)
# 32 x (32*26*26) => 32x128
x = self.d1(x)
x = F.relu(x)
# logits => 32x10
logits = self.d2(x)
out = F.softmax(logits, dim=1)
return out
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment