Skip to content

Instantly share code, notes, and snippets.

@adamoudad
Created March 3, 2021 15:56
Show Gist options
  • Save adamoudad/5a241e377dd27819ef3fe8d402da4079 to your computer and use it in GitHub Desktop.
Save adamoudad/5a241e377dd27819ef3fe8d402da4079 to your computer and use it in GitHub Desktop.
from torch import nn
from torch.nn.functional import softmax
class CustomModel(nn.Module):
def __init__(self, vocab_size=50,
embedding_dim=16,
hidden_size=8):
super().__init__()
self.encoder = nn.Embedding(vocab_size, embedding_dim)
self.lstm = nn.LSTM(embedding_dim, hidden_size)
self.linear = nn.Linear(hidden_size, 1)
self.activation = nn.Sigmoid()
def forward(self, x):
output = self.encoder(x)
output, _ = self.lstm(output)
output = output[-1] # Keep last output only
output = self.linear(output)
output = self.activation(output)
return output
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment