This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| from keras import backend as K | |
| from keras.engine import InputSpec | |
| from keras.engine.topology import Layer | |
| import numpy as np | |
| class TemporalMaxPooling(Layer): | |
| """ | |
| This pooling layer accepts the temporal sequence output by a recurrent layer | |
| and performs temporal pooling, looking at only the non-masked portion of the sequence. |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| from gensim.models import KeyedVectors | |
| # Load gensim word2vec | |
| w2v_path = '<Gensim File Path>' | |
| w2v = KeyedVectors.load_word2vec_format(w2v_path) | |
| import io | |
| # Vector file, `\t` seperated the vectors and `\n` seperate the words | |
| """ |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| class SelfAttention(nn.Module): | |
| def __init__(self, attention_size, batch_first=False, non_linearity="tanh"): | |
| super(SelfAttention, self).__init__() | |
| self.batch_first = batch_first | |
| self.attention_weights = Parameter(torch.FloatTensor(attention_size)) | |
| self.softmax = nn.Softmax(dim=-1) | |
| if non_linearity == "relu": | |
| self.non_linearity = nn.ReLU() |
OlderNewer