Created
August 17, 2019 14:27
-
-
Save NMZivkovic/19454c21b3981491ca3fe0267fcf515d to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class EncoderLayer(Layer): | |
def __init__(self, num_neurons, num_hidden_neurons, num_heads): | |
super(EncoderLayer, self).__init__() | |
# Build multi head attention layer and necessary additional layers | |
self.multi_head_attention_layer, self.attention_dropout, self.attention_normalization = \ | |
build_multi_head_attention_layers(num_neurons, num_heads) | |
# Build feed-forward neural network and necessary additional layers | |
self.feed_forward_layer, self.feed_forward_dropout, self.feed_forward_normalization = \ | |
build_feed_forward_layers(num_neurons, num_hidden_neurons) | |
def call(self, sequence, training, mask): | |
# Calculate attention output | |
attnention_output, _ = self.multi_head_attention_layer(sequence, sequence, sequence, mask) | |
attnention_output = self.attention_dropout(attnention_output, training=training) | |
attnention_output = self.attention_normalization(sequence + attnention_output) | |
# Calculate output of feed forward network | |
output = self.feed_forward_layer(attnention_output) | |
output = self.feed_forward_dropout(output, training=training) | |
# Combine two outputs | |
output = self.feed_forward_normalization(attnention_output + output) | |
return output |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment