Skip to content

Instantly share code, notes, and snippets.

@AFAgarap
Last active September 29, 2019 05:21
Show Gist options
  • Save AFAgarap/10a0726ace1df3afbde3960e81cec69b to your computer and use it in GitHub Desktop.
Save AFAgarap/10a0726ace1df3afbde3960e81cec69b to your computer and use it in GitHub Desktop.
TensorFlow 2.0 Subclassing API implementation of a feed-forward neural network. Link to blog: https://towardsdatascience.com/how-can-i-trust-you-fb433a06256c?source=friends_link&sk=0af208dc53be2a326d2407577184686b
class NeuralNet(tf.keras.Model):
def __init__(self, **kwargs):
super(NeuralNet, self).__init__()
self.hidden_layer_1 = tf.keras.layers.Dense(
units=kwargs['units'][0],
activation=tf.nn.relu,
input_shape=kwargs['input_shape']
)
self.dropout_layer_1 = tf.keras.layers.Dropout(
rate=['dropout_rate']
)
self.hidden_layer_2 = tf.keras.layers.Dense(
units=kwargs['units'][1],
activation=tf.nn.relu
)
self.dropout_layer_2 = tf.keras.layers.Dropout(
rate=kwargs['dropout_rate']
)
self.output_layer = tf.keras.layers.Dense(
units=kwargs['num_classes'],
activation=tf.nn.softmax
)
@tf.function
def call(self, features):
activation = self.hidden_layer_1(features)
activation = self.dropout_layer_1(activation)
activation = self.hidden_layer_2(activation)
activation = self.dropout_layer_2(activation)
output = self.output_layer(activation)
return output
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment