Skip to content

Instantly share code, notes, and snippets.

@tonyyang-svail
Created September 6, 2018 02:14
Show Gist options
  • Save tonyyang-svail/ce5be4464ff26f416bcc4bd01d525d05 to your computer and use it in GitHub Desktop.
Save tonyyang-svail/ce5be4464ff26f416bcc4bd01d525d05 to your computer and use it in GitHub Desktop.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.eager as tfe
tfe.enable_eager_execution()
INPUT_DIMENSION = 784
HIDDEN_DIMENSION = 100
NUM_CLASS = 10
BATCH_SIZE = 2
def image_batch():
shape = (BATCH_SIZE, INPUT_DIMENSION)
images = tf.random_uniform(shape)
labels = tf.random_uniform(
[BATCH_SIZE], minval=0, maxval=NUM_CLASS, dtype=tf.int32)
one_hot = tf.one_hot(labels, NUM_CLASS)
return images, one_hot
class MLP(tf.keras.Model):
def __init__(self, input_dim, hidden_dim, output_dim):
super(MLP, self).__init__(name='')
self.fc1 = tf.keras.layers.Dense(hidden_dim, input_shape=(input_dim,))
self.fc2 = tf.keras.layers.Dense(output_dim, input_shape=(hidden_dim,))
def call(self, input_tensor):
x = self.fc1(input_tensor)
x = tf.nn.relu(x)
x = self.fc2(x)
return tf.nn.relu(x)
model = MLP(INPUT_DIMENSION, HIDDEN_DIMENSION, NUM_CLASS)
opt = tf.train.GradientDescentOptimizer(learning_rate=0.01)
for i in range(50):
images, labels = image_batch()
with tf.GradientTape() as grad_tape:
logits = model(images)
loss = tf.losses.softmax_cross_entropy(
logits=logits, onehot_labels=labels)
print(loss)
grads = grad_tape.gradient(loss, model.variables)
opt.apply_gradients(zip(grads, model.variables))
# POSSIBLE ENHENCEMENT(tony): visualize tape, like tensorboard
# POSSIBLE ENHENCEMENT(tony): make model save work, model.save('model.h5', input_dim=(784,))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment