Skip to content

Instantly share code, notes, and snippets.

@tonyyang-svail
Created September 6, 2018 02:14
Show Gist options
  • Save tonyyang-svail/09a45375d02d1fb2032177cdef5a5470 to your computer and use it in GitHub Desktop.
Save tonyyang-svail/09a45375d02d1fb2032177cdef5a5470 to your computer and use it in GitHub Desktop.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.eager as tfe
tfe.enable_eager_execution()
NOISE_DIMENSION = 10
INPUT_DIMENSION = 784
HIDDEN_DIMENSION = 100
NUM_CLASS = 2
BATCH_SIZE = 2
def image_batch():
return tf.random_uniform((BATCH_SIZE, INPUT_DIMENSION))
class MLP(tf.keras.Model):
def __init__(self, input_dim, hidden_dim, output_dim):
super(MLP, self).__init__(name='')
self.fc1 = tf.keras.layers.Dense(hidden_dim, input_shape=(input_dim,))
self.fc2 = tf.keras.layers.Dense(output_dim, input_shape=(hidden_dim,))
def call(self, input_tensor):
x = self.fc1(input_tensor)
x = tf.nn.relu(x)
x = self.fc2(x)
return tf.nn.relu(x)
G = MLP(NOISE_DIMENSION, HIDDEN_DIMENSION, INPUT_DIMENSION)
D = MLP(INPUT_DIMENSION, HIDDEN_DIMENSION, NUM_CLASS)
opt = tf.train.GradientDescentOptimizer(learning_rate=0.01)
for i in range(50):
# Train D
noise = tf.random_uniform((BATCH_SIZE, NOISE_DIMENSION))
fake_image = G(noise)
real_image = image_batch()
images = tf.concat([fake_image, real_image], axis=0)
labels = tf.constant([[1, 0] for x in range(BATCH_SIZE)] + \
[[0, 1] for x in range(BATCH_SIZE)])
with tf.GradientTape() as grad_tape:
logits = D(images)
loss = tf.losses.softmax_cross_entropy(
logits=logits, onehot_labels=labels)
print(loss)
grads = grad_tape.gradient(loss, D.variables)
opt.apply_gradients(zip(grads, D.variables))
# Train G
with tf.GradientTape() as grad_tape:
noise = tf.random_uniform((BATCH_SIZE, NOISE_DIMENSION))
images = G(noise)
labels = tf.constant([[0, 1] for x in range(BATCH_SIZE)])
logits = D(images)
loss = tf.losses.softmax_cross_entropy(
logits=logits, onehot_labels=labels)
print(loss)
grads = grad_tape.gradient(loss, G.variables)
opt.apply_gradients(zip(grads, G.variables))
# POSSIBLE ENHENCEMENT(tony): visualize tape, like tensorboard
# POSSIBLE ENHENCEMENT(tony): make model save work, model.save('model.h5', input_dim=(784,)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment