Last active
September 7, 2018 03:09
-
-
Save tonyyang-svail/3817fae00a1c00f4454f5da6d0b297f0 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
with parameters() as params: | |
fc1 = layers.Dense(hidden_dim, input_shape=(input_dim,)) | |
fc2 = layers.Dense(output_dim, input_shape=(hidden_dim,)) | |
def forward(images, labels): | |
x = fc1(images) | |
x = layers.relu(x) | |
x = fc2(x) | |
logits = layers.relu(x) | |
loss = losses.softmax_cross_entropy(logits, labels) | |
return loss | |
forward_and_backward = grad(forward, params) | |
opt = tf.train.GradientDescentOptimizer(learning_rate=0.01) | |
for i in range(50): | |
images, labels = image_batch() | |
grads = forward_and_backward(images, labels) | |
opt.apply_gradients(params, grads) |
One possible implementation of grad
def grad(forward, params):
def forward_and_backward(*args, *kwargs):
with tf.GradientTape() as grad_tape:
loss = forward(*args, *kwargs)
return grad_tape.gradient(loss, params)
return forward_and_backward
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.