Skip to content

Instantly share code, notes, and snippets.

@tonyyang-svail
Last active September 7, 2018 03:09
Show Gist options
  • Save tonyyang-svail/3817fae00a1c00f4454f5da6d0b297f0 to your computer and use it in GitHub Desktop.
Save tonyyang-svail/3817fae00a1c00f4454f5da6d0b297f0 to your computer and use it in GitHub Desktop.
with parameters() as params:
fc1 = layers.Dense(hidden_dim, input_shape=(input_dim,))
fc2 = layers.Dense(output_dim, input_shape=(hidden_dim,))
def forward(images, labels):
x = fc1(images)
x = layers.relu(x)
x = fc2(x)
logits = layers.relu(x)
loss = losses.softmax_cross_entropy(logits, labels)
return loss
forward_and_backward = grad(forward, params)
opt = tf.train.GradientDescentOptimizer(learning_rate=0.01)
for i in range(50):
images, labels = image_batch()
grads = forward_and_backward(images, labels)
opt.apply_gradients(params, grads)
@wangkuiyi
Copy link

wangkuiyi commented Sep 6, 2018

with parameters() as params:
  fc1 = layers.Dense(hidden_dim, input_shape=(input_dim,))
  fc2 = layers.Dense(output_dim, input_shape=(hidden_dim,))

def forward(images, labels):
  x = fc1(images)
  x = layers.relu(x)
  x = fc2(x)
  logits = layers.relu(x)
  return losses.softmax_cross_entropy(logits, labels)

opt = tf.train.GradientDescentOptimizer(learning_rate=0.01)
for i in range(50):
  images, labels = image_batch()
  tape.train(forward, params, opt, images, labels)

print(params)

@tonyyang-svail
Copy link
Author

tonyyang-svail commented Sep 6, 2018

One possible implementation of grad

def grad(forward, params):
  def forward_and_backward(*args, *kwargs):
    with tf.GradientTape() as grad_tape:
      loss = forward(*args, *kwargs)
    return grad_tape.gradient(loss, params)

  return forward_and_backward

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment