Skip to content

Instantly share code, notes, and snippets.

@khanhnamle1994
Created March 8, 2018 03:01
Show Gist options
  • Select an option

  • Save khanhnamle1994/9800a941e34695033b5e2336db7af9aa to your computer and use it in GitHub Desktop.

Select an option

Save khanhnamle1994/9800a941e34695033b5e2336db7af9aa to your computer and use it in GitHub Desktop.
require 'torch'
require 'nn'
require 'optim'
-- Build a model as a sequence of layers, and a loss function
local model = nn.Sequential()
model:add(nn.Linear(500, 50))
model:add(nn.ReLU())
model:add(nn.Linear(50, 5))
local loss_fn = nn.CrossEntropyCriterion()
local x = torch.randn(32, 500)
local y = torch.Tensor(32):random(5)
local weights, grad_weights = model:getParameters()
-- Define callback
local function f(w)
assert(w == weights)
-- Forward Pass
local stores = model:forward(x)
local loss = loss_fn:forward(scores, y)
-- Backward Pass
grad_weights:zero()
local grad_scores = loss_fn:backward(scores,y)
local grad_x = model:backward(x, grad_scores)
return loss, grad_weights
end
-- Pass callback to optimizer
local state = {learningRate=1e-3}
for t = 1, 25 do
optim.adam(f, weights, state)
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment