Created
May 3, 2018 22:00
-
-
Save conormm/fc97675701306dd8a92741e2420d5df1 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
model = MoonsModel(n_features=2, n_neurons=50) | |
cost_func = nn.BCELoss() | |
optimizer = tr.optim.Adam(params=model.parameters(), lr=0.01) | |
num_epochs = 20 | |
losses = [] | |
accs = [] | |
for e in range(num_epochs): | |
for ix, (_x, _y) in enumerate(ds): | |
#=========make inpur differentiable======================= | |
_x = Variable(_x).float() | |
_y = Variable(_y).float() | |
#========forward pass===================================== | |
yhat = model(_x).float() | |
loss = cost_func(yhat, _y) | |
acc = tr.eq(yhat.round(), _y).float().mean() # accuracy | |
#=======backward pass===================================== | |
optimizer.zero_grad() # zero the gradients on each pass before the update | |
loss.backward() # backpropagate the loss through the model | |
optimizer.step() # update the gradients w.r.t the loss | |
losses.append(loss.data[0]) | |
accs.append(acc.data[0]) | |
if e % 1 == 0: | |
print("[{}/{}], loss: {} acc: {}".format(e, | |
num_epochs, np.round(loss.data[0], 3), np.round(acc.data[0], 3))) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment