Created
June 28, 2017 12:51
-
-
Save hackintoshrao/b31f65e2a8a383363e9ba0f85feeae76 to your computer and use it in GitHub Desktop.
Example of math for weight udpate
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import numpy as np | |
| def sigmoid(x): | |
| """ | |
| Calculate sigmoid | |
| """ | |
| return 1/(1+np.exp(-x)) | |
| def sigmoid_prime(x): | |
| """ | |
| # Derivative of the sigmoid function | |
| """ | |
| return sigmoid(x) * (1 - sigmoid(x)) | |
| learnrate = 0.5 | |
| x = np.array([1, 2, 3, 4]) | |
| y = np.array(0.5) | |
| # Initial weights | |
| w = np.array([0.5, -0.5, 0.3, 0.1]) | |
| ### Calculate one gradient descent step for each weight | |
| ### Note: Some steps have been consilated, so there are | |
| ### fewer variable names than in the above sample code | |
| # TODO: Calculate the node's linear combination of inputs and weights | |
| h = np.dot(x, w) | |
| # TODO: Calculate output of neural network | |
| nn_output = sigmoid(h) | |
| # TODO: Calculate error of neural network | |
| error = y - nn_output | |
| # TODO: Calculate the error term | |
| # Remember, this requires the output gradient, which we haven't | |
| # specifically added a variable for. | |
| error_term = error * sigmoid_prime(h) | |
| # TODO: Calculate change in weights | |
| del_w = learnrate * error_term * x | |
| print('Neural Network output:') | |
| print(nn_output) | |
| print('Amount of Error:') | |
| print(error) | |
| print('Change in Weights:') | |
| print(del_w) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment