Skip to content

Instantly share code, notes, and snippets.

@kaituoxu
Last active March 30, 2016 08:42
Show Gist options
  • Save kaituoxu/f57f0a081f688acbb18e3a01b4664f0b to your computer and use it in GitHub Desktop.
Save kaituoxu/f57f0a081f688acbb18e3a01b4664f0b to your computer and use it in GitHub Desktop.
The implement of Neural Network
#!/usr/bin/env python
#coding: utf-8
import numpy as np
np.random.seed(1)
def sigmoid(x):
return 1/(1 + np.exp(-x))
def ForwardComputation(X, w0, w1):
v0 = X
z1 = np.dot(X, w0)
v1 = sigmoid(z1)
z2 = np.dot(v1, w1)
v2 = sigmoid(z2)
return v2
def simpleNN():
X = np.array([[0,0,1], [0,1,1], [1,0,1],[1,1,1]])
y = np.array([[0,0,1,1]]).T
W0 = 2*np.random.random((3,4)) - 1
W1 = 2*np.random.random((4,1)) - 1
for i in xrange(10000):
z1 = np.dot(X, W0)
v1 = sigmoid(z1)
z2 = np.dot(v1, W1)
v2 = sigmoid(z2)
l2_error = (y - v2)*(v2 * (1-v2))
l1_error = l2_error.dot(W1.T) * (v1 * (1-v1))
W1 += v1.T.dot(l2_error)
W0 += X.T.dot(l1_error)
return W0, W1
if __name__ == '__main__':
W0, W1 = simpleNN()
print W0
print W1
X = np.array([[0,0,1], [0,1,1], [1,0,1],[1,1,1]])
print ForwardComputation(X, W0, W1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment