Skip to content

Instantly share code, notes, and snippets.

@Ch3shireDev
Last active December 15, 2019 23:14
Show Gist options
  • Select an option

  • Save Ch3shireDev/1207ea7aad4c6145dc5a7845d99d7947 to your computer and use it in GitHub Desktop.

Select an option

Save Ch3shireDev/1207ea7aad4c6145dc5a7845d99d7947 to your computer and use it in GitHub Desktop.
Keras neural network example
# standard libraries in Anaconda Python
import numpy as np
import matplotlib.pyplot as plt
#keras imports - requires keras and tensorflow
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop
def f(x, y):
'''auxiliary function to categorize points into blue and orange dots'''
return np.sin(x*np.pi*4)*np.sin(y*np.pi*4) > 0
#how many points should be shown
Nshow = 10000
#how many training points should be generated
N = 100000
#how many points of neural network output should be on one axis
M = 1024
batch_size = 512
num_classes = 2
epochs = 50
X = np.random.uniform(0, 1, size=N).reshape((-1, 2))
Y = np.array([[f(x, y), not f(x, y)] for x, y in X]).astype('float32')
#half of data is training, half is test
x_train = X[:len(X)//2]
x_test = X[len(X)//2:]
y_train = Y[:len(Y)//2]
y_test = Y[len(Y)//2:]
#we create neural network 2 x 1024 x 1024 x 2
model = Sequential()
model.add(Dense(1024, activation='relu', input_shape=(2,)))
model.add(Dropout(0.2))
model.add(Dense(1024, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(2, activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])
# training time
history = model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
validation_data=(x_test, y_test))
# we test how good is our neural network
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
data = [[[x, y] for x in np.linspace(0, 1, M)]
for y in np.linspace(0, 1, M)]
data = np.array(data).reshape((-1, 2))
# now we get output to get info where network is expecting for blue points to show up
result = model.predict(data)
result = np.array([x for x, y in result])
# we plot network expectations onto the graph
plt.imshow(result.reshape(M, M))
# we divided before points into two sets, now we do it again for the sake of plot
X_red=[[x, y] for x, y in X[::N//Nshow] if f(x, y) == True]
X_blue=[[x, y] for x, y in X[::N//Nshow] if f(x, y) == False]
X_red=np.array(X_red).T
X_blue=np.array(X_blue).T
# we add initial points into plot to show part of training data
plt.scatter(X_red[0]*M, X_red[1]*M, s = 1)
plt.scatter(X_blue[0]*M, X_blue[1]*M, s = 1)
# now we show visualization of our data
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment