Skip to content

Instantly share code, notes, and snippets.

@lobrien
Created August 13, 2019 22:29
Show Gist options
  • Save lobrien/18f2c1eba92cdc405ec49f606f84477e to your computer and use it in GitHub Desktop.
Save lobrien/18f2c1eba92cdc405ec49f606f84477e to your computer and use it in GitHub Desktop.
Keras XOR
import numpy as np
from keras.models import Sequential
from keras.layers.core import Activation, Dense
from keras.optimizers import SGD
# Allocate the input and output arrays
X = np.zeros((4, 2), dtype='uint8')
y = np.zeros(4, dtype='uint8')
# Training data X[i] -> Y[i]
X[0] = [0, 0]
y[0] = 0
X[1] = [0, 1]
y[1] = 1
X[2] = [1, 0]
y[2] = 1
X[3] = [1, 1]
y[3] = 0
# Create a 2 (inputs) : 2 (middle) : 1 (output) model, with sigmoid activation
model = Sequential()
model.add(Dense(2, input_dim=2))
model.add(Activation('sigmoid'))
model.add(Dense(1))
model.add(Activation('sigmoid'))
# Train using stochastic gradient descent
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='mean_squared_error', optimizer=sgd)
# Run through the data `epochs` times
history = model.fit(X, y, epochs=10000, batch_size=4, verbose=0)
# Test the result (uses same X as used for training)
print (model.predict(X))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment