Created
July 18, 2019 19:48
-
-
Save michelkana/02c951838a2644588d9b8a7948ebc849 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# flatten the 28x28 images into vectors of size 784. | |
x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:]))) | |
x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:]))) | |
x_train_noisy = x_train_noisy.reshape((len(x_train_noisy), np.prod(x_train_noisy.shape[1:]))) | |
x_test_noisy = x_test_noisy.reshape((len(x_test_noisy), np.prod(x_test_noisy.shape[1:]))) | |
#training | |
history = autoencoder.fit(x_train_noisy, x_train, | |
epochs=100, | |
batch_size=128, | |
shuffle=True, | |
validation_data=(x_test_noisy, x_test)) | |
# plot training performance | |
def plot_training_loss(history): | |
loss = history.history['loss'] | |
val_loss = history.history['val_loss'] | |
epochs = range(1, len(loss) + 1) | |
plt.plot(epochs, loss, 'bo', label='Training loss') | |
plt.plot(epochs, val_loss, 'r', label='Validation loss') | |
plt.title('Training and validation loss') | |
plt.xlabel('Epochs') | |
plt.ylabel('Loss') | |
plt.legend() | |
plt.show() | |
plot_training_loss(history) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment