Skip to content

Instantly share code, notes, and snippets.

View michelkana's full-sized avatar

Michel Kana michelkana

View GitHub Profile
from keras.models import Sequential
from keras.layers import Dense
from sklearn.metrics import accuracy_score
model = Sequential()
model.add(Dense(1, activation = 'sigmoid', input_dim = 1))
model.compile(optimizer='sgd', loss='binary_crossentropy', metrics = ['acc'])
history = model.fit(x, y, batch_size=1, epochs=100, shuffle=True, verbose=0)
import numpy as np
from math import cos, sin, pi
import matplotlib.pyplot as plt
# function for creating points randomly distributed around a given center
def get_cloud(x0, y0, nb):
radius = np.random.normal(0, 1, nb)
angle = np.random.uniform(0, 2*pi, nb)
x = np.array([r*cos(angle[i]) for i, r in enumerate(radius)])
y = np.array([r*sin(angle[i]) for i, r in enumerate(radius)])
import keras
from keras.models import Sequential
from keras.layers import Dense, Input
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
# turn the x,y coordinates into a predictors matrix
X = np.array([x, y]).T
# turn the labels into 1-hot encodings
# create new random points
X_new = np.zeros((100,2))
X_new[:,0] = np.random.uniform(-3, 3, 100)
X_new[:,1] = np.random.uniform(-1, 4, 100)
Y_new = model_cl.predict_proba(X_new)
Y_new_label = np.argmax(Y_new, axis=1)
for cloud in clouds:
i = np.where(Y_new_label==cloud['label'], True, False)
plt.scatter(X_new[i][:,0], X_new[i][:,1], marker=cloud['marker'], color=cloud['color'], label=cloud['label'])
import keras
from keras.layers import Input, Dense
from keras.models import Model
from keras.datasets import mnist
import numpy as np
# input layer
input_img = Input(shape=(784,))
# autoencoder
import matplotlib.pyplot as plt
import random
%matplotlib inline
# get MNIST images, clean and with noise
def get_mnist(noise_factor=0.5):
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
# flatten the 28x28 images into vectors of size 784.
x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:])))
x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:])))
x_train_noisy = x_train_noisy.reshape((len(x_train_noisy), np.prod(x_train_noisy.shape[1:])))
x_test_noisy = x_test_noisy.reshape((len(x_test_noisy), np.prod(x_test_noisy.shape[1:])))
#training
history = autoencoder.fit(x_train_noisy, x_train,
epochs=100,
batch_size=128,
# plot de-noised images
def plot_mnist_predict(x_test, x_test_noisy, autoencoder, y_test, labels=[]):
if len(labels)>0:
x_test = x_test[np.isin(y_test, labels)]
x_test_noisy = x_test_noisy[np.isin(y_test, labels)]
decoded_imgs = autoencoder.predict(x_test)
n = 10
plt.figure(figsize=(20, 4))
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
from keras import backend as K
import matplotlib.pyplot as plt
def get_ae_cnn(nb_filter=32, kernel_size=3, max_pooling_size=2 ):
input_img = Input(shape=(28, 28, 1))
x = Conv2D(nb_filter, (kernel_size, kernel_size), activation='relu', padding='same')(input_img)
x = MaxPooling2D((max_pooling_size, max_pooling_size), padding='same')(x)
x = Conv2D(nb_filter, (kernel_size, kernel_size), activation='relu', padding='same')(x)
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
nb_filter = 32
kernel_size = 3
x = Conv2D(nb_filter, (kernel_size, kernel_size), activation='relu', padding='same')(input_img)