Skip to content

Instantly share code, notes, and snippets.

View emuccino's full-sized avatar

Eric Muccino emuccino

  • Mindboard
View GitHub Profile
#initialize our generators; specifying data directories, batch size, and dimension threshold
train_image_directory = 'imagenette2/train'
test_image_directory = 'imagenette2/val'
n_classes = 10
batch_size = 16
max_dimension = 512
#create generators for training and generating
@emuccino
emuccino / loan_data_clean.py
Last active April 13, 2020 08:01
Load and clean Loan data
import numpy as np
import pandas as pd
#import loan dataset
df = pd.read_csv('loan.csv').dropna(axis=1,how='any')
#convert loan grades to numerical values
df['sub_grade'] = df['sub_grade'].str.slice(start=1).astype(int)
grade_dict = {k:i for i,k in enumerate(['A', 'B', 'C', 'D', 'E', 'F', 'G'])}
term_dict = {k:i for i,k in enumerate(['36 months', '60 months'])}
@emuccino
emuccino / loan_data_preprocess.py
Last active April 13, 2020 09:35
Preprocess Loan data for training
from sklearn.preprocessing import MinMaxScaler
#specify target label
target = 'loan_condition'
data = [name for name in df.columns if name != target]
numeric_data = []
string_data = []
tokenizers = {}
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input,Dense,BatchNormalization,Concatenate,GaussianNoise
from tensorflow.keras.optimizers import Nadam
#define latent dimension size
latent_dim = int(np.ceil(np.log(len(train_df)*len(data))))
#function for building generator network
def compile_generator():
@emuccino
emuccino / loan_data_get_data.py
Last active April 13, 2020 09:34
Batch generating functions
from tensorflow.keras.utils import to_categorical
#function for generating latent samples for synthetic data for generator training
def generate_latent_samples(n):
#generate latent vectors with balanced targets
x = {'latent': np.random.normal(size=(n, latent_dim)),
'target': to_categorical(np.hstack([np.array([_x for _ in range(n//2)]) for _x in range(2)]),2)}
#outputs indicating postive discirmination (target value)
import itertools
import matplotlib.pyplot as plt
def train_gan(n_epochs,n_batch,n_plot,n_eval):
#discriminator/generator training logs
disc_loss_hist = []
gen_loss_hist = []
for epoch in range(n_epochs):
@emuccino
emuccino / loan_data_classifier.py
Last active April 13, 2020 08:28
Train classifiers
#function for building classifier (very similar to the discriminator)
def compile_classifier():
inputs = {}
numeric_nets = []
string_nets = []
for name in numeric_data:
numeric_input = Input(shape=(1,),name=name)
from tensorflow.keras.datasets import mnist
#load mnist data
(x_train,y_train),(x_test,y_test) = mnist.load_data()
#reshape and scale input image data
x_train, x_test = (x.reshape(*x.shape,1)/x.max() for x in (x_train,x_test))
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, Dense, GlobalAvgPool2D, GlobalMaxPool2D, Concatenate, BatchNormalization
from tensorflow.keras.losses import SparseCategoricalCrossentropy
#function compiling device model
def compile_device_model(input_shape=None,n_filters=None,name=None,offload=False):
outputs = {}
inputs = Input(shape=input_shape)
net = inputs
outputs = {}
#complete model input
inputs = Input(shape=x_train.shape[1:])
net = {'offload':inputs}
#stack all 3 device models together
for device in device_names:
net = device_models[device](net['offload'])
outputs[device] = net[device+'_outputs']