Skip to content

Instantly share code, notes, and snippets.

View netsatsawat's full-sized avatar

Satsawat Natakarnkitkul (Net) netsatsawat

View GitHub Profile
@netsatsawat
netsatsawat / model_predict.py
Created June 18, 2019 06:27
Use the network to predict the test data generator
STEP_SIZE_TEST = testing_generator.n // testing_generator.batch_size
testing_generator.reset()
predictions = malaria_model.predict_generator(testing_generator,
steps=STEP_SIZE_TEST,
verbose=1)
img_name = testing_generator.filenames
results = pd.DataFrame({'img_name': img_name,
'prediction': predicted_class})
_this = results.img_name.str.split('\\', n=1, expand=True)
results['reference'] = _this[0]
@netsatsawat
netsatsawat / train_network.py
Created June 18, 2019 06:16
Train the malaria model
sess = K.get_session()
init = tf.global_variables_initializer()
sess.run(init)
STEP_SIZE_TRAIN = training_generator.samples // training_generator.batch_size
STEP_SIZE_VALID = validatation_generator.n // validatation_generator.batch_size
training_stats = CollectStats()
history = malaria_model.fit_generator(training_generator,
epochs=50,
steps_per_epoch=STEP_SIZE_TRAIN,
validation_data=validatation_generator,
malaria_model = tf.keras.Sequential([features_extractor_layer,
layers.Dense(training_generator.num_classes,
activation='softmax')
])
malaria_model.summary()
malaria_model.compile(optimizer=tf.keras.optimizers.RMSprop(lr=0.001),
loss='categorical_crossentropy',
metrics=['acc'])
"""
@netsatsawat
netsatsawat / data_gen.py
Created June 18, 2019 05:53
Use to demonstrate the ImageDataGenerator class and flow_from_directory() method
training_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255.,
rotation_range=30,
zoom_range=0.3,
width_shift_range=0.2,
height_shift_range=0.2,
fill_mode='nearest',
horizontal_flip=True)
validation_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255.)
testing_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255.)
training_generator = training_datagen.flow_from_directory('../data/Train/',
@netsatsawat
netsatsawat / load_pretrain.py
Created June 18, 2019 04:20
For loading pretrained feature extractor - Malaria dataset
feature_extractor_url = "https://tfhub.dev/google/imagenet/mobilenet_v2_140_224/feature_vector/3"
def feature_extractor(x):
feature_extractor_module = hub.Module(feature_extractor_url)
return feature_extractor_module(x)
IMAGE_SIZE = hub.get_expected_image_size(hub.Module(feature_extractor_url))
print('Expected size : %s' % IMAGE_SIZE)
# Expected size : [224, 224]
@netsatsawat
netsatsawat / CNN_model_architecture.py
Created May 28, 2019 15:43
Part 2 of the script to build the CNN architecture and callbacks function
np.random.seed(SEED)
tf.random.set_random_seed(SEED)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(IMG_ROWS, IMG_COLS, 1)))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
@netsatsawat
netsatsawat / CNN_prep.py
Created May 28, 2019 09:41
First part of CNN tutorial on KMNIST data set
import tensorflow as tf
from tensorflow.python import keras
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import Dense, Flatten, Conv2D, Dropout
from tensorflow.python.keras.layers import MaxPooling2D, BatchNormalization
from keras.utils.vis_utils import model_to_dot
from keras.utils import plot_model
from tensorflow.python.keras.callbacks import ModelCheckpoint
from tensorflow.keras.callbacks import EarlyStopping
from sklearn.model_selection import train_test_split
@netsatsawat
netsatsawat / logistic_regression.py
Created May 19, 2019 20:13
Tune hyperparameters using grid search CV on logit
cv_params = {'C': [0.001, 0.01, 0.1, 1., 10., 100.],
'penalty': ['l1', 'l2'],
'class_weight': [None, 'balanced']
}
fix_params = {'random_state': SEED}
log_cv_1 = GridSearchCV(LogisticRegression(**fix_params), cv_params, scoring='f1', cv=5)
log_cv_1.fit(X_train, y_train)
log_clf_all = LogisticRegression(**{**fix_params, **log_cv_1.best_params_})
_ = myUtilityFunction.prediction_evaluation(log_clf_all, X_train, X_test, y_train, y_test,
X_train.columns, "coefficients")
@netsatsawat
netsatsawat / hyperparameter_tuning_xgboost.py
Last active May 19, 2019 18:38
Code snippet to optimize the hyperparameters of XGBoost algorithm
from sklearn.model_selection import RandomizedSearchCV
xgb_clf = xgboost.XGBClassifier(random_state=SEED, n_jobs=-1)
params = {'n_estimators': [50, 100, 200, 300],
'learning_rate': [0.01, 0.05, 0.1, 0.15],
'min_child_weight': [1, 2, 3, 5, 10],
'gamma': [0.1, 0.2, 0.3, 0.4, 0.5, 1],
'subsample': [0.6, 0.7, 0.8],
'colsample_bytree': [0.6, 0.7, 0.8],
'max_depth': [3, 4, 5],
@netsatsawat
netsatsawat / xgboost_classifier.py
Created May 19, 2019 18:20
Code snippet for xgboost and evaluating the performance
xgb_clf = xgboost.XGBClassifier(random_state=SEED, n_jobs=-1, learning_rate=0.1,
max_depth=3, n_estimators=100)
_ = myUtilityFunction.prediction_evaluation(xgb_clf, X_train, X_test,
y_train, y_test, X_train.columns, "features")