Skip to content

Instantly share code, notes, and snippets.

View sadimanna's full-sized avatar
🎯
Focusing

Siladittya Manna sadimanna

🎯
Focusing
View GitHub Profile
axins2 = zoomed_inset_axes(ax, zoom = 50, loc=4)
axins2.plot(y,datapts.T)
axins2.plot(y,datapts.T,'mo')
# SPECIFY THE LIMITS
x1, x2, y1, y2 = 14.95,15.05,27.95,28.05
axins2.set_xlim(x1, x2)
axins2.set_ylim(y1, y2)
#ANNOTATE THE MIDDLE TWO POINTS
# loc values
# 2---8---1
# | | |
# 6---10--5/7
# | | |
# 3---9---4
axins1 = zoomed_inset_axes(ax, zoom = 5, loc=2)
axins1.plot(y,datapts.T)
axins1.plot(y,datapts.T,'mo')
for dpts,ypt in zip(datapts.T,y):
for dpt in dpts:
ax.annotate("%.3f"%(dpt),xy=(ypt,dpt),textcoords='data')
fig,ax = plt.subplots(figsize=(10,10))
ax.plot(y,datapts.T)
ax.plot(y,datapts.T,'yo')
ax.legend(xlabs,title='CURRENT (A)', loc='upper right')
ax.set_xlabel('RESISTANCE (OHM)')
ax.set_ylabel('VOLTAGE (V)')
ax.set_xticks(ticks=y)
ax.set_xticklabels(labels=ylabs)
ax.set_xlim([5,30])
x = [5,10,15,20]
xlabs = ["5","10","15","20"]
y = [10,15,20,25]
ylabs = ["10","15","20","25"]
# "Accuracy"
plt.plot(history.history['binary_accuracy'])
plt.plot(history.history['val_binary_accuracy'])
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper left')
plt.show()
# "Loss"
plt.plot(history.history['loss'])
model.compile(optimizer = tf.keras.optimizers.Adam(0.00001),
loss = loss_fn,
metrics = [tf.keras.metrics.BinaryAccuracy(),tf.keras.metrics.AUC()])
history = model.fit(train_datagen, epochs = 10, verbose = 1, validation_data = valid_datagen, shuffle = True)
def loss_fn(y_true,y_pred):
loss = 0
loss -= (positive_weights['desert']*y_true[0]*K.log(y_pred[0]) + negative_weights['desert']*(1-y_true[0])*K.log(1-y_pred[0]))
loss -= (positive_weights['mountains']*y_true[1]*K.log(y_pred[1]) + negative_weights['mountains']*(1-y_true[1])*K.log(1-y_pred[1]))
loss -= (positive_weights['sea']*y_true[2]*K.log(y_pred[2]) + negative_weights['sea']*(1-y_true[2])*K.log(1-y_pred[2]))
loss -= (positive_weights['sunset']*y_true[3]*K.log(y_pred[3]) + negative_weights['sunset']*(1-y_true[3])*K.log(1-y_pred[3]))
loss -= (positive_weights['trees']*y_true[4]*K.log(y_pred[4]) + negative_weights['trees']*(1-y_true[4])*K.log(1-y_pred[4]))
return loss
NUM_CLASSES = 5
fine_tuning = True
model = Sequential()
model.add(ResNet50(include_top=False,pooling='avg',weights='imagenet',input_shape=(H,W,3)))
model.add(Dense(NUM_CLASSES,activation = 'sigmoid'))
if fine_tuning:
model.layers[0].trainable = True
train_datagen = DataGeneratorKeras(train = True, augmentation = True, preprocessing_fn = preprocess_input, batch_size = 32)
valid_datagen = DataGeneratorKeras(train = False, augmentation = False, preprocessing_fn = preprocess_input, batch_size = 32)