Skip to content

Instantly share code, notes, and snippets.

View SubhadityaMukherjee's full-sized avatar

Subhaditya Mukherjee SubhadityaMukherjee

View GitHub Profile
img = PILImage.create(
"/media/hdd/Datasets/Fish_Dataset/Fish_Dataset/Shrimp/Shrimp/00012.png"
)
(x,) = first(dls.test_dl([img]))
# cam_map = torch.einsum('ck,kij->cij', learn.model[1][-1].weight, act)
x_dec = TensorImage(dls.train.decode((x,))[0][0])
image_count = len(learn.model[0])
col = 4
row = math.ceil(image_count / col)
root_dir = "/media/hdd/Datasets/Fish_Dataset/Fish_Dataset/"
path = Path(root_dir)
fields = DataBlock(
blocks=(ImageBlock, CategoryBlock),
get_items=get_image_files,
get_y=parent_label,
splitter=RandomSplitter(valid_pct=0.2, seed=42),
item_tfms=RandomResizedCrop(224, min_scale=0.5),
batch_tfms=aug_transforms(),
)
import timm
from fastai.vision.all import *
from fastai.vision.widgets import *
import os
import matplotlib.pyplot as plt
from IPython.display import Image
os.environ["TORCH_HOME"] = "/media/hdd/Datasets/"
os.environ["FASTAI_HOME"] = "/media/hdd/Datasets/"
predictions_path = Path(predictions_path) # The folder where your files
tst_files = get_image_files(predictions_path) # Similar to training
def predict_batch(self, item, rm_type_tfms=None, with_input=False): # this bit is slightly complicated. ignore it for now
dl = self.dls.test_dl(item, rm_type_tfms=rm_type_tfms, num_workers=15)
ret = self.get_preds(dl=dl, with_input=False, with_decoded=True)
return ret
Learner.predict_batch = predict_batch
@SubhadityaMukherjee
SubhadityaMukherjee / fastaiv2predictsfull.py
Last active August 13, 2022 07:15
fastaiv2predicts_full
# Assuming you have set up your Dataloader and learner as dls, learn
learn.fine_tune(1, wd=0.5)
learn.export("model.pkl") # Save the model
predictions_path = "../input/fruits/fruits-360_dataset/fruits-360/Test"
def predict_batch(self, item, rm_type_tfms=None, with_input=False): # this bit is slightly complicated. ignore it for now
dl = self.dls.test_dl(item, rm_type_tfms=rm_type_tfms, num_workers=15)
ret = self.get_preds(dl=dl, with_input=False, with_decoded=True)
predictions_path = "../input/fruits/fruits-360_dataset/fruits-360/Test"
def predict_batch(self, item, rm_type_tfms=None, with_input=False): # this bit is slightly complicated. ignore it for now
dl = self.dls.test_dl(item, rm_type_tfms=rm_type_tfms, num_workers=15)
ret = self.get_preds(dl=dl, with_input=False, with_decoded=True)
return ret
import random
predictions_path = Path(predictions_path)
Learner.predict_batch = predict_batch
from fastai.interpret import *
from fastai.vision.widgets import *
interp = ClassificationInterpretation.from_learner(learn)
interp.plot_top_losses(5, nrows=1)
#We can use this to see what our model gets confused about. This will change as you train it more.
interp.most_confused()
learn.fine_tune(1, wd=0.5)
learn.export("model.pkl") # Save the model
learn = vision_learner(dls,
resnet18, #architecture
loss_func=LabelSmoothingCrossEntropy(), #loss function/objective
opt_func=partial(OptimWrapper, opt=torch.optim.AdamW), # Optimizer
metrics=[accuracy, error_rate],
cbs=[MixUp]).to_fp16() #callbacks, mixed precision