This code snippet shows how we can change a layer in a pretrained model. In the following code, we change all the ReLU activation functions with SELU in a resnet18 model.
import torch
from torchvision import model
resnet18 = model.resnet18(pretrained=True)
def funct(list_mods):
print("type: ", type(list_mods))
for i in range(len(list_mods)):
if list_mods[i].__class__.__name__ == "ReLU":
list_mods[i] = nn.SELU(inplace=True)
elif list_mods[i].__class__.__name__ in ("Sequential", "BasicBlock"):
list_mods[i] = nn.Sequential(*funct(list(list_mods[i].children())))
return list_mods
resnet18_selu = nn.Sequential(*funct(list(resnet18.children())))