Created
June 3, 2020 15:08
-
-
Save arunm8489/00927881b888e80f89561e8d5e27d2b2 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class DummyLayer(nn.Module): | |
def __init__(self): | |
super(DummyLayer, self).__init__() | |
class DetectionLayer(nn.Module): | |
def __init__(self, anchors): | |
super(DetectionLayer, self).__init__() | |
self.anchors = anchors | |
def model_initialization(blocks): | |
darknet_details = blocks[0] | |
channels = 3 | |
output_filters = [] #list of filter numbers in each layer.It is useful while defining number of filters in routing layer | |
modulelist = nn.ModuleList() | |
for i,block in enumerate(blocks[1:]): | |
seq = nn.Sequential() | |
if (block["type"] == "convolutional"): | |
activation = block["activation"] | |
filters = int(block["filters"]) | |
kernel_size = int(block["size"]) | |
strides = int(block["stride"]) | |
use_bias= False if ("batch_normalize" in block) else True | |
pad = (kernel_size - 1) // 2 | |
conv = nn.Conv2d(in_channels=channels, out_channels=filters, kernel_size=kernel_size, | |
stride=strides, padding=pad, bias = use_bias) | |
seq.add_module("conv_{0}".format(i), conv) | |
if "batch_normalize" in block: | |
bn = nn.BatchNorm2d(filters) | |
seq.add_module("batch_norm_{0}".format(i), bn) | |
if activation == "leaky": | |
activn = nn.LeakyReLU(0.1, inplace = True) | |
seq.add_module("leaky_{0}".format(i), activn) | |
elif (block["type"] == "upsample"): | |
upsample = nn.Upsample(scale_factor = 2, mode = "bilinear") | |
seq.add_module("upsample_{}".format(i), upsample) | |
elif (block["type"] == 'route'): | |
# start and end is given in format (eg:-1 36 so we will find layer number from it. | |
# we will find layer number in negative format | |
# so that we can get the number of filters in that layer | |
block['layers'] = block['layers'].split(',') | |
block['layers'][0] = int(block['layers'][0]) | |
start = block['layers'][0] | |
if len(block['layers']) == 1: | |
#ie if -1 given and present layer is 20 . we have to sum filters in 19th and 20th layer | |
block['layers'][0] = int(i + start) | |
filters = output_filters[block['layers'][0]] #start layer number | |
elif len(block['layers']) > 1: | |
# suppose we have -1,28 and present layer is 20 we have sum filters in 19th and 28th layer | |
block['layers'][0] = int(i + start) | |
# block['layers'][1] = int(block['layers'][1]) - i # end layer number | |
block['layers'][1] = int(block['layers'][1]) | |
filters = output_filters[block['layers'][0]] + output_filters[block['layers'][1]] | |
# that means this layer don't have any forward operation | |
route = DummyLayer() | |
seq.add_module("route_{0}".format(i),route) | |
elif block["type"] == "shortcut": | |
from_ = int(block["from"]) | |
shortcut = DummyLayer() | |
seq.add_module("shortcut_{0}".format(i),shortcut) | |
elif block["type"] == "yolo": | |
mask = block["mask"].split(",") | |
mask = [int(m) for m in mask] | |
anchors = block["anchors"].split(",") | |
anchors = [(int(anchors[i]), int(anchors[i + 1])) for i in range(0, len(anchors), 2)] | |
anchors = [anchors[i] for i in mask] | |
block["anchors"] = anchors | |
detectorLayer = DetectionLayer(anchors) | |
seq.add_module("Detection_{0}".format(i),detectorLayer) | |
modulelist.append(seq) | |
output_filters.append(filters) | |
channels = filters | |
return darknet_details, modulelist |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment