Created
July 5, 2017 16:15
-
-
Save MarcoForte/868d873b5c087be9bde07657962c8383 to your computer and use it in GitHub Desktop.
Just for quick visualisation with netscope
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "RED-Net" | |
input: "data" | |
input_dim: 1 | |
input_dim: 3 | |
input_dim: 256 | |
input_dim: 256 | |
# conv1 | |
layer { | |
name: "conv1" | |
type: "Convolution" | |
bottom: "data" | |
top: "c1" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu1" type: "ReLU" bottom: "c1" top: "c1" } | |
# conv2 | |
layer { | |
name: "conv2" | |
type: "Convolution" | |
bottom: "c1" | |
top: "c2" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu2" type: "ReLU" bottom: "c2" top: "c2" } | |
# conv3 | |
layer { | |
name: "conv3" | |
type: "Convolution" | |
bottom: "c2" | |
top: "c3" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu3" type: "ReLU" bottom: "c3" top: "c3" } | |
# conv4 | |
layer { | |
name: "conv4" | |
type: "Convolution" | |
bottom: "c3" | |
top: "c4" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu4" type: "ReLU" bottom: "c4" top: "c4" } | |
# conv5 | |
layer { | |
name: "conv5" | |
type: "Convolution" | |
bottom: "c4" | |
top: "c5" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu5" type: "ReLU" bottom: "c5" top: "c5" } | |
# conv6 | |
layer { | |
name: "conv6" | |
type: "Convolution" | |
bottom: "c5" | |
top: "c6" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu6" type: "ReLU" bottom: "c6" top: "c6" } | |
# conv7 | |
layer { | |
name: "conv7" | |
type: "Convolution" | |
bottom: "c6" | |
top: "c7" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu7" type: "ReLU" bottom: "c7" top: "c7" } | |
# conv8 | |
layer { | |
name: "conv8" | |
type: "Convolution" | |
bottom: "c7" | |
top: "c8" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu8" type: "ReLU" bottom: "c8" top: "c8" } | |
# conv9 | |
layer { | |
name: "conv9" | |
type: "Convolution" | |
bottom: "c8" | |
top: "c9" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu9" type: "ReLU" bottom: "c9" top: "c9" } | |
# conv10 | |
layer { | |
name: "conv10" | |
type: "Convolution" | |
bottom: "c9" | |
top: "c10" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu10" type: "ReLU" bottom: "c10" top: "c10" } | |
# conv11 | |
layer { | |
name: "conv11" | |
type: "Convolution" | |
bottom: "c10" | |
top: "c11" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu11" type: "ReLU" bottom: "c11" top: "c11" } | |
# conv12 | |
layer { | |
name: "conv12" | |
type: "Convolution" | |
bottom: "c11" | |
top: "c12" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu12" type: "ReLU" bottom: "c12" top: "c12" } | |
# conv13 | |
layer { | |
name: "conv13" | |
type: "Convolution" | |
bottom: "c12" | |
top: "c13" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu13" type: "ReLU" bottom: "c13" top: "c13" } | |
# conv14 | |
layer { | |
name: "conv14" | |
type: "Convolution" | |
bottom: "c13" | |
top: "c14" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu14" type: "ReLU" bottom: "c14" top: "c14" } | |
# conv15 | |
layer { | |
name: "conv15" | |
type: "Convolution" | |
bottom: "c14" | |
top: "c15" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu15" type: "ReLU" bottom: "c15" top: "c15" } | |
# deconv1 | |
layer { | |
name: "deconv1" | |
type: "Deconvolution" | |
bottom: "c15" | |
top: "d1" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu16" type: "ReLU" bottom: "d1" top: "d1" } | |
# residual1 | |
layer { | |
name: "residual1" | |
type: "Eltwise" | |
bottom: "c14" | |
bottom: "d1" | |
top: "d1a" | |
} | |
layer { name: "relu17" type: "ReLU" bottom: "d1a" top: "d1a" } | |
# deconv2 | |
layer { | |
name: "deconv2" | |
type: "Deconvolution" | |
bottom: "d1a" | |
top: "d2" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu18" type: "ReLU" bottom: "d2" top: "d2" } | |
# deconv3 | |
layer { | |
name: "deconv3" | |
type: "Deconvolution" | |
bottom: "d2" | |
top: "d3" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu19" type: "ReLU" bottom: "d3" top: "d3" } | |
# residual2 | |
layer { | |
name: "residual2" | |
type: "Eltwise" | |
bottom: "c12" | |
bottom: "d3" | |
top: "d3a" | |
} | |
layer { name: "relu20" type: "ReLU" bottom: "d3a" top: "d3a" } | |
# deconv4 | |
layer { | |
name: "deconv4" | |
type: "Deconvolution" | |
bottom: "d3a" | |
top: "d4" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu21" type: "ReLU" bottom: "d4" top: "d4" } | |
# deconv5 | |
layer { | |
name: "deconv5" | |
type: "Deconvolution" | |
bottom: "d4" | |
top: "d5" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu22" type: "ReLU" bottom: "d5" top: "d5" } | |
# residual3 | |
layer { | |
name: "residual3" | |
type: "Eltwise" | |
bottom: "c10" | |
bottom: "d5" | |
top: "d5a" | |
} | |
layer { name: "relu23" type: "ReLU" bottom: "d5a" top: "d5a" } | |
# deconv6 | |
layer { | |
name: "deconv6" | |
type: "Deconvolution" | |
bottom: "d5a" | |
top: "d6" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu24" type: "ReLU" bottom: "d6" top: "d6" } | |
# deconv7 | |
layer { | |
name: "deconv7" | |
type: "Deconvolution" | |
bottom: "d6" | |
top: "d7" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu25" type: "ReLU" bottom: "d7" top: "d7" } | |
# residual4 | |
layer { | |
name: "residual4" | |
type: "Eltwise" | |
bottom: "c8" | |
bottom: "d7" | |
top: "d7a" | |
} | |
layer { name: "relu26" type: "ReLU" bottom: "d7a" top: "d7a" } | |
# deconv8 | |
layer { | |
name: "deconv8" | |
type: "Deconvolution" | |
bottom: "d7a" | |
top: "d8" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu27" type: "ReLU" bottom: "d8" top: "d8" } | |
# deconv9 | |
layer { | |
name: "deconv9" | |
type: "Deconvolution" | |
bottom: "d8" | |
top: "d9" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu28" type: "ReLU" bottom: "d9" top: "d9" } | |
# residual5 | |
layer { | |
name: "residual5" | |
type: "Eltwise" | |
bottom: "c6" | |
bottom: "d9" | |
top: "d9a" | |
} | |
layer { name: "relu29" type: "ReLU" bottom: "d9a" top: "d9a" } | |
# deconv10 | |
layer { | |
name: "deconv10" | |
type: "Deconvolution" | |
bottom: "d9a" | |
top: "d10" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu30" type: "ReLU" bottom: "d10" top: "d10" } | |
# deconv11 | |
layer { | |
name: "deconv11" | |
type: "Deconvolution" | |
bottom: "d10" | |
top: "d11" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu31" type: "ReLU" bottom: "d11" top: "d11" } | |
# residual6 | |
layer { | |
name: "residual6" | |
type: "Eltwise" | |
bottom: "c4" | |
bottom: "d11" | |
top: "d11a" | |
} | |
layer { name: "relu32" type: "ReLU" bottom: "d11a" top: "d11a" } | |
# deconv12 | |
layer { | |
name: "deconv12" | |
type: "Deconvolution" | |
bottom: "d11a" | |
top: "d12" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu33" type: "ReLU" bottom: "d12" top: "d12" } | |
# deconv13 | |
layer { | |
name: "deconv13" | |
type: "Deconvolution" | |
bottom: "d12" | |
top: "d13" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu34" type: "ReLU" bottom: "d13" top: "d13" } | |
# residual7 | |
layer { | |
name: "residual7" | |
type: "Eltwise" | |
bottom: "c2" | |
bottom: "d13" | |
top: "d13a" | |
} | |
layer { name: "relu35" type: "ReLU" bottom: "d13a" top: "d13a" } | |
# deconv14 | |
layer { | |
name: "deconv14" | |
type: "Deconvolution" | |
bottom: "d13a" | |
top: "d14" | |
convolution_param { num_output: 128 kernel_size: 3 stride: 1 pad: 1 } | |
} | |
layer { name: "relu36" type: "ReLU" bottom: "d14" top: "d14" } | |
# deconv15 | |
layer { | |
name: "deconv15" | |
type: "Deconvolution" | |
bottom: "d14" | |
top: "d15" | |
convolution_param { num_output: 3 kernel_size: 3 stride: 1 pad: 1 } | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment