Skip to content

Instantly share code, notes, and snippets.

@qzhong0605
Created December 1, 2018 07:43
Show Gist options
  • Save qzhong0605/648bdde950a49e537ad1ea8e73068995 to your computer and use it in GitHub Desktop.
Save qzhong0605/648bdde950a49e537ad1ea8e73068995 to your computer and use it in GitHub Desktop.
name: "ResNet-18"
layer {
name: "data"
type: "Input"
top: "data"
input_param {
shape {
dim: 1
dim: 3
dim: 224
dim: 224
}
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
convolution_param {
num_output: 64
bias_term: false
pad: 3
kernel_size: 7
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn_conv1"
type: "BatchNorm"
bottom: "conv1"
top: "conv1/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale_conv1"
type: "Scale"
bottom: "conv1/bn"
top: "conv1/bn"
scale_param {
bias_term: true
}
}
layer {
name: "conv1_relu"
type: "ReLU"
bottom: "conv1/bn"
top: "conv1/bn"
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1/bn"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "res2a_branch1"
type: "Convolution"
bottom: "pool1"
top: "res2a_branch1"
convolution_param {
num_output: 64
bias_term: false
pad: 0
kernel_size: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn2a_branch1"
type: "BatchNorm"
bottom: "res2a_branch1"
top: "res2a_branch1/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale2a_branch1"
type: "Scale"
bottom: "res2a_branch1/bn"
top: "res2a_branch1/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res2a_branch2a"
type: "Convolution"
bottom: "pool1"
top: "res2a_branch2a"
convolution_param {
num_output: 64
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn2a_branch2a"
type: "BatchNorm"
bottom: "res2a_branch2a"
top: "res2a_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale2a_branch2a"
type: "Scale"
bottom: "res2a_branch2a/bn"
top: "res2a_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res2a_branch2a_relu"
type: "ReLU"
bottom: "res2a_branch2a/bn"
top: "res2a_branch2a/bn"
}
layer {
name: "res2a_branch2b"
type: "Convolution"
bottom: "res2a_branch2a/bn"
top: "res2a_branch2b"
convolution_param {
num_output: 64
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn2a_branch2b"
type: "BatchNorm"
bottom: "res2a_branch2b"
top: "res2a_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale2a_branch2b"
type: "Scale"
bottom: "res2a_branch2b/bn"
top: "res2a_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res2a"
type: "Eltwise"
bottom: "res2a_branch1/bn"
bottom: "res2a_branch2b/bn"
top: "res2a"
eltwise_param {
operation: SUM
}
}
layer {
name: "res2a_relu"
type: "ReLU"
bottom: "res2a"
top: "res2a"
}
layer {
name: "res2b_branch2a"
type: "Convolution"
bottom: "res2a"
top: "res2b_branch2a"
convolution_param {
num_output: 64
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn2b_branch2a"
type: "BatchNorm"
bottom: "res2b_branch2a"
top: "res2b_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale2b_branch2a"
type: "Scale"
bottom: "res2b_branch2a/bn"
top: "res2b_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res2b_branch2a_relu"
type: "ReLU"
bottom: "res2b_branch2a/bn"
top: "res2b_branch2a/bn"
}
layer {
name: "res2b_branch2b"
type: "Convolution"
bottom: "res2b_branch2a/bn"
top: "res2b_branch2b"
convolution_param {
num_output: 64
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn2b_branch2b"
type: "BatchNorm"
bottom: "res2b_branch2b"
top: "res2b_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale2b_branch2b"
type: "Scale"
bottom: "res2b_branch2b/bn"
top: "res2b_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res2b"
type: "Eltwise"
bottom: "res2a"
bottom: "res2b_branch2b/bn"
top: "res2b"
eltwise_param {
operation: SUM
}
}
layer {
name: "res2b_relu"
type: "ReLU"
bottom: "res2b"
top: "res2b"
}
layer {
name: "res3a_branch1"
type: "Convolution"
bottom: "res2b"
top: "res3a_branch1"
convolution_param {
num_output: 128
bias_term: false
pad: 0
kernel_size: 1
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn3a_branch1"
type: "BatchNorm"
bottom: "res3a_branch1"
top: "res3a_branch1/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale3a_branch1"
type: "Scale"
bottom: "res3a_branch1/bn"
top: "res3a_branch1/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res3a_branch2a"
type: "Convolution"
bottom: "res2b"
top: "res3a_branch2a"
convolution_param {
num_output: 128
bias_term: false
pad: 1
kernel_size: 3
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn3a_branch2a"
type: "BatchNorm"
bottom: "res3a_branch2a"
top: "res3a_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale3a_branch2a"
type: "Scale"
bottom: "res3a_branch2a/bn"
top: "res3a_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res3a_branch2a_relu"
type: "ReLU"
bottom: "res3a_branch2a/bn"
top: "res3a_branch2a/bn"
}
layer {
name: "res3a_branch2b"
type: "Convolution"
bottom: "res3a_branch2a/bn"
top: "res3a_branch2b"
convolution_param {
num_output: 128
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn3a_branch2b"
type: "BatchNorm"
bottom: "res3a_branch2b"
top: "res3a_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale3a_branch2b"
type: "Scale"
bottom: "res3a_branch2b/bn"
top: "res3a_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res3a"
type: "Eltwise"
bottom: "res3a_branch1/bn"
bottom: "res3a_branch2b/bn"
top: "res3a"
eltwise_param {
operation: SUM
}
}
layer {
name: "res3a_relu"
type: "ReLU"
bottom: "res3a"
top: "res3a"
}
layer {
name: "res3b_branch2a"
type: "Convolution"
bottom: "res3a"
top: "res3b_branch2a"
convolution_param {
num_output: 128
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn3b_branch2a"
type: "BatchNorm"
bottom: "res3b_branch2a"
top: "res3b_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale3b_branch2a"
type: "Scale"
bottom: "res3b_branch2a/bn"
top: "res3b_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res3b_branch2a_relu"
type: "ReLU"
bottom: "res3b_branch2a/bn"
top: "res3b_branch2a/bn"
}
layer {
name: "res3b_branch2b"
type: "Convolution"
bottom: "res3b_branch2a/bn"
top: "res3b_branch2b"
convolution_param {
num_output: 128
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn3b_branch2b"
type: "BatchNorm"
bottom: "res3b_branch2b"
top: "res3b_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale3b_branch2b"
type: "Scale"
bottom: "res3b_branch2b/bn"
top: "res3b_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res3b"
type: "Eltwise"
bottom: "res3a"
bottom: "res3b_branch2b/bn"
top: "res3b"
eltwise_param {
operation: SUM
}
}
layer {
name: "res3b_relu"
type: "ReLU"
bottom: "res3b"
top: "res3b"
}
layer {
name: "res4a_branch1"
type: "Convolution"
bottom: "res3b"
top: "res4a_branch1"
convolution_param {
num_output: 256
bias_term: false
pad: 0
kernel_size: 1
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn4a_branch1"
type: "BatchNorm"
bottom: "res4a_branch1"
top: "res4a_branch1/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale4a_branch1"
type: "Scale"
bottom: "res4a_branch1/bn"
top: "res4a_branch1/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res4a_branch2a"
type: "Convolution"
bottom: "res3b"
top: "res4a_branch2a"
convolution_param {
num_output: 256
bias_term: false
pad: 1
kernel_size: 3
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn4a_branch2a"
type: "BatchNorm"
bottom: "res4a_branch2a"
top: "res4a_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale4a_branch2a"
type: "Scale"
bottom: "res4a_branch2a/bn"
top: "res4a_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res4a_branch2a_relu"
type: "ReLU"
bottom: "res4a_branch2a/bn"
top: "res4a_branch2a/bn"
}
layer {
name: "res4a_branch2b"
type: "Convolution"
bottom: "res4a_branch2a/bn"
top: "res4a_branch2b"
convolution_param {
num_output: 256
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn4a_branch2b"
type: "BatchNorm"
bottom: "res4a_branch2b"
top: "res4a_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale4a_branch2b"
type: "Scale"
bottom: "res4a_branch2b/bn"
top: "res4a_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res4a"
type: "Eltwise"
bottom: "res4a_branch1/bn"
bottom: "res4a_branch2b/bn"
top: "res4a"
eltwise_param {
operation: SUM
}
}
layer {
name: "res4a_relu"
type: "ReLU"
bottom: "res4a"
top: "res4a"
}
layer {
name: "res4b_branch2a"
type: "Convolution"
bottom: "res4a"
top: "res4b_branch2a"
convolution_param {
num_output: 256
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn4b_branch2a"
type: "BatchNorm"
bottom: "res4b_branch2a"
top: "res4b_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale4b_branch2a"
type: "Scale"
bottom: "res4b_branch2a/bn"
top: "res4b_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res4b_branch2a_relu"
type: "ReLU"
bottom: "res4b_branch2a/bn"
top: "res4b_branch2a/bn"
}
layer {
name: "res4b_branch2b"
type: "Convolution"
bottom: "res4b_branch2a/bn"
top: "res4b_branch2b"
convolution_param {
num_output: 256
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn4b_branch2b"
type: "BatchNorm"
bottom: "res4b_branch2b"
top: "res4b_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale4b_branch2b"
type: "Scale"
bottom: "res4b_branch2b/bn"
top: "res4b_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res4b"
type: "Eltwise"
bottom: "res4a"
bottom: "res4b_branch2b/bn"
top: "res4b"
eltwise_param {
operation: SUM
}
}
layer {
name: "res4b_relu"
type: "ReLU"
bottom: "res4b"
top: "res4b"
}
layer {
name: "res5a_branch1"
type: "Convolution"
bottom: "res4b"
top: "res5a_branch1"
convolution_param {
num_output: 512
bias_term: false
pad: 0
kernel_size: 1
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn5a_branch1"
type: "BatchNorm"
bottom: "res5a_branch1"
top: "res5a_branch1/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale5a_branch1"
type: "Scale"
bottom: "res5a_branch1/bn"
top: "res5a_branch1/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res5a_branch2a"
type: "Convolution"
bottom: "res4b"
top: "res5a_branch2a"
convolution_param {
num_output: 512
bias_term: false
pad: 1
kernel_size: 3
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn5a_branch2a"
type: "BatchNorm"
bottom: "res5a_branch2a"
top: "res5a_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale5a_branch2a"
type: "Scale"
bottom: "res5a_branch2a/bn"
top: "res5a_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res5a_branch2a_relu"
type: "ReLU"
bottom: "res5a_branch2a/bn"
top: "res5a_branch2a/bn"
}
layer {
name: "res5a_branch2b"
type: "Convolution"
bottom: "res5a_branch2a/bn"
top: "res5a_branch2b"
convolution_param {
num_output: 512
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn5a_branch2b"
type: "BatchNorm"
bottom: "res5a_branch2b"
top: "res5a_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale5a_branch2b"
type: "Scale"
bottom: "res5a_branch2b/bn"
top: "res5a_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res5a"
type: "Eltwise"
bottom: "res5a_branch1/bn"
bottom: "res5a_branch2b/bn"
top: "res5a"
eltwise_param {
operation: SUM
}
}
layer {
name: "res5a_relu"
type: "ReLU"
bottom: "res5a"
top: "res5a"
}
layer {
name: "res5b_branch2a"
type: "Convolution"
bottom: "res5a"
top: "res5b_branch2a"
convolution_param {
num_output: 512
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn5b_branch2a"
type: "BatchNorm"
bottom: "res5b_branch2a"
top: "res5b_branch2a/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale5b_branch2a"
type: "Scale"
bottom: "res5b_branch2a/bn"
top: "res5b_branch2a/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res5b_branch2a_relu"
type: "ReLU"
bottom: "res5b_branch2a/bn"
top: "res5b_branch2a/bn"
}
layer {
name: "res5b_branch2b"
type: "Convolution"
bottom: "res5b_branch2a/bn"
top: "res5b_branch2b"
convolution_param {
num_output: 512
bias_term: false
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "bn5b_branch2b"
type: "BatchNorm"
bottom: "res5b_branch2b"
top: "res5b_branch2b/bn"
batch_norm_param {
moving_average_fraction: 0.899999976158
}
}
layer {
name: "scale5b_branch2b"
type: "Scale"
bottom: "res5b_branch2b/bn"
top: "res5b_branch2b/bn"
scale_param {
bias_term: true
}
}
layer {
name: "res5b"
type: "Eltwise"
bottom: "res5a"
bottom: "res5b_branch2b/bn"
top: "res5b"
eltwise_param {
operation: SUM
}
}
layer {
name: "res5b_relu"
type: "ReLU"
bottom: "res5b"
top: "res5b"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "res5b"
top: "pool5"
pooling_param {
pool: AVE
kernel_size: 7
stride: 1
}
}
layer {
name: "fc1000"
type: "InnerProduct"
bottom: "pool5"
top: "fc1000"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 2.0
decay_mult: 1.0
}
inner_product_param {
num_output: 1000
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
value: 0.0
}
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "fc1000"
top: "prob"
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment