Created
December 23, 2018 16:47
-
-
Save qzhong0605/dc01cf00d1daa9ebb9ef023a28928599 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "cifar10-resnet32" | |
input: "data" | |
input_dim: 1 | |
input_dim: 3 | |
input_dim: 32 | |
input_dim: 32 | |
layer { | |
name: "first_conv" | |
type: "Convolution" | |
bottom: "data" | |
top: "first_conv" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "first_conv_bn" | |
type: "BatchNorm" | |
bottom: "first_conv" | |
top: "first_conv/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "first_conv_scale" | |
type: "Scale" | |
bottom: "first_conv/bn" | |
top: "first_conv/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "first_conv_relu" | |
type: "ReLU" | |
bottom: "first_conv/bn" | |
top: "first_conv/bn" | |
} | |
layer { | |
name: "group0_block0_conv0" | |
type: "Convolution" | |
bottom: "first_conv/bn" | |
top: "group0_block0_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block0_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group0_block0_conv0" | |
top: "group0_block0_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block0_conv0_scale" | |
type: "Scale" | |
bottom: "group0_block0_conv0/bn" | |
top: "group0_block0_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block0_conv0_relu" | |
type: "ReLU" | |
bottom: "group0_block0_conv0/bn" | |
top: "group0_block0_conv0/bn" | |
} | |
layer { | |
name: "group0_block0_conv1" | |
type: "Convolution" | |
bottom: "group0_block0_conv0/bn" | |
top: "group0_block0_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block0_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group0_block0_conv1" | |
top: "group0_block0_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block0_conv1_scale" | |
type: "Scale" | |
bottom: "group0_block0_conv1/bn" | |
top: "group0_block0_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block0_sum" | |
type: "Eltwise" | |
bottom: "group0_block0_conv1/bn" | |
bottom: "first_conv/bn" | |
top: "group0_block0_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group0_block0_relu" | |
type: "ReLU" | |
bottom: "group0_block0_sum" | |
top: "group0_block0_sum" | |
} | |
layer { | |
name: "group0_block1_conv0" | |
type: "Convolution" | |
bottom: "group0_block0_sum" | |
top: "group0_block1_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block1_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group0_block1_conv0" | |
top: "group0_block1_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block1_conv0_scale" | |
type: "Scale" | |
bottom: "group0_block1_conv0/bn" | |
top: "group0_block1_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block1_conv0_relu" | |
type: "ReLU" | |
bottom: "group0_block1_conv0/bn" | |
top: "group0_block1_conv0/bn" | |
} | |
layer { | |
name: "group0_block1_conv1" | |
type: "Convolution" | |
bottom: "group0_block1_conv0/bn" | |
top: "group0_block1_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block1_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group0_block1_conv1" | |
top: "group0_block1_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block1_conv1_scale" | |
type: "Scale" | |
bottom: "group0_block1_conv1/bn" | |
top: "group0_block1_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block1_sum" | |
type: "Eltwise" | |
bottom: "group0_block1_conv1/bn" | |
bottom: "group0_block0_sum" | |
top: "group0_block1_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group0_block1_relu" | |
type: "ReLU" | |
bottom: "group0_block1_sum" | |
top: "group0_block1_sum" | |
} | |
layer { | |
name: "group0_block2_conv0" | |
type: "Convolution" | |
bottom: "group0_block1_sum" | |
top: "group0_block2_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block2_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group0_block2_conv0" | |
top: "group0_block2_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block2_conv0_scale" | |
type: "Scale" | |
bottom: "group0_block2_conv0/bn" | |
top: "group0_block2_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block2_conv0_relu" | |
type: "ReLU" | |
bottom: "group0_block2_conv0/bn" | |
top: "group0_block2_conv0/bn" | |
} | |
layer { | |
name: "group0_block2_conv1" | |
type: "Convolution" | |
bottom: "group0_block2_conv0/bn" | |
top: "group0_block2_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block2_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group0_block2_conv1" | |
top: "group0_block2_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block2_conv1_scale" | |
type: "Scale" | |
bottom: "group0_block2_conv1/bn" | |
top: "group0_block2_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block2_sum" | |
type: "Eltwise" | |
bottom: "group0_block2_conv1/bn" | |
bottom: "group0_block1_sum" | |
top: "group0_block2_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group0_block2_relu" | |
type: "ReLU" | |
bottom: "group0_block2_sum" | |
top: "group0_block2_sum" | |
} | |
layer { | |
name: "group0_block3_conv0" | |
type: "Convolution" | |
bottom: "group0_block2_sum" | |
top: "group0_block3_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block3_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group0_block3_conv0" | |
top: "group0_block3_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block3_conv0_scale" | |
type: "Scale" | |
bottom: "group0_block3_conv0/bn" | |
top: "group0_block3_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block3_conv0_relu" | |
type: "ReLU" | |
bottom: "group0_block3_conv0/bn" | |
top: "group0_block3_conv0/bn" | |
} | |
layer { | |
name: "group0_block3_conv1" | |
type: "Convolution" | |
bottom: "group0_block3_conv0/bn" | |
top: "group0_block3_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block3_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group0_block3_conv1" | |
top: "group0_block3_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block3_conv1_scale" | |
type: "Scale" | |
bottom: "group0_block3_conv1/bn" | |
top: "group0_block3_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block3_sum" | |
type: "Eltwise" | |
bottom: "group0_block3_conv1/bn" | |
bottom: "group0_block2_sum" | |
top: "group0_block3_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group0_block3_relu" | |
type: "ReLU" | |
bottom: "group0_block3_sum" | |
top: "group0_block3_sum" | |
} | |
layer { | |
name: "group0_block4_conv0" | |
type: "Convolution" | |
bottom: "group0_block3_sum" | |
top: "group0_block4_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block4_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group0_block4_conv0" | |
top: "group0_block4_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block4_conv0_scale" | |
type: "Scale" | |
bottom: "group0_block4_conv0/bn" | |
top: "group0_block4_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block4_conv0_relu" | |
type: "ReLU" | |
bottom: "group0_block4_conv0/bn" | |
top: "group0_block4_conv0/bn" | |
} | |
layer { | |
name: "group0_block4_conv1" | |
type: "Convolution" | |
bottom: "group0_block4_conv0/bn" | |
top: "group0_block4_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 16 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group0_block4_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group0_block4_conv1" | |
top: "group0_block4_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group0_block4_conv1_scale" | |
type: "Scale" | |
bottom: "group0_block4_conv1/bn" | |
top: "group0_block4_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group0_block4_sum" | |
type: "Eltwise" | |
bottom: "group0_block4_conv1/bn" | |
bottom: "group0_block3_sum" | |
top: "group0_block4_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group0_block4_relu" | |
type: "ReLU" | |
bottom: "group0_block4_sum" | |
top: "group0_block4_sum" | |
} | |
layer { | |
name: "group1_block0_conv0" | |
type: "Convolution" | |
bottom: "group0_block4_sum" | |
top: "group1_block0_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block0_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group1_block0_conv0" | |
top: "group1_block0_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block0_conv0_scale" | |
type: "Scale" | |
bottom: "group1_block0_conv0/bn" | |
top: "group1_block0_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block0_conv0_relu" | |
type: "ReLU" | |
bottom: "group1_block0_conv0/bn" | |
top: "group1_block0_conv0/bn" | |
} | |
layer { | |
name: "group1_block0_conv1" | |
type: "Convolution" | |
bottom: "group1_block0_conv0/bn" | |
top: "group1_block0_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block0_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group1_block0_conv1" | |
top: "group1_block0_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block0_conv1_scale" | |
type: "Scale" | |
bottom: "group1_block0_conv1/bn" | |
top: "group1_block0_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block0_proj" | |
type: "Convolution" | |
bottom: "group0_block4_sum" | |
top: "group1_block0_proj" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 0 | |
kernel_size: 1 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block0_proj_bn" | |
type: "BatchNorm" | |
bottom: "group1_block0_proj" | |
top: "group1_block0_proj/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block0_proj_scale" | |
type: "Scale" | |
bottom: "group1_block0_proj/bn" | |
top: "group1_block0_proj/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block0_sum" | |
type: "Eltwise" | |
bottom: "group1_block0_proj/bn" | |
bottom: "group1_block0_conv1/bn" | |
top: "group1_block0_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group1_block0_relu" | |
type: "ReLU" | |
bottom: "group1_block0_sum" | |
top: "group1_block0_sum" | |
} | |
layer { | |
name: "group1_block1_conv0" | |
type: "Convolution" | |
bottom: "group1_block0_sum" | |
top: "group1_block1_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block1_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group1_block1_conv0" | |
top: "group1_block1_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block1_conv0_scale" | |
type: "Scale" | |
bottom: "group1_block1_conv0/bn" | |
top: "group1_block1_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block1_conv0_relu" | |
type: "ReLU" | |
bottom: "group1_block1_conv0/bn" | |
top: "group1_block1_conv0/bn" | |
} | |
layer { | |
name: "group1_block1_conv1" | |
type: "Convolution" | |
bottom: "group1_block1_conv0/bn" | |
top: "group1_block1_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block1_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group1_block1_conv1" | |
top: "group1_block1_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block1_conv1_scale" | |
type: "Scale" | |
bottom: "group1_block1_conv1/bn" | |
top: "group1_block1_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block1_sum" | |
type: "Eltwise" | |
bottom: "group1_block1_conv1/bn" | |
bottom: "group1_block0_sum" | |
top: "group1_block1_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group1_block1_relu" | |
type: "ReLU" | |
bottom: "group1_block1_sum" | |
top: "group1_block1_sum" | |
} | |
layer { | |
name: "group1_block2_conv0" | |
type: "Convolution" | |
bottom: "group1_block1_sum" | |
top: "group1_block2_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block2_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group1_block2_conv0" | |
top: "group1_block2_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block2_conv0_scale" | |
type: "Scale" | |
bottom: "group1_block2_conv0/bn" | |
top: "group1_block2_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block2_conv0_relu" | |
type: "ReLU" | |
bottom: "group1_block2_conv0/bn" | |
top: "group1_block2_conv0/bn" | |
} | |
layer { | |
name: "group1_block2_conv1" | |
type: "Convolution" | |
bottom: "group1_block2_conv0/bn" | |
top: "group1_block2_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block2_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group1_block2_conv1" | |
top: "group1_block2_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block2_conv1_scale" | |
type: "Scale" | |
bottom: "group1_block2_conv1/bn" | |
top: "group1_block2_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block2_sum" | |
type: "Eltwise" | |
bottom: "group1_block2_conv1/bn" | |
bottom: "group1_block1_sum" | |
top: "group1_block2_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group1_block2_relu" | |
type: "ReLU" | |
bottom: "group1_block2_sum" | |
top: "group1_block2_sum" | |
} | |
layer { | |
name: "group1_block3_conv0" | |
type: "Convolution" | |
bottom: "group1_block2_sum" | |
top: "group1_block3_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block3_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group1_block3_conv0" | |
top: "group1_block3_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block3_conv0_scale" | |
type: "Scale" | |
bottom: "group1_block3_conv0/bn" | |
top: "group1_block3_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block3_conv0_relu" | |
type: "ReLU" | |
bottom: "group1_block3_conv0/bn" | |
top: "group1_block3_conv0/bn" | |
} | |
layer { | |
name: "group1_block3_conv1" | |
type: "Convolution" | |
bottom: "group1_block3_conv0/bn" | |
top: "group1_block3_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block3_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group1_block3_conv1" | |
top: "group1_block3_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block3_conv1_scale" | |
type: "Scale" | |
bottom: "group1_block3_conv1/bn" | |
top: "group1_block3_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block3_sum" | |
type: "Eltwise" | |
bottom: "group1_block3_conv1/bn" | |
bottom: "group1_block2_sum" | |
top: "group1_block3_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group1_block3_relu" | |
type: "ReLU" | |
bottom: "group1_block3_sum" | |
top: "group1_block3_sum" | |
} | |
layer { | |
name: "group1_block4_conv0" | |
type: "Convolution" | |
bottom: "group1_block3_sum" | |
top: "group1_block4_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block4_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group1_block4_conv0" | |
top: "group1_block4_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block4_conv0_scale" | |
type: "Scale" | |
bottom: "group1_block4_conv0/bn" | |
top: "group1_block4_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block4_conv0_relu" | |
type: "ReLU" | |
bottom: "group1_block4_conv0/bn" | |
top: "group1_block4_conv0/bn" | |
} | |
layer { | |
name: "group1_block4_conv1" | |
type: "Convolution" | |
bottom: "group1_block4_conv0/bn" | |
top: "group1_block4_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group1_block4_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group1_block4_conv1" | |
top: "group1_block4_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group1_block4_conv1_scale" | |
type: "Scale" | |
bottom: "group1_block4_conv1/bn" | |
top: "group1_block4_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group1_block4_sum" | |
type: "Eltwise" | |
bottom: "group1_block4_conv1/bn" | |
bottom: "group1_block3_sum" | |
top: "group1_block4_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group1_block4_relu" | |
type: "ReLU" | |
bottom: "group1_block4_sum" | |
top: "group1_block4_sum" | |
} | |
layer { | |
name: "group2_block0_conv0" | |
type: "Convolution" | |
bottom: "group1_block4_sum" | |
top: "group2_block0_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block0_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group2_block0_conv0" | |
top: "group2_block0_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block0_conv0_scale" | |
type: "Scale" | |
bottom: "group2_block0_conv0/bn" | |
top: "group2_block0_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block0_conv0_relu" | |
type: "ReLU" | |
bottom: "group2_block0_conv0/bn" | |
top: "group2_block0_conv0/bn" | |
} | |
layer { | |
name: "group2_block0_conv1" | |
type: "Convolution" | |
bottom: "group2_block0_conv0/bn" | |
top: "group2_block0_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block0_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group2_block0_conv1" | |
top: "group2_block0_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block0_conv1_scale" | |
type: "Scale" | |
bottom: "group2_block0_conv1/bn" | |
top: "group2_block0_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block0_proj" | |
type: "Convolution" | |
bottom: "group1_block4_sum" | |
top: "group2_block0_proj" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 0 | |
kernel_size: 1 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block0_proj_bn" | |
type: "BatchNorm" | |
bottom: "group2_block0_proj" | |
top: "group2_block0_proj/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block0_proj_scale" | |
type: "Scale" | |
bottom: "group2_block0_proj/bn" | |
top: "group2_block0_proj/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block0_sum" | |
type: "Eltwise" | |
bottom: "group2_block0_proj/bn" | |
bottom: "group2_block0_conv1/bn" | |
top: "group2_block0_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group2_block0_relu" | |
type: "ReLU" | |
bottom: "group2_block0_sum" | |
top: "group2_block0_sum" | |
} | |
layer { | |
name: "group2_block1_conv0" | |
type: "Convolution" | |
bottom: "group2_block0_sum" | |
top: "group2_block1_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block1_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group2_block1_conv0" | |
top: "group2_block1_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block1_conv0_scale" | |
type: "Scale" | |
bottom: "group2_block1_conv0/bn" | |
top: "group2_block1_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block1_conv0_relu" | |
type: "ReLU" | |
bottom: "group2_block1_conv0/bn" | |
top: "group2_block1_conv0/bn" | |
} | |
layer { | |
name: "group2_block1_conv1" | |
type: "Convolution" | |
bottom: "group2_block1_conv0/bn" | |
top: "group2_block1_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block1_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group2_block1_conv1" | |
top: "group2_block1_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block1_conv1_scale" | |
type: "Scale" | |
bottom: "group2_block1_conv1/bn" | |
top: "group2_block1_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block1_sum" | |
type: "Eltwise" | |
bottom: "group2_block1_conv1/bn" | |
bottom: "group2_block0_sum" | |
top: "group2_block1_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group2_block1_relu" | |
type: "ReLU" | |
bottom: "group2_block1_sum" | |
top: "group2_block1_sum" | |
} | |
layer { | |
name: "group2_block2_conv0" | |
type: "Convolution" | |
bottom: "group2_block1_sum" | |
top: "group2_block2_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block2_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group2_block2_conv0" | |
top: "group2_block2_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block2_conv0_scale" | |
type: "Scale" | |
bottom: "group2_block2_conv0/bn" | |
top: "group2_block2_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block2_conv0_relu" | |
type: "ReLU" | |
bottom: "group2_block2_conv0/bn" | |
top: "group2_block2_conv0/bn" | |
} | |
layer { | |
name: "group2_block2_conv1" | |
type: "Convolution" | |
bottom: "group2_block2_conv0/bn" | |
top: "group2_block2_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block2_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group2_block2_conv1" | |
top: "group2_block2_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block2_conv1_scale" | |
type: "Scale" | |
bottom: "group2_block2_conv1/bn" | |
top: "group2_block2_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block2_sum" | |
type: "Eltwise" | |
bottom: "group2_block2_conv1/bn" | |
bottom: "group2_block1_sum" | |
top: "group2_block2_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group2_block2_relu" | |
type: "ReLU" | |
bottom: "group2_block2_sum" | |
top: "group2_block2_sum" | |
} | |
layer { | |
name: "group2_block3_conv0" | |
type: "Convolution" | |
bottom: "group2_block2_sum" | |
top: "group2_block3_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block3_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group2_block3_conv0" | |
top: "group2_block3_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block3_conv0_scale" | |
type: "Scale" | |
bottom: "group2_block3_conv0/bn" | |
top: "group2_block3_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block3_conv0_relu" | |
type: "ReLU" | |
bottom: "group2_block3_conv0/bn" | |
top: "group2_block3_conv0/bn" | |
} | |
layer { | |
name: "group2_block3_conv1" | |
type: "Convolution" | |
bottom: "group2_block3_conv0/bn" | |
top: "group2_block3_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block3_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group2_block3_conv1" | |
top: "group2_block3_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block3_conv1_scale" | |
type: "Scale" | |
bottom: "group2_block3_conv1/bn" | |
top: "group2_block3_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block3_sum" | |
type: "Eltwise" | |
bottom: "group2_block3_conv1/bn" | |
bottom: "group2_block2_sum" | |
top: "group2_block3_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group2_block3_relu" | |
type: "ReLU" | |
bottom: "group2_block3_sum" | |
top: "group2_block3_sum" | |
} | |
layer { | |
name: "group2_block4_conv0" | |
type: "Convolution" | |
bottom: "group2_block3_sum" | |
top: "group2_block4_conv0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block4_conv0_bn" | |
type: "BatchNorm" | |
bottom: "group2_block4_conv0" | |
top: "group2_block4_conv0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block4_conv0_scale" | |
type: "Scale" | |
bottom: "group2_block4_conv0/bn" | |
top: "group2_block4_conv0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block4_conv0_relu" | |
type: "ReLU" | |
bottom: "group2_block4_conv0/bn" | |
top: "group2_block4_conv0/bn" | |
} | |
layer { | |
name: "group2_block4_conv1" | |
type: "Convolution" | |
bottom: "group2_block4_conv0/bn" | |
top: "group2_block4_conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "group2_block4_conv1_bn" | |
type: "BatchNorm" | |
bottom: "group2_block4_conv1" | |
top: "group2_block4_conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "group2_block4_conv1_scale" | |
type: "Scale" | |
bottom: "group2_block4_conv1/bn" | |
top: "group2_block4_conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "group2_block4_sum" | |
type: "Eltwise" | |
bottom: "group2_block4_conv1/bn" | |
bottom: "group2_block3_sum" | |
top: "group2_block4_sum" | |
eltwise_param { | |
operation: SUM | |
} | |
} | |
layer { | |
name: "group2_block4_relu" | |
type: "ReLU" | |
bottom: "group2_block4_sum" | |
top: "group2_block4_sum" | |
} | |
layer { | |
name: "global_avg_pool" | |
type: "Pooling" | |
bottom: "group2_block4_sum" | |
top: "global_avg_pool" | |
pooling_param { | |
pool: AVE | |
global_pooling: true | |
} | |
} | |
layer { | |
name: "fc" | |
type: "InnerProduct" | |
bottom: "global_avg_pool" | |
top: "fc" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
param { | |
lr_mult: 2.0 | |
decay_mult: 0.0 | |
} | |
inner_product_param { | |
num_output: 10 | |
weight_filler { | |
type: "msra" | |
} | |
bias_filler { | |
type: "constant" | |
value: 0.0 | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment