Created
February 7, 2019 00:02
-
-
Save qzhong0605/4dd510755d5619d35e5ba3d4c0deb4f7 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "mobilenetv1_224_1.0" | |
layer { | |
name: "data" | |
type: "Data" | |
top: "data" | |
top: "label" | |
include { | |
phase: TRAIN | |
} | |
transform_param { | |
scale: 0.0170000009239 | |
mirror: true | |
crop_size: 224 | |
mean_value: 104.0 | |
mean_value: 117.0 | |
mean_value: 123.0 | |
} | |
data_param { | |
source: "/mnt/disk1/zhibin/experiment_data/imagenet/caffe_lmdb/ilsvrc12_encoded_train_lmdb" | |
batch_size: 32 | |
backend: LMDB | |
} | |
} | |
layer { | |
name: "data" | |
type: "Data" | |
top: "data" | |
top: "label" | |
include { | |
phase: TEST | |
} | |
transform_param { | |
scale: 0.0170000009239 | |
mirror: false | |
crop_size: 224 | |
mean_value: 104.0 | |
mean_value: 117.0 | |
mean_value: 123.0 | |
} | |
data_param { | |
source: "/mnt/disk1/zhibin/experiment_data/imagenet/caffe_lmdb/ilsvrc12_encoded_val_lmdb" | |
batch_size: 32 | |
backend: LMDB | |
} | |
} | |
layer { | |
name: "conv1" | |
type: "Convolution" | |
bottom: "data" | |
top: "conv1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 1 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv1/bn" | |
type: "BatchNorm" | |
bottom: "conv1" | |
top: "conv1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv1/scale" | |
type: "Scale" | |
bottom: "conv1/bn" | |
top: "conv1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv1/relu" | |
type: "ReLU" | |
bottom: "conv1/bn" | |
top: "conv1/bn" | |
} | |
layer { | |
name: "conv2/dw" | |
type: "Convolution" | |
bottom: "conv1/bn" | |
top: "conv2/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 32 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 32 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv2/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv2/dw" | |
top: "conv2/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv2/dw/scale" | |
type: "Scale" | |
bottom: "conv2/dw/bn" | |
top: "conv2/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv2/dw/relu" | |
type: "ReLU" | |
bottom: "conv2/dw/bn" | |
top: "conv2/dw/bn" | |
} | |
layer { | |
name: "conv2" | |
type: "Convolution" | |
bottom: "conv2/dw/bn" | |
top: "conv2" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv2/bn" | |
type: "BatchNorm" | |
bottom: "conv2" | |
top: "conv2/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv2/scale" | |
type: "Scale" | |
bottom: "conv2/bn" | |
top: "conv2/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv2/relu" | |
type: "ReLU" | |
bottom: "conv2/bn" | |
top: "conv2/bn" | |
} | |
layer { | |
name: "conv3/dw" | |
type: "Convolution" | |
bottom: "conv2/bn" | |
top: "conv3/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 64 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 64 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv3/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv3/dw" | |
top: "conv3/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv3/dw/scale" | |
type: "Scale" | |
bottom: "conv3/dw/bn" | |
top: "conv3/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv3/dw/relu" | |
type: "ReLU" | |
bottom: "conv3/dw/bn" | |
top: "conv3/dw/bn" | |
} | |
layer { | |
name: "conv3" | |
type: "Convolution" | |
bottom: "conv3/dw/bn" | |
top: "conv3" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 128 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv3/bn" | |
type: "BatchNorm" | |
bottom: "conv3" | |
top: "conv3/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv3/scale" | |
type: "Scale" | |
bottom: "conv3/bn" | |
top: "conv3/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv3/relu" | |
type: "ReLU" | |
bottom: "conv3/bn" | |
top: "conv3/bn" | |
} | |
layer { | |
name: "conv4/dw" | |
type: "Convolution" | |
bottom: "conv3/bn" | |
top: "conv4/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 128 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 128 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv4/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv4/dw" | |
top: "conv4/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv4/dw/scale" | |
type: "Scale" | |
bottom: "conv4/dw/bn" | |
top: "conv4/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv4/dw/relu" | |
type: "ReLU" | |
bottom: "conv4/dw/bn" | |
top: "conv4/dw/bn" | |
} | |
layer { | |
name: "conv4" | |
type: "Convolution" | |
bottom: "conv4/dw/bn" | |
top: "conv4" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 128 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv4/bn" | |
type: "BatchNorm" | |
bottom: "conv4" | |
top: "conv4/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv4/scale" | |
type: "Scale" | |
bottom: "conv4/bn" | |
top: "conv4/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv4/relu" | |
type: "ReLU" | |
bottom: "conv4/bn" | |
top: "conv4/bn" | |
} | |
layer { | |
name: "conv5/dw" | |
type: "Convolution" | |
bottom: "conv4/bn" | |
top: "conv5/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 128 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 128 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv5/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv5/dw" | |
top: "conv5/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv5/dw/scale" | |
type: "Scale" | |
bottom: "conv5/dw/bn" | |
top: "conv5/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv5/dw/relu" | |
type: "ReLU" | |
bottom: "conv5/dw/bn" | |
top: "conv5/dw/bn" | |
} | |
layer { | |
name: "conv5" | |
type: "Convolution" | |
bottom: "conv5/dw/bn" | |
top: "conv5" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 256 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv5/bn" | |
type: "BatchNorm" | |
bottom: "conv5" | |
top: "conv5/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv5/scale" | |
type: "Scale" | |
bottom: "conv5/bn" | |
top: "conv5/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv5/relu" | |
type: "ReLU" | |
bottom: "conv5/bn" | |
top: "conv5/bn" | |
} | |
layer { | |
name: "conv6/dw" | |
type: "Convolution" | |
bottom: "conv5/bn" | |
top: "conv6/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 256 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 256 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv6/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv6/dw" | |
top: "conv6/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv6/dw/scale" | |
type: "Scale" | |
bottom: "conv6/dw/bn" | |
top: "conv6/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv6/dw/relu" | |
type: "ReLU" | |
bottom: "conv6/dw/bn" | |
top: "conv6/dw/bn" | |
} | |
layer { | |
name: "conv6" | |
type: "Convolution" | |
bottom: "conv6/dw/bn" | |
top: "conv6" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 256 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv6/bn" | |
type: "BatchNorm" | |
bottom: "conv6" | |
top: "conv6/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv6/scale" | |
type: "Scale" | |
bottom: "conv6/bn" | |
top: "conv6/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv6/relu" | |
type: "ReLU" | |
bottom: "conv6/bn" | |
top: "conv6/bn" | |
} | |
layer { | |
name: "conv7/dw" | |
type: "Convolution" | |
bottom: "conv6/bn" | |
top: "conv7/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 256 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 256 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv7/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv7/dw" | |
top: "conv7/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv7/dw/scale" | |
type: "Scale" | |
bottom: "conv7/dw/bn" | |
top: "conv7/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv7/dw/relu" | |
type: "ReLU" | |
bottom: "conv7/dw/bn" | |
top: "conv7/dw/bn" | |
} | |
layer { | |
name: "conv7" | |
type: "Convolution" | |
bottom: "conv7/dw/bn" | |
top: "conv7" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv7/bn" | |
type: "BatchNorm" | |
bottom: "conv7" | |
top: "conv7/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv7/scale" | |
type: "Scale" | |
bottom: "conv7/bn" | |
top: "conv7/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv7/relu" | |
type: "ReLU" | |
bottom: "conv7/bn" | |
top: "conv7/bn" | |
} | |
layer { | |
name: "conv8/0/dw" | |
type: "Convolution" | |
bottom: "conv7/bn" | |
top: "conv8/0/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 512 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/0/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv8/0/dw" | |
top: "conv8/0/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/0/dw/scale" | |
type: "Scale" | |
bottom: "conv8/0/dw/bn" | |
top: "conv8/0/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/0/dw/relu" | |
type: "ReLU" | |
bottom: "conv8/0/dw/bn" | |
top: "conv8/0/dw/bn" | |
} | |
layer { | |
name: "conv8/0" | |
type: "Convolution" | |
bottom: "conv8/0/dw/bn" | |
top: "conv8/0" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/0/bn" | |
type: "BatchNorm" | |
bottom: "conv8/0" | |
top: "conv8/0/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/0/scale" | |
type: "Scale" | |
bottom: "conv8/0/bn" | |
top: "conv8/0/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/0/relu" | |
type: "ReLU" | |
bottom: "conv8/0/bn" | |
top: "conv8/0/bn" | |
} | |
layer { | |
name: "conv8/1/dw" | |
type: "Convolution" | |
bottom: "conv8/0/bn" | |
top: "conv8/1/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 512 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/1/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv8/1/dw" | |
top: "conv8/1/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/1/dw/scale" | |
type: "Scale" | |
bottom: "conv8/1/dw/bn" | |
top: "conv8/1/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/1/dw/relu" | |
type: "ReLU" | |
bottom: "conv8/1/dw/bn" | |
top: "conv8/1/dw/bn" | |
} | |
layer { | |
name: "conv8/1" | |
type: "Convolution" | |
bottom: "conv8/1/dw/bn" | |
top: "conv8/1" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/1/bn" | |
type: "BatchNorm" | |
bottom: "conv8/1" | |
top: "conv8/1/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/1/scale" | |
type: "Scale" | |
bottom: "conv8/1/bn" | |
top: "conv8/1/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/1/relu" | |
type: "ReLU" | |
bottom: "conv8/1/bn" | |
top: "conv8/1/bn" | |
} | |
layer { | |
name: "conv8/2/dw" | |
type: "Convolution" | |
bottom: "conv8/1/bn" | |
top: "conv8/2/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 512 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/2/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv8/2/dw" | |
top: "conv8/2/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/2/dw/scale" | |
type: "Scale" | |
bottom: "conv8/2/dw/bn" | |
top: "conv8/2/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/2/dw/relu" | |
type: "ReLU" | |
bottom: "conv8/2/dw/bn" | |
top: "conv8/2/dw/bn" | |
} | |
layer { | |
name: "conv8/2" | |
type: "Convolution" | |
bottom: "conv8/2/dw/bn" | |
top: "conv8/2" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/2/bn" | |
type: "BatchNorm" | |
bottom: "conv8/2" | |
top: "conv8/2/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/2/scale" | |
type: "Scale" | |
bottom: "conv8/2/bn" | |
top: "conv8/2/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/2/relu" | |
type: "ReLU" | |
bottom: "conv8/2/bn" | |
top: "conv8/2/bn" | |
} | |
layer { | |
name: "conv8/3/dw" | |
type: "Convolution" | |
bottom: "conv8/2/bn" | |
top: "conv8/3/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 512 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/3/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv8/3/dw" | |
top: "conv8/3/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/3/dw/scale" | |
type: "Scale" | |
bottom: "conv8/3/dw/bn" | |
top: "conv8/3/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/3/dw/relu" | |
type: "ReLU" | |
bottom: "conv8/3/dw/bn" | |
top: "conv8/3/dw/bn" | |
} | |
layer { | |
name: "conv8/3" | |
type: "Convolution" | |
bottom: "conv8/3/dw/bn" | |
top: "conv8/3" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/3/bn" | |
type: "BatchNorm" | |
bottom: "conv8/3" | |
top: "conv8/3/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/3/scale" | |
type: "Scale" | |
bottom: "conv8/3/bn" | |
top: "conv8/3/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/3/relu" | |
type: "ReLU" | |
bottom: "conv8/3/bn" | |
top: "conv8/3/bn" | |
} | |
layer { | |
name: "conv8/4/dw" | |
type: "Convolution" | |
bottom: "conv8/3/bn" | |
top: "conv8/4/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 512 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/4/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv8/4/dw" | |
top: "conv8/4/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/4/dw/scale" | |
type: "Scale" | |
bottom: "conv8/4/dw/bn" | |
top: "conv8/4/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/4/dw/relu" | |
type: "ReLU" | |
bottom: "conv8/4/dw/bn" | |
top: "conv8/4/dw/bn" | |
} | |
layer { | |
name: "conv8/4" | |
type: "Convolution" | |
bottom: "conv8/4/dw/bn" | |
top: "conv8/4" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv8/4/bn" | |
type: "BatchNorm" | |
bottom: "conv8/4" | |
top: "conv8/4/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv8/4/scale" | |
type: "Scale" | |
bottom: "conv8/4/bn" | |
top: "conv8/4/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv8/4/relu" | |
type: "ReLU" | |
bottom: "conv8/4/bn" | |
top: "conv8/4/bn" | |
} | |
layer { | |
name: "conv9/dw" | |
type: "Convolution" | |
bottom: "conv8/4/bn" | |
top: "conv9/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 512 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 512 | |
stride: 2 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv9/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv9/dw" | |
top: "conv9/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv9/dw/scale" | |
type: "Scale" | |
bottom: "conv9/dw/bn" | |
top: "conv9/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv9/dw/relu" | |
type: "ReLU" | |
bottom: "conv9/dw/bn" | |
top: "conv9/dw/bn" | |
} | |
layer { | |
name: "conv9" | |
type: "Convolution" | |
bottom: "conv9/dw/bn" | |
top: "conv9" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 1024 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv9/bn" | |
type: "BatchNorm" | |
bottom: "conv9" | |
top: "conv9/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv9/scale" | |
type: "Scale" | |
bottom: "conv9/bn" | |
top: "conv9/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv9/relu" | |
type: "ReLU" | |
bottom: "conv9/bn" | |
top: "conv9/bn" | |
} | |
layer { | |
name: "conv10/dw" | |
type: "Convolution" | |
bottom: "conv9/bn" | |
top: "conv10/dw" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 1024 | |
bias_term: false | |
pad: 1 | |
kernel_size: 3 | |
group: 1024 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv10/dw/bn" | |
type: "BatchNorm" | |
bottom: "conv10/dw" | |
top: "conv10/dw/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv10/dw/scale" | |
type: "Scale" | |
bottom: "conv10/dw/bn" | |
top: "conv10/dw/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv10/dw/relu" | |
type: "ReLU" | |
bottom: "conv10/dw/bn" | |
top: "conv10/dw/bn" | |
} | |
layer { | |
name: "conv10" | |
type: "Convolution" | |
bottom: "conv10/dw/bn" | |
top: "conv10" | |
param { | |
lr_mult: 1.0 | |
decay_mult: 1.0 | |
} | |
convolution_param { | |
num_output: 1024 | |
bias_term: false | |
pad: 0 | |
kernel_size: 1 | |
group: 1 | |
stride: 1 | |
weight_filler { | |
type: "msra" | |
} | |
} | |
} | |
layer { | |
name: "conv10/bn" | |
type: "BatchNorm" | |
bottom: "conv10" | |
top: "conv10/bn" | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
param { | |
lr_mult: 0.0 | |
decay_mult: 0.0 | |
} | |
} | |
layer { | |
name: "conv10/scale" | |
type: "Scale" | |
bottom: "conv10/bn" | |
top: "conv10/bn" | |
scale_param { | |
bias_term: true | |
} | |
} | |
layer { | |
name: "conv10/relu" | |
type: "ReLU" | |
bottom: "conv10/bn" | |
top: "conv10/bn" | |
} | |
layer { | |
name: "pool11" | |
type: "Pooling" | |
bottom: "conv10/bn" | |
top: "pool11" | |
pooling_param { | |
pool: AVE | |
global_pooling: true | |
} | |
} | |
layer { | |
name: "fc12" | |
type: "InnerProduct" | |
bottom: "pool11" | |
top: "fc12" | |
inner_product_param { | |
num_output: 1000 | |
} | |
} | |
layer { | |
name: "loss" | |
type: "SoftmaxWithLoss" | |
bottom: "fc12" | |
bottom: "label" | |
top: "loss/loss" | |
} | |
layer { | |
name: "accuracy/top1" | |
type: "Accuracy" | |
bottom: "fc12" | |
bottom: "label" | |
top: "acc@1" | |
accuracy_param { | |
top_k: 1 | |
} | |
} | |
layer { | |
name: "accuracy/top5" | |
type: "Accuracy" | |
bottom: "fc12" | |
bottom: "label" | |
top: "acc@5" | |
accuracy_param { | |
top_k: 5 | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment