Created
February 12, 2020 19:10
-
-
Save liaocs2008/e3085155e2a2a03913aaf951c50d7c1c to your computer and use it in GitHub Desktop.
Eyeriss-VGG-Caffemodel
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# model + meanfile, https://drive.google.com/drive/folders/1GF_hyCkw46SVxQhP0NSVB4WJraYzlhUp?usp=sharing | |
# I0208 21:55:14.949070 18811 caffe.cpp:309] Loss: 1.65299 | |
# I0208 21:55:14.949090 18811 caffe.cpp:321] loss3/loss3 = 1.65299 (* 1 = 1.65299 loss) | |
# I0208 21:55:14.949101 18811 caffe.cpp:321] loss3/top-1 = 0.607879 | |
# I0208 21:55:14.949111 18811 caffe.cpp:321] loss3/top-5 = 0.838562 | |
# | |
name: "eyeriss" | |
layer { | |
name: "data" | |
type: "ImageData" | |
top: "data" | |
top: "label" | |
transform_param { | |
mirror: false | |
mean_file: "data/ilsvrc12/eyeriss.binaryproto" | |
} | |
image_data_param { | |
source: "data/ilsvrc12/eyeriss.txt" | |
batch_size: 50 | |
new_height: 224 | |
new_width: 224 | |
} | |
} | |
layer { | |
name: "conv1" | |
type: "Convolution" | |
bottom: "data" | |
top: "conv1" | |
convolution_param { | |
kernel_h: 7 | |
kernel_w: 7 | |
num_output: 64 | |
stride_h: 2 | |
stride_w: 2 | |
pad_h: 3 | |
pad_w: 3 | |
} | |
} | |
layer { | |
name: "relu1" | |
type: "ReLU" | |
bottom: "conv1" | |
top: "conv1x" | |
} | |
layer { | |
name: "pool1" | |
type: "Pooling" | |
bottom: "conv1x" | |
top: "pool1" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 2 | |
stride_w: 2 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "norm1" | |
type: "LRN" | |
bottom: "pool1" | |
top: "norm1" | |
lrn_param { | |
local_size: 5 | |
k: 1.000000 | |
alpha: 0.000100 | |
beta: 0.750000 | |
} | |
} | |
layer { | |
name: "reduction2" | |
type: "Convolution" | |
bottom: "norm1" | |
top: "reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_reduction2" | |
type: "ReLU" | |
bottom: "reduction2" | |
top: "reduction2x" | |
} | |
layer { | |
name: "conv2" | |
type: "Convolution" | |
bottom: "reduction2x" | |
top: "conv2" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 192 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu2" | |
type: "ReLU" | |
bottom: "conv2" | |
top: "conv2x" | |
} | |
layer { | |
name: "norm2" | |
type: "LRN" | |
bottom: "conv2x" | |
top: "norm2" | |
lrn_param { | |
local_size: 5 | |
k: 1.000000 | |
alpha: 0.000100 | |
beta: 0.750000 | |
} | |
} | |
layer { | |
name: "pool2" | |
type: "Pooling" | |
bottom: "norm2" | |
top: "pool2" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 2 | |
stride_w: 2 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "icp1_reduction1" | |
type: "Convolution" | |
bottom: "pool2" | |
top: "icp1_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 96 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp1_reduction1" | |
type: "ReLU" | |
bottom: "icp1_reduction1" | |
top: "icp1_reduction1x" | |
} | |
layer { | |
name: "icp1_reduction2" | |
type: "Convolution" | |
bottom: "pool2" | |
top: "icp1_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 16 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp1_reduction2" | |
type: "ReLU" | |
bottom: "icp1_reduction2" | |
top: "icp1_reduction2x" | |
} | |
layer { | |
name: "icp1_pool" | |
type: "Pooling" | |
bottom: "pool2" | |
top: "icp1_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp1_out0" | |
type: "Convolution" | |
bottom: "pool2" | |
top: "icp1_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp1_out0" | |
type: "ReLU" | |
bottom: "icp1_out0" | |
top: "icp1_out0x" | |
} | |
layer { | |
name: "icp1_out1" | |
type: "Convolution" | |
bottom: "icp1_reduction1x" | |
top: "icp1_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp1_out1" | |
type: "ReLU" | |
bottom: "icp1_out1" | |
top: "icp1_out1x" | |
} | |
layer { | |
name: "icp1_out2" | |
type: "Convolution" | |
bottom: "icp1_reduction2x" | |
top: "icp1_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 32 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp1_out2" | |
type: "ReLU" | |
bottom: "icp1_out2" | |
top: "icp1_out2x" | |
} | |
layer { | |
name: "icp1_out3" | |
type: "Convolution" | |
bottom: "icp1_pool" | |
top: "icp1_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 32 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp1_out3" | |
type: "ReLU" | |
bottom: "icp1_out3" | |
top: "icp1_out3x" | |
} | |
layer { | |
name: "icp2_in" | |
type: "Concat" | |
bottom: "icp1_out0x" | |
bottom: "icp1_out1x" | |
bottom: "icp1_out2x" | |
bottom: "icp1_out3x" | |
top: "icp2_in" | |
} | |
layer { | |
name: "icp2_reduction1" | |
type: "Convolution" | |
bottom: "icp2_in" | |
top: "icp2_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp2_reduction1" | |
type: "ReLU" | |
bottom: "icp2_reduction1" | |
top: "icp2_reduction1x" | |
} | |
layer { | |
name: "icp2_reduction2" | |
type: "Convolution" | |
bottom: "icp2_in" | |
top: "icp2_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 32 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp2_reduction2" | |
type: "ReLU" | |
bottom: "icp2_reduction2" | |
top: "icp2_reduction2x" | |
} | |
layer { | |
name: "icp2_pool" | |
type: "Pooling" | |
bottom: "icp2_in" | |
top: "icp2_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp2_out0" | |
type: "Convolution" | |
bottom: "icp2_in" | |
top: "icp2_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp2_out0" | |
type: "ReLU" | |
bottom: "icp2_out0" | |
top: "icp2_out0x" | |
} | |
layer { | |
name: "icp2_out1" | |
type: "Convolution" | |
bottom: "icp2_reduction1x" | |
top: "icp2_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 192 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp2_out1" | |
type: "ReLU" | |
bottom: "icp2_out1" | |
top: "icp2_out1x" | |
} | |
layer { | |
name: "icp2_out2" | |
type: "Convolution" | |
bottom: "icp2_reduction2x" | |
top: "icp2_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 96 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp2_out2" | |
type: "ReLU" | |
bottom: "icp2_out2" | |
top: "icp2_out2x" | |
} | |
layer { | |
name: "icp2_out3" | |
type: "Convolution" | |
bottom: "icp2_pool" | |
top: "icp2_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp2_out3" | |
type: "ReLU" | |
bottom: "icp2_out3" | |
top: "icp2_out3x" | |
} | |
layer { | |
name: "icp2_out" | |
type: "Concat" | |
bottom: "icp2_out0x" | |
bottom: "icp2_out1x" | |
bottom: "icp2_out2x" | |
bottom: "icp2_out3x" | |
top: "icp2_out" | |
} | |
layer { | |
name: "icp3_in" | |
type: "Pooling" | |
bottom: "icp2_out" | |
top: "icp3_in" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 2 | |
stride_w: 2 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "icp3_reduction1" | |
type: "Convolution" | |
bottom: "icp3_in" | |
top: "icp3_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 96 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp3_reduction1" | |
type: "ReLU" | |
bottom: "icp3_reduction1" | |
top: "icp3_reduction1x" | |
} | |
layer { | |
name: "icp3_reduction2" | |
type: "Convolution" | |
bottom: "icp3_in" | |
top: "icp3_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 16 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp3_reduction2" | |
type: "ReLU" | |
bottom: "icp3_reduction2" | |
top: "icp3_reduction2x" | |
} | |
layer { | |
name: "icp3_pool" | |
type: "Pooling" | |
bottom: "icp3_in" | |
top: "icp3_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp3_out0" | |
type: "Convolution" | |
bottom: "icp3_in" | |
top: "icp3_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 192 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp3_out0" | |
type: "ReLU" | |
bottom: "icp3_out0" | |
top: "icp3_out0x" | |
} | |
layer { | |
name: "icp3_out1" | |
type: "Convolution" | |
bottom: "icp3_reduction1x" | |
top: "icp3_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 208 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp3_out1" | |
type: "ReLU" | |
bottom: "icp3_out1" | |
top: "icp3_out1x" | |
} | |
layer { | |
name: "icp3_out2" | |
type: "Convolution" | |
bottom: "icp3_reduction2x" | |
top: "icp3_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 48 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp3_out2" | |
type: "ReLU" | |
bottom: "icp3_out2" | |
top: "icp3_out2x" | |
} | |
layer { | |
name: "icp3_out3" | |
type: "Convolution" | |
bottom: "icp3_pool" | |
top: "icp3_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp3_out3" | |
type: "ReLU" | |
bottom: "icp3_out3" | |
top: "icp3_out3x" | |
} | |
layer { | |
name: "icp3_out" | |
type: "Concat" | |
bottom: "icp3_out0x" | |
bottom: "icp3_out1x" | |
bottom: "icp3_out2x" | |
bottom: "icp3_out3x" | |
top: "icp3_out" | |
} | |
layer { | |
name: "icp4_reduction1" | |
type: "Convolution" | |
bottom: "icp3_out" | |
top: "icp4_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 112 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp4_reduction1" | |
type: "ReLU" | |
bottom: "icp4_reduction1" | |
top: "icp4_reduction1x" | |
} | |
layer { | |
name: "icp4_reduction2" | |
type: "Convolution" | |
bottom: "icp3_out" | |
top: "icp4_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 24 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp4_reduction2" | |
type: "ReLU" | |
bottom: "icp4_reduction2" | |
top: "icp4_reduction2x" | |
} | |
layer { | |
name: "icp4_pool" | |
type: "Pooling" | |
bottom: "icp3_out" | |
top: "icp4_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp4_out0" | |
type: "Convolution" | |
bottom: "icp3_out" | |
top: "icp4_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 160 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp4_out0" | |
type: "ReLU" | |
bottom: "icp4_out0" | |
top: "icp4_out0x" | |
} | |
layer { | |
name: "icp4_out1" | |
type: "Convolution" | |
bottom: "icp4_reduction1x" | |
top: "icp4_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 224 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp4_out1" | |
type: "ReLU" | |
bottom: "icp4_out1" | |
top: "icp4_out1x" | |
} | |
layer { | |
name: "icp4_out2" | |
type: "Convolution" | |
bottom: "icp4_reduction2x" | |
top: "icp4_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp4_out2" | |
type: "ReLU" | |
bottom: "icp4_out2" | |
top: "icp4_out2x" | |
} | |
layer { | |
name: "icp4_out3" | |
type: "Convolution" | |
bottom: "icp4_pool" | |
top: "icp4_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp4_out3" | |
type: "ReLU" | |
bottom: "icp4_out3" | |
top: "icp4_out3x" | |
} | |
layer { | |
name: "icp4_out" | |
type: "Concat" | |
bottom: "icp4_out0x" | |
bottom: "icp4_out1x" | |
bottom: "icp4_out2x" | |
bottom: "icp4_out3x" | |
top: "icp4_out" | |
} | |
layer { | |
name: "icp5_reduction1" | |
type: "Convolution" | |
bottom: "icp4_out" | |
top: "icp5_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp5_reduction1" | |
type: "ReLU" | |
bottom: "icp5_reduction1" | |
top: "icp5_reduction1x" | |
} | |
layer { | |
name: "icp5_reduction2" | |
type: "Convolution" | |
bottom: "icp4_out" | |
top: "icp5_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 24 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp5_reduction2" | |
type: "ReLU" | |
bottom: "icp5_reduction2" | |
top: "icp5_reduction2x" | |
} | |
layer { | |
name: "icp5_pool" | |
type: "Pooling" | |
bottom: "icp4_out" | |
top: "icp5_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp5_out0" | |
type: "Convolution" | |
bottom: "icp4_out" | |
top: "icp5_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp5_out0" | |
type: "ReLU" | |
bottom: "icp5_out0" | |
top: "icp5_out0x" | |
} | |
layer { | |
name: "icp5_out1" | |
type: "Convolution" | |
bottom: "icp5_reduction1x" | |
top: "icp5_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 256 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp5_out1" | |
type: "ReLU" | |
bottom: "icp5_out1" | |
top: "icp5_out1x" | |
} | |
layer { | |
name: "icp5_out2" | |
type: "Convolution" | |
bottom: "icp5_reduction2x" | |
top: "icp5_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp5_out2" | |
type: "ReLU" | |
bottom: "icp5_out2" | |
top: "icp5_out2x" | |
} | |
layer { | |
name: "icp5_out3" | |
type: "Convolution" | |
bottom: "icp5_pool" | |
top: "icp5_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp5_out3" | |
type: "ReLU" | |
bottom: "icp5_out3" | |
top: "icp5_out3x" | |
} | |
layer { | |
name: "icp5_out" | |
type: "Concat" | |
bottom: "icp5_out0x" | |
bottom: "icp5_out1x" | |
bottom: "icp5_out2x" | |
bottom: "icp5_out3x" | |
top: "icp5_out" | |
} | |
layer { | |
name: "icp6_reduction1" | |
type: "Convolution" | |
bottom: "icp5_out" | |
top: "icp6_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 144 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp6_reduction1" | |
type: "ReLU" | |
bottom: "icp6_reduction1" | |
top: "icp6_reduction1x" | |
} | |
layer { | |
name: "icp6_reduction2" | |
type: "Convolution" | |
bottom: "icp5_out" | |
top: "icp6_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 32 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp6_reduction2" | |
type: "ReLU" | |
bottom: "icp6_reduction2" | |
top: "icp6_reduction2x" | |
} | |
layer { | |
name: "icp6_pool" | |
type: "Pooling" | |
bottom: "icp5_out" | |
top: "icp6_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp6_out0" | |
type: "Convolution" | |
bottom: "icp5_out" | |
top: "icp6_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 112 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp6_out0" | |
type: "ReLU" | |
bottom: "icp6_out0" | |
top: "icp6_out0x" | |
} | |
layer { | |
name: "icp6_out1" | |
type: "Convolution" | |
bottom: "icp6_reduction1x" | |
top: "icp6_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 288 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp6_out1" | |
type: "ReLU" | |
bottom: "icp6_out1" | |
top: "icp6_out1x" | |
} | |
layer { | |
name: "icp6_out2" | |
type: "Convolution" | |
bottom: "icp6_reduction2x" | |
top: "icp6_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp6_out2" | |
type: "ReLU" | |
bottom: "icp6_out2" | |
top: "icp6_out2x" | |
} | |
layer { | |
name: "icp6_out3" | |
type: "Convolution" | |
bottom: "icp6_pool" | |
top: "icp6_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 64 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp6_out3" | |
type: "ReLU" | |
bottom: "icp6_out3" | |
top: "icp6_out3x" | |
} | |
layer { | |
name: "icp6_out" | |
type: "Concat" | |
bottom: "icp6_out0x" | |
bottom: "icp6_out1x" | |
bottom: "icp6_out2x" | |
bottom: "icp6_out3x" | |
top: "icp6_out" | |
} | |
layer { | |
name: "icp7_reduction1" | |
type: "Convolution" | |
bottom: "icp6_out" | |
top: "icp7_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 160 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp7_reduction1" | |
type: "ReLU" | |
bottom: "icp7_reduction1" | |
top: "icp7_reduction1x" | |
} | |
layer { | |
name: "icp7_reduction2" | |
type: "Convolution" | |
bottom: "icp6_out" | |
top: "icp7_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 32 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp7_reduction2" | |
type: "ReLU" | |
bottom: "icp7_reduction2" | |
top: "icp7_reduction2x" | |
} | |
layer { | |
name: "icp7_pool" | |
type: "Pooling" | |
bottom: "icp6_out" | |
top: "icp7_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp7_out0" | |
type: "Convolution" | |
bottom: "icp6_out" | |
top: "icp7_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 256 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp7_out0" | |
type: "ReLU" | |
bottom: "icp7_out0" | |
top: "icp7_out0x" | |
} | |
layer { | |
name: "icp7_out1" | |
type: "Convolution" | |
bottom: "icp7_reduction1x" | |
top: "icp7_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 320 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp7_out1" | |
type: "ReLU" | |
bottom: "icp7_out1" | |
top: "icp7_out1x" | |
} | |
layer { | |
name: "icp7_out2" | |
type: "Convolution" | |
bottom: "icp7_reduction2x" | |
top: "icp7_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp7_out2" | |
type: "ReLU" | |
bottom: "icp7_out2" | |
top: "icp7_out2x" | |
} | |
layer { | |
name: "icp7_out3" | |
type: "Convolution" | |
bottom: "icp7_pool" | |
top: "icp7_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp7_out3" | |
type: "ReLU" | |
bottom: "icp7_out3" | |
top: "icp7_out3x" | |
} | |
layer { | |
name: "icp7_out" | |
type: "Concat" | |
bottom: "icp7_out0x" | |
bottom: "icp7_out1x" | |
bottom: "icp7_out2x" | |
bottom: "icp7_out3x" | |
top: "icp7_out" | |
} | |
layer { | |
name: "icp8_in" | |
type: "Pooling" | |
bottom: "icp7_out" | |
top: "icp8_in" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 2 | |
stride_w: 2 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "icp8_reduction1" | |
type: "Convolution" | |
bottom: "icp8_in" | |
top: "icp8_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 160 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp8_reduction1" | |
type: "ReLU" | |
bottom: "icp8_reduction1" | |
top: "icp8_reduction1x" | |
} | |
layer { | |
name: "icp8_reduction2" | |
type: "Convolution" | |
bottom: "icp8_in" | |
top: "icp8_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 32 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp8_reduction2" | |
type: "ReLU" | |
bottom: "icp8_reduction2" | |
top: "icp8_reduction2x" | |
} | |
layer { | |
name: "icp8_pool" | |
type: "Pooling" | |
bottom: "icp8_in" | |
top: "icp8_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp8_out0" | |
type: "Convolution" | |
bottom: "icp8_in" | |
top: "icp8_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 256 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp8_out0" | |
type: "ReLU" | |
bottom: "icp8_out0" | |
top: "icp8_out0x" | |
} | |
layer { | |
name: "icp8_out1" | |
type: "Convolution" | |
bottom: "icp8_reduction1x" | |
top: "icp8_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 320 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp8_out1" | |
type: "ReLU" | |
bottom: "icp8_out1" | |
top: "icp8_out1x" | |
} | |
layer { | |
name: "icp8_out2" | |
type: "Convolution" | |
bottom: "icp8_reduction2x" | |
top: "icp8_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp8_out2" | |
type: "ReLU" | |
bottom: "icp8_out2" | |
top: "icp8_out2x" | |
} | |
layer { | |
name: "icp8_out3" | |
type: "Convolution" | |
bottom: "icp8_pool" | |
top: "icp8_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp8_out3" | |
type: "ReLU" | |
bottom: "icp8_out3" | |
top: "icp8_out3x" | |
} | |
layer { | |
name: "icp8_out" | |
type: "Concat" | |
bottom: "icp8_out0x" | |
bottom: "icp8_out1x" | |
bottom: "icp8_out2x" | |
bottom: "icp8_out3x" | |
top: "icp8_out" | |
} | |
layer { | |
name: "icp9_reduction1" | |
type: "Convolution" | |
bottom: "icp8_out" | |
top: "icp9_reduction1" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 192 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp9_reduction1" | |
type: "ReLU" | |
bottom: "icp9_reduction1" | |
top: "icp9_reduction1x" | |
} | |
layer { | |
name: "icp9_reduction2" | |
type: "Convolution" | |
bottom: "icp8_out" | |
top: "icp9_reduction2" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 48 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp9_reduction2" | |
type: "ReLU" | |
bottom: "icp9_reduction2" | |
top: "icp9_reduction2x" | |
} | |
layer { | |
name: "icp9_pool" | |
type: "Pooling" | |
bottom: "icp8_out" | |
top: "icp9_pool" | |
pooling_param { | |
pool: MAX | |
kernel_h: 3 | |
kernel_w: 3 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "icp9_out0" | |
type: "Convolution" | |
bottom: "icp8_out" | |
top: "icp9_out0" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 384 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp9_out0" | |
type: "ReLU" | |
bottom: "icp9_out0" | |
top: "icp9_out0x" | |
} | |
layer { | |
name: "icp9_out1" | |
type: "Convolution" | |
bottom: "icp9_reduction1x" | |
top: "icp9_out1" | |
convolution_param { | |
kernel_h: 3 | |
kernel_w: 3 | |
num_output: 384 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 1 | |
pad_w: 1 | |
} | |
} | |
layer { | |
name: "relu_icp9_out1" | |
type: "ReLU" | |
bottom: "icp9_out1" | |
top: "icp9_out1x" | |
} | |
layer { | |
name: "icp9_out2" | |
type: "Convolution" | |
bottom: "icp9_reduction2x" | |
top: "icp9_out2" | |
convolution_param { | |
kernel_h: 5 | |
kernel_w: 5 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 2 | |
pad_w: 2 | |
} | |
} | |
layer { | |
name: "relu_icp9_out2" | |
type: "ReLU" | |
bottom: "icp9_out2" | |
top: "icp9_out2x" | |
} | |
layer { | |
name: "icp9_out3" | |
type: "Convolution" | |
bottom: "icp9_pool" | |
top: "icp9_out3" | |
convolution_param { | |
kernel_h: 1 | |
kernel_w: 1 | |
num_output: 128 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "relu_icp9_out3" | |
type: "ReLU" | |
bottom: "icp9_out3" | |
top: "icp9_out3x" | |
} | |
layer { | |
name: "icp9_out" | |
type: "Concat" | |
bottom: "icp9_out0x" | |
bottom: "icp9_out1x" | |
bottom: "icp9_out2x" | |
bottom: "icp9_out3x" | |
top: "icp9_out" | |
} | |
layer { | |
name: "cls3_pool" | |
type: "Pooling" | |
bottom: "icp9_out" | |
top: "cls3_pool" | |
pooling_param { | |
pool: AVE | |
kernel_h: 7 | |
kernel_w: 7 | |
stride_h: 1 | |
stride_w: 1 | |
pad_h: 0 | |
pad_w: 0 | |
} | |
} | |
layer { | |
name: "cls3_fc" | |
type: "InnerProduct" | |
bottom: "cls3_pool" | |
top: "cls3_fc" | |
inner_product_param { | |
num_output: 1000 | |
} | |
} | |
#layer { | |
# name: "prob" | |
# type: "Softmax" | |
# bottom: "cls3_fc" | |
# top: "prob" | |
#} | |
layer { | |
name: "loss3/loss3" | |
type: "SoftmaxWithLoss" | |
bottom: "cls3_fc" | |
bottom: "label" | |
top: "loss3/loss3" | |
loss_weight: 1 | |
} | |
layer { | |
name: "loss3/top-1" | |
type: "Accuracy" | |
bottom: "cls3_fc" | |
bottom: "label" | |
top: "loss3/top-1" | |
include { | |
phase: TEST | |
} | |
} | |
layer { | |
name: "loss3/top-5" | |
type: "Accuracy" | |
bottom: "cls3_fc" | |
bottom: "label" | |
top: "loss3/top-5" | |
include { | |
phase: TEST | |
} | |
accuracy_param { | |
top_k: 5 | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment