Created
July 8, 2018 03:35
-
-
Save XinDongol/f61736d6efdef988aa637e3e421d2737 to your computer and use it in GitHub Desktop.
train_val.prototxt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: "LeNet-HWGQ" | |
layer { | |
name: "mnist" | |
type: "Data" | |
top: "data" | |
top: "label" | |
include { | |
phase: TRAIN | |
} | |
transform_param { | |
scale: 0.00390625 | |
} | |
data_param { | |
source: "../mnist_train_lmdb" | |
batch_size: 64 | |
backend: LMDB | |
} | |
} | |
layer { | |
name: "mnist" | |
type: "Data" | |
top: "data" | |
top: "label" | |
include { | |
phase: TEST | |
} | |
transform_param { | |
scale: 0.00390625 | |
} | |
data_param { | |
source: "../mnist_test_lmdb" | |
batch_size: 100 | |
backend: LMDB | |
} | |
} | |
layer { | |
name: "conv1" | |
type: "BinaryConvolution" | |
bottom: "data" | |
top: "conv1" | |
param { | |
lr_mult: 1 | |
decay_mult: 1 | |
} | |
convolution_param { | |
num_output: 20 | |
kernel_size: 5 | |
bias_term: false | |
weight_filler { | |
type: "gaussian" | |
std: 0.01 | |
} | |
} | |
binary_convolution_param { | |
use_alpha: true | |
} | |
} | |
layer { | |
name: "pool1" | |
type: "Pooling" | |
bottom: "conv1" | |
top: "pool1" | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} | |
layer { | |
name: "bn1" | |
type: "BatchNorm" | |
bottom: "pool1" | |
top: "pool1" | |
param { | |
lr_mult: 0 | |
} | |
param { | |
lr_mult: 0 | |
} | |
param { | |
lr_mult: 0 | |
} | |
batch_norm_param { | |
moving_average_fraction: 0.95 | |
} | |
} | |
layer { | |
name: "qt1" | |
type: "Quant" | |
bottom: "pool1" | |
top: "qt1" | |
quant_param { | |
forward_func: "hwgq" | |
backward_func: "relu" | |
centers: 0.538 centers: 1.076 centers: 1.614 | |
clip_thr: 1.614 | |
} | |
} | |
layer { | |
name: "conv2" | |
type: "BinaryConvolution" | |
bottom: "qt1" | |
top: "conv2" | |
param { | |
lr_mult: 1 | |
decay_mult: 1 | |
} | |
convolution_param { | |
num_output: 50 | |
kernel_size: 5 | |
bias_term: false | |
weight_filler { | |
type: "gaussian" | |
std: 0.01 | |
} | |
} | |
binary_convolution_param { | |
use_alpha: true | |
} | |
} | |
layer { | |
name: "pool2" | |
type: "Pooling" | |
bottom: "conv2" | |
top: "pool2" | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} | |
layer { | |
name: "bn2" | |
type: "BatchNorm" | |
bottom: "pool2" | |
top: "pool2" | |
param { | |
lr_mult: 0 | |
} | |
param { | |
lr_mult: 0 | |
} | |
param { | |
lr_mult: 0 | |
} | |
batch_norm_param { | |
moving_average_fraction: 0.95 | |
} | |
} | |
layer { | |
name: "qt2" | |
type: "Quant" | |
bottom: "pool2" | |
top: "qt2" | |
quant_param { | |
forward_func: "hwgq" | |
backward_func: "relu" | |
centers: 0.538 centers: 1.076 centers: 1.614 | |
clip_thr: 1.614 | |
} | |
} | |
layer { | |
name: "ip1" | |
type: "BinaryInnerProduct" | |
bottom: "qt2" | |
top: "ip1" | |
param { | |
lr_mult: 1 | |
decay_mult: 1 | |
} | |
inner_product_param { | |
num_output: 500 | |
bias_term: false | |
weight_filler { | |
type: "gaussian" | |
std: 0.005 | |
} | |
} | |
binary_inner_product_param { | |
use_alpha: true | |
} | |
} | |
layer { | |
name: "bn3" | |
type: "BatchNorm" | |
bottom: "ip1" | |
top: "ip1" | |
param { | |
lr_mult: 0 | |
} | |
param { | |
lr_mult: 0 | |
} | |
param { | |
lr_mult: 0 | |
} | |
batch_norm_param { | |
moving_average_fraction: 0.95 | |
} | |
} | |
layer { | |
name: "qt3" | |
type: "Quant" | |
bottom: "ip1" | |
top: "qt3" | |
quant_param { | |
forward_func: "hwgq" | |
backward_func: "relu" | |
centers: 0.538 centers: 1.076 centers: 1.614 | |
clip_thr: 1.614 | |
} | |
} | |
layer { | |
name: "ip2" | |
type: "BinaryInnerProduct" | |
bottom: "qt3" | |
top: "ip2" | |
param { | |
lr_mult: 1 | |
decay_mult: 1 | |
} | |
inner_product_param { | |
num_output: 10 | |
weight_filler { | |
type: "gaussian" | |
std: 0.005 | |
} | |
bias_filler { | |
type: "constant" | |
value: 0 | |
} | |
} | |
binary_inner_product_param { | |
use_alpha: true | |
} | |
} | |
layer { | |
name: "accuracy" | |
type: "Accuracy" | |
bottom: "ip2" | |
bottom: "label" | |
top: "accuracy" | |
include { | |
phase: TEST | |
} | |
} | |
layer { | |
name: "loss" | |
type: "SoftmaxWithLoss" | |
bottom: "ip2" | |
bottom: "label" | |
top: "loss" | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment