Skip to content

Instantly share code, notes, and snippets.

@ducha-aiki
Created October 21, 2015 18:37
Show Gist options
  • Save ducha-aiki/e3f8c4a162ddb62c02f7 to your computer and use it in GitHub Desktop.
Save ducha-aiki/e3f8c4a162ddb62c02f7 to your computer and use it in GitHub Desktop.
cifar10_2K_not-in-place
I1021 21:39:31.658416 3427 caffe.cpp:184] Using GPUs 0
I1021 21:39:31.932163 3427 solver.cpp:47] Initializing solver from parameters:
test_iter: 10
test_interval: 1000
base_lr: 0.001
display: 100
max_iter: 5000
lr_policy: "poly"
power: 0.5
momentum: 0.9
snapshot_prefix: "examples/cifar10_full_sigmoid_bn"
solver_mode: GPU
device_id: 0
net: "examples/cifar10/cifar10_full_sigmoid_train_test_bn.prototxt"
I1021 21:39:31.932204 3427 solver.cpp:90] Creating training net from net file: examples/cifar10/cifar10_full_sigmoid_train_test_bn.prototxt
I1021 21:39:31.932634 3427 net.cpp:322] The NetState phase (0) differed from the phase (1) specified by a rule in layer cifar
I1021 21:39:31.932646 3427 net.cpp:322] The NetState phase (0) differed from the phase (1) specified by a rule in layer bn1
I1021 21:39:31.932658 3427 net.cpp:322] The NetState phase (0) differed from the phase (1) specified by a rule in layer bn2
I1021 21:39:31.932663 3427 net.cpp:322] The NetState phase (0) differed from the phase (1) specified by a rule in layer bn3
I1021 21:39:31.932667 3427 net.cpp:322] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I1021 21:39:31.932780 3427 net.cpp:49] Initializing net from parameters:
name: "CIFAR10_full"
state {
phase: TRAIN
}
layer {
name: "cifar"
type: "Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
mean_file: "examples/cifar10/mean.binaryproto"
}
data_param {
source: "examples/cifar10/cifar10_train_lmdb"
batch_size: 111
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.0001
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "bn1"
type: "BatchNorm"
bottom: "pool1"
top: "bn1"
param {
lr_mult: 0
}
param {
lr_mult: 0
}
param {
lr_mult: 0
}
include {
phase: TRAIN
}
batch_norm_param {
use_global_stats: false
}
}
layer {
name: "Sigmoid1"
type: "Sigmoid"
bottom: "bn1"
top: "Sigmoid1"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "Sigmoid1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "bn2"
type: "BatchNorm"
bottom: "conv2"
top: "bn2"
param {
lr_mult: 0
}
param {
lr_mult: 0
}
param {
lr_mult: 0
}
include {
phase: TRAIN
}
batch_norm_param {
use_global_stats: false
}
}
layer {
name: "Sigmoid2"
type: "Sigmoid"
bottom: "bn2"
top: "Sigmoid2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "Sigmoid2"
top: "pool2"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
}
param {
lr_mult: 1
}
convolution_param {
num_output: 64
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "bn3"
type: "BatchNorm"
bottom: "conv3"
top: "bn3"
param {
lr_mult: 0
}
param {
lr_mult: 0
}
param {
lr_mult: 0
}
include {
phase: TRAIN
}
batch_norm_param {
use_global_stats: false
}
}
layer {
name: "Sigmoid3"
type: "Sigmoid"
bottom: "bn3"
top: "Sigmoid3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "Sigmoid3"
top: "pool3"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool3"
top: "ip1"
param {
lr_mult: 1
decay_mult: 250
}
param {
lr_mult: 0.2
decay_mult: 0
}
inner_product_param {
num_output: 10
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip1"
bottom: "label"
top: "loss"
}
I1021 21:39:31.932869 3427 layer_factory.hpp:76] Creating layer cifar
I1021 21:39:31.933334 3427 net.cpp:106] Creating Layer cifar
I1021 21:39:31.933457 3427 net.cpp:411] cifar -> data
I1021 21:39:31.933539 3427 net.cpp:411] cifar -> label
I1021 21:39:31.933552 3427 data_transformer.cpp:25] Loading mean file from: examples/cifar10/mean.binaryproto
I1021 21:39:31.934157 3429 db_lmdb.cpp:38] Opened lmdb examples/cifar10/cifar10_train_lmdb
I1021 21:39:31.972024 3427 data_layer.cpp:45] output data size: 111,3,32,32
I1021 21:39:31.979584 3427 net.cpp:150] Setting up cifar
I1021 21:39:31.979622 3427 net.cpp:157] Top shape: 111 3 32 32 (340992)
I1021 21:39:31.979626 3427 net.cpp:157] Top shape: 111 (111)
I1021 21:39:31.979629 3427 net.cpp:165] Memory required for data: 1364412
I1021 21:39:31.979639 3427 layer_factory.hpp:76] Creating layer conv1
I1021 21:39:31.979651 3427 net.cpp:106] Creating Layer conv1
I1021 21:39:31.979655 3427 net.cpp:454] conv1 <- data
I1021 21:39:31.979665 3427 net.cpp:411] conv1 -> conv1
I1021 21:39:31.980183 3427 net.cpp:150] Setting up conv1
I1021 21:39:31.980191 3427 net.cpp:157] Top shape: 111 32 32 32 (3637248)
I1021 21:39:31.980203 3427 net.cpp:165] Memory required for data: 15913404
I1021 21:39:31.980216 3427 layer_factory.hpp:76] Creating layer pool1
I1021 21:39:31.980227 3427 net.cpp:106] Creating Layer pool1
I1021 21:39:31.980231 3427 net.cpp:454] pool1 <- conv1
I1021 21:39:31.980236 3427 net.cpp:411] pool1 -> pool1
I1021 21:39:31.980332 3427 net.cpp:150] Setting up pool1
I1021 21:39:31.980348 3427 net.cpp:157] Top shape: 111 32 16 16 (909312)
I1021 21:39:31.980350 3427 net.cpp:165] Memory required for data: 19550652
I1021 21:39:31.980352 3427 layer_factory.hpp:76] Creating layer bn1
I1021 21:39:31.980370 3427 net.cpp:106] Creating Layer bn1
I1021 21:39:31.980371 3427 net.cpp:454] bn1 <- pool1
I1021 21:39:31.980376 3427 net.cpp:411] bn1 -> bn1
I1021 21:39:31.981046 3427 net.cpp:150] Setting up bn1
I1021 21:39:31.981053 3427 net.cpp:157] Top shape: 111 32 16 16 (909312)
I1021 21:39:31.981055 3427 net.cpp:165] Memory required for data: 23187900
I1021 21:39:31.981063 3427 layer_factory.hpp:76] Creating layer Sigmoid1
I1021 21:39:31.981068 3427 net.cpp:106] Creating Layer Sigmoid1
I1021 21:39:31.981072 3427 net.cpp:454] Sigmoid1 <- bn1
I1021 21:39:31.981076 3427 net.cpp:411] Sigmoid1 -> Sigmoid1
I1021 21:39:31.981091 3427 net.cpp:150] Setting up Sigmoid1
I1021 21:39:31.981096 3427 net.cpp:157] Top shape: 111 32 16 16 (909312)
I1021 21:39:31.981097 3427 net.cpp:165] Memory required for data: 26825148
I1021 21:39:31.981098 3427 layer_factory.hpp:76] Creating layer conv2
I1021 21:39:31.981104 3427 net.cpp:106] Creating Layer conv2
I1021 21:39:31.981106 3427 net.cpp:454] conv2 <- Sigmoid1
I1021 21:39:31.981109 3427 net.cpp:411] conv2 -> conv2
I1021 21:39:31.982153 3427 net.cpp:150] Setting up conv2
I1021 21:39:31.982162 3427 net.cpp:157] Top shape: 111 32 16 16 (909312)
I1021 21:39:31.982164 3427 net.cpp:165] Memory required for data: 30462396
I1021 21:39:31.982168 3427 layer_factory.hpp:76] Creating layer bn2
I1021 21:39:31.982177 3427 net.cpp:106] Creating Layer bn2
I1021 21:39:31.982178 3427 net.cpp:454] bn2 <- conv2
I1021 21:39:31.982184 3427 net.cpp:411] bn2 -> bn2
I1021 21:39:31.982332 3427 net.cpp:150] Setting up bn2
I1021 21:39:31.982338 3427 net.cpp:157] Top shape: 111 32 16 16 (909312)
I1021 21:39:31.982341 3427 net.cpp:165] Memory required for data: 34099644
I1021 21:39:31.982347 3427 layer_factory.hpp:76] Creating layer Sigmoid2
I1021 21:39:31.982352 3427 net.cpp:106] Creating Layer Sigmoid2
I1021 21:39:31.982353 3427 net.cpp:454] Sigmoid2 <- bn2
I1021 21:39:31.982357 3427 net.cpp:411] Sigmoid2 -> Sigmoid2
I1021 21:39:31.982389 3427 net.cpp:150] Setting up Sigmoid2
I1021 21:39:31.982422 3427 net.cpp:157] Top shape: 111 32 16 16 (909312)
I1021 21:39:31.982425 3427 net.cpp:165] Memory required for data: 37736892
I1021 21:39:31.982427 3427 layer_factory.hpp:76] Creating layer pool2
I1021 21:39:31.982432 3427 net.cpp:106] Creating Layer pool2
I1021 21:39:31.982434 3427 net.cpp:454] pool2 <- Sigmoid2
I1021 21:39:31.982439 3427 net.cpp:411] pool2 -> pool2
I1021 21:39:31.982622 3427 net.cpp:150] Setting up pool2
I1021 21:39:31.982636 3427 net.cpp:157] Top shape: 111 32 8 8 (227328)
I1021 21:39:31.982638 3427 net.cpp:165] Memory required for data: 38646204
I1021 21:39:31.982640 3427 layer_factory.hpp:76] Creating layer conv3
I1021 21:39:31.982658 3427 net.cpp:106] Creating Layer conv3
I1021 21:39:31.982661 3427 net.cpp:454] conv3 <- pool2
I1021 21:39:31.982663 3427 net.cpp:411] conv3 -> conv3
I1021 21:39:31.983965 3427 net.cpp:150] Setting up conv3
I1021 21:39:31.983971 3427 net.cpp:157] Top shape: 111 64 8 8 (454656)
I1021 21:39:31.983973 3427 net.cpp:165] Memory required for data: 40464828
I1021 21:39:31.983976 3427 layer_factory.hpp:76] Creating layer bn3
I1021 21:39:31.983981 3427 net.cpp:106] Creating Layer bn3
I1021 21:39:31.983983 3427 net.cpp:454] bn3 <- conv3
I1021 21:39:31.983988 3427 net.cpp:411] bn3 -> bn3
I1021 21:39:31.984104 3427 net.cpp:150] Setting up bn3
I1021 21:39:31.984107 3427 net.cpp:157] Top shape: 111 64 8 8 (454656)
I1021 21:39:31.984109 3427 net.cpp:165] Memory required for data: 42283452
I1021 21:39:31.984113 3427 layer_factory.hpp:76] Creating layer Sigmoid3
I1021 21:39:31.984118 3427 net.cpp:106] Creating Layer Sigmoid3
I1021 21:39:31.984120 3427 net.cpp:454] Sigmoid3 <- bn3
I1021 21:39:31.984122 3427 net.cpp:411] Sigmoid3 -> Sigmoid3
I1021 21:39:31.984338 3427 net.cpp:150] Setting up Sigmoid3
I1021 21:39:31.984352 3427 net.cpp:157] Top shape: 111 64 8 8 (454656)
I1021 21:39:31.984400 3427 net.cpp:165] Memory required for data: 44102076
I1021 21:39:31.984405 3427 layer_factory.hpp:76] Creating layer pool3
I1021 21:39:31.984411 3427 net.cpp:106] Creating Layer pool3
I1021 21:39:31.984412 3427 net.cpp:454] pool3 <- Sigmoid3
I1021 21:39:31.984416 3427 net.cpp:411] pool3 -> pool3
I1021 21:39:31.984434 3427 net.cpp:150] Setting up pool3
I1021 21:39:31.984437 3427 net.cpp:157] Top shape: 111 64 4 4 (113664)
I1021 21:39:31.984482 3427 net.cpp:165] Memory required for data: 44556732
I1021 21:39:31.984484 3427 layer_factory.hpp:76] Creating layer ip1
I1021 21:39:31.984532 3427 net.cpp:106] Creating Layer ip1
I1021 21:39:31.984536 3427 net.cpp:454] ip1 <- pool3
I1021 21:39:31.984558 3427 net.cpp:411] ip1 -> ip1
I1021 21:39:31.985220 3427 net.cpp:150] Setting up ip1
I1021 21:39:31.985231 3427 net.cpp:157] Top shape: 111 10 (1110)
I1021 21:39:31.985251 3427 net.cpp:165] Memory required for data: 44561172
I1021 21:39:31.985278 3427 layer_factory.hpp:76] Creating layer loss
I1021 21:39:31.985298 3427 net.cpp:106] Creating Layer loss
I1021 21:39:31.985301 3427 net.cpp:454] loss <- ip1
I1021 21:39:31.985324 3427 net.cpp:454] loss <- label
I1021 21:39:31.985342 3427 net.cpp:411] loss -> loss
I1021 21:39:31.985362 3427 layer_factory.hpp:76] Creating layer loss
I1021 21:39:31.985440 3427 net.cpp:150] Setting up loss
I1021 21:39:31.985445 3427 net.cpp:157] Top shape: (1)
I1021 21:39:31.985462 3427 net.cpp:160] with loss weight 1
I1021 21:39:31.985491 3427 net.cpp:165] Memory required for data: 44561176
I1021 21:39:31.985515 3427 net.cpp:226] loss needs backward computation.
I1021 21:39:31.985518 3427 net.cpp:226] ip1 needs backward computation.
I1021 21:39:31.985543 3427 net.cpp:226] pool3 needs backward computation.
I1021 21:39:31.985545 3427 net.cpp:226] Sigmoid3 needs backward computation.
I1021 21:39:31.985548 3427 net.cpp:226] bn3 needs backward computation.
I1021 21:39:31.985569 3427 net.cpp:226] conv3 needs backward computation.
I1021 21:39:31.985586 3427 net.cpp:226] pool2 needs backward computation.
I1021 21:39:31.985589 3427 net.cpp:226] Sigmoid2 needs backward computation.
I1021 21:39:31.985628 3427 net.cpp:226] bn2 needs backward computation.
I1021 21:39:31.985632 3427 net.cpp:226] conv2 needs backward computation.
I1021 21:39:31.985654 3427 net.cpp:226] Sigmoid1 needs backward computation.
I1021 21:39:31.985674 3427 net.cpp:226] bn1 needs backward computation.
I1021 21:39:31.985677 3427 net.cpp:226] pool1 needs backward computation.
I1021 21:39:31.985697 3427 net.cpp:226] conv1 needs backward computation.
I1021 21:39:31.985700 3427 net.cpp:228] cifar does not need backward computation.
I1021 21:39:31.985723 3427 net.cpp:270] This network produces output loss
I1021 21:39:31.985749 3427 net.cpp:283] Network initialization done.
I1021 21:39:31.986170 3427 solver.cpp:180] Creating test net (#0) specified by net file: examples/cifar10/cifar10_full_sigmoid_train_test_bn.prototxt
I1021 21:39:31.986217 3427 net.cpp:322] The NetState phase (1) differed from the phase (0) specified by a rule in layer cifar
I1021 21:39:31.986240 3427 net.cpp:322] The NetState phase (1) differed from the phase (0) specified by a rule in layer bn1
I1021 21:39:31.986259 3427 net.cpp:322] The NetState phase (1) differed from the phase (0) specified by a rule in layer bn2
I1021 21:39:31.986265 3427 net.cpp:322] The NetState phase (1) differed from the phase (0) specified by a rule in layer bn3
I1021 21:39:31.986382 3427 net.cpp:49] Initializing net from parameters:
name: "CIFAR10_full"
state {
phase: TEST
}
layer {
name: "cifar"
type: "Data"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
mean_file: "examples/cifar10/mean.binaryproto"
}
data_param {
source: "examples/cifar10/cifar10_test_lmdb"
batch_size: 1000
backend: LMDB
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.0001
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "bn1"
type: "BatchNorm"
bottom: "pool1"
top: "bn1"
param {
lr_mult: 0
}
param {
lr_mult: 0
}
param {
lr_mult: 0
}
include {
phase: TEST
}
batch_norm_param {
use_global_stats: true
}
}
layer {
name: "Sigmoid1"
type: "Sigmoid"
bottom: "bn1"
top: "Sigmoid1"
}
layer {
name: "conv2"
type: "Convolution"
bottom: "Sigmoid1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 32
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "bn2"
type: "BatchNorm"
bottom: "conv2"
top: "bn2"
param {
lr_mult: 0
}
param {
lr_mult: 0
}
param {
lr_mult: 0
}
include {
phase: TEST
}
batch_norm_param {
use_global_stats: true
}
}
layer {
name: "Sigmoid2"
type: "Sigmoid"
bottom: "bn2"
top: "Sigmoid2"
}
layer {
name: "pool2"
type: "Pooling"
bottom: "Sigmoid2"
top: "pool2"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
}
param {
lr_mult: 1
}
convolution_param {
num_output: 64
pad: 2
kernel_size: 5
stride: 1
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "bn3"
type: "BatchNorm"
bottom: "conv3"
top: "bn3"
param {
lr_mult: 0
}
param {
lr_mult: 0
}
param {
lr_mult: 0
}
include {
phase: TEST
}
batch_norm_param {
use_global_stats: true
}
}
layer {
name: "Sigmoid3"
type: "Sigmoid"
bottom: "bn3"
top: "Sigmoid3"
}
layer {
name: "pool3"
type: "Pooling"
bottom: "Sigmoid3"
top: "pool3"
pooling_param {
pool: AVE
kernel_size: 3
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool3"
top: "ip1"
param {
lr_mult: 1
decay_mult: 250
}
param {
lr_mult: 0.2
decay_mult: 0
}
inner_product_param {
num_output: 10
weight_filler {
type: "gaussian"
std: 0.01
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "ip1"
bottom: "label"
top: "accuracy"
include {
phase: TEST
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip1"
bottom: "label"
top: "loss"
}
I1021 21:39:31.986920 3427 layer_factory.hpp:76] Creating layer cifar
I1021 21:39:31.987375 3427 net.cpp:106] Creating Layer cifar
I1021 21:39:31.987381 3427 net.cpp:411] cifar -> data
I1021 21:39:31.987409 3427 net.cpp:411] cifar -> label
I1021 21:39:31.987428 3427 data_transformer.cpp:25] Loading mean file from: examples/cifar10/mean.binaryproto
I1021 21:39:31.988034 3431 db_lmdb.cpp:38] Opened lmdb examples/cifar10/cifar10_test_lmdb
I1021 21:39:31.988154 3427 data_layer.cpp:45] output data size: 1000,3,32,32
I1021 21:39:32.007434 3427 net.cpp:150] Setting up cifar
I1021 21:39:32.007467 3427 net.cpp:157] Top shape: 1000 3 32 32 (3072000)
I1021 21:39:32.007470 3427 net.cpp:157] Top shape: 1000 (1000)
I1021 21:39:32.007472 3427 net.cpp:165] Memory required for data: 12292000
I1021 21:39:32.007478 3427 layer_factory.hpp:76] Creating layer label_cifar_1_split
I1021 21:39:32.007491 3427 net.cpp:106] Creating Layer label_cifar_1_split
I1021 21:39:32.007494 3427 net.cpp:454] label_cifar_1_split <- label
I1021 21:39:32.007498 3427 net.cpp:411] label_cifar_1_split -> label_cifar_1_split_0
I1021 21:39:32.007506 3427 net.cpp:411] label_cifar_1_split -> label_cifar_1_split_1
I1021 21:39:32.007663 3427 net.cpp:150] Setting up label_cifar_1_split
I1021 21:39:32.007675 3427 net.cpp:157] Top shape: 1000 (1000)
I1021 21:39:32.007678 3427 net.cpp:157] Top shape: 1000 (1000)
I1021 21:39:32.007679 3427 net.cpp:165] Memory required for data: 12300000
I1021 21:39:32.007681 3427 layer_factory.hpp:76] Creating layer conv1
I1021 21:39:32.007699 3427 net.cpp:106] Creating Layer conv1
I1021 21:39:32.007700 3427 net.cpp:454] conv1 <- data
I1021 21:39:32.007704 3427 net.cpp:411] conv1 -> conv1
I1021 21:39:32.008038 3427 net.cpp:150] Setting up conv1
I1021 21:39:32.008049 3427 net.cpp:157] Top shape: 1000 32 32 32 (32768000)
I1021 21:39:32.008060 3427 net.cpp:165] Memory required for data: 143372000
I1021 21:39:32.008069 3427 layer_factory.hpp:76] Creating layer pool1
I1021 21:39:32.008074 3427 net.cpp:106] Creating Layer pool1
I1021 21:39:32.008076 3427 net.cpp:454] pool1 <- conv1
I1021 21:39:32.008080 3427 net.cpp:411] pool1 -> pool1
I1021 21:39:32.008105 3427 net.cpp:150] Setting up pool1
I1021 21:39:32.008110 3427 net.cpp:157] Top shape: 1000 32 16 16 (8192000)
I1021 21:39:32.008111 3427 net.cpp:165] Memory required for data: 176140000
I1021 21:39:32.008112 3427 layer_factory.hpp:76] Creating layer bn1
I1021 21:39:32.008119 3427 net.cpp:106] Creating Layer bn1
I1021 21:39:32.008121 3427 net.cpp:454] bn1 <- pool1
I1021 21:39:32.008124 3427 net.cpp:411] bn1 -> bn1
I1021 21:39:32.008246 3427 net.cpp:150] Setting up bn1
I1021 21:39:32.008251 3427 net.cpp:157] Top shape: 1000 32 16 16 (8192000)
I1021 21:39:32.008252 3427 net.cpp:165] Memory required for data: 208908000
I1021 21:39:32.008268 3427 layer_factory.hpp:76] Creating layer Sigmoid1
I1021 21:39:32.008272 3427 net.cpp:106] Creating Layer Sigmoid1
I1021 21:39:32.008275 3427 net.cpp:454] Sigmoid1 <- bn1
I1021 21:39:32.008277 3427 net.cpp:411] Sigmoid1 -> Sigmoid1
I1021 21:39:32.008291 3427 net.cpp:150] Setting up Sigmoid1
I1021 21:39:32.008293 3427 net.cpp:157] Top shape: 1000 32 16 16 (8192000)
I1021 21:39:32.008296 3427 net.cpp:165] Memory required for data: 241676000
I1021 21:39:32.008297 3427 layer_factory.hpp:76] Creating layer conv2
I1021 21:39:32.008302 3427 net.cpp:106] Creating Layer conv2
I1021 21:39:32.008347 3427 net.cpp:454] conv2 <- Sigmoid1
I1021 21:39:32.008355 3427 net.cpp:411] conv2 -> conv2
I1021 21:39:32.010607 3427 net.cpp:150] Setting up conv2
I1021 21:39:32.010617 3427 net.cpp:157] Top shape: 1000 32 16 16 (8192000)
I1021 21:39:32.010619 3427 net.cpp:165] Memory required for data: 274444000
I1021 21:39:32.010624 3427 layer_factory.hpp:76] Creating layer bn2
I1021 21:39:32.010630 3427 net.cpp:106] Creating Layer bn2
I1021 21:39:32.010633 3427 net.cpp:454] bn2 <- conv2
I1021 21:39:32.010637 3427 net.cpp:411] bn2 -> bn2
I1021 21:39:32.010776 3427 net.cpp:150] Setting up bn2
I1021 21:39:32.010781 3427 net.cpp:157] Top shape: 1000 32 16 16 (8192000)
I1021 21:39:32.010782 3427 net.cpp:165] Memory required for data: 307212000
I1021 21:39:32.010789 3427 layer_factory.hpp:76] Creating layer Sigmoid2
I1021 21:39:32.010794 3427 net.cpp:106] Creating Layer Sigmoid2
I1021 21:39:32.010797 3427 net.cpp:454] Sigmoid2 <- bn2
I1021 21:39:32.010798 3427 net.cpp:411] Sigmoid2 -> Sigmoid2
I1021 21:39:32.010812 3427 net.cpp:150] Setting up Sigmoid2
I1021 21:39:32.010815 3427 net.cpp:157] Top shape: 1000 32 16 16 (8192000)
I1021 21:39:32.010818 3427 net.cpp:165] Memory required for data: 339980000
I1021 21:39:32.010819 3427 layer_factory.hpp:76] Creating layer pool2
I1021 21:39:32.010823 3427 net.cpp:106] Creating Layer pool2
I1021 21:39:32.010825 3427 net.cpp:454] pool2 <- Sigmoid2
I1021 21:39:32.010828 3427 net.cpp:411] pool2 -> pool2
I1021 21:39:32.010841 3427 net.cpp:150] Setting up pool2
I1021 21:39:32.010844 3427 net.cpp:157] Top shape: 1000 32 8 8 (2048000)
I1021 21:39:32.010846 3427 net.cpp:165] Memory required for data: 348172000
I1021 21:39:32.010848 3427 layer_factory.hpp:76] Creating layer conv3
I1021 21:39:32.010854 3427 net.cpp:106] Creating Layer conv3
I1021 21:39:32.010855 3427 net.cpp:454] conv3 <- pool2
I1021 21:39:32.010859 3427 net.cpp:411] conv3 -> conv3
I1021 21:39:32.012171 3427 net.cpp:150] Setting up conv3
I1021 21:39:32.012176 3427 net.cpp:157] Top shape: 1000 64 8 8 (4096000)
I1021 21:39:32.012177 3427 net.cpp:165] Memory required for data: 364556000
I1021 21:39:32.012181 3427 layer_factory.hpp:76] Creating layer bn3
I1021 21:39:32.012187 3427 net.cpp:106] Creating Layer bn3
I1021 21:39:32.012189 3427 net.cpp:454] bn3 <- conv3
I1021 21:39:32.012192 3427 net.cpp:411] bn3 -> bn3
I1021 21:39:32.012311 3427 net.cpp:150] Setting up bn3
I1021 21:39:32.012316 3427 net.cpp:157] Top shape: 1000 64 8 8 (4096000)
I1021 21:39:32.012317 3427 net.cpp:165] Memory required for data: 380940000
I1021 21:39:32.012322 3427 layer_factory.hpp:76] Creating layer Sigmoid3
I1021 21:39:32.012326 3427 net.cpp:106] Creating Layer Sigmoid3
I1021 21:39:32.012328 3427 net.cpp:454] Sigmoid3 <- bn3
I1021 21:39:32.012331 3427 net.cpp:411] Sigmoid3 -> Sigmoid3
I1021 21:39:32.012343 3427 net.cpp:150] Setting up Sigmoid3
I1021 21:39:32.012347 3427 net.cpp:157] Top shape: 1000 64 8 8 (4096000)
I1021 21:39:32.012348 3427 net.cpp:165] Memory required for data: 397324000
I1021 21:39:32.012351 3427 layer_factory.hpp:76] Creating layer pool3
I1021 21:39:32.012354 3427 net.cpp:106] Creating Layer pool3
I1021 21:39:32.012356 3427 net.cpp:454] pool3 <- Sigmoid3
I1021 21:39:32.012359 3427 net.cpp:411] pool3 -> pool3
I1021 21:39:32.012372 3427 net.cpp:150] Setting up pool3
I1021 21:39:32.012374 3427 net.cpp:157] Top shape: 1000 64 4 4 (1024000)
I1021 21:39:32.012377 3427 net.cpp:165] Memory required for data: 401420000
I1021 21:39:32.012382 3427 layer_factory.hpp:76] Creating layer ip1
I1021 21:39:32.012387 3427 net.cpp:106] Creating Layer ip1
I1021 21:39:32.012389 3427 net.cpp:454] ip1 <- pool3
I1021 21:39:32.012393 3427 net.cpp:411] ip1 -> ip1
I1021 21:39:32.012711 3427 net.cpp:150] Setting up ip1
I1021 21:39:32.012717 3427 net.cpp:157] Top shape: 1000 10 (10000)
I1021 21:39:32.012718 3427 net.cpp:165] Memory required for data: 401460000
I1021 21:39:32.012724 3427 layer_factory.hpp:76] Creating layer ip1_ip1_0_split
I1021 21:39:32.012729 3427 net.cpp:106] Creating Layer ip1_ip1_0_split
I1021 21:39:32.012745 3427 net.cpp:454] ip1_ip1_0_split <- ip1
I1021 21:39:32.012749 3427 net.cpp:411] ip1_ip1_0_split -> ip1_ip1_0_split_0
I1021 21:39:32.012753 3427 net.cpp:411] ip1_ip1_0_split -> ip1_ip1_0_split_1
I1021 21:39:32.012778 3427 net.cpp:150] Setting up ip1_ip1_0_split
I1021 21:39:32.012783 3427 net.cpp:157] Top shape: 1000 10 (10000)
I1021 21:39:32.012784 3427 net.cpp:157] Top shape: 1000 10 (10000)
I1021 21:39:32.012786 3427 net.cpp:165] Memory required for data: 401540000
I1021 21:39:32.012789 3427 layer_factory.hpp:76] Creating layer accuracy
I1021 21:39:32.012795 3427 net.cpp:106] Creating Layer accuracy
I1021 21:39:32.012797 3427 net.cpp:454] accuracy <- ip1_ip1_0_split_0
I1021 21:39:32.012800 3427 net.cpp:454] accuracy <- label_cifar_1_split_0
I1021 21:39:32.012804 3427 net.cpp:411] accuracy -> accuracy
I1021 21:39:32.012809 3427 net.cpp:150] Setting up accuracy
I1021 21:39:32.012812 3427 net.cpp:157] Top shape: (1)
I1021 21:39:32.012814 3427 net.cpp:165] Memory required for data: 401540004
I1021 21:39:32.012816 3427 layer_factory.hpp:76] Creating layer loss
I1021 21:39:32.012820 3427 net.cpp:106] Creating Layer loss
I1021 21:39:32.012821 3427 net.cpp:454] loss <- ip1_ip1_0_split_1
I1021 21:39:32.012825 3427 net.cpp:454] loss <- label_cifar_1_split_1
I1021 21:39:32.012827 3427 net.cpp:411] loss -> loss
I1021 21:39:32.012832 3427 layer_factory.hpp:76] Creating layer loss
I1021 21:39:32.012892 3427 net.cpp:150] Setting up loss
I1021 21:39:32.012895 3427 net.cpp:157] Top shape: (1)
I1021 21:39:32.012897 3427 net.cpp:160] with loss weight 1
I1021 21:39:32.012907 3427 net.cpp:165] Memory required for data: 401540008
I1021 21:39:32.012908 3427 net.cpp:226] loss needs backward computation.
I1021 21:39:32.012910 3427 net.cpp:228] accuracy does not need backward computation.
I1021 21:39:32.012912 3427 net.cpp:226] ip1_ip1_0_split needs backward computation.
I1021 21:39:32.012914 3427 net.cpp:226] ip1 needs backward computation.
I1021 21:39:32.012917 3427 net.cpp:226] pool3 needs backward computation.
I1021 21:39:32.012918 3427 net.cpp:226] Sigmoid3 needs backward computation.
I1021 21:39:32.012919 3427 net.cpp:226] bn3 needs backward computation.
I1021 21:39:32.012922 3427 net.cpp:226] conv3 needs backward computation.
I1021 21:39:32.012923 3427 net.cpp:226] pool2 needs backward computation.
I1021 21:39:32.012925 3427 net.cpp:226] Sigmoid2 needs backward computation.
I1021 21:39:32.012928 3427 net.cpp:226] bn2 needs backward computation.
I1021 21:39:32.012929 3427 net.cpp:226] conv2 needs backward computation.
I1021 21:39:32.012930 3427 net.cpp:226] Sigmoid1 needs backward computation.
I1021 21:39:32.012933 3427 net.cpp:226] bn1 needs backward computation.
I1021 21:39:32.012934 3427 net.cpp:226] pool1 needs backward computation.
I1021 21:39:32.012936 3427 net.cpp:226] conv1 needs backward computation.
I1021 21:39:32.012938 3427 net.cpp:228] label_cifar_1_split does not need backward computation.
I1021 21:39:32.012940 3427 net.cpp:228] cifar does not need backward computation.
I1021 21:39:32.012943 3427 net.cpp:270] This network produces output accuracy
I1021 21:39:32.012944 3427 net.cpp:270] This network produces output loss
I1021 21:39:32.012955 3427 net.cpp:283] Network initialization done.
I1021 21:39:32.013012 3427 solver.cpp:59] Solver scaffolding done.
I1021 21:39:32.013370 3427 caffe.cpp:212] Starting Optimization
I1021 21:39:32.013376 3427 solver.cpp:287] Solving CIFAR10_full
I1021 21:39:32.013378 3427 solver.cpp:288] Learning Rate Policy: poly
I1021 21:39:32.013828 3427 solver.cpp:340] Iteration 0, Testing net (#0)
I1021 21:39:32.014142 3427 blocking_queue.cpp:50] Data layer prefetch queue empty
I1021 21:39:36.492568 3427 solver.cpp:408] Test net output #0: accuracy = 0.1
I1021 21:39:36.492605 3427 solver.cpp:408] Test net output #1: loss = 87.3365 (* 1 = 87.3365 loss)
I1021 21:39:36.582258 3427 solver.cpp:236] Iteration 0, loss = 2.309
I1021 21:39:36.582298 3427 solver.cpp:252] Train net output #0: loss = 2.309 (* 1 = 2.309 loss)
I1021 21:39:36.582363 3427 sgd_solver.cpp:106] Iteration 0, lr = 0.001
I1021 21:39:49.183332 3427 solver.cpp:236] Iteration 100, loss = 2.06843
I1021 21:39:49.183368 3427 solver.cpp:252] Train net output #0: loss = 2.06843 (* 1 = 2.06843 loss)
I1021 21:39:49.183374 3427 sgd_solver.cpp:106] Iteration 100, lr = 0.00098995
I1021 21:40:02.056252 3427 solver.cpp:236] Iteration 200, loss = 1.88549
I1021 21:40:02.056336 3427 solver.cpp:252] Train net output #0: loss = 1.88549 (* 1 = 1.88549 loss)
I1021 21:40:02.056344 3427 sgd_solver.cpp:106] Iteration 200, lr = 0.000979796
I1021 21:40:14.760164 3427 solver.cpp:236] Iteration 300, loss = 1.78708
I1021 21:40:14.760201 3427 solver.cpp:252] Train net output #0: loss = 1.78708 (* 1 = 1.78708 loss)
I1021 21:40:14.760206 3427 sgd_solver.cpp:106] Iteration 300, lr = 0.000969536
I1021 21:40:27.368268 3427 solver.cpp:236] Iteration 400, loss = 1.73871
I1021 21:40:27.368304 3427 solver.cpp:252] Train net output #0: loss = 1.73871 (* 1 = 1.73871 loss)
I1021 21:40:27.368309 3427 sgd_solver.cpp:106] Iteration 400, lr = 0.000959166
I1021 21:40:39.962333 3427 solver.cpp:236] Iteration 500, loss = 1.58247
I1021 21:40:39.962419 3427 solver.cpp:252] Train net output #0: loss = 1.58247 (* 1 = 1.58247 loss)
I1021 21:40:39.962425 3427 sgd_solver.cpp:106] Iteration 500, lr = 0.000948683
I1021 21:40:52.493491 3427 solver.cpp:236] Iteration 600, loss = 1.53626
I1021 21:40:52.493516 3427 solver.cpp:252] Train net output #0: loss = 1.53626 (* 1 = 1.53626 loss)
I1021 21:40:52.493522 3427 sgd_solver.cpp:106] Iteration 600, lr = 0.000938083
I1021 21:40:58.539062 3427 solver.cpp:236] Iteration 700, loss = 1.51635
I1021 21:40:58.539095 3427 solver.cpp:252] Train net output #0: loss = 1.51635 (* 1 = 1.51635 loss)
I1021 21:40:58.539100 3427 sgd_solver.cpp:106] Iteration 700, lr = 0.000927362
I1021 21:41:04.584962 3427 solver.cpp:236] Iteration 800, loss = 1.43724
I1021 21:41:04.584987 3427 solver.cpp:252] Train net output #0: loss = 1.43724 (* 1 = 1.43724 loss)
I1021 21:41:04.584991 3427 sgd_solver.cpp:106] Iteration 800, lr = 0.000916515
I1021 21:41:10.623792 3427 solver.cpp:236] Iteration 900, loss = 1.44181
I1021 21:41:10.623878 3427 solver.cpp:252] Train net output #0: loss = 1.44181 (* 1 = 1.44181 loss)
I1021 21:41:10.623891 3427 sgd_solver.cpp:106] Iteration 900, lr = 0.000905539
I1021 21:41:16.632550 3427 solver.cpp:340] Iteration 1000, Testing net (#0)
I1021 21:41:18.956629 3427 solver.cpp:408] Test net output #0: accuracy = 0.4018
I1021 21:41:18.956650 3427 solver.cpp:408] Test net output #1: loss = 1.63185 (* 1 = 1.63185 loss)
I1021 21:41:18.995424 3427 solver.cpp:236] Iteration 1000, loss = 1.57062
I1021 21:41:18.995445 3427 solver.cpp:252] Train net output #0: loss = 1.57062 (* 1 = 1.57062 loss)
I1021 21:41:18.995450 3427 sgd_solver.cpp:106] Iteration 1000, lr = 0.000894427
I1021 21:41:25.037570 3427 solver.cpp:236] Iteration 1100, loss = 1.31977
I1021 21:41:25.037595 3427 solver.cpp:252] Train net output #0: loss = 1.31977 (* 1 = 1.31977 loss)
I1021 21:41:25.037600 3427 sgd_solver.cpp:106] Iteration 1100, lr = 0.000883176
I1021 21:41:31.078234 3427 solver.cpp:236] Iteration 1200, loss = 1.43099
I1021 21:41:31.078260 3427 solver.cpp:252] Train net output #0: loss = 1.43099 (* 1 = 1.43099 loss)
I1021 21:41:31.078265 3427 sgd_solver.cpp:106] Iteration 1200, lr = 0.00087178
I1021 21:41:37.121426 3427 solver.cpp:236] Iteration 1300, loss = 1.4668
I1021 21:41:37.121451 3427 solver.cpp:252] Train net output #0: loss = 1.4668 (* 1 = 1.4668 loss)
I1021 21:41:37.121456 3427 sgd_solver.cpp:106] Iteration 1300, lr = 0.000860233
I1021 21:41:43.162214 3427 solver.cpp:236] Iteration 1400, loss = 1.37483
I1021 21:41:43.162305 3427 solver.cpp:252] Train net output #0: loss = 1.37483 (* 1 = 1.37483 loss)
I1021 21:41:43.162320 3427 sgd_solver.cpp:106] Iteration 1400, lr = 0.000848528
I1021 21:41:49.202479 3427 solver.cpp:236] Iteration 1500, loss = 1.35915
I1021 21:41:49.202504 3427 solver.cpp:252] Train net output #0: loss = 1.35915 (* 1 = 1.35915 loss)
I1021 21:41:49.202509 3427 sgd_solver.cpp:106] Iteration 1500, lr = 0.00083666
I1021 21:41:55.248610 3427 solver.cpp:236] Iteration 1600, loss = 1.26454
I1021 21:41:55.248635 3427 solver.cpp:252] Train net output #0: loss = 1.26454 (* 1 = 1.26454 loss)
I1021 21:41:55.248639 3427 sgd_solver.cpp:106] Iteration 1600, lr = 0.000824621
I1021 21:42:01.293602 3427 solver.cpp:236] Iteration 1700, loss = 1.29635
I1021 21:42:01.293627 3427 solver.cpp:252] Train net output #0: loss = 1.29635 (* 1 = 1.29635 loss)
I1021 21:42:01.293632 3427 sgd_solver.cpp:106] Iteration 1700, lr = 0.000812404
I1021 21:42:07.332945 3427 solver.cpp:236] Iteration 1800, loss = 1.26151
I1021 21:42:07.332972 3427 solver.cpp:252] Train net output #0: loss = 1.26151 (* 1 = 1.26151 loss)
I1021 21:42:07.332976 3427 sgd_solver.cpp:106] Iteration 1800, lr = 0.0008
I1021 21:42:13.372344 3427 solver.cpp:236] Iteration 1900, loss = 1.23388
I1021 21:42:13.372442 3427 solver.cpp:252] Train net output #0: loss = 1.23388 (* 1 = 1.23388 loss)
I1021 21:42:13.372457 3427 sgd_solver.cpp:106] Iteration 1900, lr = 0.000787401
I1021 21:42:19.374846 3427 solver.cpp:340] Iteration 2000, Testing net (#0)
I1021 21:42:21.688736 3427 solver.cpp:408] Test net output #0: accuracy = 0.3233
I1021 21:42:21.688760 3427 solver.cpp:408] Test net output #1: loss = 1.92475 (* 1 = 1.92475 loss)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment