Skip to content

Instantly share code, notes, and snippets.

@dniku
Created March 22, 2015 20:39
Show Gist options
  • Save dniku/886ab00fce06b5436f9a to your computer and use it in GitHub Desktop.
Save dniku/886ab00fce06b5436f9a to your computer and use it in GitHub Desktop.
I0322 23:04:55.893093 32722 caffe.cpp:117] Use CPU.
I0322 23:04:55.894064 32722 caffe.cpp:121] Starting Optimization
I0322 23:04:55.894438 32722 solver.cpp:32] Initializing solver from parameters:
test_iter: 100
test_interval: 500
base_lr: 0.01
display: 100
max_iter: 10000
lr_policy: "inv"
gamma: 0.0001
power: 0.75
momentum: 0.9
weight_decay: 0.0005
snapshot: 5000
snapshot_prefix: "lenet"
solver_mode: CPU
net: "lenet_train_test_modified.prototxt"
I0322 23:04:55.894565 32722 solver.cpp:70] Creating training net from net file: lenet_train_test_modified.prototxt
E0322 23:04:55.896198 32722 upgrade_proto.cpp:618] Attempting to upgrade input file specified using deprecated V1LayerParameter: lenet_train_test_modified.prototxt
I0322 23:04:55.897217 32722 upgrade_proto.cpp:626] Successfully upgraded file specified using deprecated V1LayerParameter
I0322 23:04:55.897526 32722 net.cpp:257] The NetState phase (0) differed from the phase (1) specified by a rule in layer mnist
I0322 23:04:55.897626 32722 net.cpp:257] The NetState phase (0) differed from the phase (1) specified by a rule in layer accuracy
I0322 23:04:55.898342 32722 net.cpp:42] Initializing net from parameters:
name: "LeNet"
state {
phase: TRAIN
}
layer {
name: "mnist"
type: "ImageData"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
scale: 0.00390625
}
image_data_param {
source: "mnist_train_index.txt"
batch_size: 64
is_color: false
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 50
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool2"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 500
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "ip1"
top: "ip1"
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "ip2"
bottom: "label"
top: "loss"
}
I0322 23:04:55.898717 32722 layer_factory.hpp:74] Creating layer mnist
I0322 23:04:55.898838 32722 net.cpp:84] Creating Layer mnist
I0322 23:04:55.898955 32722 net.cpp:338] mnist -> data
I0322 23:04:55.899097 32722 net.cpp:338] mnist -> label
I0322 23:04:55.899179 32722 net.cpp:113] Setting up mnist
I0322 23:04:55.899237 32722 image_data_layer.cpp:36] Opening file mnist_train_index.txt
I0322 23:04:56.058006 32722 image_data_layer.cpp:51] A total of 60000 images.
I0322 23:04:56.059156 32722 image_data_layer.cpp:80] output data size: 64,1,12,12
I0322 23:04:56.059674 32722 net.cpp:120] Top shape: 64 1 12 12 (9216)
I0322 23:04:56.059798 32722 net.cpp:120] Top shape: 64 (64)
I0322 23:04:56.059921 32722 layer_factory.hpp:74] Creating layer conv1
I0322 23:04:56.060101 32722 net.cpp:84] Creating Layer conv1
I0322 23:04:56.060197 32722 net.cpp:380] conv1 <- data
I0322 23:04:56.060328 32722 net.cpp:338] conv1 -> conv1
I0322 23:04:56.060468 32722 net.cpp:113] Setting up conv1
I0322 23:04:56.063215 32722 net.cpp:120] Top shape: 64 20 8 8 (81920)
I0322 23:04:56.063557 32722 layer_factory.hpp:74] Creating layer pool1
I0322 23:04:56.063671 32722 net.cpp:84] Creating Layer pool1
I0322 23:04:56.063751 32722 net.cpp:380] pool1 <- conv1
I0322 23:04:56.063868 32722 net.cpp:338] pool1 -> pool1
I0322 23:04:56.064019 32722 net.cpp:113] Setting up pool1
I0322 23:04:56.064280 32722 net.cpp:120] Top shape: 64 20 4 4 (20480)
I0322 23:04:56.064368 32722 layer_factory.hpp:74] Creating layer conv2
I0322 23:04:56.064471 32722 net.cpp:84] Creating Layer conv2
I0322 23:04:56.064546 32722 net.cpp:380] conv2 <- pool1
I0322 23:04:56.064656 32722 net.cpp:338] conv2 -> conv2
I0322 23:04:56.064790 32722 net.cpp:113] Setting up conv2
F0322 23:04:56.067253 32722 blob.cpp:101] Check failed: data_
*** Check failure stack trace: ***
@ 0x7f616bea10ee (unknown)
@ 0x7f616bea2f23 (unknown)
@ 0x7f616bea0d0a (unknown)
@ 0x7f616bea378f (unknown)
@ 0x7f616c2ca50e (unknown)
@ 0x7f616c2920d5 (unknown)
@ 0x7f616c1e97d6 (unknown)
@ 0x7f616c1ebd03 (unknown)
@ 0x7f616c2080c8 (unknown)
@ 0x7f616c2085e9 (unknown)
@ 0x7f616c2089e8 (unknown)
@ 0x40e507 (unknown)
@ 0x40e7d9 (unknown)
@ 0x408182 (unknown)
@ 0x40615a (unknown)
@ 0x7f616b3d6dc6 (unknown)
@ 0x406535 (unknown)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment