Last active
August 16, 2017 05:36
-
-
Save JosephKJ/635f18f3dea46403c3679265a8bc3117 to your computer and use it in GitHub Desktop.
Modified VGG_ILSVRC_16 Architecture, which contains only Convolutional Layers.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Modified VGG_ILSVRC_16 Architecture, which contains only Convolutional Layers. | |
# Adapted from: https://gist.github.com/ksimonyan/211839e770f7b538e2d8#file-readme-md | |
name: "VGG_ILSVRC_16_layers" | |
input: "data" | |
input_dim: 10 | |
input_dim: 3 | |
input_dim: 224 | |
input_dim: 224 | |
layers { | |
bottom: "data" | |
top: "conv1_1" | |
name: "conv1_1" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv1_1" | |
top: "conv1_1" | |
name: "relu1_1" | |
type: RELU | |
} | |
layers { | |
bottom: "conv1_1" | |
top: "conv1_2" | |
name: "conv1_2" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 64 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv1_2" | |
top: "conv1_2" | |
name: "relu1_2" | |
type: RELU | |
} | |
layers { | |
bottom: "conv1_2" | |
top: "pool1" | |
name: "pool1" | |
type: POOLING | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} | |
layers { | |
bottom: "pool1" | |
top: "conv2_1" | |
name: "conv2_1" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 128 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv2_1" | |
top: "conv2_1" | |
name: "relu2_1" | |
type: RELU | |
} | |
layers { | |
bottom: "conv2_1" | |
top: "conv2_2" | |
name: "conv2_2" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 128 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv2_2" | |
top: "conv2_2" | |
name: "relu2_2" | |
type: RELU | |
} | |
layers { | |
bottom: "conv2_2" | |
top: "pool2" | |
name: "pool2" | |
type: POOLING | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} | |
layers { | |
bottom: "pool2" | |
top: "conv3_1" | |
name: "conv3_1" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 256 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv3_1" | |
top: "conv3_1" | |
name: "relu3_1" | |
type: RELU | |
} | |
layers { | |
bottom: "conv3_1" | |
top: "conv3_2" | |
name: "conv3_2" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 256 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv3_2" | |
top: "conv3_2" | |
name: "relu3_2" | |
type: RELU | |
} | |
layers { | |
bottom: "conv3_2" | |
top: "conv3_3" | |
name: "conv3_3" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 256 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv3_3" | |
top: "conv3_3" | |
name: "relu3_3" | |
type: RELU | |
} | |
layers { | |
bottom: "conv3_3" | |
top: "pool3" | |
name: "pool3" | |
type: POOLING | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} | |
layers { | |
bottom: "pool3" | |
top: "conv4_1" | |
name: "conv4_1" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 512 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv4_1" | |
top: "conv4_1" | |
name: "relu4_1" | |
type: RELU | |
} | |
layers { | |
bottom: "conv4_1" | |
top: "conv4_2" | |
name: "conv4_2" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 512 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv4_2" | |
top: "conv4_2" | |
name: "relu4_2" | |
type: RELU | |
} | |
layers { | |
bottom: "conv4_2" | |
top: "conv4_3" | |
name: "conv4_3" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 512 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv4_3" | |
top: "conv4_3" | |
name: "relu4_3" | |
type: RELU | |
} | |
layers { | |
bottom: "conv4_3" | |
top: "pool4" | |
name: "pool4" | |
type: POOLING | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} | |
layers { | |
bottom: "pool4" | |
top: "conv5_1" | |
name: "conv5_1" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 512 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv5_1" | |
top: "conv5_1" | |
name: "relu5_1" | |
type: RELU | |
} | |
layers { | |
bottom: "conv5_1" | |
top: "conv5_2" | |
name: "conv5_2" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 512 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv5_2" | |
top: "conv5_2" | |
name: "relu5_2" | |
type: RELU | |
} | |
layers { | |
bottom: "conv5_2" | |
top: "conv5_3" | |
name: "conv5_3" | |
type: CONVOLUTION | |
convolution_param { | |
num_output: 512 | |
pad: 1 | |
kernel_size: 3 | |
} | |
} | |
layers { | |
bottom: "conv5_3" | |
top: "conv5_3" | |
name: "relu5_3" | |
type: RELU | |
} | |
layers { | |
bottom: "conv5_3" | |
top: "pool5" | |
name: "pool5" | |
type: POOLING | |
pooling_param { | |
pool: MAX | |
kernel_size: 2 | |
stride: 2 | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment