Created
January 7, 2017 00:32
-
-
Save kuasha/b136d9438c1901eb03e64f2e5c5d72ba to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
### Preprocess the data here. | |
### Feel free to use as many code cells as needed. | |
from tensorflow.contrib.layers import flatten | |
def LeNet(x): | |
# Hyperparameters | |
mu = 0 | |
sigma = 0.1 | |
# SOLUTION: Layer 1: Convolutional. Input = 32x32x3. Output = 28x28x6. | |
conv1_W = tf.Variable(tf.truncated_normal(shape=(5, 5, 3, 6), mean = mu, stddev = sigma)) | |
conv1_b = tf.Variable(tf.zeros(6)) | |
conv1 = tf.nn.conv2d(x, conv1_W, strides=[1, 1, 1, 1], padding='VALID') + conv1_b | |
# SOLUTION: Activation. | |
conv1 = tf.nn.relu(conv1) | |
# SOLUTION: Pooling. Input = 28x28x6. Output = 14x14x6. | |
conv1 = tf.nn.max_pool(conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='VALID') | |
# SOLUTION: Layer 2: Convolutional. Output = 10x10x16. | |
conv2_W = tf.Variable(tf.truncated_normal(shape=(5, 5, 6, 16), mean = mu, stddev = sigma)) | |
conv2_b = tf.Variable(tf.zeros(16)) | |
conv2 = tf.nn.conv2d(conv1, conv2_W, strides=[1, 1, 1, 1], padding='VALID') + conv2_b | |
# SOLUTION: Activation. | |
conv2 = tf.nn.relu(conv2) | |
# SOLUTION: Pooling. Input = 10x10x16. Output = 5x5x16. | |
conv2 = tf.nn.max_pool(conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='VALID') | |
# SOLUTION: Flatten. Input = 5x5x16. Output = 400. | |
fc0 = flatten(conv2) | |
# SOLUTION: Layer 3: Fully Connected. Input = 400. Output = 120. | |
fc1_W = tf.Variable(tf.truncated_normal(shape=(400, 120), mean = mu, stddev = sigma)) | |
fc1_b = tf.Variable(tf.zeros(120)) | |
fc1 = tf.matmul(fc0, fc1_W) + fc1_b | |
# SOLUTION: Activation. | |
fc1 = tf.nn.relu(fc1) | |
# SOLUTION: Layer 4: Fully Connected. Input = 120. Output = 84. | |
fc2_W = tf.Variable(tf.truncated_normal(shape=(120, 84), mean = mu, stddev = sigma)) | |
fc2_b = tf.Variable(tf.zeros(84)) | |
fc2 = tf.matmul(fc1, fc2_W) + fc2_b | |
# SOLUTION: Activation. | |
fc2 = tf.nn.relu(fc2) | |
# SOLUTION: Layer 5: Fully Connected. Input = 84. Output = 43. | |
fc3_W = tf.Variable(tf.truncated_normal(shape=(84, n_classes), mean = mu, stddev = sigma)) | |
fc3_b = tf.Variable(tf.zeros(n_classes)) | |
logits = tf.matmul(fc2, fc3_W) + fc3_b | |
return logits |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment