Skip to content

Instantly share code, notes, and snippets.

@kalradivyanshu
Created July 20, 2018 00:04
Show Gist options
  • Save kalradivyanshu/7be90aedea333fa101373d946ada872a to your computer and use it in GitHub Desktop.
Save kalradivyanshu/7be90aedea333fa101373d946ada872a to your computer and use it in GitHub Desktop.
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
from tensorflow.examples.tutorials.mnist import input_data
from time import time
import sys
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
image_size = 28
x = tf.placeholder(tf.float32, [None, 784])
input2d = tf.reshape(x, [-1, image_size, image_size, 1])
layers = int(sys.argv[1])
conv1 = tf.layers.conv2d(inputs=input2d, filters=32, kernel_size=[5, 5], padding="same", activation=tf.nn.relu)
conv = conv1
for i in range(layers):
conv = tf.layers.conv2d(inputs=conv, filters=32, kernel_size=[5, 5], padding="same", activation=tf.nn.relu)
pool1 = tf.layers.max_pooling2d(inputs=conv, pool_size=[2, 2], strides=2)
pool_flat = tf.layers.flatten(pool1)
hidden = tf.layers.dense(inputs= pool_flat, units=1024, activation=tf.nn.relu)
output = tf.layers.dense(inputs=hidden, units=10)
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(labels = y_, logits = output)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
then = time()
for _ in range(550):
print(_, end = "\r")
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
print(layers, time() - then)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment