Skip to content

Instantly share code, notes, and snippets.

@kalradivyanshu
Created July 19, 2018 22:36
Show Gist options
  • Save kalradivyanshu/5602e3c2f0c579401ea09943575c6797 to your computer and use it in GitHub Desktop.
Save kalradivyanshu/5602e3c2f0c579401ea09943575c6797 to your computer and use it in GitHub Desktop.
from tensorflow.examples.tutorials.mnist import input_data
import sys
layers = int(sys.argv[1])
import tensorflow as tf
from time import time
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
x = tf.placeholder(tf.float32, [None, 784])
hiddenLayer = tf.layers.dense(x, 1000, activation = tf.nn.tanh)
lastLayer = hiddenLayer
for i in range(layers):
lastLayer = tf.layers.dense(lastLayer, 1000, activation = tf.nn.tanh)
output = tf.layers.dense(lastLayer, 10)
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(labels = y_, logits = output)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
then = time()
for _ in range(550):
print(_, end = "\r")
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
print(layers, time() - then)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment