Last active
July 3, 2017 12:39
-
-
Save berak/854622b9e6ac0a99e43c to your computer and use it in GitHub Desktop.
tensorflow
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import cv2 | |
| import numpy as np | |
| import tensorflow as tf | |
| import tensorflow.examples.tutorials.mnist.input_data as input_data | |
| def init_weights(shape): | |
| return tf.Variable(tf.random_normal(shape, stddev=0.01)) | |
| def model(X, w_h, w_o): | |
| h = tf.nn.sigmoid(tf.matmul(X, w_h)) # this is a basic mlp, think 2 stacked logistic regressions | |
| return tf.matmul(h, w_o) # note that we dont take the softmax at the end because our cost fn does that for us | |
| mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) | |
| trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels | |
| X = tf.placeholder("float", [None, 784]) | |
| Y = tf.placeholder("float", [None, 10]) | |
| w_h = init_weights([784, 625]) # create symbolic variables | |
| w_o = init_weights([625, 10]) | |
| py_x = model(X, w_h, w_o) | |
| cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(py_x, Y)) # compute costs | |
| train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct an optimizer | |
| predict_op = tf.argmax(py_x, 1) | |
| sess = tf.Session() | |
| init = tf.initialize_all_variables() | |
| sess.run(init) | |
| for i in range(30): | |
| for start, end in zip(range(0, len(trX), 128), range(128, len(trX), 128)): | |
| sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]}) | |
| print i, np.mean(np.argmax(teY, axis=1) == | |
| sess.run(predict_op, feed_dict={X: teX, Y: teY})) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment