Skip to content

Instantly share code, notes, and snippets.

@joeyism
Last active September 5, 2018 14:58
Show Gist options
  • Save joeyism/8366cd7becce8ff3faafe8c7c2daa0c4 to your computer and use it in GitHub Desktop.
Save joeyism/8366cd7becce8ff3faafe8c7c2daa0c4 to your computer and use it in GitHub Desktop.
from cifar import Cifar
from tqdm import tqdm
import pretrained
import tensorflow as tf
import helper
n_classes = 10
learning_rate = 0.00001
batch_size = 16
no_of_epochs = 100
image_size = 224
conv5 = tf.layers.flatten(pretrained.maxpool5)
weights = tf.Variable(tf.zeros([9216, n_classes]), name="output_weight")
bias = tf.Variable(tf.truncated_normal([n_classes]), name="output_bias")
model = tf.matmul(conv5, weights) + bias
outputs = tf.placeholder(tf.float32, [None, n_classes])
cost = tf.losses.softmax_cross_entropy(outputs, model)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
correct_pred = tf.equal(tf.argmax(model, 1), tf.argmax(outputs, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
cifar = Cifar(batch_size=batch_size)
cifar.create_resized_test_set(dim=n_classes)
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
for epoch in range(no_of_epochs):
for i in tqdm(range(cifar.no_of_batches),
desc="Epoch {}".format(epoch),
unit=" batch "):
this_batch = cifar.batch(i)
input_batch, out = helper.reshape_batch(this_batch, (image_size, image_size), n_classes)
sess.run([optimizer],
feed_dict={
pretrained.x: input_batch,
outputs: out })
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment