Skip to content

Instantly share code, notes, and snippets.

@awjuliani
Created October 13, 2016 21:15
Show Gist options
  • Save awjuliani/fb10d1ea206fab25f946512d959e3894 to your computer and use it in GitHub Desktop.
Save awjuliani/fb10d1ea206fab25f946512d959e3894 to your computer and use it in GitHub Desktop.
import tensorflow as tf
import numpy as np
import tensorflow.contrib.slim as slim
total_layers = 25 #Specify how deep we want our network
units_between_stride = total_layers / 5
def denseBlock(input_layer,i,j):
with tf.variable_scope("dense_unit"+str(i)):
nodes = []
a = slim.conv2d(input_layer,64,[3,3],normalizer_fn=slim.batch_norm)
nodes.append(a)
for z in range(j):
b = slim.conv2d(tf.concat(3,nodes),64,[3,3],normalizer_fn=slim.batch_norm)
nodes.append(b)
return b
tf.reset_default_graph()
input_layer = tf.placeholder(shape=[None,32,32,3],dtype=tf.float32,name='input')
label_layer = tf.placeholder(shape=[None],dtype=tf.int32)
label_oh = slim.layers.one_hot_encoding(label_layer,10)
layer1 = slim.conv2d(input_layer,64,[3,3],normalizer_fn=slim.batch_norm,scope='conv_'+str(0))
for i in range(5):
layer1 = denseBlock(layer1,i,units_between_stride)
layer1 = slim.conv2d(layer1,64,[3,3],stride=[2,2],normalizer_fn=slim.batch_norm,scope='conv_s_'+str(i))
top = slim.conv2d(layer1,10,[3,3],normalizer_fn=slim.batch_norm,activation_fn=None,scope='conv_top')
output = slim.layers.softmax(slim.layers.flatten(top))
loss = tf.reduce_mean(-tf.reduce_sum(label_oh * tf.log(output) + 1e-10, reduction_indices=[1]))
trainer = tf.train.AdamOptimizer(learning_rate=0.001)
update = trainer.minimize(loss)
@ItamarF
Copy link

ItamarF commented Sep 24, 2017

Maybe:
tf.concat(3,nodes)
-->
tf.concat(nodes,3)
?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment