Skip to content

Instantly share code, notes, and snippets.

@dmgottlieb
Created March 2, 2016 00:27
Show Gist options
  • Save dmgottlieb/cc7450b936a9c77aa595 to your computer and use it in GitHub Desktop.
Save dmgottlieb/cc7450b936a9c77aa595 to your computer and use it in GitHub Desktop.
A simple convolutional net implemented in Theano
from theano import *
import theano.tensor as T
Q = T.tensor4('Q')
W_CONV1 = shared(np.random.randn(8,1,3,3) * (1.0/6),name='W_CONV1')
b_CONV1 = shared(np.zeros(8),name='b_CONV1')
W_CONV2 = shared(np.random.randn(16,8,3,3) * 0.04,name='W_CONV2')
b_CONV2 = shared(np.zeros(16),name='b_CONV2')
W_FC = shared(np.random.randn(16*32*32,1) * .008,name='W_FC')
b_FC = shared(np.zeros(1),name='b_FC')
CONV1 = T.nnet.conv2d(Q, W_CONV1,border_mode='half')
CONV1_b = CONV1 + b_CONV1.dimshuffle('x',0,'x','x')
RELU1 = T.nnet.relu(CONV1_b)
CONV2 = T.nnet.conv2d(RELU1, W_CONV2,border_mode='half')
CONV2_b = CONV2 + b_CONV2.dimshuffle('x',0,'x','x')
RELU2 = T.nnet.relu(CONV2_b)
SCORES = T.dot(RELU2.flatten(2),W_FC) + b_FC
SOFTMAX = T.nnet.sigmoid(SCORES)
LOSS = (T.nnet.binary_crossentropy(SOFTMAX,Y)).mean()
GRADS = T.grad(cost=LOSS, wrt=[W_CONV1,b_CONV1,
W_CONV2,b_CONV2,
W_FC, b_FC])
alpha = 0.05
ConvNetPredict = function([Q],SOFTMAX)
ConvNetTrain = function([Q,Y],LOSS,
updates=[
(W_CONV1, W_CONV1 - alpha*GRADS[0]),
(b_CONV1, b_CONV1 - alpha*GRADS[1]),
(W_CONV2, W_CONV2 - alpha*GRADS[2]),
(b_CONV2, b_CONV2 - alpha*GRADS[3]),
(W_FC, W_FC - alpha*GRADS[4]),
(b_FC, b_FC - alpha*GRADS[5])
])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment