Skip to content

Instantly share code, notes, and snippets.

@christopher-beckham
Created March 22, 2017 14:22
Show Gist options
  • Select an option

  • Save christopher-beckham/4a61ec33589728f277dda361b32b0ebc to your computer and use it in GitHub Desktop.

Select an option

Save christopher-beckham/4a61ec33589728f277dda361b32b0ebc to your computer and use it in GitHub Desktop.
import theano
from theano import OpFromGraph
from theano import tensor as T
import numpy as np
import lasagne
from lasagne.layers import *
# suppose we have the network architecture:
# input -> conv1 -> conv2 -> dense
# and we want to make (conv1 -> conv2) a block
# (i.e. a 'super layer')
# normally, we'd do this:
l_in = InputLayer((None, 1, 28, 28))
l_conv = Conv2DLayer(l_in, num_filters=5, filter_size=3)
l_conv2 = Conv2DLayer(l_conv, num_filters=5, filter_size=3)
l_dense = DenseLayer(l_conv2, num_units=2)
# but we could define a layer that uses OpFromGraph
class OpFromGraphLayer(Layer):
def __init__(self, incoming, block_fn, **kwargs):
super(OpFromGraphLayer, self).__init__(incoming, **kwargs)
self.op, self.out_shp = block_fn(incoming)
def get_output_shape_for(self, input_shape):
return self.out_shp
def get_output_for(self, input, **kwargs):
return self.op(input)
# this block takes some starting layer, adds two convolutions,
# then returns the result as an op
def block(l_begin):
l_in = InputLayer(l_begin.output_shape)
l_conv = Conv2DLayer(l_in, num_filters=5, filter_size=3)
l_conv2 = Conv2DLayer(l_conv, num_filters=5, filter_size=3)
final_out = get_output(l_conv2, l_in.input_var)
return OpFromGraph([l_in.input_var], [final_out]), l_conv2.output_shape
l_in = InputLayer((None, 1, 28, 28))
l_op = OpFromGraphLayer(l_in, block_fn=block)
l_dense = DenseLayer(l_op, num_units=2)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment