Skip to content

Instantly share code, notes, and snippets.

@sveitser
Last active March 2, 2016 02:14
Show Gist options
  • Save sveitser/be6c8c82d53b1060e5f1 to your computer and use it in GitHub Desktop.
Save sveitser/be6c8c82d53b1060e5f1 to your computer and use it in GitHub Desktop.
----------------------------------- coverage: platform linux, python 3.5.1-final-0 ------------------------------------
Name Stmts Miss Cover Missing
---------------------------------------------------------------------
lasagne/__init__.py 14 0 100%
lasagne/conftest.py 3 0 100%
lasagne/init.py 112 0 100%
lasagne/layers/__init__.py 13 0 100%
lasagne/layers/base.py 52 0 100%
lasagne/layers/conv.py 85 0 100%
lasagne/layers/corrmm.py 23 0 100%
lasagne/layers/cuda_convnet.py 167 156 7% 17-634
lasagne/layers/dense.py 53 0 100%
lasagne/layers/dnn.py 78 0 100%
lasagne/layers/embedding.py 15 0 100%
lasagne/layers/helper.py 127 2 98% 194-196
lasagne/layers/input.py 23 0 100%
lasagne/layers/merge.py 108 0 100%
lasagne/layers/noise.py 31 0 100%
lasagne/layers/normalization.py 94 0 100%
lasagne/layers/pool.py 152 0 100%
lasagne/layers/recurrent.py 370 0 100%
lasagne/layers/shape.py 149 0 100%
lasagne/layers/special.py 333 0 100%
lasagne/nonlinearities.py 33 0 100%
lasagne/objectives.py 50 0 100%
lasagne/random.py 6 0 100%
lasagne/regularization.py 21 0 100%
lasagne/theano_extensions/__init__.py 0 0 100%
lasagne/theano_extensions/conv.py 119 0 100%
lasagne/theano_extensions/padding.py 20 0 100%
lasagne/updates.py 141 0 100%
lasagne/utils.py 109 0 100%
---------------------------------------------------------------------
TOTAL 2501 158 94%
====================================================== FAILURES =======================================================
________________________________ TestGetOutput_Layer.test_get_output_with_unused_kwarg ________________________________
self = <test_helper.TestGetOutput_Layer object at 0x7fd96ad55e10>
layers = (<Mock name='mock.input_layer.input_layer' spec='InputLayer' id='140571776999376'>, <Mock name='mock.input_layer' spec='Layer' id='140571776999264'>, <Mock spec='Layer' id='140571776999320'>)
get_output = <function get_output at 0x7fd9acfb7378>
def test_get_output_with_unused_kwarg(self, layers, get_output):
l1, l2, l3 = layers
unused_kwarg = object()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
get_output(l3, kwagg=unused_kwarg)
> assert len(w) == 1
E assert 3 == 1
E + where 3 = len([<warnings.WarningMessage object at 0x7fd96ad561d0>, <warnings.WarningMessage object at 0x7fd96ad562b0>, <warnings.WarningMessage object at 0x7fd96ad560f0>])
lasagne/tests/layers/test_helper.py:237: AssertionError
______________________________ TestGetOutput_Layer.test_get_output_with_no_unused_kwarg _______________________________
self = <test_helper.TestGetOutput_Layer object at 0x7fd96ad56b38>
layers = (<Mock name='mock.input_layer.input_layer' spec='InputLayer' id='140571777001792'>, <Mock name='mock.input_layer' spec='Layer' id='140571777001680'>, <Mock spec='Layer' id='140571777001848'>)
get_output = <function get_output at 0x7fd9acfb7378>
def test_get_output_with_no_unused_kwarg(self, layers, get_output):
l1, l2, l3 = layers
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
get_output(l3)
> assert len(w) == 0
E assert 2 == 0
E + where 2 = len([<warnings.WarningMessage object at 0x7fd96ad566d8>, <warnings.WarningMessage object at 0x7fd96ad56e48>])
lasagne/tests/layers/test_helper.py:246: AssertionError
_______________________________________ TestParametricRectifierLayer.test_prelu _______________________________________
self = <test_special.TestParametricRectifierLayer object at 0x7fd931ef1fd0>
init_alpha = <function TestParametricRectifierLayer.init_alpha.<locals>.<lambda> at 0x7fd93274b7b8>
def test_prelu(self, init_alpha):
import lasagne
input_shape = (3, 28)
input = np.random.randn(*input_shape).astype(theano.config.floatX)
l_in = lasagne.layers.input.InputLayer(input_shape)
l_dense = lasagne.layers.dense.DenseLayer(l_in, num_units=100)
l_prelu = lasagne.layers.prelu(l_dense, alpha=init_alpha)
output = lasagne.layers.get_output(l_prelu, input)
assert l_dense.nonlinearity == lasagne.nonlinearities.identity
W = l_dense.W.get_value()
b = l_dense.b.get_value()
alpha_v = l_prelu.alpha.get_value()
expected = np.dot(input, W) + b
expected = np.maximum(expected, 0) + \
np.minimum(expected, 0) * alpha_v
> assert np.allclose(output.eval(), expected)
E assert <function allclose at 0x7fd9c434d0d0>(array([[ 0.00000000e+00, 1.26576021e-01, -1.16077764e-02,\n -8.72909743e-03, 2.74441659e-01, 1.35156974....95090973e-01,\n -8.30698758e-02, -4.08609122e-01, 6.13293409e-01,\n -2.84109414e-01]], dtype=float32), array([[ 0.00000000e+00, 1.26576036e-01, -1.16077941e-02,\n -8.72910116e-03, 2.74441659e-01, 1.35156974....95091033e-01,\n -8.30699131e-02, -4.08609092e-01, 6.13293350e-01,\n -2.84109414e-01]], dtype=float32))
E + where <function allclose at 0x7fd9c434d0d0> = np.allclose
E + and array([[ 0.00000000e+00, 1.26576021e-01, -1.16077764e-02,\n -8.72909743e-03, 2.74441659e-01, 1.35156974....95090973e-01,\n -8.30698758e-02, -4.08609122e-01, 6.13293409e-01,\n -2.84109414e-01]], dtype=float32) = <bound method Variable.eval of Elemwise{add,no_inplace}.0>()
E + where <bound method Variable.eval of Elemwise{add,no_inplace}.0> = Elemwise{add,no_inplace}.0.eval
lasagne/tests/layers/test_special.py:688: AssertionError
================================ 3 failed, 1046 passed, 102 skipped in 189.63 seconds =================================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment