Skip to content

Instantly share code, notes, and snippets.

@mjwillson
Forked from benanne/gist:1759022
Created September 7, 2015 17:55
Show Gist options
  • Save mjwillson/675cc0259e4291d97104 to your computer and use it in GitHub Desktop.
Save mjwillson/675cc0259e4291d97104 to your computer and use it in GitHub Desktop.
Theano AdvancedSubtensor memory leak
import theano.tensor as T
import theano
import numpy as np
import gc
def freemem():
gc.collect()
gc.collect()
gc.collect()
return theano.sandbox.cuda.cuda_ndarray.cuda_ndarray.mem_info()[0] / 1024**2
def reproduce_problem():
test_params=np.asarray(np.random.randn(5000 * 6000),'float32')
some_vector = T.vector('some_vector')
some_matrix = some_vector.reshape((6000, 5000))
lalala = T.constant(np.arange(6000))
variables = theano.shared(np.ones((5000,),dtype='float32'))
derp = T.sum(T.dot(some_matrix[lalala], variables))
print("Avail. memory before compilation: %d MB" % freemem())
obj = theano.function([some_vector], derp)
print("Avail. memory after objective function compilation: %d MB" % freemem())
grad_derp = T.grad(derp, some_vector)
grad = theano.function([some_vector], grad_derp)
print("Avail. memory after gradient function compilation: %d MB" % freemem())
for i in range(3):
obj(test_params)
print("Avail. memory after objective evaluation: %d MB" % freemem())
grad(test_params)
print("Avail. memory after gradient evaluation: %d MB" % freemem())
reproduce_problem()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment