Skip to content

Instantly share code, notes, and snippets.

View delta2323's full-sized avatar

Kenta Oono delta2323

View GitHub Profile
import six
import numpy
import chainer
class StatelessRNN(chainer.Chain):
# state_names defines the order of states
state_names = ('c', 'h')
@delta2323
delta2323 / convert.py
Last active July 13, 2016 15:11
Creation of Stateful RNNs from Stateless ones.
import six
import numpy
import chainer
class StatelessRNN(chainer.Chain):
# state_names defines the order of states
state_names = ('c', 'h')
@delta2323
delta2323 / brownian_bridge.py
Created March 13, 2016 11:23
Brownian Bridge sample code
#!/usr/bin/env python
import numpy
import six
from matplotlib import pyplot
seed = 0
N = 100
M = 10
import time
global_hooks = {}
class FunctionHook(object):
def __enter__(self):
global global_hooks
import chainer
from chainer import links
from chainer import optimizers
from chainer import functions
import numpy
import six
init = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
target = chainer.Variable(numpy.ones_like(init))
# GPU to use: Tesla K40
# Comments are GPU memory usage measured by nvidia-smi
import chainer, cupy, numpy
cupy.cuda.Device(1).use()
a = cupy.array((), dtype=numpy.float32) # 74MiB
# I have not investigated what components consume first 74MiB memory.
b = cupy.ones((1024, 1024), dtype=numpy.float32) # 78MiB
from chainer import links
from chainer import serializers
import numpy
cls = links.Classifier(links.Linear(10, 10))
cls.add_persistent('k', numpy.random.uniform(-1, 1, (1, 3)))
print cls._persistent # => ['k']
print cls.k # => [[-0.76103399 0.09844306 -0.25231615]]
serializers.save_hdf5('test.hd5', cls)
#/usr/bin/env python
import re
import requests
r = requests.get('https://papers.nips.cc/book/advances-in-neural-information-processing-systems-28-2015')
urls = re.findall('<a href="(/paper/[0-9]+.*?)">.*?</a>', r.text)
for url in urls:
url = 'https://papers.nips.cc/' + url +'.pdf'
@delta2323
delta2323 / v1.4.py
Last active November 20, 2015 04:57
#!/usr/bin/env python
import chainer
from chainer import functions as F
from chainer import optimizers as O
import numpy as np
print(chainer.__version__)
# caffemodelの入手
cd models/bvlc_reference_caffenet/
wget http://dl.caffe.berkeleyvision.org/bvlc_reference_caffenet.caffemodel
# このURLはreadme.mdcaffemodel_urlを参照している
# ダウンロードに結構時間がかかる(30分くらい?)
# imagenet_mean.binaryprotoとsynset_words.txtの入手
cd ../../
./data/ilsvrc12/get_ilsvrc_aux.sh