Created
June 15, 2017 06:32
-
-
Save raven38/ce909f5a6ec6cfe3cbcd6d08a2f86b17 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from chainer import cuda | |
from chainer import function | |
from chainer.utils import type_check | |
import numpy as np | |
import cupy as cp | |
def _kern(): | |
return cuda.elementwise( | |
'T cond, T x, T slope', 'T y', | |
'y = cond >= 0 ? x : (T)(slope * x)', 'rrelu') | |
class RReLU(function.Function): | |
"""Randomized Leaky rectifier unit.""" | |
def __init__(self, lower=1./8, upper=1./3, train=True): | |
self.lower = lower | |
self.upper = upper | |
self.train = train | |
def check_type_forward(self, in_types): | |
type_check.expect(in_types.size() == 1) | |
x_type, = in_types | |
type_check.expect(x_type.dtype.kind == 'f') | |
def forward_cpu(self, x): | |
y = x[0].copy() | |
if self.train: | |
self.r = np.random.uniform(self.lower, self.upper, len(x[0])) | |
else: | |
self.r = np.empty(len(x[0])).fill((self.lower+self.upper)/2) | |
y[x[0]<0] *= self.r[x[0]<0] | |
# if self.lower >= 0 and self.upper >= 0: | |
# self.retain_inputs(()) | |
# self.retain_outputs((0,)) | |
return y, | |
def forward_gpu(self, x): | |
if self.train: | |
self.r = cp.random.uniform(self.lower, self.upper, x[0].shape[1:]).astype(cp.float32) | |
else: | |
self.r = cp.empty(x[0].shape[1:]) | |
# print(self.r) | |
self.r.fill((self.lower+self.upper)/2.0) | |
# print(self.r) | |
self.r = self.r.astype(cp.float32) | |
# print(self.r) | |
y = _kern()(x[0], x[0], self.r) | |
# if self.lower >= 0 and self.upper >= 0: | |
# self.retain_inputs(()) | |
# self.retain_outputs((0,)) | |
return y, | |
def backward_cpu(self, x, gy): | |
gx = gy[0].copy() | |
gx[x[0] < 0] *= self.r[x[0] < 0] | |
# if self.lower >= 0 and self.upper >= 0: | |
# y = self.output_data | |
# gx[y[0] < 0] *= self.r[y[0] < 0] | |
# else: | |
# gx[x[0] < 0] *= self.r[x[0] < 0] | |
return gx, | |
def backward_gpu(self, x, gy): | |
gx = _kern()(x[0], gy[0], self.r) | |
# if self.lower >= 0 and self.upper >= 0: | |
# y = self.output_data | |
# gx = _kern()(y[0], gy[0], self.r) | |
# else: | |
# gx = _kern()(x[0], gy[0], self.r) | |
return gx, | |
def randomized_leaky_relu(x, l=1./8, u=1./3, train=True): | |
"""Randomized Leaky Rectified Liner Unit function. | |
This function is expressed as | |
.. math:: f(x)=\\max(x, ax), | |
where :math:`a` is a random number sampled from a uniform distribution U(l, u). | |
Args: | |
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \ | |
:class:`cupy.ndarray`): | |
Input variable. A :math:`(s_1, s_2, ..., s_N)`-shaped float array. | |
l (float): | |
r (float): | |
Returns: | |
~chainer.Variable: Outputs variable. A | |
:math:`(s_1, s_2, ..., s_N)`-shaped float array. | |
.. admonition:: Example | |
TBC | |
Ref: | |
https://arxiv.org/pdf/1505.00853.pdf | |
""" | |
return RReLU(l, u, train)(x) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment