Created
June 27, 2019 08:53
-
-
Save Lexie88rus/9bdd9ff191c529c67489e883ddd31420 to your computer and use it in GitHub Desktop.
Soft Exponential activation
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class soft_exponential(nn.Module): | |
''' | |
Implementation of soft exponential activation. | |
Shape: | |
- Input: (N, *) where * means, any number of additional | |
dimensions | |
- Output: (N, *), same shape as the input | |
Parameters: | |
- alpha - trainable parameter | |
References: | |
- See related paper: | |
https://arxiv.org/pdf/1602.01321.pdf | |
Examples: | |
>>> a1 = soft_exponential(256) | |
>>> x = torch.randn(256) | |
>>> x = a1(x) | |
''' | |
def __init__(self, in_features, alpha = None): | |
''' | |
Initialization. | |
INPUT: | |
- in_features: shape of the input | |
- aplha: trainable parameter | |
aplha is initialized with zero value by default | |
''' | |
super(soft_exponential,self).__init__() | |
self.in_features = in_features | |
# initialize alpha | |
if alpha == None: | |
self.alpha = Parameter(torch.tensor(0.0)) # create a tensor out of alpha | |
else: | |
self.alpha = Parameter(torch.tensor(alpha)) # create a tensor out of alpha | |
self.alpha.requiresGrad = True # set requiresGrad to true! | |
def forward(self, x): | |
''' | |
Forward pass of the function. | |
Applies the function to the input elementwise. | |
''' | |
if (self.alpha == 0.0): | |
return x | |
if (self.alpha < 0.0): | |
return - torch.log(1 - self.alpha * (x + self.alpha)) / self.alpha | |
if (self.alpha > 0.0): | |
return (torch.exp(self.alpha * x) - 1)/ self.alpha + self.alpha |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment