Skip to content

Instantly share code, notes, and snippets.

@rikkimax
Last active November 12, 2018 12:05
Show Gist options
  • Select an option

  • Save rikkimax/37314fdfd53d8d04b5924ff00e33faab to your computer and use it in GitHub Desktop.

Select an option

Save rikkimax/37314fdfd53d8d04b5924ff00e33faab to your computer and use it in GitHub Desktop.
/// https://en.wikipedia.org/wiki/Activation_function
module stage2.nn.activationfunctions;
import std.math : exp, pow, E, tanh, atan, abs, sqrt, log, sin, cos, PI;
struct ActivationFunction {
double function(double x, double arg) activation;
double function(double x, double arg) derivative;
double arg;
}
struct Range {
import std.math : isInfinity;
double start, end;
bool isStartPositiveInfinity() { return start.isInfinity && start > 0; }
bool isStartNegativeInfinity() { return start.isInfinity && start < 0; }
bool isEndPositiveInfinity() { return start.isInfinity && end > 0; }
bool isEndNegativeInfinity() { return start.isInfinity && end < 0; }
}
struct UsesArg {}
@UsesArg
@Range(-double.infinity, double.infinity)
enum Linear = ActivationFunction(
function(double x, double arg) => x * arg,
function(double x, double arg) => arg);
@Range(0, 1)
enum HeavisideStep = ActivationFunction(
function(double x, double arg) => x < 0 ? 0 : (x == 0 ? 0.5 : 1),
function(double x, double arg) => x == 0 ? 1 : 0);
@Range(0, 1)
enum Sigmoid = ActivationFunction(
function(double x, double arg) => 1 / (1 + pow(E, -x)),
function(double x, double arg) => (1 / (1 + pow(E, -x))) * (1 - (1 / (1 + pow(E, -x)))));
@Range(-1, 1)
enum TanH = ActivationFunction(
function(double x, double arg) => tanh(x),
function(double x, double arg) => 1 - pow(tanh(x), 2));
@Range(-(PI / 2), PI / 2)
enum ArcTan = ActivationFunction(
function(double x, double arg) => atan(x),
function(double x, double arg) => pow(x, 2) + 1);
@Range(-1, 1)
enum SoftSign = ActivationFunction(
function(double x, double arg) => x / (1 + abs(x)),
function(double x, double arg) => 1 / pow(1 + abs(x), 2));
@UsesArg
enum InverseSquareRootUnit = ActivationFunction(
function(double x, double arg) => x / sqrt(1 + arg * pow(x, 2)),
function(double x, double arg) => pow(1 / sqrt(1 + arg * pow(x, 2)), 3));
@Range(0, double.infinity)
enum RectifiedLinearUnit = ActivationFunction(
function(double x, double arg) => x >= 0 ? x : 0,
function(double x, double arg) => x >= 0 ? 1 : 0);
@Range(-double.infinity, double.infinity)
enum LeakyRectifiedLinearUnit = ActivationFunction(
function(double x, double arg) => x >= 0 ? x : (0.01 * x),
function(double x, double arg) => x >= 0 ? 1 : 0.01);
@UsesArg
@Range(-double.infinity, double.infinity)
enum ParametericRectifiedLinearUnit = ActivationFunction(
function(double x, double arg) => x >= 0 ? x : arg * x,
function(double x, double arg) => x >= 0 ? 1 : arg);
@UsesArg
enum ExponentialLinearUnit = ActivationFunction(
function(double x, double arg) => x > 0 ? x : (arg * exp(x) - 1),
function(double x, double arg) => x > 0 ? 1 : ((x > 0 ? x : (arg * exp(x) - 1)) + arg));
@UsesArg
enum ScaledExponentialLinearUnit = ActivationFunction(
function(double x, double arg) => x >= 0 ? x : (arg * exp(x) - 1),
function(double x, double arg) => x >= 0 ? 1 : ((x > 0 ? x : (arg * exp(x) - 1)) + arg));
@UsesArg
enum InverseSquareRootLinearUnit = ActivationFunction(
function(double x, double arg) => x < 0 ? (x / sqrt(1 + arg * pow(x, 2))) : x,
function(double x, double arg) => x < 0 ? pow(1 / sqrt(1 + arg * pow(x, 2)), 3) : 1);
@Range(0, double.infinity)
enum SoftPlus = ActivationFunction(
function(double x, double arg) => log(1 + exp(x)),
function(double x, double arg) => 1 / (1 + exp(-x)));
@Range(-double.infinity, double.infinity)
enum BentIdentity = ActivationFunction(
function(double x, double arg) => ((sqrt(pow(x, 2) + 1) - 1) / 2) + x,
function(double x, double arg) => (x / (2 * sqrt(pow(x, 2) + 1))) + 1);
@UsesArg
@Range(-double.infinity, double.infinity)
enum SoftExponential = ActivationFunction(
function(double x, double arg) => arg < 0 ? -(log(1 - arg * (x + arg)) / arg) : (arg == 0 ? x : (((exp(arg * x) - 1) / arg) + arg)),
function(double x, double arg) => arg < 0 ? (arg / (1 - arg * (arg + x))) : exp(arg * x));
@Range(-1, 1)
enum Sinusoid = ActivationFunction(
function(double x, double arg) => sin(x),
function(double x, double arg) => cos(x));
@Range(-0.217234, 1)
enum Sinc = ActivationFunction(
function(double x, double arg) => x == 0 ? 1 : (sin(x) / x),
function(double x, double arg) => x == 0 ? 0 : ((cos(x) / x) - (sin(x)) / pow(x, 2)));
@Range(0, 1)
enum Gaussian = ActivationFunction(
function(double x, double arg) => exp(-pow(x, 2)),
function(double x, double arg) => -2 * x * exp(-pow(x, 2)));
module stage2.nn.costfunctions;
import std.algorithm : max;
import std.math : pow, log, exp, sqrt;
struct CostFunction {
double function(double neuronErrorSum, size_t countNeurons, double arg) layerCost;
double function(double target, double got, double arg) neuronCost;
double function(double target, double got, double arg) derivative;
double arg;
}
struct UsesArg {}
enum MeanSquareError = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => neuronErrorSum / countNeurons,
(double target, double got, double arg) => (got - target) * 2,
(double target, double got, double arg) => got - target);
enum Quadratic = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => (1 / countNeurons) * neuronErrorSum,
(double target, double got, double arg) => pow(got - target, 2),
(double target, double got, double arg) => got - target);
enum CrossEntropy = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => -neuronErrorSum,
(double target, double got, double arg) => target * log(got) + (1 - target) * log(1 - got),
(double target, double got, double arg) => (got - target) / ((1 - got) * got));
enum HingeLoss = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => neuronErrorSum,
(double target, double got, double arg) => max(0, 1 - (got - target)),
(double target, double got, double arg) => (got - target) < 1 ? -1 : 0);
@UsesArg
enum Exponential = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => arg * exp((1 / arg) * neuronErrorSum),
(double target, double got, double arg) => pow(got - target, 2),
(double target, double got, double arg) => (2 / arg) * (got - target) * exp(pow(got - target, 2)));
enum HellingerDistance = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => (1 / sqrt(2)) * neuronErrorSum,
(double target, double got, double arg) => pow(sqrt(got) - sqrt(target), 2),
(double target, double got, double arg) => (sqrt(got) - sqrt(target)) / (sqrt(2) * sqrt(got)));
enum KullbackLeiblerDivergence = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => neuronErrorSum,
(double target, double got, double arg) => target * log(target / got),
(double target, double got, double arg) => -(target / got));
enum ItakuraSaitoDistance = CostFunction(
(double neuronErrorSum, size_t countNeurons, double arg) => neuronErrorSum,
(double target, double got, double arg) => (target / got) - log(target / got) - 1,
(double target, double got, double arg) => (got - target) / pow(got, 2));
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment