#ifndef __SHAREWIZ_ACTIVATION_H__ #define __SHAREWIZ_ACTIVATION_H__ #include // Built-in activation functions. enum Activation_Types { ACTIVATION_ABS, // Absolute value. ACTIVATION_ARCTAN, // Arctan. ACTIVATION_BOUNDEDRELU, // Bounded rectified linear. ACTIVATION_ELU, // ACTIVATION_GAUSSIAN, // Gaussian. ACTIVATION_LINEAR, // Linear. ACTIVATION_LOG, // Logistic. ACTIVATION_RELU, // Rectified linear. ACTIVATION_SCALED_TANH, // Scaled Tanh 1.7159 * tanh(0.66667 * x ). ACTIVATION_SIGMOID, // Sigmoid. ACTIVATION_SOFTRELU, // Soft rectified linear. ACTIVATION_SQRT, // Square Root. ACTIVATION_SQUARE, // Square. ACTIVATION_SQUASH, // Squash. ACTIVATION_STEP, // Step. ACTIVATION_TANH // Hyperbolic tangent. }; class Activation; typedef std::shared_ptr pActivationX; //typedef std::vector pActivation; class Activation { private: //enum {SIGMOID, TANH, RELU, LINEAR} types; //types type; Activation_Types activation_type; public: Activation(); Activation(Activation_Types _activation_type); ~Activation(); double activate(const double& value, const bool derivative=false); double abs(const double& value, const bool derivative=false); double arctan(const double& value, const bool derivative=false); double boundedRelu(const double& value, const bool derivative=false); double elu(const double& value, const bool derivative = false); double gaussian(const double& value, const bool derivative = false); double linear(const double& value, const bool derivative=false); double log(const double& value, const bool derivative=false); double relu(const double& value, const bool derivative = false); double scaledTanh(const double& value, const bool derivative = false); double sigmoid(const double& value, const bool derivative=false); double sigmoid_limit(double value, double positive_limit=45.0, double negative_limit=-45.0); double softRelu(const double& value, const bool derivative=false); double sqrt(const double& value, const bool derivative = false); double square(const double& value, const bool derivative=false); double squash(const double& value, const bool derivative = false); double step(const double& value, const bool derivative = false); double tanh(const double& value, const bool derivative = false); double tanh_limit(double& value, double positive_limit=10.0, double negative_limit=-10.0); Activation_Types getActivationType(); void setActivationType(Activation_Types _activation_type); //double sigmoid(const double& value); //double sigmoid_derivative(const double& value); //double tanh_derivative(const double& value); }; /* // Built-in activation functions export class Activations { public static TANH: ActivationFunction = { output: x = > (Math).tanh(x), der: x = > { let output = Activations.TANH.output(x); return 1 - output * output; } }; public static RELU: ActivationFunction = { output: x = > Math.max(0, x), der: x = > x <= 0 ? 0 : 1 }; public static SIGMOID: ActivationFunction = { output: x = > 1 / (1 + Math.exp(-x)), der: x = > { let output = Activations.SIGMOID.output(x); return output * (1 - output); } }; public static LINEAR: ActivationFunction = { output: x = > x, der: x = > 1 }; } /* Build-in regularization functions. export class RegularizationFunction { public static L1: RegularizationFunction = { output: w = > Math.abs(w), der: w = > w < 0 ? -1 : 1 }; public static L2: RegularizationFunction = { output: w = > 0.5 * w * w, der: w = > w }; } */ #endif