#include #include "activation.h" Activation::Activation() { activation_type = ACTIVATION_SIGMOID; } Activation::Activation(Activation_Types _activation_type) { activation_type = _activation_type; } Activation::~Activation() { } double Activation::activate(const double& value, const bool derivative) { switch (activation_type) { case (ACTIVATION_ABS) : return (abs(value, derivative)); break; case (ACTIVATION_ARCTAN) : return (arctan(value, derivative)); break; case (ACTIVATION_BOUNDEDRELU) : return (boundedRelu(value, derivative)); break; case (ACTIVATION_ELU) : return (elu(value, derivative)); break; case (ACTIVATION_GAUSSIAN) : return (gaussian(value, derivative)); break; case (ACTIVATION_LINEAR) : return (linear(value, derivative)); break; case (ACTIVATION_LOG) : return (log(value, derivative)); break; case (ACTIVATION_RELU) : return (relu(value, derivative)); break; case (ACTIVATION_SCALED_TANH) : return (tanh(value, derivative)); break; case (ACTIVATION_SIGMOID) : return (sigmoid(value, derivative)); break; case (ACTIVATION_SOFTRELU) : return (softRelu(value, derivative)); break; case (ACTIVATION_SQRT) : return (sqrt(value, derivative)); break; case (ACTIVATION_SQUARE) : return (square(value, derivative)); break; case (ACTIVATION_SQUASH) : return (squash(value, derivative)); break; case (ACTIVATION_STEP) : return (step(value, derivative)); break; case (ACTIVATION_TANH) : return (tanh(value, derivative)); break; default: return (sigmoid(value, derivative)); break; } } // Returns a value between -1.0 and +1.0. // // f(x) = abs(x) double Activation::abs(const double& value, const bool derivative) { if (derivative) return value < 0 ? -1 : 1; else return std::abs(value); } // Returns a value between -1.0 and +1.0. double Activation::arctan(const double& value, const bool derivative) { if (derivative) return (std::cos(value) * std::cos(value)); else return std::atan(value); } // Returns a value between -1.0 and +1.0. // // f(x) = min(a, max(0, x)) double Activation::boundedRelu(const double& value, const bool derivative) { if (derivative) return 0; // TODO else return 0; // TODO } // Returns a value between -1.0 and +1.0. // // f(x) = double Activation::elu(const double& value, const bool derivative) { if (derivative) { double output = elu(value); return output > 0 ? 1.0 : output + 1; } else return value > 0 ? value : std::exp(value) - 1; } // Returns a value between -1.0 and +1.0. // // f(x) = double Activation::gaussian(const double& value, const bool derivative) { if (derivative) return -2 * value * std::exp(-value * -value); else return std::exp(-value * -value); } // Returns a value between -1.0 and +1.0. // // f(x) = a*x + b double Activation::linear(const double& value, const bool derivative) { if (derivative) return 1; else return value; } // Returns a value between -1.0 and +1.0. // // f(x) = 1 / (1 + e^-x) double Activation::log(const double& value, const bool derivative) { if (derivative) return 0; // TODO else return 1.0 / (1.0 + std::exp(-value)); /* if (value < -45.0) return 0.0; else if (value > 45.0) return 1.0; else return 1.0 / (1.0 + std::exp(-value)); */ } // Returns a value between -1.0 and +1.0. // // f(x) = max(0, x) double Activation::relu(const double& value, const bool derivative) { if (derivative) return value > 0 ? 1.0 : 0.0; else return value > 0 ? value : 0; } // Returns a value between -1.0 and +1.0. // // f(x) = 1.7159 * tanh(0.66667 * x) double Activation::scaledTanh(const double& value, const bool derivative) { if (derivative) // TODO... { double tanh_value = std::tanh(value); return 0.66667f * (1.7159f - 1 / 1.7159f * tanh_value * tanh_value); } else return 1.7159 * std::tanh(0.66667 * value); } // Returns a value between 0.0 and 1.0. double Activation::sigmoid(const double& value, const bool derivative) { if (derivative) return sigmoid(value) * (1.0 - sigmoid(value)); else return 1.0 / double((1.0 + exp(-value))); } /* // Returns a value between 0.0 and 1.0. double Activation::sigmoid(const double& value) { return 1.0 / double((1.0 + exp(-value))); } double Activation::sigmoid_derivative(const double& value) { return sigmoid(value) * (1.0 - sigmoid(value)); } */ double Activation::sigmoid_limit(double value, double positive_limit, double negative_limit) { if (value < negative_limit) return 0.0; else if (value > positive_limit) return 1.0; else return 1.0 / (1.0 + exp(-value)); } // Returns a value between -1.0 and +1.0. // // f(x) = log(1 + e^x) double Activation::softRelu(const double& value, const bool derivative) { if (derivative) return 0; // TODO else return 0; // TODO } // Returns a value between -1.0 and +1.0. // // f(x) = sqrt(x) double Activation::sqrt(const double& value, const bool derivative) { if (derivative) return 0; // TODO else return std::sqrt(value); // TODO } // Returns a value between -1.0 and +1.0. // // f(x) = x^2 double Activation::square(const double& value, const bool derivative) { if (derivative) return 0; // TODO else return value * value; // TODO } // Returns a value between -1.0 and +1.0. // // f(x) = double Activation::squash(const double& value, const bool derivative) { if (derivative) { if (value > 0) return (value) / (1 + value); else return (value) / (1 - value); } else return (value) / (1 + std::abs(value)); } // Returns a value between -1.0 and +1.0. // // f(x) = double Activation::step(const double& value, const bool derivative) { if (derivative) { if (value > 0) return 0; else return value; } else { if (value > 0) return 1; else return 0; } } // Returns a value between -1.0 and +1.0. // // f(x) = a*tanh(b*x) double Activation::tanh(const double& value, const bool derivative) { if (derivative) { double tanh_value = std::tanh(value); return (1.0 - tanh_value * tanh_value); //return (1.0 - std::tanh(value)) * (1.0 + std::tanh(value)); } else return std::tanh(value); /* if (value < -45.0) return -1.0; else if (value > 45.0) return 1.0; else return std::tanh(value); */ } // Returns a value between -1.0 and +1.0. double Activation::tanh_limit(double& value, double positive_limit, double negative_limit) { if (value < negative_limit) return -1.0; else if (value > positive_limit) return 1.0; else return tanh(value); } Activation_Types Activation::getActivationType() { return activation_type; } void Activation::setActivationType(Activation_Types _activation_type) { activation_type = _activation_type; } /* public double SoftMax(double x, string layer) { // Determine max double max = double.MinValue; if (layer == "ih") max = (ihSum0 > ihSum1) ? ihSum0 : ihSum1; else if (layer == "ho") max = (hoSum0 > hoSum1) ? hoSum0 : hoSum1; // Compute scale double scale = 0.0; if (layer == "ih") scale = Math.Exp(ihSum0 - max) + Math.Exp(ihSum1 - max); else if (layer == "ho") scale = Math.Exp(hoSum0 - max ) + Math.Exp(hoSum1 - max); return Math.Exp(x - max) / scale; } */