User Tools

Site Tools


brain:activation.h

activation.h

#ifndef __SHAREWIZ_ACTIVATION_H__
#define __SHAREWIZ_ACTIVATION_H__
 
#include <memory>
 
 
// Built-in activation functions.
 
enum Activation_Types
{
  ACTIVATION_ABS,                      // Absolute value.
  ACTIVATION_ARCTAN,                   // Arctan.
  ACTIVATION_BENT,                     // Bent identity.
  ACTIVATION_BINARY_HALF_STEP,         // Binary half step.
  ACTIVATION_BINARY_STEP,              // Binary step.
  ACTIVATION_BOUNDED_RELU,             // Bounded rectified linear.
  ACTIVATION_ELU,                      // Exponential Linear Unit.
  ACTIVATION_GAUSSIAN,                 // Gaussian.
  ACTIVATION_IDENTITY,                 // Linear.
  ACTIVATION_LINEAR,                   // Linear.
  ACTIVATION_LOG,                      // Logistic.
  ACTIVATION_PRELU,                    // Parameteric Rectified Linear Unit. 
  ACTIVATION_RELU,                     // Rectified linear.
  ACTIVATION_SCALED_TANH,              // Scaled Tanh 1.7159 * tanh(0.66667 * x ).
  ACTIVATION_SIGMOID,                  // Sigmoid.
  ACTIVATION_SINC,                     // Sinc.
  ACTIVATION_SINUSOID,                 // Sinusoid.
  ACTIVATION_SOFT_EXPONENTIAL,         // Soft exponential.
  ACTIVATION_SOFT_PLUS,                // Soft plus. 
  ACTIVATION_SOFT_RELU,                // Soft rectified linear. 
  ACTIVATION_SOFT_SIGN,                // Soft sign. 
  ACTIVATION_SOFT_STEP,                // Soft step (aka Logistic). 
  ACTIVATION_SQRT,                     // Square Root.
  ACTIVATION_SQUARE,                   // Square.
  ACTIVATION_SQUASH,                   // Squash.
  ACTIVATION_STEP,                     // Step.
  ACTIVATION_TANH                      // Hyperbolic tangent.
};
 
class Activation;
 
typedef std::shared_ptr<Activation> pActivationX;
//typedef std::vector<pActivationX> pActivation;
 
class Activation
{
private:
	//enum {SIGMOID, TANH, RELU, LINEAR} types;
	//types type;
  Activation_Types activation_type;
 
public:
	Activation();
  Activation(Activation_Types _activation_type);
	~Activation();
 
  double activate(const double& value, const bool derivative = false, const double& alpha=0.0);
 
  double abs(const double& value, const bool derivative=false);
  double arctan(const double& value, const bool derivative=false);
  double bent(const double& value, const bool derivative = false);
  double binaryHalfStep(const double& value, const bool derivative = false);
  double binaryStep(const double& value, const bool derivative = false);
  double boundedRelu(const double& value, const bool derivative=false);
  double elu(const double& value, const double& alpha, const bool derivative = false);
  double gaussian(const double& value, const bool derivative = false);
  double identity(const double& value, const bool derivative = false);
  double linear(const double& value, const bool derivative=false);
  double log(const double& value, const bool derivative=false);
  double prelu(const double& value, const double& alpha, const bool derivative = false);
  double relu(const double& value, const bool derivative = false);
  double scaledTanh(const double& value, const bool derivative = false);
  double sigmoid(const double& value, const bool derivative=false);
  double sigmoid_limit(double value, double positive_limit=45.0, double negative_limit=-45.0);
  double sinc(const double& value, const bool derivative = false);
  double sinusoid(const double& value, const bool derivative = false);
  double softExponential(const double& value, const double& alpha, const bool derivative = false);
  double softPlus(const double& value, const bool derivative = false);
  double softRelu(const double& value, const bool derivative=false);
  double softSign(const double& value, const bool derivative = false);
  double softStep(const double& value, const bool derivative = false);
  double sqrt(const double& value, const bool derivative = false);
  double square(const double& value, const bool derivative=false);
  double squash(const double& value, const bool derivative = false);
  double step(const double& value, const bool derivative = false);
  double tanh(const double& value, const bool derivative = false);
  double tanh_limit(double& value, double positive_limit=10.0, double negative_limit=-10.0);
 
 
  Activation_Types getActivationType();
  void setActivationType(Activation_Types _activation_type);
 
 
	//double sigmoid(const double& value);
	//double sigmoid_derivative(const double& value);
 
	//double tanh_derivative(const double& value);
};
 
 
 
/*
 
 
	// Built-in activation functions
	export class Activations {
		public static TANH: ActivationFunction = {
		output: x = > (<any>Math).tanh(x),
					der: x = > {
			let output = Activations.TANH.output(x);
			return 1 - output * output;
		}
		};
		public static RELU: ActivationFunction = {
		output: x = > Math.max(0, x),
					der: x = > x <= 0 ? 0 : 1
		};
		public static SIGMOID: ActivationFunction = {
		output: x = > 1 / (1 + Math.exp(-x)),
					der: x = > {
			let output = Activations.SIGMOID.output(x);
			return output * (1 - output);
		}
		};
		public static LINEAR: ActivationFunction = {
		output: x = > x,
					der: x = > 1
		};
	}
 
 
/* Build-in regularization functions.
export class RegularizationFunction {
	public static L1: RegularizationFunction = {
	output: w = > Math.abs(w),
				der: w = > w < 0 ? -1 : 1
	};
	public static L2: RegularizationFunction = {
	output: w = > 0.5 * w * w,
				der: w = > w
	};
}
*/
 
 
 
 
 
#endif
brain/activation.h.txt · Last modified: 2020/07/15 10:30 by 127.0.0.1

Donate Powered by PHP Valid HTML5 Valid CSS Driven by DokuWiki