brain:brain
Differences
This shows you the differences between two versions of the page.
Next revision | Previous revision | ||
brain:brain [2016/12/12 11:18] – created peter | brain:brain [2020/07/15 09:30] (current) – external edit 127.0.0.1 | ||
---|---|---|---|
Line 1: | Line 1: | ||
+ | Brain.cpp | ||
<file cpp brain.cpp> | <file cpp brain.cpp> | ||
Line 422: | Line 423: | ||
} | } | ||
</ | </ | ||
+ | |||
+ | |||
+ | activation.cpp | ||
+ | |||
+ | <file cpp activation.cpp> | ||
+ | #include < | ||
+ | |||
+ | |||
+ | #include " | ||
+ | |||
+ | |||
+ | Activation:: | ||
+ | { | ||
+ | activation_type = ACTIVATION_SIGMOID; | ||
+ | } | ||
+ | |||
+ | |||
+ | Activation:: | ||
+ | { | ||
+ | activation_type = _activation_type; | ||
+ | } | ||
+ | |||
+ | |||
+ | Activation:: | ||
+ | { | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | double Activation:: | ||
+ | { | ||
+ | switch (activation_type) | ||
+ | { | ||
+ | case (ACTIVATION_ABS) : | ||
+ | return (abs(value, derivative)); | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_ARCTAN) : | ||
+ | return (arctan(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_BOUNDEDRELU) : | ||
+ | return (boundedRelu(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_ELU) : | ||
+ | return (elu(value, derivative)); | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_GAUSSIAN) : | ||
+ | return (gaussian(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_LINEAR) : | ||
+ | return (linear(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_LOG) : | ||
+ | return (log(value, derivative)); | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_RELU) : | ||
+ | return (relu(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_SCALED_TANH) : | ||
+ | return (tanh(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_SIGMOID) : | ||
+ | return (sigmoid(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_SOFTRELU) : | ||
+ | return (softRelu(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_SQRT) : | ||
+ | return (sqrt(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_SQUARE) : | ||
+ | return (square(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_SQUASH) : | ||
+ | return (squash(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_STEP) : | ||
+ | return (step(value, | ||
+ | break; | ||
+ | |||
+ | case (ACTIVATION_TANH) : | ||
+ | return (tanh(value, | ||
+ | break; | ||
+ | |||
+ | default: | ||
+ | return (sigmoid(value, | ||
+ | break; | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = abs(x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return value < 0 ? -1 : 1; | ||
+ | else | ||
+ | return std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return (std:: | ||
+ | else | ||
+ | return std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = min(a, max(0, x)) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return 0; // TODO | ||
+ | else | ||
+ | return 0; // TODO | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | { | ||
+ | double output = elu(value); | ||
+ | return output > 0 ? 1.0 : output + 1; | ||
+ | } | ||
+ | else | ||
+ | return value > 0 ? value : std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return -2 * value * std:: | ||
+ | else | ||
+ | return std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = a*x + b | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return 1; | ||
+ | else | ||
+ | return value; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = 1 / (1 + e^-x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return 0; // TODO | ||
+ | else | ||
+ | return 1.0 / (1.0 + std:: | ||
+ | |||
+ | /* | ||
+ | if (value < -45.0) | ||
+ | return 0.0; | ||
+ | else | ||
+ | if (value > 45.0) | ||
+ | return 1.0; | ||
+ | else | ||
+ | return 1.0 / (1.0 + std:: | ||
+ | */ | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = max(0, x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return value > 0 ? 1.0 : 0.0; | ||
+ | else | ||
+ | return value > 0 ? value : 0; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = 1.7159 * tanh(0.66667 * x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) // TODO... | ||
+ | { | ||
+ | double tanh_value = std:: | ||
+ | return 0.66667f * (1.7159f - 1 / 1.7159f * tanh_value * tanh_value); | ||
+ | } | ||
+ | else | ||
+ | return 1.7159 * std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between 0.0 and 1.0. | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return sigmoid(value) * (1.0 - sigmoid(value)); | ||
+ | else | ||
+ | return 1.0 / double((1.0 + exp(-value))); | ||
+ | } | ||
+ | |||
+ | |||
+ | /* | ||
+ | // Returns a value between 0.0 and 1.0. | ||
+ | double Activation:: | ||
+ | { | ||
+ | return 1.0 / double((1.0 + exp(-value))); | ||
+ | } | ||
+ | |||
+ | |||
+ | double Activation:: | ||
+ | { | ||
+ | return sigmoid(value) * (1.0 - sigmoid(value)); | ||
+ | } | ||
+ | */ | ||
+ | |||
+ | |||
+ | double Activation:: | ||
+ | { | ||
+ | if (value < negative_limit) | ||
+ | return 0.0; | ||
+ | else | ||
+ | if (value > positive_limit) | ||
+ | return 1.0; | ||
+ | else | ||
+ | return 1.0 / (1.0 + exp(-value)); | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = log(1 + e^x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return 0; // TODO | ||
+ | else | ||
+ | return 0; // TODO | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = sqrt(x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return 0; // TODO | ||
+ | else | ||
+ | return std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = x^2 | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | return 0; // TODO | ||
+ | else | ||
+ | return value * value; // TODO | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | { | ||
+ | if (value > 0) | ||
+ | return (value) / (1 + value); | ||
+ | else | ||
+ | return (value) / (1 - value); | ||
+ | } | ||
+ | else | ||
+ | return (value) / (1 + std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | { | ||
+ | if (value > 0) | ||
+ | return 0; | ||
+ | else | ||
+ | return value; | ||
+ | } | ||
+ | else | ||
+ | { | ||
+ | if (value > 0) | ||
+ | return 1; | ||
+ | else | ||
+ | return 0; | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | // | ||
+ | // f(x) = a*tanh(b*x) | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (derivative) | ||
+ | { | ||
+ | double tanh_value = std:: | ||
+ | return (1.0 - tanh_value * tanh_value); | ||
+ | //return (1.0 - std:: | ||
+ | } | ||
+ | else | ||
+ | return std:: | ||
+ | |||
+ | /* | ||
+ | if (value < -45.0) | ||
+ | return -1.0; | ||
+ | else | ||
+ | if (value > 45.0) | ||
+ | return 1.0; | ||
+ | else | ||
+ | return std:: | ||
+ | */ | ||
+ | |||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | double Activation:: | ||
+ | { | ||
+ | if (value < negative_limit) | ||
+ | return -1.0; | ||
+ | else | ||
+ | if (value > positive_limit) | ||
+ | return 1.0; | ||
+ | else | ||
+ | return tanh(value); | ||
+ | } | ||
+ | |||
+ | |||
+ | Activation_Types Activation:: | ||
+ | { | ||
+ | return activation_type; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Activation:: | ||
+ | { | ||
+ | activation_type = _activation_type; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | /* | ||
+ | public double SoftMax(double x, string layer) | ||
+ | { | ||
+ | // Determine max | ||
+ | double max = double.MinValue; | ||
+ | if (layer == " | ||
+ | max = (ihSum0 > ihSum1) ? ihSum0 : ihSum1; | ||
+ | else | ||
+ | if (layer == " | ||
+ | max = (hoSum0 > hoSum1) ? hoSum0 : hoSum1; | ||
+ | |||
+ | // Compute scale | ||
+ | double scale = 0.0; | ||
+ | if (layer == " | ||
+ | scale = Math.Exp(ihSum0 - max) + Math.Exp(ihSum1 - max); | ||
+ | else | ||
+ | if (layer == " | ||
+ | scale = Math.Exp(hoSum0 - max ) + Math.Exp(hoSum1 - max); | ||
+ | |||
+ | return Math.Exp(x - max) / scale; | ||
+ | } | ||
+ | |||
+ | */ | ||
+ | </ | ||
+ | |||
+ | activation.h | ||
+ | |||
+ | <file h activation.h> | ||
+ | #ifndef __SHAREWIZ_ACTIVATION_H__ | ||
+ | #define __SHAREWIZ_ACTIVATION_H__ | ||
+ | |||
+ | #include < | ||
+ | |||
+ | |||
+ | // Built-in activation functions. | ||
+ | |||
+ | enum Activation_Types | ||
+ | { | ||
+ | ACTIVATION_ABS, | ||
+ | ACTIVATION_ARCTAN, | ||
+ | ACTIVATION_BOUNDEDRELU, | ||
+ | ACTIVATION_ELU, | ||
+ | ACTIVATION_GAUSSIAN, | ||
+ | ACTIVATION_LINEAR, | ||
+ | ACTIVATION_LOG, | ||
+ | ACTIVATION_RELU, | ||
+ | ACTIVATION_SCALED_TANH, | ||
+ | ACTIVATION_SIGMOID, | ||
+ | ACTIVATION_SOFTRELU, | ||
+ | ACTIVATION_SQRT, | ||
+ | ACTIVATION_SQUARE, | ||
+ | ACTIVATION_SQUASH, | ||
+ | ACTIVATION_STEP, | ||
+ | ACTIVATION_TANH | ||
+ | }; | ||
+ | |||
+ | class Activation; | ||
+ | |||
+ | typedef std:: | ||
+ | //typedef std:: | ||
+ | |||
+ | class Activation | ||
+ | { | ||
+ | private: | ||
+ | //enum {SIGMOID, TANH, RELU, LINEAR} types; | ||
+ | //types type; | ||
+ | Activation_Types activation_type; | ||
+ | |||
+ | public: | ||
+ | Activation(); | ||
+ | Activation(Activation_Types _activation_type); | ||
+ | ~Activation(); | ||
+ | |||
+ | double activate(const double& value, const bool derivative=false); | ||
+ | |||
+ | double abs(const double& value, const bool derivative=false); | ||
+ | double arctan(const double& value, const bool derivative=false); | ||
+ | double boundedRelu(const double& value, const bool derivative=false); | ||
+ | double elu(const double& value, const bool derivative = false); | ||
+ | double gaussian(const double& value, const bool derivative = false); | ||
+ | double linear(const double& value, const bool derivative=false); | ||
+ | double log(const double& value, const bool derivative=false); | ||
+ | double relu(const double& value, const bool derivative = false); | ||
+ | double scaledTanh(const double& value, const bool derivative = false); | ||
+ | double sigmoid(const double& value, const bool derivative=false); | ||
+ | double sigmoid_limit(double value, double positive_limit=45.0, | ||
+ | double softRelu(const double& value, const bool derivative=false); | ||
+ | double sqrt(const double& value, const bool derivative = false); | ||
+ | double square(const double& value, const bool derivative=false); | ||
+ | double squash(const double& value, const bool derivative = false); | ||
+ | double step(const double& value, const bool derivative = false); | ||
+ | double tanh(const double& value, const bool derivative = false); | ||
+ | double tanh_limit(double& | ||
+ | |||
+ | |||
+ | Activation_Types getActivationType(); | ||
+ | void setActivationType(Activation_Types _activation_type); | ||
+ | |||
+ | |||
+ | //double sigmoid(const double& value); | ||
+ | //double sigmoid_derivative(const double& value); | ||
+ | |||
+ | //double tanh_derivative(const double& value); | ||
+ | }; | ||
+ | |||
+ | |||
+ | |||
+ | /* | ||
+ | |||
+ | |||
+ | // Built-in activation functions | ||
+ | export class Activations { | ||
+ | public static TANH: ActivationFunction = { | ||
+ | output: x = > (< | ||
+ | der: x = > { | ||
+ | let output = Activations.TANH.output(x); | ||
+ | return 1 - output * output; | ||
+ | } | ||
+ | }; | ||
+ | public static RELU: ActivationFunction = { | ||
+ | output: x = > Math.max(0, x), | ||
+ | der: x = > x <= 0 ? 0 : 1 | ||
+ | }; | ||
+ | public static SIGMOID: ActivationFunction = { | ||
+ | output: x = > 1 / (1 + Math.exp(-x)), | ||
+ | der: x = > { | ||
+ | let output = Activations.SIGMOID.output(x); | ||
+ | return output * (1 - output); | ||
+ | } | ||
+ | }; | ||
+ | public static LINEAR: ActivationFunction = { | ||
+ | output: x = > x, | ||
+ | der: x = > 1 | ||
+ | }; | ||
+ | } | ||
+ | |||
+ | |||
+ | /* Build-in regularization functions. | ||
+ | export class RegularizationFunction { | ||
+ | public static L1: RegularizationFunction = { | ||
+ | output: w = > Math.abs(w), | ||
+ | der: w = > w < 0 ? -1 : 1 | ||
+ | }; | ||
+ | public static L2: RegularizationFunction = { | ||
+ | output: w = > 0.5 * w * w, | ||
+ | der: w = > w | ||
+ | }; | ||
+ | } | ||
+ | */ | ||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | connection.cpp | ||
+ | |||
+ | <file cpp connection.cpp> | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | #include " | ||
+ | |||
+ | |||
+ | |||
+ | Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | index = -1; | ||
+ | deltaWeight = 0; | ||
+ | weight = 0; | ||
+ | momentum = 0.4; | ||
+ | |||
+ | Q = 0; | ||
+ | R = -1; | ||
+ | |||
+ | randomizeWeight(); | ||
+ | } | ||
+ | |||
+ | |||
+ | Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | index = -1; | ||
+ | deltaWeight = 0; | ||
+ | weight = 0; | ||
+ | momentum = 0.4; | ||
+ | |||
+ | Q = 0; | ||
+ | R = -1; | ||
+ | |||
+ | this-> | ||
+ | this-> | ||
+ | |||
+ | randomizeWeight(); | ||
+ | } | ||
+ | |||
+ | |||
+ | double Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return error; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | error = e; | ||
+ | } | ||
+ | |||
+ | |||
+ | int Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return index; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | double Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return weight; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | // | ||
+ | weight = w; | ||
+ | } | ||
+ | |||
+ | |||
+ | double Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return deltaWeight; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | deltaWeight = dw; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Controls how much the weights are changed during a weight update by factoring in previous weight updates. | ||
+ | // It acts as a smoothing parameter that reduces oscillation and helps attain convergence. | ||
+ | // This must be a real value between 0.0 and 1.0, a typical value for momentum is 0.9. | ||
+ | double Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return momentum; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | double Connection:: | ||
+ | { | ||
+ | return Q; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | Q = _Q; | ||
+ | } | ||
+ | |||
+ | |||
+ | double Connection:: | ||
+ | { | ||
+ | return R; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | R = _R; | ||
+ | } | ||
+ | |||
+ | |||
+ | pNeuronX& | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return from; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | pNeuronX& | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return to; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | double Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | weight = rand() / double(RAND_MAX); | ||
+ | // | ||
+ | |||
+ | return weight; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Connection:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | if (!from) | ||
+ | return; | ||
+ | if (!to) | ||
+ | return; | ||
+ | |||
+ | int f = from-> | ||
+ | int t = to-> | ||
+ | |||
+ | // | ||
+ | std::cout << " | ||
+ | } | ||
+ | |||
+ | |||
+ | /* | ||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | int random_number(int N) // random value in [0, N) | ||
+ | { | ||
+ | static std:: | ||
+ | static std:: | ||
+ | std:: | ||
+ | return dist(eng); | ||
+ | } | ||
+ | |||
+ | std:: | ||
+ | { | ||
+ | std:: | ||
+ | int remaining = last - first + 1; | ||
+ | int m = std::min(n, remaining); | ||
+ | while (m > 0) { | ||
+ | if (random_number(remaining) < m) { | ||
+ | numbers.push_back(first); | ||
+ | --m; | ||
+ | } | ||
+ | --remaining; | ||
+ | ++first; | ||
+ | } | ||
+ | return numbers; | ||
+ | } | ||
+ | |||
+ | int main() | ||
+ | { | ||
+ | auto numbers = random_sample(1, | ||
+ | for (int value : numbers) { | ||
+ | std::cout << value << " "; | ||
+ | } | ||
+ | std::cout << ' | ||
+ | } | ||
+ | */ | ||
+ | /* | ||
+ | very simple random is 1+((power(r, | ||
+ | |||
+ | To shuffle an array a of n elements: | ||
+ | for i from n ? 1 downto 1 do | ||
+ | j ? random integer with 0 ? j ? i | ||
+ | exchange a[j] and a[i] | ||
+ | |||
+ | |||
+ | for (int i = cards.Length - 1; i > 0; i--) | ||
+ | { | ||
+ | int n = rand.Next(i + 1); | ||
+ | Swap(ref cards[i], ref cards[n]); | ||
+ | } | ||
+ | */ | ||
+ | </ | ||
+ | |||
+ | connection.h | ||
+ | |||
+ | <file h connection.h> | ||
+ | #ifndef __SHAREWIZ_CONNECTION_H__ | ||
+ | #define __SHAREWIZ_CONNECTION_H__ | ||
+ | |||
+ | //#include < | ||
+ | //#include < | ||
+ | |||
+ | // Connection class. | ||
+ | // | ||
+ | // A Connection links two Neurons. | ||
+ | |||
+ | |||
+ | class Neuron; | ||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | |||
+ | class Connection | ||
+ | { | ||
+ | private: | ||
+ | int index; | ||
+ | double weight; | ||
+ | double deltaWeight; | ||
+ | double error; | ||
+ | double momentum; | ||
+ | |||
+ | // For DQN. | ||
+ | // Should we re-use weight as Q and deltaWeight as R. | ||
+ | double Q; | ||
+ | double R; | ||
+ | |||
+ | double odds; // Odds of being chosen as next action for DQN. | ||
+ | |||
+ | |||
+ | double randomizeWeight(void); | ||
+ | |||
+ | pNeuronX from; | ||
+ | pNeuronX to; | ||
+ | |||
+ | public: | ||
+ | Connection(); | ||
+ | Connection(const pNeuronX& | ||
+ | |||
+ | double getError(void); | ||
+ | void setError(const double& e); | ||
+ | |||
+ | int getIndex(void); | ||
+ | void setIndex(const int& index); | ||
+ | |||
+ | double getWeight(void); | ||
+ | void setWeight(const double& w); | ||
+ | |||
+ | double getDeltaWeight(void); | ||
+ | void setDeltaWeight(const double& w); | ||
+ | |||
+ | double getMomentum(void); | ||
+ | void setMomentum(const double& momentum); | ||
+ | |||
+ | double getQ(void); | ||
+ | void setQ(const double& _Q); | ||
+ | |||
+ | double getR(void); | ||
+ | void setR(const double& _R); | ||
+ | |||
+ | pNeuronX& | ||
+ | void setFrom(const pNeuronX& | ||
+ | |||
+ | pNeuronX& | ||
+ | void setTo(const pNeuronX& | ||
+ | |||
+ | void printOutput(void); | ||
+ | }; | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | layer.cpp | ||
+ | |||
+ | <file cpp layer.cpp> | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | #include " | ||
+ | #include " | ||
+ | #include " | ||
+ | |||
+ | |||
+ | // | ||
+ | Layer:: | ||
+ | // index(-1), | ||
+ | // neurons(10) | ||
+ | // index(0) | ||
+ | // | ||
+ | { | ||
+ | //idx++; | ||
+ | // | ||
+ | index = -1; | ||
+ | |||
+ | neurons.reserve(10); | ||
+ | |||
+ | //neurons = std:: | ||
+ | // | ||
+ | } | ||
+ | |||
+ | |||
+ | Layer:: | ||
+ | { | ||
+ | index = -1; | ||
+ | |||
+ | neurons.reserve(num_neurons); | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | pNeuronX tmp(new Neuron()); | ||
+ | tmp-> | ||
+ | neurons.push_back(tmp); | ||
+ | } | ||
+ | |||
+ | |||
+ | /* | ||
+ | // Add a bias neuron in each layer. | ||
+ | // Force the bias node's output to 1.0 (it was the last neuron pushed in this layer): | ||
+ | pNeuronX tmp(new Neuron()); | ||
+ | tmp-> | ||
+ | tmp-> | ||
+ | neurons.push_back(tmp); | ||
+ | |||
+ | // | ||
+ | // | ||
+ | */ | ||
+ | } | ||
+ | |||
+ | |||
+ | int Layer:: | ||
+ | { | ||
+ | return index; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Layer:: | ||
+ | { | ||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | unsigned int Layer:: | ||
+ | { | ||
+ | return neurons.size(); | ||
+ | } | ||
+ | |||
+ | |||
+ | void Layer:: | ||
+ | { | ||
+ | neurons.push_back(n); | ||
+ | } | ||
+ | |||
+ | |||
+ | void Layer:: | ||
+ | { | ||
+ | assert(neurons.size() >= idx); | ||
+ | |||
+ | for (unsigned i = neurons.size()-1; | ||
+ | { | ||
+ | if (neurons[i]-> | ||
+ | { | ||
+ | neurons.erase(neurons.begin() + i); | ||
+ | return; | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | pNeuronX & | ||
+ | { | ||
+ | assert(neurons.size() >= idx); | ||
+ | |||
+ | return neurons[idx]; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Layer:: | ||
+ | { | ||
+ | /* | ||
+ | // INPUT -> HIDDEN | ||
+ | for(y=0; y< | ||
+ | for(x=0; x< | ||
+ | temp += (input[pattern][x] * weight_i_h[x][y]); | ||
+ | } | ||
+ | hidden[y] = (1.0 / (1.0 + exp(-1.0 * (temp + bias[y])))); | ||
+ | temp = 0; | ||
+ | } | ||
+ | |||
+ | // HIDDEN -> OUTPUT | ||
+ | for(y=0; y< | ||
+ | for(x=0; x< | ||
+ | temp += (hidden[x] * weight_h_o[x][y]); | ||
+ | } | ||
+ | output[pattern][y] = (1.0 / (1.0 + exp(-1.0 * (temp + bias[y + hidden_array_size])))); | ||
+ | temp = 0; | ||
+ | } | ||
+ | return; | ||
+ | } | ||
+ | */ | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | // Weight sum of the previous layer' | ||
+ | double weightedSum = 0; | ||
+ | // | ||
+ | weightedSum += 1.0; // Add a 1 to act as a bias. | ||
+ | |||
+ | pNeuronX& | ||
+ | if (!currentNeuron) | ||
+ | continue; | ||
+ | unsigned int currentIndex = currentNeuron-> | ||
+ | |||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | pNeuronX& | ||
+ | if (!prevNeuron) | ||
+ | continue; | ||
+ | // | ||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | if (!prevNeuron-> | ||
+ | continue; | ||
+ | if (!prevNeuron-> | ||
+ | continue; | ||
+ | |||
+ | // We are only interested in connections going into the currentNeuron. | ||
+ | unsigned int prevIndex = prevNeuron-> | ||
+ | |||
+ | //if (prevNeuron.getConnectionOut(k).getTo() == currentNeuron) | ||
+ | if (prevIndex == currentIndex) | ||
+ | { | ||
+ | weightedSum += prevLayer-> | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | // Add in Bias? | ||
+ | // | ||
+ | |||
+ | // | ||
+ | if (currentNeuron) | ||
+ | { | ||
+ | pActivationX act = currentNeuron-> | ||
+ | if (!act) | ||
+ | continue; | ||
+ | |||
+ | // Sigmoid function. | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | currentNeuron-> | ||
+ | |||
+ | //std::cout << " | ||
+ | } | ||
+ | } | ||
+ | |||
+ | // std::cout << " | ||
+ | } | ||
+ | |||
+ | |||
+ | void Layer:: | ||
+ | { | ||
+ | std::cout << "Layer " << index << " has " << neurons.size() << " Neurons" | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | if (!neurons[i]) | ||
+ | continue; | ||
+ | |||
+ | std::cout << " | ||
+ | |||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | pConnectionX& | ||
+ | if (!currentConnection) | ||
+ | continue; | ||
+ | currentConnection-> | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | </ | ||
+ | |||
+ | layer.h | ||
+ | |||
+ | <file h layer.h> | ||
+ | #ifndef __SHAREWIZ_LAYER_H__ | ||
+ | #define __SHAREWIZ_LAYER_H__ | ||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | //#include " | ||
+ | |||
+ | // Layer class. | ||
+ | // | ||
+ | // A Neural Network can have multiple Layers. | ||
+ | |||
+ | class Layer; | ||
+ | class Neuron; | ||
+ | |||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | |||
+ | |||
+ | class Layer | ||
+ | { | ||
+ | private: | ||
+ | int index; | ||
+ | |||
+ | pNeuron neurons; | ||
+ | |||
+ | public: | ||
+ | Layer(); | ||
+ | Layer(unsigned int num_neurons); | ||
+ | |||
+ | unsigned int getSize(void); | ||
+ | |||
+ | int getIndex(void); | ||
+ | void setIndex(const int& index); | ||
+ | |||
+ | void addNeuron(const pNeuronX& | ||
+ | void removeNeuron(const int& idx); | ||
+ | |||
+ | pNeuronX& | ||
+ | |||
+ | void feedForward(const pLayerX& | ||
+ | |||
+ | void printOutput(void); | ||
+ | }; | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | net.cpp | ||
+ | |||
+ | <file cpp net.cpp> | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | #include " | ||
+ | #include " | ||
+ | #include " | ||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | /* | ||
+ | It has been shown that the error surface of a backpropagation network with one hidden layer and hidden units | ||
+ | has no local minima, if the network is trained with an arbitrary set containing different inputs1 (Yu, 1992). | ||
+ | In practice, however, other features of the error surface such as " | ||
+ | can present difficulty for optimisation. For example, two error functions (from (Gori, 1996)) do not have local | ||
+ | minima. However, the function on the left is expected to be more difficult to optimise with gradient descent. | ||
+ | For the purposes of this paper, the criterion of interest considered is "the best solution found in a given | ||
+ | practical time limit. | ||
+ | */ | ||
+ | |||
+ | |||
+ | Net::Net() : | ||
+ | learning_rate(0.5), | ||
+ | max_error_tollerance(0.1), | ||
+ | goal_amount(100.0) | ||
+ | { | ||
+ | layers.reserve(10); | ||
+ | } | ||
+ | |||
+ | |||
+ | // Example: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Net myNet(myTopology); | ||
+ | Net:: | ||
+ | learning_rate(0.5), | ||
+ | max_error_tollerance(0.1), | ||
+ | goal_amount(100.0) | ||
+ | { | ||
+ | assert(topology.size() > 0); | ||
+ | |||
+ | learning_rate = 0.5; | ||
+ | max_error_tollerance = 0.1; | ||
+ | goal_amount = 100.0; | ||
+ | |||
+ | layers.reserve(topology.size()); | ||
+ | |||
+ | // obtain a time-based seed: | ||
+ | //unsigned seed = std:: | ||
+ | // using built-in random generator: | ||
+ | //shuffle (topology.begin(), | ||
+ | //auto engine = std:: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | |||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | pLayerX tmp(new Layer(topology[i])); | ||
+ | tmp-> | ||
+ | layers.push_back(tmp); | ||
+ | } | ||
+ | |||
+ | |||
+ | std::cout << " | ||
+ | for (unsigned int i = 0; i < layers.size(); | ||
+ | { | ||
+ | std::cout << " | ||
+ | } | ||
+ | |||
+ | |||
+ | // | ||
+ | |||
+ | // Add Bias to input and hidden layers. | ||
+ | // | ||
+ | |||
+ | // | ||
+ | // | ||
+ | |||
+ | // | ||
+ | } | ||
+ | |||
+ | |||
+ | // Connects the " | ||
+ | void Net:: | ||
+ | { | ||
+ | //unsigned int connection_idx = 1; | ||
+ | int connection_idx = 0; | ||
+ | |||
+ | for (unsigned int i = 0; i < connections.size(); | ||
+ | { | ||
+ | //for (unsigned int j = 0; j < connections[i].size(); | ||
+ | //{ | ||
+ | int layerFrom = (int)connections[i][0]; | ||
+ | int neuronFrom = (int)connections[i][1]; | ||
+ | int layerTo = (int)connections[i][2]; | ||
+ | int neuronTo = (int)connections[i][3]; | ||
+ | double _R = connections[i][4]; | ||
+ | |||
+ | pConnectionX tmp(new Connection(layers[layerFrom]-> | ||
+ | tmp-> | ||
+ | tmp-> | ||
+ | tmp-> | ||
+ | layers[layerFrom]-> | ||
+ | layers[layerTo]-> | ||
+ | //} | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Connects the " | ||
+ | void Net:: | ||
+ | { | ||
+ | //unsigned int connection_idx = 1; | ||
+ | |||
+ | pConnectionX tmp(new Connection(layers[layerFrom]-> | ||
+ | tmp-> | ||
+ | tmp-> | ||
+ | tmp-> | ||
+ | layers[layerFrom]-> | ||
+ | layers[layerTo]-> | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Connects all Neurons to each other. | ||
+ | void Net:: | ||
+ | { | ||
+ | // assert(layer.size() > 1); // There must be more than 1 neuron to connect. | ||
+ | |||
+ | int connection_idx = 0; | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | for (unsigned int l = 0; l < layers[k]-> | ||
+ | { | ||
+ | pConnectionX tmp(new Connection(layers[i]-> | ||
+ | tmp-> | ||
+ | layers[i]-> | ||
+ | layers[k]-> | ||
+ | |||
+ | //std::cout << " | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Connects all Neurons in a layer to all Neurons in the next layer. | ||
+ | void Net:: | ||
+ | { | ||
+ | unsigned int connection_idx = 0; | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | pConnectionX tmp(new Connection(layers[i]-> | ||
+ | tmp-> | ||
+ | layers[i]-> | ||
+ | layers[i + 1]-> | ||
+ | |||
+ | //std::cout << " | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Same as connectForward() but code spread out between layers. | ||
+ | // Connects all Neurons to Neurons in next layer. | ||
+ | void Net:: | ||
+ | { | ||
+ | unsigned int connection_idx = 0; | ||
+ | |||
+ | // Create the input to hidden connections. | ||
+ | // assert(layers.size() > 1); // There must be more than 1 layers to connect. | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | // Create the input to hidden connections. | ||
+ | if (i == 0) | ||
+ | { | ||
+ | if ((layers.size()) > 1) // there are other layers | ||
+ | { | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | pConnectionX tmp(new Connection(layers[0]-> | ||
+ | tmp-> | ||
+ | layers[0]-> | ||
+ | layers[1]-> | ||
+ | |||
+ | //std::cout << " | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | else // no other layers. | ||
+ | { | ||
+ | |||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Create the inside hidden connections...and hidden to output connection. | ||
+ | if ((i>0) && (i <= layers.size() - 2)) | ||
+ | { | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | pConnectionX tmp(new Connection(layers[i]-> | ||
+ | tmp-> | ||
+ | layers[i]-> | ||
+ | layers[i + 1]-> | ||
+ | |||
+ | //std::cout << " | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Connects all Neurons to each other. | ||
+ | void Net:: | ||
+ | { | ||
+ | // assert(layer.size() > 1); // There must be more than 1 neuron to connect. | ||
+ | |||
+ | unsigned int connection_idx = 0; | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | pConnectionX tmp(new Connection(layer-> | ||
+ | tmp-> | ||
+ | layer-> | ||
+ | layer-> | ||
+ | |||
+ | //std::cout << " | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | int numLayers = layers.size(); | ||
+ | int numNeurons = 0; | ||
+ | int numConnections = 0; | ||
+ | |||
+ | // Determine how many layers, neurons (states) and connections (actions) we have. | ||
+ | for (unsigned int i = 0; i < layers.size(); | ||
+ | { | ||
+ | numNeurons += layers[i]-> | ||
+ | | ||
+ | for (unsigned int j = 0; j < layers[i]-> | ||
+ | { | ||
+ | pNeuronX& | ||
+ | if (!currentNeuron) | ||
+ | continue; | ||
+ | |||
+ | numConnections += currentNeuron-> | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Select random initial neuron (state). | ||
+ | int rnd_layer = randomBetween(0, | ||
+ | int rnd_state = randomBetween(0, | ||
+ | pLayerX currentLayer = layers[rnd_layer]; | ||
+ | pNeuronX currentState = layers[rnd_layer]-> | ||
+ | |||
+ | // Set initial reward. | ||
+ | double R = -1; | ||
+ | |||
+ | // Loop until a reward matching the goal_amount has been found. | ||
+ | while (R != goal_amount) | ||
+ | { | ||
+ | // Select one amongst all possible actions for the current state. | ||
+ | // TODO: Simply using random treats all possible actions as equal. | ||
+ | // TODO: Should cater for giving different actions different odds of being chosen. | ||
+ | int rnd_action = randomBetween(0, | ||
+ | pConnectionX currentAction = currentState-> | ||
+ | |||
+ | // Action outcome is set to deterministic in this example | ||
+ | // Transition probability is 1. | ||
+ | pNeuronX nextState = currentAction-> | ||
+ | |||
+ | // Get reward. | ||
+ | R = currentAction-> | ||
+ | |||
+ | // Get Q. | ||
+ | double Q = currentAction-> | ||
+ | |||
+ | // Determine the maximum Q. | ||
+ | double maxQ = DBL_MIN; | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | double tmpQ = nextState-> | ||
+ | |||
+ | if (maxQ < tmpQ) | ||
+ | maxQ = tmpQ; | ||
+ | } | ||
+ | if (maxQ == DBL_MIN) maxQ = 0; | ||
+ | |||
+ | // Update the Q. | ||
+ | //double v = Q + alpha * (R + gamma * maxQ - Q); | ||
+ | double target = R + gamma * maxQ; | ||
+ | //double error = R + gamma * maxQ - Q; | ||
+ | double error = target - Q; | ||
+ | |||
+ | |||
+ | // Experience Replay Memory. | ||
+ | // To suggest an experience replay memory. | ||
+ | // This is loosely inspired by the brain, and in particular the way it syncs memory traces in the hippocampus | ||
+ | // with the cortex. | ||
+ | // What this amounts to is that instead of performing an update and then throwing away the experience tuple, | ||
+ | // i.e. the original Q, we keep it around and effectively build up a training set of experiences. | ||
+ | // Then, we don't learn based on the new experience that comes in at time t, but instead sample random | ||
+ | // expriences from the replay memory and perform an update on each sample. | ||
+ | // This feature has the effect of removing correlations in the observed state, | ||
+ | // reduces gradual drift and forgetting. | ||
+ | // If the size of the memory pool is greater than some threshold, start replacing old experiences. | ||
+ | // or those further from the current Q, or randomly etc. | ||
+ | int rnd_replay_memory = randomBetween(0, | ||
+ | if (rnd_replay_memory > 99) // if rnd > some value | ||
+ | { | ||
+ | // | ||
+ | // | ||
+ | | ||
+ | // Record old Q value into array of stored memories. | ||
+ | // Now select new Q value from randomly selecting one of the old Q memory values - perhaps by using odds. | ||
+ | // i.e. most fresh Q value might have slightly greater chance of being selected etc. | ||
+ | } | ||
+ | |||
+ | // Clamping TD Error. | ||
+ | // Clamp the TD Error gradient at some fixed maximum value. | ||
+ | // If the error is greater in magnitude then some threshold (tderror_clamp) then we cap it at that value. | ||
+ | // This makes the learning more robust to outliers and has the interpretation of using Huber loss, which | ||
+ | // is an L2 penalty in a small region around the target value and an L1 penalty further away. | ||
+ | // double tderror_clamp = 1.0; // for robustness | ||
+ | // if (error > tderror_clamp) | ||
+ | // error = tderror_clamp; | ||
+ | |||
+ | // Periodic Target Q Value Updates. | ||
+ | // Periodically freeze the Q where it is. | ||
+ | // Aims to reduce correlations between updates and the immediately undertaken behavior. | ||
+ | // The idea is to freeze the Q network once in a while into a frozen, copied network, which is used to | ||
+ | // only compute the targets. | ||
+ | // This target network is once in a while updated to the actual current. | ||
+ | int rnd_freeze = randomBetween(0, | ||
+ | if (rnd_freeze > 99) | ||
+ | { | ||
+ | } | ||
+ | |||
+ | double v = Q + alpha * (error); | ||
+ | |||
+ | currentAction-> | ||
+ | |||
+ | // Update the state. | ||
+ | currentState = nextState; | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Determine the maximum Q for the state. | ||
+ | double Net:: | ||
+ | { | ||
+ | double maxQ = DBL_MIN; | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | double tmpQ = state-> | ||
+ | |||
+ | if (maxQ < tmpQ) | ||
+ | maxQ = tmpQ; | ||
+ | } | ||
+ | |||
+ | if (maxQ == DBL_MIN) | ||
+ | maxQ = 0; | ||
+ | |||
+ | return maxQ; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Get policy from state. | ||
+ | pNeuronX Net:: | ||
+ | { | ||
+ | double maxValue = DBL_MIN; | ||
+ | pNeuronX policyGotoState = currentState; | ||
+ | |||
+ | for (unsigned int i = 0; i < currentState-> | ||
+ | { | ||
+ | pNeuronX nextState = currentState-> | ||
+ | double value = currentState-> | ||
+ | |||
+ | if (value > maxValue) | ||
+ | { | ||
+ | maxValue = value; | ||
+ | policyGotoState = nextState; | ||
+ | } | ||
+ | } | ||
+ | |||
+ | return policyGotoState; | ||
+ | } | ||
+ | |||
+ | | ||
+ | // Policy is maxQ(states). | ||
+ | void Net:: | ||
+ | { | ||
+ | for (unsigned int i = 0; i < layers.size(); | ||
+ | { | ||
+ | for (unsigned int j = 0; j < layers[i]-> | ||
+ | { | ||
+ | pNeuronX fromState = layers[i]-> | ||
+ | int from = fromState-> | ||
+ | |||
+ | pNeuronX toState = getPolicy(fromState); | ||
+ | int to = toState-> | ||
+ | |||
+ | std::cout << "From " << from << " goto " << to << std::endl; | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // This method has a bit of a bias towards the low end if the range of rand() isn't divisible | ||
+ | // by highestNumber - lowestNumber + 1. | ||
+ | int Net:: | ||
+ | { | ||
+ | assert(highestNumber >= lowestNumber); | ||
+ | |||
+ | //return rand() % to + from; | ||
+ | |||
+ | return rand() % (highestNumber - lowestNumber + 1) + lowestNumber; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Controls how much the weights are changed during a weight update. | ||
+ | // The larger the value, the more the weights are changed. | ||
+ | // This must be a real value between 0.0 and 10.0. | ||
+ | // These values are commonly set from 0.5 to 0.7. | ||
+ | double Net:: | ||
+ | { | ||
+ | return learning_rate; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | double Net:: | ||
+ | { | ||
+ | return max_error_tollerance; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | double Net:: | ||
+ | { | ||
+ | return alpha; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Learning rate. | ||
+ | // | ||
+ | // The Alpha parameter has a range of 0 to 1 (0 <= Gamma > 1). | ||
+ | // | ||
+ | // Set this by trial and error. | ||
+ | void Net:: | ||
+ | { | ||
+ | alpha = _alpha_amount; | ||
+ | } | ||
+ | |||
+ | |||
+ | double Net:: | ||
+ | { | ||
+ | return gamma; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Discount factor. | ||
+ | // | ||
+ | // The Gamma parameter has a range of 0 to 1 (0 <= Gamma > 1). | ||
+ | // | ||
+ | // If Gamma is closer to zero, the agent will tend to consider only immediate rewards. | ||
+ | // | ||
+ | // If Gamma is closer to one, the agent will consider future rewards with greater weight, | ||
+ | // willing to delay the reward. | ||
+ | void Net:: | ||
+ | { | ||
+ | gamma = _gamma_amount; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | double Net:: | ||
+ | { | ||
+ | return goal_amount; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | /* | ||
+ | void Net:: | ||
+ | { | ||
+ | int x, y; | ||
+ | ifstream in(arg); | ||
+ | if(!in) | ||
+ | { | ||
+ | cout << endl << " | ||
+ | return; | ||
+ | } | ||
+ | |||
+ | in >> input_array_size; | ||
+ | in >> hidden_array_size; | ||
+ | in >> output_array_size; | ||
+ | in >> learning_rate; | ||
+ | in >> number_of_input_patterns; | ||
+ | bias_array_size = hidden_array_size + output_array_size; | ||
+ | initialize_net(); | ||
+ | for (x=0; x< | ||
+ | in >> bias[x]; | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | in >> weight_i_h[x][y]; | ||
+ | } | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | } | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | in >> input[x][y]; | ||
+ | } | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | in >> target[x][y]; | ||
+ | } | ||
+ | in.close(); | ||
+ | cout << endl << "data loaded" | ||
+ | return; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | int x, y; | ||
+ | ofstream out; | ||
+ | out.open(argres); | ||
+ | if(!out) | ||
+ | { | ||
+ | cout << endl << " | ||
+ | return; | ||
+ | } | ||
+ | out << input_array_size << endl; | ||
+ | out << hidden_array_size << endl; | ||
+ | out << output_array_size << endl; | ||
+ | out << learning_rate << endl; | ||
+ | out << number_of_input_patterns << endl << endl; | ||
+ | for(x=0; x< | ||
+ | out << bias[x] << ' '; | ||
+ | out << endl << endl; | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | out << weight_i_h[x][y] << ' '; | ||
+ | } | ||
+ | out << endl << endl; | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | } | ||
+ | out << endl << endl; | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | out << input[x][y] << ' '; | ||
+ | out << endl; | ||
+ | } | ||
+ | out << endl; | ||
+ | for(x=0; x< | ||
+ | { | ||
+ | for(y=0; y< | ||
+ | out << target[x][y] << ' '; | ||
+ | out << endl; | ||
+ | } | ||
+ | out.close(); | ||
+ | cout << endl << "data saved" << endl; | ||
+ | return; | ||
+ | } | ||
+ | |||
+ | */ | ||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | layers[0]-> | ||
+ | layers[0]-> | ||
+ | // | ||
+ | |||
+ | layers[1]-> | ||
+ | layers[1]-> | ||
+ | |||
+ | layers[2]-> | ||
+ | |||
+ | layers[0]-> | ||
+ | layers[0]-> | ||
+ | layers[0]-> | ||
+ | layers[0]-> | ||
+ | layers[1]-> | ||
+ | layers[1]-> | ||
+ | |||
+ | // Add connection between two neurons in same level 1 | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | |||
+ | |||
+ | printOutput(); | ||
+ | |||
+ | // | ||
+ | // | ||
+ | |||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | |||
+ | printOutput(); | ||
+ | std::cout << " | ||
+ | } | ||
+ | |||
+ | |||
+ | // TODO: Only works if 3 or more layers. | ||
+ | void Net:: | ||
+ | { | ||
+ | // std::cout << " | ||
+ | // std::cout << " | ||
+ | |||
+ | |||
+ | assert(layers[0]-> | ||
+ | |||
+ | // | ||
+ | |||
+ | // Setting input vals to input layer. | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | if (!layers[0]-> | ||
+ | continue; | ||
+ | |||
+ | layers[0]-> | ||
+ | } | ||
+ | |||
+ | | ||
+ | // Updating hidden layers. | ||
+ | for (unsigned int i = 1; i< | ||
+ | { | ||
+ | layers[i]-> | ||
+ | } | ||
+ | | ||
+ | |||
+ | // Updating output layer. | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | pLayerX& | ||
+ | layers[layers.size() - 1]-> | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | /* | ||
+ | void backward_pass(int pattern) | ||
+ | { | ||
+ | register int x, y; | ||
+ | register double temp = 0; | ||
+ | |||
+ | // COMPUTE ERRORSIGNAL FOR OUTPUT UNITS | ||
+ | for(x=0; x< | ||
+ | errorsignal_output[x] = (target[pattern][x] - output[pattern][x]); | ||
+ | } | ||
+ | |||
+ | // COMPUTE ERRORSIGNAL FOR HIDDEN UNITS | ||
+ | for(x=0; x< | ||
+ | for(y=0; y< | ||
+ | temp += (errorsignal_output[y] * weight_h_o[x][y]); | ||
+ | } | ||
+ | errorsignal_hidden[x] = hidden[x] * (1-hidden[x]) * temp; | ||
+ | temp = 0.0; | ||
+ | } | ||
+ | |||
+ | // ADJUST WEIGHTS OF CONNECTIONS FROM HIDDEN TO OUTPUT UNITS | ||
+ | double length = 0.0; | ||
+ | for (x=0; x< | ||
+ | length += hidden[x]*hidden[x]; | ||
+ | } | ||
+ | if (length< | ||
+ | for(x=0; x< | ||
+ | for(y=0; y< | ||
+ | weight_h_o[x][y] += (learning_rate * errorsignal_output[y] * | ||
+ | hidden[x]/ | ||
+ | } | ||
+ | } | ||
+ | |||
+ | // ADJUST BIASES OF HIDDEN UNITS | ||
+ | for(x=hidden_array_size; | ||
+ | bias[x] += (learning_rate * errorsignal_output[x] / length); | ||
+ | } | ||
+ | |||
+ | // ADJUST WEIGHTS OF CONNECTIONS FROM INPUT TO HIDDEN UNITS | ||
+ | length = 0.0; | ||
+ | for (x=0; x< | ||
+ | length += input[pattern][x]*input[pattern][x]; | ||
+ | } | ||
+ | if (length< | ||
+ | for(x=0; x< | ||
+ | for(y=0; y< | ||
+ | weight_i_h[x][y] += (learning_rate * errorsignal_hidden[y] * | ||
+ | input[pattern][x]/ | ||
+ | } | ||
+ | } | ||
+ | |||
+ | // ADJUST BIASES FOR OUTPUT UNITS | ||
+ | for(x=0; x< | ||
+ | bias[x] += (learning_rate * errorsignal_hidden[x] / length); | ||
+ | } | ||
+ | return; | ||
+ | } | ||
+ | */ | ||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | // COMPUTE ERRORSIGNAL FOR OUTPUT UNITS | ||
+ | pLayerX& | ||
+ | assert(targetVals.size() == outputLayer-> | ||
+ | |||
+ | // Traversing output layer. | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | if (!outputLayer-> | ||
+ | continue; | ||
+ | |||
+ | double outputValue = outputLayer-> | ||
+ | double gradient = (targetVals[i] - outputValue) * outputValue * (1.0 - outputValue); | ||
+ | outputLayer-> | ||
+ | } | ||
+ | |||
+ | // COMPUTE ERRORSIGNAL FOR HIDDEN UNITS | ||
+ | for (unsigned int i = layers.size() - 2; i>0; i--) // for every hidden layer | ||
+ | { | ||
+ | pLayerX& | ||
+ | pLayerX& | ||
+ | |||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | double temp = 0.0; | ||
+ | pNeuronX& | ||
+ | if (!currentNeuron) | ||
+ | continue; | ||
+ | |||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | pConnectionX & | ||
+ | if (!currentConnection) | ||
+ | continue; | ||
+ | if (!currentConnection-> | ||
+ | continue; | ||
+ | |||
+ | int currentIndex = currentNeuron-> | ||
+ | |||
+ | for (unsigned int l = 0; l< | ||
+ | { | ||
+ | pNeuronX& | ||
+ | if (!nextNeuron) | ||
+ | continue; | ||
+ | |||
+ | int nextIndex = nextNeuron-> | ||
+ | |||
+ | if (currentIndex == nextIndex) | ||
+ | { | ||
+ | temp += (nextNeuron-> | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | currentNeuron-> | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // ADJUST WEIGHTS OF CONNECTIONS FROM HIDDEN TO OUTPUT UNITS | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | pNeuronX& | ||
+ | if (!currentNeuron) | ||
+ | continue; | ||
+ | double currentValue = currentNeuron-> | ||
+ | |||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | pConnectionX& | ||
+ | if (!currentConnection) | ||
+ | continue; | ||
+ | pNeuronX& | ||
+ | if (!nextNeuron) | ||
+ | continue; | ||
+ | |||
+ | double nextGradient = nextNeuron-> | ||
+ | // | ||
+ | double delta = 0.5 * nextGradient * currentValue; | ||
+ | |||
+ | // | ||
+ | currentConnection-> | ||
+ | currentConnection-> | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | pLayerX& | ||
+ | // | ||
+ | assert(targetVals.size() == outputLayer-> | ||
+ | |||
+ | // Starting with the output layer. | ||
+ | //for (unsigned int i=0; i< | ||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | pNeuronX& | ||
+ | |||
+ | double output = outputLayer-> | ||
+ | // COMPUTE ERRORSIGNAL FOR OUTPUT UNITS | ||
+ | double error = output * (1 - output) * (pow(targetVals[i] - output, 2)); // std::cout << " | ||
+ | // | ||
+ | |||
+ | // ADJUST WEIGHTS OF CONNECTIONS FROM HIDDEN TO OUTPUT UNITS | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | outputLayer-> | ||
+ | double newWeight = outputLayer-> | ||
+ | newWeight += (error * outputLayer-> | ||
+ | outputLayer-> | ||
+ | } | ||
+ | } | ||
+ | |||
+ | for (unsigned int i = layers.size() - 2; i>0; i--) // Traversing hidden layers all the way to input layer. | ||
+ | { | ||
+ | pLayerX& | ||
+ | pLayerX& | ||
+ | |||
+ | // Traversing current layer | ||
+ | //for (unsigned int j=0; j< | ||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | const double& output = currentLayer-> | ||
+ | double subSum = 0.0; // Initializing subsum. | ||
+ | |||
+ | // Traversing next layer. | ||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | double error = nextLayer-> | ||
+ | double weight = nextLayer-> | ||
+ | |||
+ | |||
+ | // | ||
+ | subSum += pow(nextLayer-> | ||
+ | // | ||
+ | } | ||
+ | |||
+ | double error = output*(1 - output)*(subSum); | ||
+ | |||
+ | for (unsigned int k = 0; k< | ||
+ | { | ||
+ | currentLayer-> | ||
+ | double newWeight = currentLayer-> | ||
+ | newWeight += error * output; | ||
+ | currentLayer-> | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | |||
+ | //void Net:: | ||
+ | void Net:: | ||
+ | { | ||
+ | std::cout << " | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | pLayerX& | ||
+ | currentLayer-> | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | pLayerX& | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | std::cout << " | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | void Net:: | ||
+ | { | ||
+ | |||
+ | } | ||
+ | </ | ||
+ | |||
+ | net.h | ||
+ | |||
+ | <file h net.h> | ||
+ | #ifndef __SHAREWIZ_NET_H__ | ||
+ | #define __SHAREWIZ_NET_H__ | ||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | // A Net class. | ||
+ | // | ||
+ | // To handle neural networks. | ||
+ | |||
+ | // There are several things to keep in mind when applying this agent in practice : | ||
+ | // 1. If the rewards are very sparse in the environment the agent will have trouble learning. | ||
+ | // Right now there is no priority sweeping support, but one might imagine oversampling experience that have | ||
+ | // high TD errors. | ||
+ | // Similarly, there are no eligibility traces right now though this could be added with a few modifications | ||
+ | // in future versions. | ||
+ | // 2. The exploration is rather naive, since a random action is taken once in a while. | ||
+ | // If the environment requires longer sequences of precise actions to get a reward, the agent might have a | ||
+ | // lot of difficulty finding these by chance, and then also learning from them sufficiently. | ||
+ | // 3. DQN only supports a set number of discrete actions and it is not obvious how one can incorporate | ||
+ | // (high - dimensional) continuous action spaces. | ||
+ | |||
+ | class Layer; | ||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | |||
+ | class Neuron; | ||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | |||
+ | class Net | ||
+ | { | ||
+ | private: | ||
+ | // | ||
+ | |||
+ | double learning_rate; | ||
+ | // Controls how much the weights are changed during a weight update. | ||
+ | // The larger the value, the more the weights are changed. | ||
+ | // This must be a real value between 0.0 and 10.0. | ||
+ | // These values are commonly set from 0.5 to 0.7. | ||
+ | double max_error_tollerance; | ||
+ | |||
+ | double alpha = 0.1; // Learning rate. | ||
+ | // Set this by trial and error. | ||
+ | double gamma = 0.4; // Discount factor (0 - 1). | ||
+ | // If Gamma is closer to 0, the agent will tend to consider only | ||
+ | // immediate rewards. | ||
+ | // If Gamma is closer to 1, the agent will consider future rewards | ||
+ | // with greater weight, willing to delay the reward. | ||
+ | //double epsilon = 0.2; // Initial epsilon for epsilon-greedy policy (0 - 1). | ||
+ | // High epsilon(up to 1) will cause the agent to take more random actions. | ||
+ | // It is a good idea to start with a high epsilon(e.g. 0.2 or even a bit higher) | ||
+ | // and decay it over time to be lower(e.g. 0.05). | ||
+ | //double lambda = 0; // eligibility trace decay, [0,1). 0 = no eligibility traces. | ||
+ | |||
+ | double goal_amount; | ||
+ | |||
+ | pLayer layers; | ||
+ | |||
+ | public: | ||
+ | Net(); | ||
+ | Net(const std:: | ||
+ | |||
+ | double getLearningRate(void); | ||
+ | void setLearningRate(const double& learning_rate); | ||
+ | |||
+ | double getMaxErrorTollerance(void); | ||
+ | void setMaxErrorTollerance(const double& max_error_tollerance); | ||
+ | |||
+ | double getAlpha(void); | ||
+ | void setAlpha(const double& _alpha_amount); | ||
+ | |||
+ | double getGamma(void); | ||
+ | void setGamma(const double& _gamma_amount); | ||
+ | |||
+ | double getGoalAmount(void); | ||
+ | void setGoalAmount(const double& _goal_amount); | ||
+ | |||
+ | void setTarget(const std:: | ||
+ | |||
+ | void setTest(); | ||
+ | |||
+ | void connect(const std:: | ||
+ | void connect(int layerFrom, int neuronFrom, int layerTo, int neuronTo, double _R, int connection_idx = 1); | ||
+ | |||
+ | void connectAll(); | ||
+ | void connectForward(); | ||
+ | void connectForward2(); | ||
+ | void connectAllInLayer(const pLayerX& | ||
+ | |||
+ | void DQN(void); | ||
+ | double getMaxQ(pNeuronX state); | ||
+ | |||
+ | pNeuronX getPolicy(pNeuronX currentState); | ||
+ | void showPolicy(void); | ||
+ | |||
+ | void feedForward(const std:: | ||
+ | void backPropagate(const std:: | ||
+ | void backPropagate2(const std:: | ||
+ | |||
+ | |||
+ | int randomBetween(int lowestNumber, | ||
+ | |||
+ | void printOutput(void); | ||
+ | void printResult(void); | ||
+ | }; | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | neuron.cpp | ||
+ | |||
+ | <file cpp neuron.cpp> | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | #include " | ||
+ | |||
+ | |||
+ | Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | index = -1; | ||
+ | gradient = 0; | ||
+ | value = 0; | ||
+ | |||
+ | connections_in.reserve(10); | ||
+ | connections_out.reserve(10); | ||
+ | |||
+ | pActivationX tmp(new Activation()); | ||
+ | // | ||
+ | this-> | ||
+ | // | ||
+ | |||
+ | randomizeValue(); | ||
+ | } | ||
+ | |||
+ | |||
+ | bool Neuron:: | ||
+ | { | ||
+ | //cout << " | ||
+ | if (this-> | ||
+ | //if (*this == rhs) | ||
+ | return true; | ||
+ | |||
+ | return false; | ||
+ | } | ||
+ | |||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return gradient; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | int Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return index; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | unsigned int Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return connections_in.size(); | ||
+ | } | ||
+ | |||
+ | |||
+ | unsigned int Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return connections_out.size(); | ||
+ | } | ||
+ | |||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return value; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | value = v; | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | connections_in.push_back(c); | ||
+ | //index++; | ||
+ | } | ||
+ | |||
+ | |||
+ | //void Neuron:: | ||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | connections_out.push_back(c); | ||
+ | //index++; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a specific input connection. | ||
+ | pConnectionX & | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | assert(connections_in.size() >= idx); | ||
+ | |||
+ | return connections_in[idx]; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a specific output connection. | ||
+ | pConnectionX & | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | assert(connections_out.size() >= idx); | ||
+ | |||
+ | return connections_out[idx]; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Remove all connections with a value below the indicated threshold. | ||
+ | // | ||
+ | // TODO: Should we consider abs value? | ||
+ | void Neuron:: | ||
+ | { | ||
+ | for (unsigned i = connections_in.size()-1; | ||
+ | { | ||
+ | if (connections_in[i]-> | ||
+ | { | ||
+ | // TODO. Do we need to also remove the " | ||
+ | // TODO. Does this retain the actual Connection, which potentially we should potentially delete if not used? | ||
+ | connections_in.erase(connections_in.begin() + i); | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Remove all connections with a value below the indicated threshold. | ||
+ | // | ||
+ | // TODO: Should we consider abs value? | ||
+ | void Neuron:: | ||
+ | { | ||
+ | for (unsigned i = connections_out.size()-1; | ||
+ | { | ||
+ | if (connections_out[i]-> | ||
+ | { | ||
+ | // TODO. Do we need to also remove the " | ||
+ | // TODO. Does this retain the actual Connection, which potentially we should potentially delete if not used? | ||
+ | connections_out.erase(connections_out.begin() + i); | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | assert(connections_in.size() >= idx); | ||
+ | | ||
+ | //for (unsigned i = 0; i < connections_in.size(); | ||
+ | for (unsigned i = connections_in.size()-1; | ||
+ | { | ||
+ | if (connections_in[i]-> | ||
+ | { | ||
+ | connections_in.erase(connections_in.begin() + i); | ||
+ | return; | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | assert(connections_out.size() >= idx); | ||
+ | |||
+ | //for (unsigned i = 0; i < connections_out.size(); | ||
+ | for (unsigned i = connections_out.size()-1; | ||
+ | { | ||
+ | if (connections_out[i]-> | ||
+ | { | ||
+ | connections_out.erase(connections_out.begin() + i); | ||
+ | return; | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | value = rand() / double(RAND_MAX); | ||
+ | |||
+ | return value; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | pActivationX & | ||
+ | { | ||
+ | return activation; | ||
+ | } | ||
+ | |||
+ | |||
+ | Activation_Types Neuron:: | ||
+ | { | ||
+ | return activation-> | ||
+ | } | ||
+ | |||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | // | ||
+ | activation-> | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | |||
+ | /* | ||
+ | // Return random double between -0.5 and +0.5. | ||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | double r = ((double)rand()) / double(RAND_MAX); | ||
+ | return r - 0.5; | ||
+ | } | ||
+ | */ | ||
+ | |||
+ | |||
+ | // Returns a value between 0.0 and 1.0. | ||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return 1.0 / double((1.0 + exp(-weightedSum))); | ||
+ | } | ||
+ | |||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return sigmoid(x) * (1.0 - sigmoid(x)); | ||
+ | } | ||
+ | |||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | if (x < -45.0) | ||
+ | return 0.0; | ||
+ | else | ||
+ | if (x > 45.0) | ||
+ | return 1.0; | ||
+ | else | ||
+ | return 1.0 / (1.0 + exp(-x)); | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns a value between -1.0 and +1.0. | ||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | if (x < -10.0) | ||
+ | return -1.0; | ||
+ | else | ||
+ | if (x > 10.0) | ||
+ | return 1.0; | ||
+ | else | ||
+ | return tanh(x); | ||
+ | } | ||
+ | |||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | return (1.0 - tanh(x)) * (1.0 + tanh(x)); | ||
+ | } | ||
+ | |||
+ | /* | ||
+ | double Neuron:: | ||
+ | { | ||
+ | // tanh - output range [-1.0..1.0] | ||
+ | |||
+ | return tanh(x); | ||
+ | } | ||
+ | |||
+ | double Neuron:: | ||
+ | { | ||
+ | // tanh derivative | ||
+ | return 1.0 - x * x; | ||
+ | } | ||
+ | */ | ||
+ | |||
+ | void Neuron:: | ||
+ | { | ||
+ | #ifdef DEBUG | ||
+ | std::cout << " | ||
+ | #endif | ||
+ | |||
+ | std::cout << " | ||
+ | << connections_out.size() << " Connections-Out" | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | if (!connections_in[i]) | ||
+ | continue; | ||
+ | |||
+ | std::cout << " | ||
+ | } | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | if (!connections_out[i]) | ||
+ | continue; | ||
+ | |||
+ | std::cout << " | ||
+ | } | ||
+ | } | ||
+ | </ | ||
+ | |||
+ | neuron.h | ||
+ | |||
+ | <file h neuron.h> | ||
+ | #ifndef __SHAREWIZ_NEURON_H__ | ||
+ | #define __SHAREWIZ_NEURON_H__ | ||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | |||
+ | // Neuron class. | ||
+ | // | ||
+ | // Represents Synapsis within the brain. | ||
+ | |||
+ | |||
+ | class Connection; | ||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | |||
+ | class Neuron | ||
+ | { | ||
+ | private: | ||
+ | int index; | ||
+ | double value; | ||
+ | double gradient; | ||
+ | |||
+ | pConnection connections_in; | ||
+ | pConnection connections_out; | ||
+ | |||
+ | pActivationX activation; | ||
+ | |||
+ | public: | ||
+ | Neuron(); | ||
+ | |||
+ | bool operator==(Neuron& | ||
+ | |||
+ | unsigned int getSizeIn(void); | ||
+ | unsigned int getSizeOut(void); | ||
+ | |||
+ | void addConnectionIn(const pConnectionX& | ||
+ | void addConnectionOut(const pConnectionX& | ||
+ | |||
+ | pConnectionX & | ||
+ | pConnectionX & | ||
+ | |||
+ | void pruneConnectionIn(const double& threshold); | ||
+ | void pruneConnectionOut(const double& threshold); | ||
+ | |||
+ | void removeConnectionIn(const unsigned int& idx); | ||
+ | void removeConnectionOut(const unsigned int& idx); | ||
+ | | ||
+ | double getGradient(void); | ||
+ | void setGradient(const double& gradient); | ||
+ | |||
+ | int getIndex(void); | ||
+ | void setIndex(const int& index); | ||
+ | |||
+ | double getValue(void); | ||
+ | void setValue(const double& v); | ||
+ | |||
+ | double randomizeValue(void); | ||
+ | |||
+ | pActivationX & | ||
+ | Activation_Types getActivationType(); | ||
+ | void setActivationType(Activation_Types _activation_type); | ||
+ | |||
+ | double sigmoid(const double& weightedSum); | ||
+ | double sigmoid_derivative(const double& x); | ||
+ | double sigmoidX(double x); | ||
+ | |||
+ | double hyperTanFunction(double& | ||
+ | double tanh_derivative(const double& x); | ||
+ | |||
+ | void printOutput(void); | ||
+ | }; | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | neuron_group.cpp | ||
+ | |||
+ | <file cpp neuron_group.cpp> | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | #include " | ||
+ | #include " | ||
+ | |||
+ | |||
+ | NeuronGroup:: | ||
+ | { | ||
+ | index = -1; | ||
+ | |||
+ | neurons.reserve(10); | ||
+ | } | ||
+ | |||
+ | |||
+ | NeuronGroup:: | ||
+ | { | ||
+ | index = -1; | ||
+ | |||
+ | neurons.reserve(num_neurons); | ||
+ | |||
+ | for (unsigned int i = 0; i < num_neurons; | ||
+ | { | ||
+ | pNeuronX tmp(new Neuron()); | ||
+ | tmp-> | ||
+ | neurons.push_back(tmp); | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | int NeuronGroup:: | ||
+ | { | ||
+ | return index; | ||
+ | } | ||
+ | |||
+ | |||
+ | void NeuronGroup:: | ||
+ | { | ||
+ | this-> | ||
+ | } | ||
+ | |||
+ | |||
+ | unsigned int NeuronGroup:: | ||
+ | { | ||
+ | return neurons.size(); | ||
+ | } | ||
+ | |||
+ | |||
+ | void NeuronGroup:: | ||
+ | { | ||
+ | neurons.push_back(n); | ||
+ | } | ||
+ | |||
+ | |||
+ | void NeuronGroup:: | ||
+ | { | ||
+ | assert(neurons.size() >= idx); | ||
+ | |||
+ | for (unsigned i = neurons.size() - 1; i > 0; i--) | ||
+ | { | ||
+ | if (neurons[i]-> | ||
+ | { | ||
+ | neurons.erase(neurons.begin() + i); | ||
+ | return; | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | pNeuronX & | ||
+ | { | ||
+ | assert(neurons.size() >= idx); | ||
+ | |||
+ | return neurons[idx]; | ||
+ | } | ||
+ | |||
+ | |||
+ | void NeuronGroup:: | ||
+ | { | ||
+ | std::cout << "Layer " << index << " has " << neurons.size() << " Neurons" | ||
+ | |||
+ | for (unsigned int i = 0; i< | ||
+ | { | ||
+ | if (!neurons[i]) | ||
+ | continue; | ||
+ | |||
+ | std::cout << " | ||
+ | |||
+ | for (unsigned int j = 0; j< | ||
+ | { | ||
+ | pConnectionX& | ||
+ | if (!currentConnection) | ||
+ | continue; | ||
+ | currentConnection-> | ||
+ | } | ||
+ | } | ||
+ | } | ||
+ | </ | ||
+ | |||
+ | neuron_group.h | ||
+ | |||
+ | <file h neuron_group.h> | ||
+ | #ifndef __SHAREWIZ_NEURON_GROUP_H__ | ||
+ | #define __SHAREWIZ_NEURON_GROUP_H__ | ||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | |||
+ | class NeuronGroup; | ||
+ | class Neuron; | ||
+ | |||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | typedef std:: | ||
+ | typedef std:: | ||
+ | |||
+ | |||
+ | |||
+ | class NeuronGroup | ||
+ | { | ||
+ | private: | ||
+ | int index; | ||
+ | | ||
+ | pNeuron neurons; | ||
+ | |||
+ | public: | ||
+ | NeuronGroup(); | ||
+ | NeuronGroup(unsigned int num_neurons); | ||
+ | |||
+ | unsigned int getSize(void); | ||
+ | |||
+ | int getIndex(void); | ||
+ | void setIndex(const int& index); | ||
+ | |||
+ | void addNeuron(const pNeuronX& | ||
+ | void removeNeuron(const int& idx); | ||
+ | |||
+ | pNeuronX& | ||
+ | |||
+ | //void feedForward(const pLayerX& | ||
+ | |||
+ | void printOutput(void); | ||
+ | }; | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | string.cpp | ||
+ | |||
+ | <file cpp string.cpp> | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | |||
+ | |||
+ | // Example: | ||
+ | // | ||
+ | template < | ||
+ | std::string numberToString(T pNumber) | ||
+ | { | ||
+ | std:: | ||
+ | oOStrStream << pNumber; | ||
+ | return oOStrStream.str(); | ||
+ | } | ||
+ | |||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | |||
+ | // Returns all occurences of the regex within the string. | ||
+ | // | ||
+ | // Example: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Returns: | ||
+ | // | ||
+ | // | ||
+ | std:: | ||
+ | { | ||
+ | std:: | ||
+ | std::regex reg(regex); | ||
+ | // | ||
+ | // | ||
+ | |||
+ | // The 4th param indicates: | ||
+ | // -1 would indicate to return all none-occurences. | ||
+ | // 0 indicates to return all occurences found. | ||
+ | // 1 would return all the 1st sub-expression occurences. | ||
+ | // 2 would return all the 2nd sub-expression occurences. | ||
+ | // 3... | ||
+ | std:: | ||
+ | std:: | ||
+ | for (int i=0; it != reg_end; ++it, i++) | ||
+ | { | ||
+ | //std::cout << " | ||
+ | //std::cout << " | ||
+ | result.push_back(*it); | ||
+ | } | ||
+ | |||
+ | return result; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Replaces all occurences of the regex within the replacement string. | ||
+ | // | ||
+ | // Parameters: | ||
+ | // | ||
+ | // | ||
+ | // The replacement string may contain references of the form $n. Every such reference will be replaced by the | ||
+ | // text captured by the n'th parenthesized pattern. | ||
+ | // n can be from 0 to 99, and $0 refers to the text matched by the whole pattern. | ||
+ | // | ||
+ | // This may include format specifiers and escape sequences that are replaced by the characters they represent. | ||
+ | // | ||
+ | // For format_default, | ||
+ | // $n n-th backreference(i.e., | ||
+ | // n must be an integer value designating a valid backreference, | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // $$ A single $ character. | ||
+ | // | ||
+ | // | ||
+ | // One or more of these constants can be combined (using the bitwise OR operator, |) to | ||
+ | // form a valid bitmask value of type regex_constants:: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Sub-sequences must begin at the first character to match. | ||
+ | // | ||
+ | // | ||
+ | // This constant has a value of zero**. | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Example: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Returns: | ||
+ | // This is a dogfish. | ||
+ | // | ||
+ | // Example2: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Example3: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Returns: | ||
+ | // | ||
+ | // | ||
+ | // NOTE: Isolated $1 backreferences. | ||
+ | // The $011 says to use $01, or the 1st regex match. | ||
+ | // If $11 was used, the system would try to use the 11th regex match. | ||
+ | // This only works because the limit of set to 99 maximum matches. | ||
+ | // | ||
+ | // Example4: | ||
+ | // | ||
+ | std::string string_replace(const std:: | ||
+ | std:: | ||
+ | { | ||
+ | std::string result = s; | ||
+ | std::regex reg(regex); | ||
+ | |||
+ | // using string/ | ||
+ | result = std:: | ||
+ | |||
+ | |||
+ | /* | ||
+ | // using string/ | ||
+ | std::cout << std:: | ||
+ | |||
+ | // using range/ | ||
+ | std::string result2; | ||
+ | std:: | ||
+ | std::cout << result2; | ||
+ | |||
+ | // with flags: | ||
+ | std::cout << std:: | ||
+ | std::cout << std::endl; | ||
+ | */ | ||
+ | |||
+ | |||
+ | return result; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Replaces all occurences of the regex within the replacement string. | ||
+ | // | ||
+ | // Parameters: | ||
+ | // | ||
+ | // | ||
+ | // The replacement string may contain references of the form $n. Every such reference will be replaced by the | ||
+ | // text captured by the n'th parenthesized pattern. | ||
+ | // n can be from 0 to 99, and $0 refers to the text matched by the whole pattern. | ||
+ | // | ||
+ | // This may include format specifiers and escape sequences that are replaced by the characters they represent. | ||
+ | // | ||
+ | // For format_default, | ||
+ | // $n n-th backreference(i.e., | ||
+ | // n must be an integer value designating a valid backreference, | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // $$ A single $ character. | ||
+ | // | ||
+ | // retain: | ||
+ | // If false then the replacement string completely overwrites the previous string by the replacement. | ||
+ | // | ||
+ | // Example: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Returns: | ||
+ | // | ||
+ | // | ||
+ | // Example2: | ||
+ | // | ||
+ | // | ||
+ | // Returns: | ||
+ | // | ||
+ | // | ||
+ | // Example3: | ||
+ | // | ||
+ | // | ||
+ | // Returns: | ||
+ | // | ||
+ | std::string string_replace(const std:: | ||
+ | bool retain) | ||
+ | { | ||
+ | if (retain) | ||
+ | return string_replace(s, | ||
+ | else | ||
+ | return string_replace(s, | ||
+ | } | ||
+ | |||
+ | |||
+ | // Returns true if the string matches the regex. | ||
+ | // | ||
+ | // Example: | ||
+ | bool string_match(const std:: | ||
+ | { | ||
+ | std::smatch m; | ||
+ | std:: | ||
+ | if (m.empty()) { | ||
+ | return false; | ||
+ | } | ||
+ | else { | ||
+ | return true; | ||
+ | } | ||
+ | |||
+ | } | ||
+ | |||
+ | |||
+ | // Shows all matches of the regex within the string. | ||
+ | // | ||
+ | // Example: | ||
+ | // | ||
+ | // | ||
+ | // | ||
+ | // Match of the input against the left Alternative (a) followed by the remainder of the | ||
+ | // regex (c|bc) succeeds, with results: | ||
+ | // | ||
+ | // The skipped Alternatives (ab) and (c) leave their submatches | ||
+ | // m[3] and m[5] empty. | ||
+ | // | ||
+ | // show_matches(" | ||
+ | void show_matches(const std:: | ||
+ | { | ||
+ | std::smatch m; | ||
+ | std:: | ||
+ | if (m.empty()) { | ||
+ | std::cout << " | ||
+ | } | ||
+ | else { | ||
+ | std::cout << " | ||
+ | std::cout << " | ||
+ | for (std:: | ||
+ | std::cout << " m[" << n << " | ||
+ | std::cout << " | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Splits a string into seperate tokens. | ||
+ | // | ||
+ | // Example: | ||
+ | // s = "0 HEAD"; | ||
+ | // regex = " | ||
+ | std:: | ||
+ | { | ||
+ | | ||
+ | std:: | ||
+ | std::smatch m; | ||
+ | std:: | ||
+ | if (m.empty()) { | ||
+ | return result; | ||
+ | } | ||
+ | else { | ||
+ | // | ||
+ | for (std:: | ||
+ | result.push_back(m[n]); | ||
+ | // | ||
+ | } | ||
+ | |||
+ | return result; | ||
+ | | ||
+ | /* | ||
+ | std:: | ||
+ | std::regex rgx(regex); | ||
+ | std:: | ||
+ | s.end(), | ||
+ | rgx, | ||
+ | -1); | ||
+ | std:: | ||
+ | for (; iter != end; ++iter) | ||
+ | result.push_back(*iter); | ||
+ | |||
+ | return result; | ||
+ | */ | ||
+ | |||
+ | /* | ||
+ | std:: | ||
+ | std::regex rgx(regex); | ||
+ | std:: | ||
+ | std:: | ||
+ | while (i != j) { | ||
+ | //std::cout << *i++ << " "; | ||
+ | result.push_back(*i++); | ||
+ | } | ||
+ | |||
+ | return result; | ||
+ | */ | ||
+ | } | ||
+ | </ | ||
+ | |||
+ | string.h | ||
+ | |||
+ | <file h string.h> | ||
+ | #ifndef __SHAREWIZ_STRING_H__ | ||
+ | #define __SHAREWIZ_STRING_H__ | ||
+ | |||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | // String class. | ||
+ | |||
+ | |||
+ | template < | ||
+ | std::string numberToString(T pNumber); | ||
+ | |||
+ | std:: | ||
+ | std::string string_replace(const std:: | ||
+ | std:: | ||
+ | std::string string_replace(const std:: | ||
+ | bool retain); | ||
+ | bool string_match(const std:: | ||
+ | std:: | ||
+ | void show_matches(const std:: | ||
+ | std:: | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ | |||
+ | verylong.cpp | ||
+ | |||
+ | <file cpp verylong.cpp> | ||
+ | /* | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | */ | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | #include < | ||
+ | |||
+ | #include " | ||
+ | |||
+ | // Class Data | ||
+ | const Verylong Verylong:: | ||
+ | const Verylong Verylong:: | ||
+ | const Verylong Verylong:: | ||
+ | |||
+ | |||
+ | // Constructors, | ||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | std:: | ||
+ | |||
+ | vlsign = (s[0] == ' | ||
+ | if (ispunct(s[0])) | ||
+ | vlstr = s.substr(1, s.length() - 1); // is a punctuation mark. | ||
+ | else | ||
+ | vlstr = s; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | if (n < 0) // check for sign and convert the | ||
+ | { // number to positive if it is negative | ||
+ | vlsign = 1; | ||
+ | n = (-n); | ||
+ | } | ||
+ | else | ||
+ | vlsign = 0; | ||
+ | |||
+ | if (n > 0) | ||
+ | while (n >= 1) // extract the number digit by digit and store | ||
+ | { // internally | ||
+ | vlstr = char(n % 10 + ' | ||
+ | n /= 10; | ||
+ | } | ||
+ | else | ||
+ | vlstr = std:: | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | int number, factor = 1; | ||
+ | static Verylong max0(INT_MAX); | ||
+ | static Verylong min0(INT_MIN + 1); | ||
+ | std:: | ||
+ | |||
+ | if (*this > max0) | ||
+ | { | ||
+ | std::cerr << "Error : Conversion Verylong-> | ||
+ | return INT_MAX; | ||
+ | } | ||
+ | else | ||
+ | if (*this < min0) | ||
+ | { | ||
+ | std::cerr << "Error : Conversion Verylong-> | ||
+ | return INT_MIN; | ||
+ | } | ||
+ | |||
+ | number = *j - ' | ||
+ | |||
+ | for (j++; j != vlstr.rend(); | ||
+ | { | ||
+ | factor *= 10; | ||
+ | number += (*j - ' | ||
+ | } | ||
+ | |||
+ | if (vlsign) | ||
+ | return -number; | ||
+ | |||
+ | return number; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | double sum, factor = 1.0; | ||
+ | std:: | ||
+ | |||
+ | sum = double(*i) - ' | ||
+ | |||
+ | for (i++; i != vlstr.rend(); | ||
+ | { | ||
+ | factor *= 10.0; | ||
+ | sum += double(*i - ' | ||
+ | } | ||
+ | |||
+ | if (vlsign) | ||
+ | return -sum; | ||
+ | |||
+ | return sum; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong:: | ||
+ | { | ||
+ | if (vlstr.length() == 0) | ||
+ | return std:: | ||
+ | |||
+ | return vlstr; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Various member operators | ||
+ | |||
+ | const Verylong & Verylong:: | ||
+ | { | ||
+ | if (this == & | ||
+ | return *this; | ||
+ | |||
+ | vlstr = rhs.vlstr; | ||
+ | vlsign = rhs.vlsign; | ||
+ | |||
+ | return *this; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Unary - operator | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | Verylong temp(*this); | ||
+ | |||
+ | if (temp != zero) | ||
+ | temp.vlsign = !vlsign; | ||
+ | |||
+ | return temp; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Prefix increment operator | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this + one; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Postfix increment operator | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | Verylong result(*this); | ||
+ | |||
+ | *this = *this + one; | ||
+ | return result; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Prefix decrement operator | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this - one; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Postfix decrement operator | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | Verylong result(*this); | ||
+ | |||
+ | *this = *this - one; | ||
+ | return result; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this + v; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this - v; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this * v; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this / v; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | return *this = *this % v; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | Verylong N(degree); | ||
+ | Verylong Y(" | ||
+ | |||
+ | if (N == Verylong:: | ||
+ | return Verylong:: | ||
+ | |||
+ | if (N < Verylong:: | ||
+ | return Verylong:: | ||
+ | |||
+ | while (1) | ||
+ | { | ||
+ | if (N == Verylong:: | ||
+ | { | ||
+ | *this = Y; | ||
+ | break; | ||
+ | } | ||
+ | |||
+ | Y = Y * *this; | ||
+ | N = N - Verylong:: | ||
+ | } | ||
+ | |||
+ | return *this; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | // Various friendship operators and functions. | ||
+ | |||
+ | Verylong operator + (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | char digitsum, d1, d2, carry = 0; | ||
+ | std:: | ||
+ | std:: | ||
+ | |||
+ | if (u.vlsign ^ v.vlsign) | ||
+ | { | ||
+ | if (u.vlsign == 0) | ||
+ | return u - abs(v); | ||
+ | else | ||
+ | return v - abs(u); | ||
+ | } | ||
+ | |||
+ | for (j = u.vlstr.rbegin(), | ||
+ | j != u.vlstr.rend() || k != v.vlstr.rend(); | ||
+ | { | ||
+ | d1 = (j == u.vlstr.rend()) ? 0 : *(j++) - ' | ||
+ | d2 = (k == v.vlstr.rend()) ? 0 : *(k++) - ' | ||
+ | digitsum = d1 + d2 + carry; | ||
+ | |||
+ | carry = (digitsum >= 10) ? 1 : 0; | ||
+ | digitsum -= 10 * carry; | ||
+ | |||
+ | temp = char(digitsum + ' | ||
+ | } | ||
+ | |||
+ | if (carry) // if carry at end, last digit is 1 | ||
+ | temp = ' | ||
+ | |||
+ | if (u.vlsign) | ||
+ | temp = ' | ||
+ | |||
+ | return Verylong(temp); | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong operator - (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | char d, d1, d2, borrow = 0; | ||
+ | int negative; | ||
+ | std:: | ||
+ | std:: | ||
+ | |||
+ | if (u.vlsign ^ v.vlsign) | ||
+ | { | ||
+ | if (u.vlsign == 0) | ||
+ | return u + abs(v); | ||
+ | else | ||
+ | return -(v + abs(u)); | ||
+ | } | ||
+ | |||
+ | Verylong w, y; | ||
+ | |||
+ | if (u.vlsign == 0) // both u,v are positive | ||
+ | if (u< | ||
+ | { | ||
+ | w = v; | ||
+ | y = u; | ||
+ | negative = 1; | ||
+ | } | ||
+ | else | ||
+ | { | ||
+ | w = u; | ||
+ | y = v; | ||
+ | negative = 0; | ||
+ | } | ||
+ | else | ||
+ | if (u< | ||
+ | { | ||
+ | w = u; | ||
+ | y = v; | ||
+ | negative = 1; | ||
+ | } | ||
+ | else | ||
+ | { | ||
+ | w = v; | ||
+ | y = u; | ||
+ | negative = 0; | ||
+ | } | ||
+ | |||
+ | for (i = w.vlstr.rbegin(), | ||
+ | i != w.vlstr.rend() || j != y.vlstr.rend(); | ||
+ | { | ||
+ | d1 = (i == w.vlstr.rend()) ? 0 : *(i++) - ' | ||
+ | d2 = (j == y.vlstr.rend()) ? 0 : *(j++) - ' | ||
+ | |||
+ | d = d1 - d2 - borrow; | ||
+ | borrow = (d < 0) ? 1 : 0; | ||
+ | d += 10 * borrow; | ||
+ | |||
+ | temp = char(d + ' | ||
+ | } | ||
+ | |||
+ | while (temp[0] == ' | ||
+ | temp = temp.substr(1); | ||
+ | |||
+ | if (negative) | ||
+ | temp = ' | ||
+ | |||
+ | return Verylong(temp); | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong operator * (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | Verylong pprod(" | ||
+ | std:: | ||
+ | |||
+ | for (int j = 0; r != v.vlstr.rend(); | ||
+ | { | ||
+ | int digit = *r - ' | ||
+ | |||
+ | pprod = u.multdigit(digit); | ||
+ | pprod = pprod.mult10(j); | ||
+ | tempsum = tempsum + pprod; | ||
+ | } | ||
+ | |||
+ | tempsum.vlsign = u.vlsign^v.vlsign; | ||
+ | return tempsum; | ||
+ | } | ||
+ | |||
+ | |||
+ | // This algorithm is the long division algorithm. | ||
+ | Verylong operator / (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | int len = u.vlstr.length() - v.vlstr.length(); | ||
+ | std:: | ||
+ | Verylong w, y, b, c, d, quotient = Verylong:: | ||
+ | |||
+ | if (v == Verylong:: | ||
+ | { | ||
+ | std::cerr << "Error : division by zero" << std::endl; | ||
+ | return Verylong:: | ||
+ | } | ||
+ | |||
+ | w = abs(u); | ||
+ | y = abs(v); | ||
+ | |||
+ | if (w < y) | ||
+ | return Verylong:: | ||
+ | |||
+ | c = Verylong(w.vlstr.substr(0, | ||
+ | |||
+ | for (int i = 0; i <= len; i++) | ||
+ | { | ||
+ | quotient = quotient.mult10(1); | ||
+ | |||
+ | b = d = Verylong:: | ||
+ | |||
+ | while (b < c) | ||
+ | { | ||
+ | b = b + y; d = d + Verylong:: | ||
+ | } | ||
+ | |||
+ | if (c < b) // if b>c, then | ||
+ | { // we have added one count too many | ||
+ | b = b - y; | ||
+ | d = d - Verylong:: | ||
+ | } | ||
+ | |||
+ | quotient = quotient + d; // add to the quotient | ||
+ | |||
+ | if (i < len) | ||
+ | { | ||
+ | // partial remainder * 10 and add to next digit | ||
+ | c = (c - b).mult10(1); | ||
+ | c += Verylong(w.vlstr[w.vlstr.length() - len + i] - ' | ||
+ | } | ||
+ | } | ||
+ | |||
+ | quotient.vlsign = u.vlsign^v.vlsign; | ||
+ | |||
+ | return quotient; | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong operator % (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | return (u - v*(u / v)); | ||
+ | } | ||
+ | |||
+ | |||
+ | Verylong operator ^ (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | //return (u - v*(u / v)); | ||
+ | |||
+ | Verylong temp(u); | ||
+ | |||
+ | return temp ^= v; | ||
+ | } | ||
+ | |||
+ | |||
+ | |||
+ | int operator == (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | return (u.vlsign == v.vlsign && u.vlstr == v.vlstr); | ||
+ | } | ||
+ | |||
+ | |||
+ | int operator != (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | return !(u == v); | ||
+ | } | ||
+ | |||
+ | |||
+ | int operator < (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | if (u.vlsign < v.vlsign) | ||
+ | return 0; | ||
+ | else | ||
+ | if (u.vlsign > v.vlsign) | ||
+ | return 1; | ||
+ | |||
+ | // exclusive or (^) to determine sign | ||
+ | if (u.vlstr.length() < v.vlstr.length()) | ||
+ | return (1 ^ u.vlsign); | ||
+ | else | ||
+ | if (u.vlstr.length() > v.vlstr.length()) | ||
+ | return (0 ^ u.vlsign); | ||
+ | |||
+ | return (u.vlstr < v.vlstr && !u.vlsign) || | ||
+ | (u.vlstr > v.vlstr && u.vlsign); | ||
+ | } | ||
+ | |||
+ | |||
+ | int operator <= (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | return (u<v || u == v); | ||
+ | } | ||
+ | |||
+ | |||
+ | int operator >(const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | return (!(u<v) && u != v); | ||
+ | } | ||
+ | |||
+ | |||
+ | int operator >= (const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | return (u>v || u == v); | ||
+ | } | ||
+ | |||
+ | |||
+ | // Calculate the absolute value of a number | ||
+ | Verylong abs(const Verylong &v) | ||
+ | { | ||
+ | Verylong u(v); | ||
+ | |||
+ | if (u.vlsign) | ||
+ | u.vlsign = 0; | ||
+ | |||
+ | return u; | ||
+ | } | ||
+ | |||
+ | // Calculate the integer square root of a number | ||
+ | // based on the formula (a+b)^2 = a^2 + 2ab + b^2 | ||
+ | Verylong sqrt(const Verylong &v) | ||
+ | { | ||
+ | // if v is negative, error is reported | ||
+ | if (v.vlsign) | ||
+ | { | ||
+ | std::cerr << " | ||
+ | return Verylong:: | ||
+ | } | ||
+ | |||
+ | int j, k = v.vlstr.length() + 1, num = k >> 1; | ||
+ | Verylong y, z, sum, tempsum, digitsum; | ||
+ | |||
+ | std:: | ||
+ | |||
+ | k = 0; | ||
+ | j = 1; | ||
+ | |||
+ | // segment the number 2 digits by 2 digits | ||
+ | if (v.vlstr.length() % 2) | ||
+ | digitsum = Verylong(w[k++] - ' | ||
+ | else | ||
+ | { | ||
+ | digitsum = Verylong((w[k] - ' | ||
+ | k += 2; | ||
+ | } | ||
+ | |||
+ | // find the first digit of the integer square root | ||
+ | sum = z = Verylong(int(sqrt(double(digitsum)))); | ||
+ | |||
+ | // store partial result | ||
+ | temp = char(int(z) + ' | ||
+ | digitsum = digitsum - z*z; | ||
+ | |||
+ | for (; j<num; j++) | ||
+ | { | ||
+ | // get next digit from the number | ||
+ | digitsum = digitsum.mult10(1) + Verylong(w[k++] - ' | ||
+ | y = z + z; // 2*a | ||
+ | z = digitsum / y; | ||
+ | tempsum = digitsum.mult10(1) + Verylong(w[k++] - ' | ||
+ | digitsum = -y*z.mult10(1) + tempsum - z*z; | ||
+ | |||
+ | // decrease z by 1 and re-calculate when it is over-estimated. | ||
+ | while (digitsum < Verylong:: | ||
+ | { | ||
+ | --z; | ||
+ | digitsum = -y*z.mult10(1) + tempsum - z*z; | ||
+ | } | ||
+ | |||
+ | temp = temp + char(int(z) + ' | ||
+ | z = sum = sum.mult10(1) + z; // update value of the partial result | ||
+ | } | ||
+ | |||
+ | Verylong result(temp); | ||
+ | |||
+ | return result; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Raise a number X to a power of degree | ||
+ | Verylong pow(const Verylong &X, const Verylong & | ||
+ | { | ||
+ | Verylong N(degree), Y(" | ||
+ | |||
+ | if (N == Verylong:: | ||
+ | return Verylong:: | ||
+ | |||
+ | if (N < Verylong:: | ||
+ | return Verylong:: | ||
+ | |||
+ | while (1) | ||
+ | { | ||
+ | if (N%Verylong:: | ||
+ | { | ||
+ | Y = Y * x; | ||
+ | N = N / Verylong:: | ||
+ | if (N == Verylong:: | ||
+ | return Y; | ||
+ | } | ||
+ | else | ||
+ | N = N / Verylong:: | ||
+ | |||
+ | x = x * x; | ||
+ | } | ||
+ | } | ||
+ | |||
+ | |||
+ | // Double division function | ||
+ | double div(const Verylong &u, const Verylong &v) | ||
+ | { | ||
+ | double qq = 0.0, qqscale = 1.0; | ||
+ | Verylong w, y, b, c; | ||
+ | int d, count, | ||
+ | decno = std:: | ||
+ | |||
+ | if (v == Verylong:: | ||
+ | { | ||
+ | std::cerr << "ERROR : Division by zero" << std::endl; | ||
+ | return 0.0; | ||
+ | } | ||
+ | |||
+ | if (u == Verylong:: | ||
+ | return 0.0; | ||
+ | |||
+ | w = abs(u); | ||
+ | y = abs(v); | ||
+ | |||
+ | while (w< | ||
+ | { | ||
+ | w = w.mult10(1); | ||
+ | qqscale *= 0.1; | ||
+ | } | ||
+ | |||
+ | int len = w.vlstr.length() - y.vlstr.length(); | ||
+ | std:: | ||
+ | |||
+ | c = Verylong(temp); | ||
+ | |||
+ | for (int i = 0; i <= len; i++) | ||
+ | { | ||
+ | qq *= 10.0; | ||
+ | |||
+ | b = Verylong:: | ||
+ | |||
+ | while (b < c) | ||
+ | { | ||
+ | b += y; d += 1; | ||
+ | } | ||
+ | |||
+ | if (c < b) // if b>c, then | ||
+ | { // we have added one count too many | ||
+ | b -= y; | ||
+ | d -= 1; | ||
+ | } | ||
+ | |||
+ | qq += double(d); | ||
+ | |||
+ | c = (c - b).mult10(1); | ||
+ | |||
+ | if (i < len) // and add to next digit | ||
+ | c += Verylong(w.vlstr[w.vlstr.length() - len + i] - ' | ||
+ | } | ||
+ | |||
+ | qq *= qqscale; count = 0; | ||
+ | |||
+ | while (c != Verylong:: | ||
+ | { | ||
+ | qqscale *= 0.1; | ||
+ | |||
+ | b = Verylong:: | ||
+ | |||
+ | while (b < c) | ||
+ | { | ||
+ | b += y; d += 1; | ||
+ | } | ||
+ | |||
+ | if (c < b) // if b>c, then | ||
+ | { // we have added one count too many | ||
+ | b -= y; d -= 1; | ||
+ | } | ||
+ | |||
+ | qq += double(d)*qqscale; | ||
+ | |||
+ | c = (c - b).mult10(1); | ||
+ | count++; | ||
+ | } | ||
+ | |||
+ | if (u.vlsign^v.vlsign) | ||
+ | qq *= (-1.0); | ||
+ | |||
+ | return qq; | ||
+ | } | ||
+ | |||
+ | |||
+ | std:: | ||
+ | { | ||
+ | if (v.vlstr.length() > 0) | ||
+ | { | ||
+ | if (v.vlsign) s << " | ||
+ | s << v.vlstr; | ||
+ | } | ||
+ | else | ||
+ | s << " | ||
+ | |||
+ | return s; | ||
+ | } | ||
+ | |||
+ | |||
+ | std:: | ||
+ | { | ||
+ | std:: | ||
+ | |||
+ | s >> temp; | ||
+ | v = Verylong(temp); | ||
+ | |||
+ | return s; | ||
+ | } | ||
+ | |||
+ | |||
+ | // | ||
+ | // Private member functions: multdigit(), | ||
+ | // | ||
+ | |||
+ | // Multiply this Verylong number by num | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | int carry = 0; | ||
+ | std:: | ||
+ | |||
+ | if (num) | ||
+ | { | ||
+ | std:: | ||
+ | |||
+ | for (r = vlstr.rbegin(); | ||
+ | { | ||
+ | int d1 = *r - ' | ||
+ | digitprod = d1*num + carry; | ||
+ | |||
+ | if (digitprod >= 10) // if there' | ||
+ | { | ||
+ | carry = digitprod / 10; // carry is high digit | ||
+ | digitprod -= carry * 10; // result is low digit | ||
+ | } | ||
+ | else | ||
+ | carry = 0; // otherwise carry is 0 | ||
+ | |||
+ | temp = char(digitprod + ' | ||
+ | } | ||
+ | |||
+ | if (carry) //if carry at end, | ||
+ | temp = char(carry + ' | ||
+ | |||
+ | Verylong result(temp); | ||
+ | return result; | ||
+ | } | ||
+ | else | ||
+ | return zero; | ||
+ | } | ||
+ | |||
+ | |||
+ | // Multiply this Verylong number by 10*num | ||
+ | Verylong Verylong:: | ||
+ | { | ||
+ | int j; | ||
+ | |||
+ | if (*this != zero) | ||
+ | { | ||
+ | std:: | ||
+ | |||
+ | for (j = 0; j<num; j++) | ||
+ | temp = temp + ' | ||
+ | |||
+ | Verylong result(vlstr + temp); | ||
+ | |||
+ | if (vlsign) | ||
+ | result = -result; | ||
+ | |||
+ | return result; | ||
+ | } | ||
+ | else | ||
+ | return zero; | ||
+ | } | ||
+ | |||
+ | |||
+ | //template <> Verylong zero(Verylong) { return Verylong:: | ||
+ | //template <> Verylong one(Verylong) { return Verylong:: | ||
+ | |||
+ | </ | ||
+ | |||
+ | verylong.h | ||
+ | |||
+ | <file h verylong.h> | ||
+ | #ifndef __SHAREWIZ_VERYLONG_H__ | ||
+ | #define __SHAREWIZ_VERYLONG_H__ | ||
+ | |||
+ | //#include < | ||
+ | |||
+ | // Very Long Integer Class | ||
+ | |||
+ | |||
+ | class Verylong | ||
+ | { | ||
+ | private: | ||
+ | // Data Fields | ||
+ | std:: | ||
+ | int vlsign; | ||
+ | |||
+ | // Private member functions | ||
+ | Verylong multdigit(int) const; | ||
+ | Verylong mult10(int) const; | ||
+ | |||
+ | public: | ||
+ | // Constructors and destructor | ||
+ | Verylong(const std:: | ||
+ | Verylong(int); | ||
+ | Verylong(const Verylong &); | ||
+ | ~Verylong(); | ||
+ | |||
+ | // Conversion operators | ||
+ | operator int() const; | ||
+ | operator double() const; | ||
+ | operator std::string () const; | ||
+ | |||
+ | // Arithmetic operators and Relational operators | ||
+ | const Verylong & operator = (const Verylong & | ||
+ | Verylong operator - () const; | ||
+ | Verylong operator ++ (); // prefix | ||
+ | Verylong operator ++ (int); | ||
+ | Verylong operator -- (); // prefix | ||
+ | Verylong operator -- (int); | ||
+ | |||
+ | Verylong operator += (const Verylong &); | ||
+ | Verylong operator -= (const Verylong &); | ||
+ | Verylong operator *= (const Verylong &); | ||
+ | Verylong operator /= (const Verylong &); | ||
+ | Verylong operator %= (const Verylong &); | ||
+ | Verylong operator ^= (const Verylong &); | ||
+ | |||
+ | friend Verylong operator + (const Verylong &, const Verylong &); | ||
+ | friend Verylong operator - (const Verylong &, const Verylong &); | ||
+ | friend Verylong operator * (const Verylong &, const Verylong &); | ||
+ | friend Verylong operator / (const Verylong &, const Verylong &); | ||
+ | friend Verylong operator % (const Verylong &, const Verylong &); | ||
+ | friend Verylong operator ^ (const Verylong &, const Verylong &); | ||
+ | |||
+ | friend int operator == (const Verylong &, const Verylong &); | ||
+ | friend int operator != (const Verylong &, const Verylong &); | ||
+ | friend int operator < (const Verylong &, const Verylong &); | ||
+ | friend int operator <= (const Verylong &, const Verylong &); | ||
+ | friend int operator > (const Verylong &, const Verylong &); | ||
+ | friend int operator >= (const Verylong &, const Verylong &); | ||
+ | |||
+ | // Other functions | ||
+ | friend Verylong abs(const Verylong &); | ||
+ | friend Verylong sqrt(const Verylong &); | ||
+ | friend Verylong pow(const Verylong &, const Verylong &); | ||
+ | friend double div(const Verylong &, const Verylong &); | ||
+ | |||
+ | // Class Data | ||
+ | static const Verylong zero; | ||
+ | static const Verylong one; | ||
+ | static const Verylong two; | ||
+ | |||
+ | // I/O stream functions | ||
+ | friend std:: | ||
+ | friend std:: | ||
+ | }; | ||
+ | |||
+ | |||
+ | //template <> Verylong zero(Verylong) { return Verylong:: | ||
+ | //template <> Verylong one(Verylong) { return Verylong:: | ||
+ | |||
+ | |||
+ | #endif | ||
+ | </ | ||
+ |
brain/brain.1481541534.txt.gz · Last modified: 2020/07/15 09:30 (external edit)