User Tools

Site Tools


brain:brain

Brain.cpp

brain.cpp
#include <iostream>
#include <chrono>       // std::chrono::system_clock
#include <string>
#include <regex>
#include "net.h"
#include "verylong.h"
 
#include "string.h"
 
int main()
{
  std::vector<std::string> result;
 
  //std::string regex = "[\\s,]+";
  //std::string regex = "([A-Za-z])*";
  //std::string regex = "([\\d]+)";
  //std::string regex = "([\\d]+).*([\\d]+)";
//  std::string regex = "([\\d]+).+([\\d]+)";
  //std::string regex = "\\b([\\d])([^ ]*)";
  std::string regex = "([A-Z]+)([\\d]+)";
 
  std::regex reg(regex);
  std::cout << "RS=" << reg.mark_count() << std::endl;
 
 
  //std::string ss = "The1White2Rabbit,333is4very,late.";
  //std::string ss = "5yz0ab1cde2fgh";
  //std::string ss = "MAY14";
  std::string ss = "aaaMAY14bbbJUNE4";
 
  result = string_find(ss, regex);
 
  for (int i = 0; i < result.size(); i++)
  {
    std::cout << "[" << i << "]=" << result[i] << "#" << std::endl;
  }
 
  //   std::string s("This is a catfish");
  //   std::string regex("(cat)");
  //   std::string r("(dog)");
  ss = "This cat is a catfish";
  regex = "(cat)";
  std::string result2;
  result2 = string_replace(ss, regex, "dog", std::regex_constants::format_first_only);
  std::cout << result2 << std::endl;
  result2 = string_replace(ss, regex, "ha ha $$1 boo");
  std::cout << result2 << std::endl;
  result2 = string_replace(ss, regex, "ha ha $$1 boo", false);
  std::cout << result2 << std::endl;
 
 
  ss = "there is a subsequence in the string\n";
  regex = "(sub)";
  result2 = string_replace(ss, regex, "xyz");
  std::cout << result2 << std::endl;
 
  ss = "April 15, 2003";
  regex = "(\\w+) (\\d+), (\\d+)";
  result2 = string_replace(ss, regex, "$011,$3");
  std::cout << result2 << std::endl;
 
  ss = "  14MAY  15JUNE ";
  std::cout << "[" << ss << "]" << std::endl;
  //regex = "([\\d]+)([A+Z]+)";
  regex = "(\\d+)([A-Z]+)";
  result2 = string_replace(ss, regex, "$1 $2");
  std::cout << "[" << result2 << "]" << std::endl;
  result2 = string_replace(ss, regex, "$1 $2", std::regex_constants::format_no_copy);
  std::cout << "[" << result2 << "]" << std::endl;
  result2 = string_replace(ss, regex, "$1 $2", false);
  std::cout << "[" << result2 << "]" << std::endl;
 
 
  /*
  std::string s3("there is a subsequence in the string\n");
  std::regex e("\\b(sub)([^ ]*)");   // matches words beginning by "sub"
 
  // using string/c-string (3) version:
  std::cout << std::regex_replace(s3, e, "sub-$2");
 
  // using range/c-string (6) version:
  std::string result2;
  std::regex_replace(std::back_inserter(result2), s3.begin(), s3.end(), e, "$2");
  std::cout << result2;
 
  // with flags:
  std::cout << std::regex_replace(s3, e, "$1 and $2", std::regex_constants::format_no_copy);
  std::cout << std::endl;
  */
 
  std::cout << "HEAD" << std::endl;
  ss = "0 HEAD";
  regex = "([\\d]+)[\\s]+([A-Z]*)$";
  //regex = "([\\d]+)([A-Z]*)";
  // regex = "[\\d]+[A-Z]*";
 
  result.clear();
  result = string_tokenize(ss, regex);
  for (int i = 0; i < result.size(); i++)
  {
    std::cout << "[" << i << "]=" << result[i] << "#" << std::endl;
  }
 
 
 
  /*
 
  show_matches("abcdef", "abc|def");
  show_matches("abc", "ab|abc"); // left Alernative matched first
  show_matches("abc", "((a)|(ab))((c)|(bc))");
  */
#ifdef _MSC_VER
  std::string s1;
  std::getline(std::cin, s1);
#endif
 
 
  //return 0;
 
 
 
 
  /*
	Verylong v1("12345678901234567890123456789");
	v1 = 2;
	std::cout << "V1=" << v1 << std::endl;
	Verylong v2(1);
	v1 += v2;
	std::cout << "V1=" << v1 << std::endl;
	v1 ^= 80;
	//v1 ^= 5;
	std::cout << "V1=" << v1 << std::endl;
 
 
	Verylong v3(0);
	v1 = 12;
	v2 = 2;
	v3 = v1 ^ v2;
	std::cout << "V3=" << v3 << std::endl;
 
	Verylong v4(0);
	v4 = v1;
	std::cout << "V4=" << v4 << std::endl;
 
	//return 0;
  */
 
  std::vector<int> myvector;
 
  // set some values (from 1 to 10)
  for (int i = 1; i <= 10; i++) myvector.push_back(i);
 
  // erase the 6th element
  myvector.erase(myvector.begin() + 5);
 
  // erase the first 3 elements:
  myvector.erase(myvector.begin(), myvector.begin() + 3);
 
  std::cout << "myvector contains:";
  for (unsigned i = 0; i<myvector.size(); ++i)
    std::cout << ' ' << myvector[i];
  std::cout << '\n';
 
  myvector.push_back(2);
 
  std::cout << "myvector now contains:";
  for (unsigned i = 0; i<myvector.size(); ++i)
    std::cout << ' ' << myvector[i];
  std::cout << '\n';
 
 
  std::cout << "****************DQN***********************" << std::endl;
 
  /*
  To learn to get out to room 5 from any other room as quick as possible.
 
        [1]---
         |    |
         |     |
         |      |
  [2]---[3]    [5]
         |      |
         |    |
         |   |
  [0]---[4]---
 
  otherwise shown as 
 
  [0]---[1]---[5]
   |     |      
   |     |
   |     |
  [4]---[3]---[2]
   |
   |
   |
  [5]
 
  */
 
 
  srand((unsigned int)time(NULL));
  std::vector<unsigned int> myDQNTopology;
  myDQNTopology.push_back(6); // 6 neurons. (in layer 0).
 
  std::cout << myDQNTopology.size() << std::endl << std::endl; // myTopology == {3, 4, 2 ,1}
 
  Net myDQN(myDQNTopology);
 
  myDQN.setGoalAmount(100);
 
  const std::vector< std::vector<double> > connections = {
    //{ 0, 0, 0, 0, 0 },
    { 0, 0, 0, 4, 0 }, // from Layer 0, Neuron 0 to Layer 0, Neuron 4 give a value of zero.
    { 0, 1, 0, 1, 0 },
    { 0, 1, 0, 3, 0 },
    { 0, 1, 0, 5, 100 },
    //{ 0, 2, 0, 2, 0 },
    { 0, 2, 0, 3, 0 },
    //{ 0, 3, 0, 3, 0 },
    { 0, 3, 0, 1, 0 },
    { 0, 3, 0, 2, 0 },
    { 0, 3, 0, 4, 0 },
    //{ 0, 4, 0, 4, 0 },
    { 0, 4, 0, 0, 0 },
    { 0, 4, 0, 3, 0 },
    { 0, 4, 0, 5, 100 },
    { 0, 5, 0, 5, 100 },
    { 0, 5, 0, 1, 0 },
    { 0, 5, 0, 4, 0 }
  };
 
 
  /*
  const std::vector< std::vector<double> > connections = {
    { 0, 0, 0, 0, 0 },
    { 0, 0, 0, 1, 0 },
    { 0, 0, 0, 3, 0 },
    { 0, 1, 0, 1, 0 },
    { 0, 1, 0, 0, 0 },
    { 0, 1, 0, 2, 100 },
    { 0, 2, 0, 2, 100 },
    { 0, 2, 0, 1, 0 },
    { 0, 2, 0, 5, 0 },
    { 0, 3, 0, 3, 0 },
    { 0, 3, 0, 0, 0 },
    { 0, 2, 0, 4, 0 },
    { 0, 4, 0, 4, 0 },
    { 0, 4, 0, 1, 0 },
    { 0, 4, 0, 3, 0 },
    { 0, 4, 0, 5, 0 },
    { 0, 5, 0, 5, 0 },
    { 0, 5, 0, 2, 100 },
    { 0, 5, 0, 4, 0 }
  };
  */
 
  myDQN.connect(connections);
 
 
  /*
  unsigned int connection_idx = 0;
 
  myDQN.connect(0, 0, 0, 0, 0, connection_idx++);
  myDQN.connect(0, 0, 0, 4, 0, connection_idx++);
  myDQN.connect(0, 1, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 1, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 1, 0, 5, 100, connection_idx++);
  myDQN.connect(0, 2, 0, 2, 0, connection_idx++);
  myDQN.connect(0, 2, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 2, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 4, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 4, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 0, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 5, 100, connection_idx++);
  myDQN.connect(0, 5, 0, 5, 100, connection_idx++);
  myDQN.connect(0, 5, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 5, 0, 4, 0, connection_idx++);
  */
 
  /*
  myDQN.connect(0, 0, 0, 0, 0, connection_idx++);
  myDQN.connect(0, 0, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 0, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 1, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 1, 0, 0, 0, connection_idx++);
  myDQN.connect(0, 1, 0, 2, 100, connection_idx++);
  myDQN.connect(0, 2, 0, 2, 100, connection_idx++);
  myDQN.connect(0, 2, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 2, 0, 5, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 0, 0, connection_idx++);
  myDQN.connect(0, 3, 0, 4, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 4, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 1, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 3, 0, connection_idx++);
  myDQN.connect(0, 4, 0, 5, 0, connection_idx++);
  myDQN.connect(0, 5, 0, 5, 0, connection_idx++);
  myDQN.connect(0, 5, 0, 4, 0, connection_idx++);
  myDQN.connect(0, 5, 0, 2, 100, connection_idx++);
  */
 
 
  myDQN.printOutput();
 
  for (unsigned int i = 0; i<1000; ++i)
    myDQN.DQN();
 
  myDQN.printOutput();
  myDQN.printResult();
  myDQN.showPolicy();
 
#ifdef _MSC_VER
  std::string s2;
  std::getline(std::cin, s2);
#endif
 
  return 0;
 
  std::cout << "****************BACKPROP***********************" << std::endl;
	srand((unsigned int)time(NULL));
	std::vector<unsigned int> myTopology;
	myTopology.push_back(2);
	//myTopology.push_back(3); 1 for bias.
	myTopology.push_back(4);
	//myTopology.push_back(8);
	//myTopology.push_back(2);
	myTopology.push_back(1);
 
	std::cout << myTopology.size() << std::endl << std::endl; // myTopology == {3, 4, 2 ,1}
	//Neural::Net myNet(myTopology);
	Net myNet(myTopology);
 
 
	std::cout << "****************SET VALS***********************" << std::endl;
 
  myNet.connectForward();
	//myNet.setTest();
 
	std::cout << "****************FORWARD***********************" << std::endl;
 
	/*
	for (unsigned int i = 0; i<50; ++i)
	{
		myNet.feedForward({ 1, 1 });
		myNet.backPropagate({ 0 });
	}
 
	for (unsigned int i = 0; i<50; ++i)
	{
		myNet.feedForward({ 0, 0 });
		myNet.backPropagate({ 1 });
	}
	*/
 
	for (unsigned int i = 0; i<50000; ++i)
  //for (unsigned int i = 0; i<100000; ++i)
  //for (unsigned int i = 0; i<500000; ++i)
	{
		myNet.feedForward({ 0, 0 });
		myNet.backPropagate({ 0 });
 
		myNet.feedForward({ 1, 0 });
		myNet.backPropagate({ 1 });
 
		myNet.feedForward({ 0, 1 });
		myNet.backPropagate({ 1 });
 
		myNet.feedForward({ 1, 1 });
		myNet.backPropagate({ 0 });
	}
 
	/*
	std::cout << "Feeding 0,0" << std::endl;
	myNet.feedForward({ 0, 0 });
	myNet.printOutput();
 
	std::cout << "Feeding 1,1" << std::endl;
	myNet.feedForward({ 1, 1 });
	myNet.printOutput();
	*/
 
	std::cout << "****************BACK**************************" << std::endl;
 
	//myNet.backPropagate({0});
	std::cout << "Feeding 0,0" << std::endl;
	myNet.feedForward({ 0, 0 });
	//myNet.printOutput();
	myNet.printResult();
 
	//myNet.backPropagate({1});
	std::cout << "Feeding 1,0" << std::endl;
	myNet.feedForward({ 1, 0 });
	//myNet.printOutput();
	myNet.printResult();
 
	//myNet.backPropagate({1});
	std::cout << "Feeding 1,1" << std::endl;
	myNet.feedForward({ 1, 1 });
	//myNet.printOutput();
	myNet.printResult();
 
	//myNet.backPropagate({1});
	std::cout << "Feeding 0,1" << std::endl;
	myNet.feedForward({ 0, 1 });
	//myNet.printOutput();
	myNet.printResult();
 
 
#ifdef _MSC_VER
  std::string s;
  std::getline(std::cin, s);
#endif
 
 
	return 0;
}

activation.cpp

activation.cpp
#include <cmath>
 
 
#include "activation.h"
 
 
Activation::Activation()
{
  activation_type = ACTIVATION_SIGMOID;
}
 
 
Activation::Activation(Activation_Types _activation_type)
{
  activation_type = _activation_type;
}
 
 
Activation::~Activation()
{
}
 
 
 
double Activation::activate(const double& value, const bool derivative)
{
  switch (activation_type)
  {
  case (ACTIVATION_ABS) :
    return (abs(value, derivative));
    break;
 
  case (ACTIVATION_ARCTAN) :
    return (arctan(value, derivative));
    break;
 
  case (ACTIVATION_BOUNDEDRELU) :
    return (boundedRelu(value, derivative));
    break;
 
  case (ACTIVATION_ELU) :
    return (elu(value, derivative));
    break;
 
  case (ACTIVATION_GAUSSIAN) :
    return (gaussian(value, derivative));
    break;
 
  case (ACTIVATION_LINEAR) :
    return (linear(value, derivative));
    break;
 
  case (ACTIVATION_LOG) :
    return (log(value, derivative));
    break;
 
  case (ACTIVATION_RELU) :
    return (relu(value, derivative));
    break;
 
  case (ACTIVATION_SCALED_TANH) :
    return (tanh(value, derivative));
    break;
 
  case (ACTIVATION_SIGMOID) :
    return (sigmoid(value, derivative));
    break;
 
  case (ACTIVATION_SOFTRELU) :
    return (softRelu(value, derivative));
    break;
 
  case (ACTIVATION_SQRT) :
    return (sqrt(value, derivative));
    break;
 
  case (ACTIVATION_SQUARE) :
    return (square(value, derivative));
    break;
 
  case (ACTIVATION_SQUASH) :
    return (squash(value, derivative));
    break;
 
  case (ACTIVATION_STEP) :
    return (step(value, derivative));
    break;
 
  case (ACTIVATION_TANH) :
    return (tanh(value, derivative));
    break;
 
  default:
    return (sigmoid(value, derivative));
    break;
  }
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = abs(x)
double Activation::abs(const double& value, const bool derivative)
{
  if (derivative)
    return value < 0 ? -1 : 1;
  else
    return std::abs(value);
}
 
 
// Returns a value between -1.0 and +1.0.
double Activation::arctan(const double& value, const bool derivative)
{
  if (derivative)
    return (std::cos(value) * std::cos(value));
  else
    return std::atan(value);
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = min(a, max(0, x))
double Activation::boundedRelu(const double& value, const bool derivative)
{
  if (derivative)
    return 0; // TODO
  else
    return 0; // TODO
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = 
double Activation::elu(const double& value, const bool derivative)
{
  if (derivative)
  {
    double output = elu(value);
    return output > 0 ? 1.0 : output + 1;
  }
  else
    return value > 0 ? value : std::exp(value) - 1;
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = 
double Activation::gaussian(const double& value, const bool derivative)
{
  if (derivative)
    return -2 * value * std::exp(-value * -value);
  else
    return std::exp(-value * -value);
}
 
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = a*x + b
double Activation::linear(const double& value, const bool derivative)
{
  if (derivative)
    return 1; 
  else
    return value; 
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = 1 / (1 + e^-x)
double Activation::log(const double& value, const bool derivative)
{
  if (derivative)
    return 0; // TODO
  else
    return 1.0 / (1.0 + std::exp(-value));
 
  /*
  if (value < -45.0)
    return 0.0;
  else
  if (value > 45.0)
    return 1.0;
  else
    return 1.0 / (1.0 + std::exp(-value));
  */
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = max(0, x)
double Activation::relu(const double& value, const bool derivative)
{
  if (derivative)
    return value > 0 ? 1.0 : 0.0;
  else
    return value > 0 ? value : 0;
}
 
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = 1.7159 * tanh(0.66667 * x)
double Activation::scaledTanh(const double& value, const bool derivative)
{
  if (derivative) // TODO...
  {
    double tanh_value = std::tanh(value);
    return 0.66667f * (1.7159f - 1 / 1.7159f * tanh_value * tanh_value);
  }
  else
    return 1.7159 * std::tanh(0.66667 * value);
}
 
 
// Returns a value between 0.0 and 1.0.
double Activation::sigmoid(const double& value, const bool derivative)
{
  if (derivative)
    return sigmoid(value) * (1.0 - sigmoid(value));
  else
    return 1.0 / double((1.0 + exp(-value)));
}
 
 
/*
// Returns a value between 0.0 and 1.0.
double Activation::sigmoid(const double& value)
{
	return 1.0 / double((1.0 + exp(-value)));
}
 
 
double Activation::sigmoid_derivative(const double& value)
{
	return sigmoid(value) * (1.0 - sigmoid(value));
}
*/
 
 
double Activation::sigmoid_limit(double value, double positive_limit, double negative_limit)
{
	if (value < negative_limit)
		return 0.0;
	else
	if (value > positive_limit)
		return 1.0;
	else
		return 1.0 / (1.0 + exp(-value));
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = log(1 + e^x)
double Activation::softRelu(const double& value, const bool derivative)
{
  if (derivative)
    return 0; // TODO
  else
    return 0; // TODO
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = sqrt(x)
double Activation::sqrt(const double& value, const bool derivative)
{
  if (derivative)
    return 0; // TODO
  else
    return std::sqrt(value); // TODO
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = x^2
double Activation::square(const double& value, const bool derivative)
{
  if (derivative)
    return 0; // TODO
  else
    return value * value; // TODO
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = 
double Activation::squash(const double& value, const bool derivative)
{
  if (derivative)
  {
    if (value > 0)
      return (value) / (1 + value);
    else
      return (value) / (1 - value);
  }
  else
    return (value) / (1 + std::abs(value));
}
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = 
double Activation::step(const double& value, const bool derivative)
{
  if (derivative)
  {
    if (value > 0)
      return 0;
    else
      return value;
  }
  else
  {
    if (value > 0)
      return 1;
    else
      return 0;
  }
}
 
 
 
// Returns a value between -1.0 and +1.0.
//
// f(x) = a*tanh(b*x)
double Activation::tanh(const double& value, const bool derivative)
{
  if (derivative)
  {
    double tanh_value = std::tanh(value);
    return (1.0 - tanh_value * tanh_value);
    //return (1.0 - std::tanh(value)) * (1.0 + std::tanh(value));
  }
  else
    return std::tanh(value);
 
  /*
  if (value < -45.0)
    return -1.0;
  else
  if (value > 45.0)
    return 1.0;
  else
    return std::tanh(value);
  */
 
}
 
 
// Returns a value between -1.0 and +1.0.
double Activation::tanh_limit(double& value, double positive_limit, double negative_limit)
{
	if (value < negative_limit)
		return -1.0;
	else
	if (value > positive_limit)
		return 1.0;
	else
		return tanh(value);
}
 
 
Activation_Types Activation::getActivationType()
{
  return activation_type;
}
 
 
void Activation::setActivationType(Activation_Types _activation_type)
{
  activation_type = _activation_type;
}
 
 
 
 
 
 
 
/*
public double SoftMax(double x, string layer)
{
  // Determine max
  double max = double.MinValue;
  if (layer == "ih")
    max = (ihSum0 > ihSum1) ? ihSum0 : ihSum1;
  else 
  if (layer == "ho")
    max = (hoSum0 > hoSum1) ? hoSum0 : hoSum1;
 
  // Compute scale
  double scale = 0.0;
  if (layer == "ih")
    scale = Math.Exp(ihSum0 - max) + Math.Exp(ihSum1 - max);
  else 
  if (layer == "ho")
    scale = Math.Exp(hoSum0 - max ) + Math.Exp(hoSum1 - max);
 
  return Math.Exp(x - max) / scale;
}
 
*/

activation.h

activation.h
#ifndef __SHAREWIZ_ACTIVATION_H__
#define __SHAREWIZ_ACTIVATION_H__
 
#include <memory>
 
 
// Built-in activation functions.
 
enum Activation_Types
{
  ACTIVATION_ABS,                      // Absolute value.
  ACTIVATION_ARCTAN,                   // Arctan.
  ACTIVATION_BOUNDEDRELU,              // Bounded rectified linear.
  ACTIVATION_ELU,                      // 
  ACTIVATION_GAUSSIAN,                 // Gaussian.
  ACTIVATION_LINEAR,                   // Linear.
  ACTIVATION_LOG,                      // Logistic.
  ACTIVATION_RELU,                     // Rectified linear.
  ACTIVATION_SCALED_TANH,              // Scaled Tanh 1.7159 * tanh(0.66667 * x ).
  ACTIVATION_SIGMOID,                  // Sigmoid.
  ACTIVATION_SOFTRELU,                 // Soft rectified linear. 
  ACTIVATION_SQRT,                     // Square Root.
  ACTIVATION_SQUARE,                   // Square.
  ACTIVATION_SQUASH,                   // Squash.
  ACTIVATION_STEP,                     // Step.
  ACTIVATION_TANH                      // Hyperbolic tangent.
};
 
class Activation;
 
typedef std::shared_ptr<Activation> pActivationX;
//typedef std::vector<pActivationX> pActivation;
 
class Activation
{
private:
	//enum {SIGMOID, TANH, RELU, LINEAR} types;
	//types type;
  Activation_Types activation_type;
 
public:
	Activation();
  Activation(Activation_Types _activation_type);
	~Activation();
 
  double activate(const double& value, const bool derivative=false);
 
  double abs(const double& value, const bool derivative=false);
  double arctan(const double& value, const bool derivative=false);
  double boundedRelu(const double& value, const bool derivative=false);
  double elu(const double& value, const bool derivative = false);
  double gaussian(const double& value, const bool derivative = false);
  double linear(const double& value, const bool derivative=false);
  double log(const double& value, const bool derivative=false);
  double relu(const double& value, const bool derivative = false);
  double scaledTanh(const double& value, const bool derivative = false);
  double sigmoid(const double& value, const bool derivative=false);
  double sigmoid_limit(double value, double positive_limit=45.0, double negative_limit=-45.0);
  double softRelu(const double& value, const bool derivative=false);
  double sqrt(const double& value, const bool derivative = false);
  double square(const double& value, const bool derivative=false);
  double squash(const double& value, const bool derivative = false);
  double step(const double& value, const bool derivative = false);
  double tanh(const double& value, const bool derivative = false);
  double tanh_limit(double& value, double positive_limit=10.0, double negative_limit=-10.0);
 
 
  Activation_Types getActivationType();
  void setActivationType(Activation_Types _activation_type);
 
 
	//double sigmoid(const double& value);
	//double sigmoid_derivative(const double& value);
 
	//double tanh_derivative(const double& value);
};
 
 
 
/*
 
 
	// Built-in activation functions
	export class Activations {
		public static TANH: ActivationFunction = {
		output: x = > (<any>Math).tanh(x),
					der: x = > {
			let output = Activations.TANH.output(x);
			return 1 - output * output;
		}
		};
		public static RELU: ActivationFunction = {
		output: x = > Math.max(0, x),
					der: x = > x <= 0 ? 0 : 1
		};
		public static SIGMOID: ActivationFunction = {
		output: x = > 1 / (1 + Math.exp(-x)),
					der: x = > {
			let output = Activations.SIGMOID.output(x);
			return output * (1 - output);
		}
		};
		public static LINEAR: ActivationFunction = {
		output: x = > x,
					der: x = > 1
		};
	}
 
 
/* Build-in regularization functions.
export class RegularizationFunction {
	public static L1: RegularizationFunction = {
	output: w = > Math.abs(w),
				der: w = > w < 0 ? -1 : 1
	};
	public static L2: RegularizationFunction = {
	output: w = > 0.5 * w * w,
				der: w = > w
	};
}
*/
 
 
 
 
 
#endif

connection.cpp

connection.cpp
#include <iostream>
#include <cassert>
 
#include "neuron.h"
#include "connection.h"
 
 
 
Connection::Connection()
{
#ifdef DEBUG
	std::cout << "Connection::Connection1" << std::endl;
#endif
 
	index = -1;
	deltaWeight = 0;
	weight = 0;
	momentum = 0.4;
 
  Q = 0;
  R = -1;
 
	randomizeWeight();
}
 
 
Connection::Connection(const pNeuronX& from, const pNeuronX& to)
{
#ifdef DEBUG
	std::cout << "Connection::Connection2" << std::endl;
#endif
 
	index = -1;
	deltaWeight = 0;
	weight = 0;
	momentum = 0.4;
 
  Q = 0;
  R = -1;
 
	this->from = from;
	this->to = to;
 
	randomizeWeight();
}
 
 
double Connection::getError(void)
{
#ifdef DEBUG
	std::cout << "Connection::getError" << std::endl;
#endif
 
	return error;
}
 
 
void Connection::setError(const double& e)
{
#ifdef DEBUG
	std::cout << "Connection::setError" << std::endl;
#endif
 
	error = e;
}
 
 
int Connection::getIndex(void)
{
#ifdef DEBUG
	std::cout << "Connection::getIndex" << std::endl;
#endif
 
	return index;
}
 
 
void Connection::setIndex(const int& index)
{
#ifdef DEBUG
	std::cout << "Connection::setIndex" << std::endl;
#endif
 
	this->index = index;
}
 
 
double Connection::getWeight(void)
{
#ifdef DEBUG
	std::cout << "Connection::getWeight" << std::endl;
#endif
 
	return weight;
}
 
 
void Connection::setWeight(const double& w)
{
#ifdef DEBUG
	std::cout << "Connection::setWeight" << std::endl;
#endif
 
	//deltaWeight = weight - w;
	weight = w;
}
 
 
double Connection::getDeltaWeight(void)
{
#ifdef DEBUG
	std::cout << "Connection::getDeltaWeight" << std::endl;
#endif
 
	return deltaWeight;
}
 
 
void Connection::setDeltaWeight(const double& dw)
{
#ifdef DEBUG
	std::cout << "Connection::setDeltaWeight" << std::endl;
#endif
 
	deltaWeight = dw;
}
 
 
// Controls how much the weights are changed during a weight update by factoring in previous weight updates.
// It acts as a smoothing parameter that reduces oscillation and helps attain convergence.
// This must be a real value between 0.0 and 1.0, a typical value for momentum is 0.9.
double Connection::getMomentum(void)
{
#ifdef DEBUG
	std::cout << "Connection::getMomentum" << std::endl;
#endif
 
	return momentum;
}
 
 
void  Connection::setMomentum(const double& momentum)
{
#ifdef DEBUG
	std::cout << "Connection::setMomentum" << std::endl;
#endif
 
	this->momentum = momentum;
}
 
 
double Connection::getQ(void)
{
  return Q;
}
 
 
void Connection::setQ(const double& _Q)
{
  Q = _Q;
}
 
 
double Connection::getR(void)
{
  return R;
}
 
 
void Connection::setR(const double& _R)
{
  R = _R;
}
 
 
pNeuronX& Connection::getFrom(void)
{
#ifdef DEBUG
	std::cout << "Connection::getFrom" << std::endl;
#endif
 
	return from;
}
 
 
void Connection::setFrom(const pNeuronX& from)
{
#ifdef DEBUG
	std::cout << "Connection::setFrom" << std::endl;
#endif
 
	this->from = from;
}
 
 
pNeuronX& Connection::getTo(void)
{
#ifdef DEBUG
	std::cout << "Connection::getTo" << std::endl;
#endif
 
	return to;
}
 
 
void Connection::setTo(const pNeuronX& to)
{
#ifdef DEBUG
	std::cout << "Connection::setTo" << std::endl;
#endif
 
	this->to = to;
}
 
 
double Connection::randomizeWeight(void)
{
#ifdef DEBUG
	std::cout << "Connection::randomizeWeight" << std::endl;
#endif
 
	weight = rand() / double(RAND_MAX);
	//deltaWeight = weight;
 
	return weight;
}
 
 
void Connection::printOutput(void)
{
#ifdef DEBUG
	std::cout << "Connection::printOutput" << std::endl;
#endif
 
	if (!from)
		return;
	if (!to)
		return;
 
	int f = from->getIndex();
	int t = to->getIndex();
 
	//std::cout << "    Connection[" << index << "] w=" << weight << ", From=" << f << ", To=" << t << ", d=" << deltaWeight << std::endl;
  std::cout << "    Connection[" << index << "] w=" << weight << ", From=" << f << ", To=" << t << ", d=" << deltaWeight << ", Q=" << Q << ", R=" << R << std::endl;
}
 
 
/*
 
#include <algorithm>
#include <iostream>
#include <vector>
 
int random_number(int N) // random value in [0, N)
{
static std::random_device seed;
static std::mt19937 eng(seed());
std::uniform_int_distribution<> dist(0, N - 1);
return dist(eng);
}
 
std::vector<int> random_sample(int first, int last, int n)
{
std::vector<int> numbers;
int remaining = last - first + 1;
int m = std::min(n, remaining);
while (m > 0) {
if (random_number(remaining) < m) {
numbers.push_back(first);
--m;
}
--remaining;
++first;
}
return numbers;
}
 
int main()
{
auto numbers = random_sample(1, 100, 20);
for (int value : numbers) {
std::cout << value << " ";
}
std::cout << '\n';
}
*/
/*
very simple random is 1+((power(r,x)-1) mod p) will be from 1 to p for values of x from 1 to p and will be random where r and p are prime numbers and r <> p.
 
To shuffle an array a of n elements:
for i from n ? 1 downto 1 do
j ? random integer with 0 ? j ? i
exchange a[j] and a[i]
 
 
for (int i = cards.Length - 1; i > 0; i--)
{
int n = rand.Next(i + 1);
Swap(ref cards[i], ref cards[n]);
}
*/

connection.h

connection.h
#ifndef __SHAREWIZ_CONNECTION_H__
#define __SHAREWIZ_CONNECTION_H__
 
//#include <memory>
//#include <vector>
 
// Connection class.
// 
// A Connection links two Neurons.
 
 
class Neuron;
 
typedef std::shared_ptr<Neuron> pNeuronX;
typedef std::vector<pNeuronX> pNeuron;
 
 
class Connection
{
private:
	int index;
	double weight;
	double deltaWeight;
	double error;
	double momentum;                      // alpha.
 
  // For DQN.  
  // Should we re-use weight as Q and deltaWeight as R.
  double Q;
  double R;
 
  double odds;                         // Odds of being chosen as next action for DQN.
 
 
	double randomizeWeight(void);
 
	pNeuronX from;
	pNeuronX to;
 
public:
	Connection();
	Connection(const pNeuronX& from, const pNeuronX& to);
 
	double getError(void);
	void setError(const double& e);
 
	int getIndex(void);
	void setIndex(const int& index);
 
	double getWeight(void);
	void setWeight(const double& w);
 
	double getDeltaWeight(void);
	void setDeltaWeight(const double& w);
 
	double getMomentum(void);
	void setMomentum(const double& momentum);
 
  double getQ(void);
  void setQ(const double& _Q);
 
  double getR(void);
  void setR(const double& _R);
 
  pNeuronX& getFrom(void);
	void setFrom(const pNeuronX& from);
 
	pNeuronX& getTo(void);
	void setTo(const pNeuronX& to);
 
	void printOutput(void);
};
 
 
#endif

layer.cpp

layer.cpp
#include <iostream>
#include <cassert>
 
#include "layer.h"
#include "neuron.h"
#include "connection.h"
#include "activation.h"
 
 
//Layer::Layer() :
Layer::Layer()
//  index(-1),
//  neurons(10)
//  index(0)
//neurons(10)
{
	//idx++;
	//index++;  
	index = -1;
 
	neurons.reserve(10);
 
	//neurons = std::vector<Neuron>();
	//std::cout << "neurons size: " << neurons.size() << std::endl;
}
 
 
Layer::Layer(unsigned int num_neurons)
{
	index = -1;
 
	neurons.reserve(num_neurons);
 
	for (unsigned int i = 0; i<num_neurons; i++)
	{
		pNeuronX tmp(new Neuron());
		tmp->setIndex(i);
		neurons.push_back(tmp);
	}
 
 
	/*
	// Add a bias neuron in each layer.
	// Force the bias node's output to 1.0 (it was the last neuron pushed in this layer):
	pNeuronX tmp(new Neuron());
	tmp->setIndex(100);
	tmp->setValue(1);
	neurons.push_back(tmp);
 
	//neurons.back().back().setOutputVal(1.0);
	//neurons.back()->setValue(1.0);
	*/
}
 
 
int Layer::getIndex(void)
{
	return index;
}
 
 
void Layer::setIndex(const int& index)
{
	this->index = index;
}
 
 
unsigned int Layer::getSize(void)
{
	return neurons.size();
}
 
 
void Layer::addNeuron(const pNeuronX& n)
{
	neurons.push_back(n);
}
 
 
void Layer::removeNeuron(const int& idx)
{
	assert(neurons.size() >= idx);
 
  for (unsigned i = neurons.size()-1; i > 0;  i--)
  {
    if (neurons[i]->getIndex() == idx)
    {
      neurons.erase(neurons.begin() + i);
      return;
    }
  }
}
 
 
pNeuronX &Layer::getNeuron(const int& idx)
{
	assert(neurons.size() >= idx);
 
	return neurons[idx];
}
 
 
void Layer::feedForward(const pLayerX& prevLayer)
{
	/*
	// INPUT -> HIDDEN
	for(y=0; y<hidden_array_size; y++) {
	for(x=0; x<input_array_size; x++) {
	temp += (input[pattern][x] * weight_i_h[x][y]);
	}
	hidden[y] = (1.0 / (1.0 + exp(-1.0 * (temp + bias[y]))));
	temp = 0;
	}
 
	// HIDDEN -> OUTPUT
	for(y=0; y<output_array_size; y++) {
	for(x=0; x<hidden_array_size; x++) {
	temp += (hidden[x] * weight_h_o[x][y]);
	}
	output[pattern][y] = (1.0 / (1.0 + exp(-1.0 * (temp + bias[y + hidden_array_size]))));
	temp = 0;
	}
	return;
	}
	*/
 
	for (unsigned int i = 0; i<getSize(); i++) // How many Neurons in current layer.
	{
		// Weight sum of the previous layer's output values.
		double weightedSum = 0;
		//weightedSum += .5; // Add a 1 to act as a bias.
		weightedSum += 1.0; // Add a 1 to act as a bias.
 
		pNeuronX& currentNeuron = neurons[i];
		if (!currentNeuron)
			continue;
		unsigned int currentIndex = currentNeuron->getIndex();
 
		for (unsigned int j = 0; j<prevLayer->getSize(); j++) // How many Neurons in prev layer.
		{
			pNeuronX& prevNeuron = prevLayer->getNeuron(j);
			if (!prevNeuron)
				continue;
			//std::cout << "J=" << j << std::endl;
			for (unsigned int k = 0; k<prevNeuron->getSizeOut(); k++)
			{
				if (!prevNeuron->getConnectionOut(k))
					continue;
				if (!prevNeuron->getConnectionOut(k)->getTo())
					continue;
 
				// We are only interested in connections going into the currentNeuron.
				unsigned int prevIndex = prevNeuron->getConnectionOut(k)->getTo()->getIndex();
 
				//if (prevNeuron.getConnectionOut(k).getTo() == currentNeuron)
				if (prevIndex == currentIndex)
				{
					weightedSum += prevLayer->getNeuron(j)->getValue()*prevLayer->getNeuron(j)->getConnectionOut(k)->getWeight();
				}
			}
		}
 
		// Add in Bias?
		//weightedSum += .5; // Add a 1 to act as a bias. 
 
		//std::cout << "*************" << std::endl;
    if (currentNeuron)
    {
      pActivationX act = currentNeuron->getActivation();
      if (!act)
        continue;
 
      // Sigmoid function.  Activation function is applied to this intermediate value to yield the local value of the neuron.
      //currentNeuron->setValue(currentNeuron->sigmoid(weightedSum));
      //currentNeuron->setValue(act.activate(weightedSum));
      //currentNeuron->setValue(currentNeuron->getActivation()->activate(weightedSum));
      currentNeuron->setValue(act->activate(weightedSum));
 
      //std::cout << "------------------" << std::endl;      
    }
	}
 
	//  std::cout << "++++++++++++++++" << std::endl;  
}
 
 
void Layer::printOutput(void)
{
	std::cout << "Layer " << index << " has " << neurons.size() << " Neurons" << std::endl;
 
	for (unsigned int i = 0; i<neurons.size(); i++)
	{
		if (!neurons[i])
			continue;
 
		std::cout << "  Neuron[" << i << "] v=" << neurons[i]->getValue() << ", g=" << neurons[i]->getGradient() << std::endl;
 
		for (unsigned int j = 0; j<neurons[i]->getSizeOut(); j++)
		{
			pConnectionX& currentConnection = neurons[i]->getConnectionOut(j);
			if (!currentConnection)
				continue;
			currentConnection->printOutput();
		}
	}
}

layer.h

layer.h
#ifndef __SHAREWIZ_LAYER_H__
#define __SHAREWIZ_LAYER_H__
 
#include <memory>
#include <vector>
 
//#include "neuron.h"
 
// Layer class.
//
// A Neural Network can have multiple Layers.
 
class Layer;
class Neuron;
 
 
typedef std::shared_ptr<Layer> pLayerX;
typedef std::vector<pLayerX> pLayer;
 
typedef std::shared_ptr<Neuron> pNeuronX;
typedef std::vector<pNeuronX> pNeuron;
 
 
 
class Layer
{
private:
	int index;
 
	pNeuron neurons;
 
public:
	Layer();
	Layer(unsigned int num_neurons);
 
	unsigned int getSize(void);  // Returns how many neurons.
 
	int getIndex(void);
	void setIndex(const int& index);
 
	void addNeuron(const pNeuronX& n);
	void removeNeuron(const int& idx);
 
	pNeuronX& getNeuron(const int& idx);
 
	void feedForward(const pLayerX& prevLayer);
 
	void printOutput(void);
};
 
 
#endif

net.cpp

net.cpp
#include <iostream>
#include <cassert>
 
#include "net.h"
#include "layer.h"
#include "neuron.h"
#include "connection.h"
 
typedef std::shared_ptr<Connection> pConnectionX;
typedef std::vector<pConnectionX> pConnection;
 
/*
It has been shown that the error surface of a backpropagation network with one hidden layer and hidden units 
has no local minima, if the network is trained with an arbitrary set containing different inputs1 (Yu, 1992).
In practice, however, other features of the error surface such as "ravines" and "plateaus" (Baldi and Hornik, 1988) 
can present difficulty for optimisation. For example, two error functions (from (Gori, 1996)) do not have local 
minima. However, the function on the left is expected to be more difficult to optimise with gradient descent. 
For the purposes of this paper, the criterion of interest considered is "the best solution found in a given 
practical time limit.
*/
 
 
Net::Net() :
learning_rate(0.5),
max_error_tollerance(0.1),
goal_amount(100.0)
{
	layers.reserve(10);
}
 
 
// Example:
//   std::vector<unsigned int> myTopology;
//   myTopology.push_back(2);
//   myTopology.push_back(4);
//   myTopology.push_back(1);
//   Net myNet(myTopology);
Net::Net(const std::vector<unsigned int>& topology) :
learning_rate(0.5),
max_error_tollerance(0.1),
goal_amount(100.0)
{
  assert(topology.size() > 0);
 
  learning_rate = 0.5;
  max_error_tollerance = 0.1;
  goal_amount = 100.0;
 
  layers.reserve(topology.size());
 
	// obtain a time-based seed:
	//unsigned seed = std::chrono::system_clock::now().time_since_epoch().count();
	// using built-in random generator:
	//shuffle (topology.begin(), topology.end(), std::default_random_engine(seed));
	//auto engine = std::default_random_engine{};
	//std::shuffle(std::begin(topology), std::end(topology), engine);
	//std::random_shuffle ( topology.begin(), topology.end());
	//std::shuffle ( topology.begin(), topology.end() );
 
 
	for (unsigned int i = 0; i<topology.size(); i++)
	{
		pLayerX tmp(new Layer(topology[i]));
    tmp->setIndex(i);
		layers.push_back(tmp);
	}
 
 
	std::cout << "layers size: " << layers.size() << std::endl;
  for (unsigned int i = 0; i < layers.size(); i++)
  {
    std::cout << "layers " << i << " neurons size: " << layers[i]->getSize() << std::endl;
  }
 
 
	//printOutput();
 
	// Add Bias to input and hidden layers.
	//layers[1]->addNeuron(
 
  //connectAll();
  //connectForward();
 
	//printOutput();
}
 
 
// Connects the "From" Neuron to the "To" Neuron.
void Net::connect(const std::vector< std::vector<double> > connections)
{
  //unsigned int connection_idx = 1;
  int connection_idx = 0;
 
  for (unsigned int i = 0; i < connections.size(); i++)
  {
    //for (unsigned int j = 0; j < connections[i].size(); j++)
    //{
      int layerFrom = (int)connections[i][0];
      int neuronFrom = (int)connections[i][1];
      int layerTo = (int)connections[i][2];
      int neuronTo = (int)connections[i][3];
      double _R = connections[i][4];
 
      pConnectionX tmp(new Connection(layers[layerFrom]->getNeuron(neuronFrom), layers[layerTo]->getNeuron(neuronTo)));
      tmp->setIndex(connection_idx++);
      tmp->setQ(0);
      tmp->setR(_R);
      layers[layerFrom]->getNeuron(neuronFrom)->addConnectionOut(tmp);
      layers[layerTo]->getNeuron(neuronTo)->addConnectionIn(tmp);
    //}
  }
}
 
 
// Connects the "From" Neuron to the "To" Neuron.
void Net::connect(int layerFrom, int neuronFrom, int layerTo, int neuronTo, double _R, int connection_idx)
{
  //unsigned int connection_idx = 1;
 
  pConnectionX tmp(new Connection(layers[layerFrom]->getNeuron(neuronFrom), layers[layerTo]->getNeuron(neuronTo)));
  tmp->setIndex(connection_idx);
  tmp->setQ(0);
  tmp->setR(_R);
  layers[layerFrom]->getNeuron(neuronFrom)->addConnectionOut(tmp);
  layers[layerTo]->getNeuron(neuronTo)->addConnectionIn(tmp);
}
 
 
 
// Connects all Neurons to each other.
void Net::connectAll()
{
  // assert(layer.size() > 1); // There must be more than 1 neuron to connect.
 
  int connection_idx = 0;
 
  for (unsigned int i = 0; i<layers.size(); i++) // For each Sending Layer.
  {
    for (unsigned int j = 0; j<layers[i]->getSize(); j++)  // For each neuron in Sending Layer.
    {
      for (unsigned int k = 0; k<layers.size(); k++)  // For each Receiving layer.
      {
        for (unsigned int l = 0; l < layers[k]->getSize(); l++)  // For each neuron in Receiving layer.
        {
          pConnectionX tmp(new Connection(layers[i]->getNeuron(j), layers[k]->getNeuron(l)));
          tmp->setIndex(connection_idx++);
          layers[i]->getNeuron(j)->addConnectionOut(tmp);
          layers[k]->getNeuron(l)->addConnectionIn(tmp);
 
          //std::cout << "I[" << j << "] connected to H[" << k << "] with Connection IDX=" << connection_idx-1 << std::endl;
        }
      }
    }
  }
}
 
 
// Connects all Neurons in a layer to all Neurons in the next layer.
void Net::connectForward()
{
  unsigned int connection_idx = 0;
 
  for (unsigned int i = 0; i<layers.size()-1; i++) // For each Sending Layer.
  {
    for (unsigned int j = 0; j<layers[i]->getSize(); j++)  // how many input neurons in input level.
    {
      for (unsigned int k = 0; k<layers[i+1]->getSize(); k++)  // how many neurons in next level.
      {
        pConnectionX tmp(new Connection(layers[i]->getNeuron(j), layers[i + 1]->getNeuron(k)));
        tmp->setIndex(connection_idx++);
        layers[i]->getNeuron(j)->addConnectionOut(tmp);
        layers[i + 1]->getNeuron(k)->addConnectionIn(tmp);
 
        //std::cout << "I[" << j << "] connected to H[" << k << "] with Connection IDX=" << connection_idx-1 << std::endl;
      }
    }
  }
}
 
 
// Same as connectForward() but code spread out between layers.
// Connects all Neurons to Neurons in next layer.
void Net::connectForward2()
{
  unsigned int connection_idx = 0;
 
  // Create the input to hidden connections.
  // assert(layers.size() > 1); // There must be more than 1 layers to connect.
 
  for (unsigned int i = 0; i<layers.size() - 1; i++) // get how many Layers
  {
    // Create the input to hidden connections.
    if (i == 0)
    {
      if ((layers.size()) > 1)  // there are other layers
      {
        for (unsigned int j = 0; j<layers[0]->getSize(); j++)  // how many input neurons in input level.
        {
          for (unsigned int k = 0; k<layers[1]->getSize(); k++)  // how many neurons in next level.
          {
            pConnectionX tmp(new Connection(layers[0]->getNeuron(j), layers[1]->getNeuron(k)));
            tmp->setIndex(connection_idx++);
            layers[0]->getNeuron(j)->addConnectionOut(tmp);
            layers[1]->getNeuron(k)->addConnectionIn(tmp);
 
            //std::cout << "I[" << j << "] connected to H[" << k << "] with Connection IDX=" << connection_idx-1 << std::endl;
          }
        }
      }
      else //  no other layers.  so no connections possible.
      {
 
      }
    }
 
 
    // Create the inside hidden connections...and hidden to output connection.
    if ((i>0) && (i <= layers.size() - 2))
    {
      for (unsigned int j = 0; j<layers[i]->getSize(); j++)  // how many input neurons.
      {
        for (unsigned int k = 0; k<layers[i + 1]->getSize(); k++)  // how many neurons in next level.
        {
          pConnectionX tmp(new Connection(layers[i]->getNeuron(j), layers[i + 1]->getNeuron(k)));
          tmp->setIndex(connection_idx++);
          layers[i]->getNeuron(j)->addConnectionOut(tmp);
          layers[i + 1]->getNeuron(k)->addConnectionIn(tmp);
 
          //std::cout << "HI[" << j << "] connected to O[" << k << "] with Connection IDX=" << connection_idx-1 << std::endl;
        }
      }
    }
  }
}
 
 
// Connects all Neurons to each other.
void Net::connectAllInLayer(const pLayerX& layer)
{
  // assert(layer.size() > 1); // There must be more than 1 neuron to connect.
 
  unsigned int connection_idx = 0;
 
  for (unsigned int i = 0; i<layer->getSize(); i++)  // For each "from" neuron in Layer.
  {
    for (unsigned int j = 0; j<layer->getSize(); j++)  // For each "to" neuron in layer.
    {
      pConnectionX tmp(new Connection(layer->getNeuron(i), layer->getNeuron(j)));
      tmp->setIndex(connection_idx++);
      layer->getNeuron(i)->addConnectionOut(tmp);
      layer->getNeuron(j)->addConnectionIn(tmp);
 
        //std::cout << "I[" << j << "] connected to H[" << k << "] with Connection IDX=" << connection_idx-1 << std::endl;
    }
  }
}
 
 
 
void Net::DQN(void)
{
  int numLayers = layers.size();
  int numNeurons = 0;
  int numConnections = 0;
 
  // Determine how many layers, neurons (states) and connections (actions) we have.
  for (unsigned int i = 0; i < layers.size(); i++) // get how many Layers
  {
    numNeurons += layers[i]->getSize();
 
    for (unsigned int j = 0; j < layers[i]->getSize(); j++)  // For each neuron in the Sending Layer.
    {
      pNeuronX& currentNeuron = layers[i]->getNeuron(j);  // Get current neuron.
      if (!currentNeuron)
        continue;
 
      numConnections += currentNeuron->getSizeOut();  // Get how many connections from the current neuron.
    }
  }
 
 
  // Select random initial neuron (state).
  int rnd_layer = randomBetween(0, numLayers-1);
  int rnd_state = randomBetween(0, numNeurons-1);
  pLayerX currentLayer = layers[rnd_layer];
  pNeuronX currentState = layers[rnd_layer]->getNeuron(rnd_state);
 
  // Set initial reward.
  double R = -1;
 
  // Loop until a reward matching the goal_amount has been found.
  while (R != goal_amount)
  {
    // Select one amongst all possible actions for the current state.
    // TODO: Simply using random treats all possible actions as equal.
    // TODO: Should cater for giving different actions different odds of being chosen.
    int rnd_action = randomBetween(0, currentState->getSizeOut()-1); 
    pConnectionX currentAction = currentState->getConnectionOut(rnd_action);
 
    // Action outcome is set to deterministic in this example
    // Transition probability is 1.
    pNeuronX nextState = currentAction->getTo();
 
    // Get reward.
    R = currentAction->getR();
 
    // Get Q.
    double Q = currentAction->getQ();
 
    // Determine the maximum Q.
    double maxQ = DBL_MIN;
    for (unsigned int i = 0; i<nextState->getSizeOut(); i++)
    {
      double tmpQ = nextState->getConnectionOut(i)->getQ();
 
      if (maxQ < tmpQ)
        maxQ = tmpQ;
    }
    if (maxQ == DBL_MIN) maxQ = 0;
 
    // Update the Q.
    //double v = Q + alpha * (R + gamma * maxQ - Q);
    double target = R + gamma * maxQ;
    //double error = R + gamma * maxQ - Q;
    double error = target - Q;
 
 
    // Experience Replay Memory.
    // To suggest an experience replay memory.
    // This is loosely inspired by the brain, and in particular the way it syncs memory traces in the hippocampus 
    // with the cortex. 
    // What this amounts to is that instead of performing an update and then throwing away the experience tuple, 
    // i.e. the original Q, we keep it around and effectively build up a training set of experiences. 
    // Then, we don't learn based on the new experience that comes in at time t, but instead sample random 
    // expriences from the replay memory and perform an update on each sample. 
    // This feature has the effect of removing correlations in the observed state,action,reward sequence and 
    // reduces gradual drift and forgetting.
    // If the size of the memory pool is greater than some threshold, start replacing old experiences.
    // or those further from the current Q, or randomly etc.
    int rnd_replay_memory = randomBetween(0, 100);
    if (rnd_replay_memory > 99) // if rnd > some value
    {
      //experience_add_every = 5; // number of time steps before we add another experience to replay memory.
      //experience_size = 10000;  // size of experience.
 
      // Record old Q value into array of stored memories.
      // Now select new Q value from randomly selecting one of the old Q memory values - perhaps by using odds.
      // i.e. most fresh Q value might have slightly greater chance of being selected etc.
    }
 
    // Clamping TD Error.
    // Clamp the TD Error gradient at some fixed maximum value.
    // If the error is greater in magnitude then some threshold (tderror_clamp) then we cap it at that value.
    // This makes the learning more robust to outliers and has the interpretation of using Huber loss, which 
    // is an L2 penalty in a small region around the target value and an L1 penalty further away.
//    double tderror_clamp = 1.0; // for robustness
//    if (error > tderror_clamp)
//      error = tderror_clamp;
 
    // Periodic Target Q Value Updates.
    // Periodically freeze the Q where it is.
    // Aims to reduce correlations between updates and the immediately undertaken behavior.
    // The idea is to freeze the Q network once in a while into a frozen, copied network, which is used to 
    // only compute the targets.
    // This target network is once in a while updated to the actual current.
    int rnd_freeze = randomBetween(0, 100);
    if (rnd_freeze > 99)
    {
    }
 
    double v = Q + alpha * (error);
 
    currentAction->setQ(v);
 
    // Update the state.
    currentState = nextState;
  }
}
 
 
// Determine the maximum Q for the state.
double Net::getMaxQ(pNeuronX state)
{
  double maxQ = DBL_MIN;
  for (unsigned int i = 0; i<state->getSizeOut(); i++)
  {
    double tmpQ = state->getConnectionOut(i)->getQ();
 
    if (maxQ < tmpQ)
      maxQ = tmpQ;
  }
 
  if (maxQ == DBL_MIN) 
    maxQ = 0;
 
  return maxQ;
}
 
 
// Get policy from state.
pNeuronX Net::getPolicy(pNeuronX currentState)
{
  double maxValue = DBL_MIN;
  pNeuronX policyGotoState = currentState; // Default goto self if not found.
 
  for (unsigned int i = 0; i < currentState->getSizeOut(); i++)
  {
    pNeuronX nextState = currentState->getConnectionOut(i)->getTo();
    double value = currentState->getConnectionOut(i)->getQ();
 
    if (value > maxValue)
    {
      maxValue = value;
      policyGotoState = nextState;
    }
  }
 
  return policyGotoState;
}
 
 
// Policy is maxQ(states).
void Net::showPolicy(void) 
{
  for (unsigned int i = 0; i < layers.size(); i++)
  {
    for (unsigned int j = 0; j < layers[i]->getSize(); j++)
    {
      pNeuronX fromState = layers[i]->getNeuron(j);
      int from = fromState->getIndex(); 
 
      pNeuronX toState = getPolicy(fromState);
      int to = toState->getIndex(); 
 
      std::cout << "From " << from << " goto " << to << std::endl;
    }
  }
}
 
 
// This method has a bit of a bias towards the low end if the range of rand() isn't divisible 
// by highestNumber - lowestNumber + 1.
int Net::randomBetween(int lowestNumber, int highestNumber)
{
  assert(highestNumber >= lowestNumber);
 
  //return rand() % to + from;
 
  return rand() % (highestNumber - lowestNumber + 1) + lowestNumber;
}
 
 
 
// Controls how much the weights are changed during a weight update.
// The larger the value, the more the weights are changed.
// This must be a real value between 0.0 and 10.0.
// These values are commonly set from 0.5 to 0.7.
double Net::getLearningRate(void)
{
	return learning_rate;
}
 
 
void Net::setLearningRate(const double& learning_rate)
{
	this->learning_rate = learning_rate;
}
 
 
double Net::getMaxErrorTollerance(void)
{
	return max_error_tollerance;
}
 
 
void Net::setMaxErrorTollerance(const double& max_error_tollerance)
{
	this->max_error_tollerance = max_error_tollerance;
}
 
 
double Net::getAlpha(void)
{
  return alpha;
}
 
 
// Learning rate.
//
// The Alpha parameter has a range of 0 to 1 (0 <= Gamma > 1).
//
// Set this by trial and error.  That's Pretty much the best thing we have.
void Net::setAlpha(const double& _alpha_amount)
{
  alpha = _alpha_amount;
}
 
 
double Net::getGamma(void)
{
  return gamma;
}
 
 
// Discount factor.
//
// The Gamma parameter has a range of 0 to 1 (0 <= Gamma > 1).
//
// If Gamma is closer to zero, the agent will tend to consider only immediate rewards.
//
// If Gamma is closer to one, the agent will consider future rewards with greater weight, 
// willing to delay the reward.
void Net::setGamma(const double& _gamma_amount)
{
  gamma = _gamma_amount;
}
 
 
 
double Net::getGoalAmount(void)
{
  return goal_amount;
}
 
 
void Net::setGoalAmount(const double& _goal_amount)
{
  this->goal_amount = _goal_amount;
}
 
 
 
/*
void Net::load_data(char *arg)
{
int x, y;
ifstream in(arg);
if(!in)
{
cout << endl << "failed to load data file" << endl; file_loaded = 0;
return;
}
 
in >> input_array_size;
in >> hidden_array_size;
in >> output_array_size;
in >> learning_rate;
in >> number_of_input_patterns;
bias_array_size = hidden_array_size + output_array_size;
initialize_net();
for (x=0; x<bias_array_size; x++)
in >> bias[x];
for(x=0; x<input_array_size; x++)
{
for(y=0; y<hidden_array_size; y++)
in >> weight_i_h[x][y];
}
for(x=0; x<hidden_array_size; x++)
{
for(y=0; y<output_array_size; y++) in >> weight_h_o[x][y];
}
for(x=0; x<number_of_input_patterns; x++)
{
for(y=0; y<input_array_size; y++)
in >> input[x][y];
}
for(x=0; x<number_of_input_patterns; x++)
{
for(y=0; y<output_array_size; y++)
in >> target[x][y];
}
in.close();
cout << endl << "data loaded" << endl;
return;
}
 
 
 
void Net::save_data(char *argres)
{
int x, y;
ofstream out;
out.open(argres);
if(!out)
{
cout << endl << "failed to save file" << endl;
return;
}
out << input_array_size << endl;
out << hidden_array_size << endl;
out << output_array_size << endl;
out << learning_rate << endl;
out << number_of_input_patterns << endl << endl;
for(x=0; x<bias_array_size; x++)
out << bias[x] << ' ';
out << endl << endl;
for(x=0; x<input_array_size; x++)
{
for(y=0; y<hidden_array_size; y++)
out << weight_i_h[x][y] << ' ';
}
out << endl << endl;
for(x=0; x<hidden_array_size; x++)
{
for(y=0; y<output_array_size; y++) out << weight_h_o[x][y] << ' ';
}
out << endl << endl;
for(x=0; x<number_of_input_patterns; x++)
{
for(y=0; y<input_array_size; y++)
out << input[x][y] << ' ';
out << endl;
}
out << endl;
for(x=0; x<number_of_input_patterns; x++)
{
for(y=0; y<output_array_size; y++)
out << target[x][y] << ' ';
out << endl;
}
out.close();
cout << endl << "data saved" << endl;
return;
}
 
*/
 
void Net::setTest(void)
{
	layers[0]->getNeuron(0)->setValue(1);
	layers[0]->getNeuron(1)->setValue(1);
	//layers[0]->getNeuron(2)->setValue(1);
 
	layers[1]->getNeuron(0)->setValue(123);
	layers[1]->getNeuron(1)->setValue(456);
 
	layers[2]->getNeuron(0)->setValue(0);
 
	layers[0]->getNeuron(0)->getConnectionOut(0)->setWeight(0.1);
	layers[0]->getNeuron(0)->getConnectionOut(1)->setWeight(0.2);
	layers[0]->getNeuron(1)->getConnectionOut(0)->setWeight(0.3);
	layers[0]->getNeuron(1)->getConnectionOut(1)->setWeight(0.4);
	layers[1]->getNeuron(0)->getConnectionOut(0)->setWeight(0.5);
	layers[1]->getNeuron(1)->getConnectionOut(0)->setWeight(0.6);
 
	// Add connection between two neurons in same level 1
	//pConnectionX tmp(new Connection(layers[1]->getNeuron(0), layers[1]->getNeuron(1)));    
	//tmp->setIndex(100);
	//layers[1]->getNeuron(0)->addConnectionOut(tmp);
 
 
	printOutput();
 
	//layers[1]->getNeuron(0)->removeConnectionOut(0)->setWeight(0.5);
	//layers[1]->getNeuron(0)->getConnectionOut(0) = nullptr;
 
	//layers[1]->getNeuron(0)->getConnectionIn(0) = nullptr;
	//layers[0]->getNeuron(0)->getConnectionOut(0) = nullptr;
	//layers[0]->getNeuron(0)->getConnectionOut(1) = nullptr;
	//layers[0]->getNeuron(0) = nullptr;
 
	printOutput();
	std::cout << "**************************************************************" << std::endl;
}
 
 
// TODO: Only works if 3 or more layers.  Perhaps we should cater when zero hidden layers?
void Net::feedForward(const std::vector<double>& inputVals)
{
	//  std::cout << "inputVals.size=" << inputVals.size() << std::endl;
	//  std::cout << "layers[0]->getSize()=" << layers[0]->getSize() << std::endl;
 
 
	assert(layers[0]->getSize() == inputVals.size());
 
	//std::cout << "inputVals.size=" << inputVals.size() << std::endl;
 
	// Setting input vals to input layer.
	for (unsigned int i = 0; i<inputVals.size(); i++)
	{
		if (!layers[0]->getNeuron(i))
			continue;
 
		layers[0]->getNeuron(i)->setValue(inputVals[i]); // layers[0] is the input layer.
	}
 
 
	// Updating hidden layers.
	for (unsigned int i = 1; i<layers.size() - 1; i++)
	{
		layers[i]->feedForward(layers[i - 1]); // Updating the neurons output based on the neurons of the previous layer.
	}
 
 
	// Updating output layer.
	for (unsigned int i = 0; i<layers.back()->getSize(); i++) // How many neurons in the output layer
	{
		pLayerX& prevLayer = layers[layers.size() - 2];
		layers[layers.size() - 1]->feedForward(prevLayer); // Updating the neurons output based on the neurons of the previous layer.
	}
}
 
 
/*
void backward_pass(int pattern)
{
register int x, y;
register double temp = 0;
 
// COMPUTE ERRORSIGNAL FOR OUTPUT UNITS
for(x=0; x<output_array_size; x++) {
errorsignal_output[x] = (target[pattern][x] - output[pattern][x]);
}
 
// COMPUTE ERRORSIGNAL FOR HIDDEN UNITS
for(x=0; x<hidden_array_size; x++) {
for(y=0; y<output_array_size; y++) {
temp += (errorsignal_output[y] * weight_h_o[x][y]);
}
errorsignal_hidden[x] = hidden[x] * (1-hidden[x]) * temp;
temp = 0.0;
}
 
// ADJUST WEIGHTS OF CONNECTIONS FROM HIDDEN TO OUTPUT UNITS
double length = 0.0;
for (x=0; x<hidden_array_size; x++) {
length += hidden[x]*hidden[x];
}
if (length<=0.1) length = 0.1;
for(x=0; x<hidden_array_size; x++) {
for(y=0; y<output_array_size; y++) {
weight_h_o[x][y] += (learning_rate * errorsignal_output[y] *
hidden[x]/length);
}
}
 
// ADJUST BIASES OF HIDDEN UNITS
for(x=hidden_array_size; x<bias_array_size; x++) {
bias[x] += (learning_rate * errorsignal_output[x] / length);
}
 
// ADJUST WEIGHTS OF CONNECTIONS FROM INPUT TO HIDDEN UNITS
length = 0.0;
for (x=0; x<input_array_size; x++) {
length += input[pattern][x]*input[pattern][x];
}
if (length<=0.1) length = 0.1;
for(x=0; x<input_array_size; x++) {
for(y=0; y<hidden_array_size; y++) {
weight_i_h[x][y] += (learning_rate * errorsignal_hidden[y] *
input[pattern][x]/length);
}
}
 
// ADJUST BIASES FOR OUTPUT UNITS
for(x=0; x<hidden_array_size; x++) {
bias[x] += (learning_rate * errorsignal_hidden[x] / length);
}
return;
}
*/
 
void Net::backPropagate(const std::vector<double>& targetVals)
{
	// COMPUTE ERRORSIGNAL FOR OUTPUT UNITS
	pLayerX& outputLayer = layers.back();
	assert(targetVals.size() == outputLayer->getSize());
 
	// Traversing output layer.
	for (unsigned int i = 0; i<outputLayer->getSize(); i++) // For every output Neuron.
	{
		if (!outputLayer->getNeuron(i))
			continue;
 
    double outputValue = outputLayer->getNeuron(i)->getValue();
    double gradient = (targetVals[i] - outputValue) * outputValue * (1.0 - outputValue);
		outputLayer->getNeuron(i)->setGradient(gradient);
	}
 
	// COMPUTE ERRORSIGNAL FOR HIDDEN UNITS
	for (unsigned int i = layers.size() - 2; i>0; i--) // for every hidden layer
	{
		pLayerX& currentLayer = layers[i];
		pLayerX& nextLayer = layers[i + 1];
 
		for (unsigned int j = 0; j<currentLayer->getSize(); j++) // for every neuron
		{
			double temp = 0.0;
			pNeuronX& currentNeuron = layers[i]->getNeuron(j);  // current neuron.
			if (!currentNeuron)
				continue;
 
			for (unsigned int k = 0; k<currentNeuron->getSizeOut(); k++)  // for every connection in current layer.
			{
				pConnectionX &currentConnection = currentNeuron->getConnectionOut(k);
				if (!currentConnection)
					continue;
				if (!currentConnection->getTo())
					continue;
 
				int currentIndex = currentNeuron->getConnectionOut(k)->getTo()->getIndex();
 
				for (unsigned int l = 0; l<nextLayer->getSize(); l++) // for every neuron in next layer
				{
					pNeuronX& nextNeuron = nextLayer->getNeuron(l);  // next layers neuron.
					if (!nextNeuron)
						continue;
 
					int nextIndex = nextNeuron->getIndex();
 
					if (currentIndex == nextIndex)
					{
						temp += (nextNeuron->getGradient() * currentConnection->getWeight());    // output_error + weight-h-o
					}
				}
			}
 
			currentNeuron->setGradient(currentNeuron->getValue() * (1.0 - currentNeuron->getValue()) * temp);
		}
	}
 
 
	// ADJUST WEIGHTS OF CONNECTIONS FROM HIDDEN TO OUTPUT UNITS
	for (unsigned int i = 0; i<layers.size() - 1; i++)  // for every layer.
	{
		for (unsigned int j = 0; j<layers[i]->getSize(); j++)  // for every neuron.
		{
			pNeuronX& currentNeuron = layers[i]->getNeuron(j);
			if (!currentNeuron)
				continue;
			double currentValue = currentNeuron->getValue();
 
			for (unsigned int k = 0; k<layers[i]->getNeuron(j)->getSizeOut(); k++) // for every connection.
			{
				pConnectionX& currentConnection = currentNeuron->getConnectionOut(k);
				if (!currentConnection)
					continue;
				pNeuronX& nextNeuron = currentConnection->getTo();
				if (!nextNeuron)
					continue;
 
				double nextGradient = nextNeuron->getGradient();
				//double delta = 0.5 * nextGradient * currentNeuron->getValue();
        double delta = 0.5 * nextGradient * currentValue;
 
				//currentConnection->setWeight(currentConnection->getWeight() + delta + (0.4 * currentConnection->getDeltaWeight()));
				currentConnection->setWeight(currentConnection->getWeight() + delta + (currentConnection->getMomentum() * currentConnection->getDeltaWeight()));
				currentConnection->setDeltaWeight(delta);
			}
		}
	}
}
 
 
void Net::backPropagate2(const std::vector<double>& targetVals)
{
	pLayerX& outputLayer = layers.back();
	//assert(targetVals.size() == outputLayer.size());
	assert(targetVals.size() == outputLayer->getSize());
 
	// Starting with the output layer.
	//for (unsigned int i=0; i<outputLayer.size(); i++)
	for (unsigned int i = 0; i<outputLayer->getSize(); i++) // How many neurons in output layer.
	{
		pNeuronX& currentNeuron = outputLayer->getNeuron(i);
 
		double output = outputLayer->getNeuron(i)->getValue();
		// COMPUTE ERRORSIGNAL FOR OUTPUT UNITS
		double error = output * (1 - output) * (pow(targetVals[i] - output, 2)); // std::cout << "good4" << std::endl;
		//std::cout << "Error Output=" << error << std::endl;
 
		// ADJUST WEIGHTS OF CONNECTIONS FROM HIDDEN TO OUTPUT UNITS
		for (unsigned int j = 0; j<outputLayer->getNeuron(i)->getSizeIn(); j++)
		{
			outputLayer->getNeuron(i)->getConnectionIn(j)->setError(error); // Set error against each connection into the output layer.
			double newWeight = outputLayer->getNeuron(i)->getConnectionIn(j)->getWeight();
			newWeight += (error * outputLayer->getNeuron(i)->getValue());
			outputLayer->getNeuron(i)->getConnectionIn(j)->setWeight(newWeight); // Setting new weight of each connection into the output layer.
		}
	}
 
	for (unsigned int i = layers.size() - 2; i>0; i--) // Traversing hidden layers all the way to input layer.
	{
		pLayerX& currentLayer = layers[i];
		pLayerX& nextLayer = layers[i + 1];
 
		// Traversing current layer
		//for (unsigned int j=0; j<currentLayer.size(); j++)
		for (unsigned int j = 0; j<currentLayer->getSize(); j++)  // for every neuron in current layer.
		{
			const double& output = currentLayer->getNeuron(j)->getValue(); // get its value.
			double subSum = 0.0; // Initializing subsum.
 
			// Traversing next layer.
			for (unsigned int k = 0; k<nextLayer->getSize(); k++) // for every neuron in next layer.
			{
				double error = nextLayer->getNeuron(k)->getConnectionIn(k)->getError();
				double weight = nextLayer->getNeuron(k)->getConnectionIn(j)->getWeight();
 
 
				//subSum += pow(nextLayer.getNeuron(j).getConnectionIn(k).getError() * currentLayer.getNeuron(j).getConnectionIn(k).getWeight(),2); // Getting their backpropagated error and weight.
				subSum += pow(nextLayer->getNeuron(k)->getConnectionIn(k)->getError() * currentLayer->getNeuron(j)->getConnectionIn(k)->getWeight(), 2); // Getting their backpropagated error and weight.
				//subSum += pow(nextLayer.getNeuron(k).getConnectionIn(k).getError() * nextLayer.getNeuron(k).getConnectionIn(j).getWeight(),2); // Getting their backpropagated error and weight.
			}
 
			double error = output*(1 - output)*(subSum);
 
			for (unsigned int k = 0; k<currentLayer->getNeuron(j)->getSizeIn(); k++)
			{
				currentLayer->getNeuron(j)->getConnectionIn(k)->setError(error);
				double newWeight = currentLayer->getNeuron(j)->getConnectionIn(k)->getWeight();
				newWeight += error * output;
				currentLayer->getNeuron(j)->getConnectionIn(k)->setWeight(newWeight);
			}
		}
	}
}
 
 
 
 
//void Net::printOutput(void) const 
void Net::printOutput(void)
{
	std::cout << "***Net has [" << layers.size() << "] layers" << std::endl;
 
	for (unsigned int i = 0; i<layers.size(); i++)
	{
		pLayerX& currentLayer = layers[i];
		currentLayer->printOutput();
	}
}
 
 
void Net::printResult(void)
{
	pLayerX& outputLayer = layers.back();
 
	for (unsigned int i = 0; i<outputLayer->getSize(); i++)
	{
		std::cout << "Result=" << outputLayer->getNeuron(i)->getValue() << std::endl;
	}
}
 
 
void Net::setTarget(const std::vector<double>& targetVals)
{
 
}

net.h

net.h
#ifndef __SHAREWIZ_NET_H__
#define __SHAREWIZ_NET_H__
 
#include <memory>
#include <vector>
 
// A Net class.
//
// To handle neural networks.
 
// There are several things to keep in mind when applying this agent in practice :
//   1. If the rewards are very sparse in the environment the agent will have trouble learning.
//      Right now there is no priority sweeping support, but one might imagine oversampling experience that have 
//      high TD errors.  It's not clear how this can be done in most principled way. 
//      Similarly, there are no eligibility traces right now though this could be added with a few modifications 
//      in future versions.
//   2. The exploration is rather naive, since a random action is taken once in a while.
//      If the environment requires longer sequences of precise actions to get a reward, the agent might have a 
//      lot of difficulty finding these by chance, and then also learning from them sufficiently.
//   3. DQN only supports a set number of discrete actions and it is not obvious how one can incorporate
//      (high - dimensional) continuous action spaces.
 
class Layer;
 
typedef std::shared_ptr<Layer> pLayerX;
typedef std::vector<pLayerX> pLayer;
 
 
class Neuron;
typedef std::shared_ptr<Neuron> pNeuronX;
typedef std::vector<pNeuronX> pNeuron;
 
 
class Net
{
private:
	//std::vector<double>& targetVals;
 
	double learning_rate; // eta.
                                       // Controls how much the weights are changed during a weight update.
                                       // The larger the value, the more the weights are changed.
                                       // This must be a real value between 0.0 and 10.0.
                                       // These values are commonly set from 0.5 to 0.7.
	double max_error_tollerance;
 
  double alpha = 0.1;                  // Learning rate.
                                       // Set this by trial and error.  That's Pretty much the best thing we have.
  double gamma = 0.4;                  // Discount factor (0 - 1).
                                       // If Gamma is closer to 0, the agent will tend to consider only 
                                       // immediate rewards.
                                       // If Gamma is closer to 1, the agent will consider future rewards 
                                       // with greater weight, willing to delay the reward.
  //double epsilon = 0.2;                // Initial epsilon for epsilon-greedy policy (0 - 1).
                                       // High epsilon(up to 1) will cause the agent to take more random actions.
                                       // It is a good idea to start with a high epsilon(e.g. 0.2 or even a bit higher) 
                                       // and decay it over time to be lower(e.g. 0.05).
  //double lambda = 0;                   // eligibility trace decay, [0,1). 0 = no eligibility traces.
 
  double goal_amount;                  // Used by DQN networks.  The goal amount to try to obtain.
 
	pLayer layers;
 
public:
	Net();
	Net(const std::vector<unsigned int>& topology);
 
	double getLearningRate(void);
	void setLearningRate(const double& learning_rate);
 
	double getMaxErrorTollerance(void);
	void setMaxErrorTollerance(const double& max_error_tollerance);
 
  double getAlpha(void);
  void setAlpha(const double& _alpha_amount);
 
  double getGamma(void);
  void setGamma(const double& _gamma_amount);
 
  double getGoalAmount(void);
  void setGoalAmount(const double& _goal_amount);
 
	void setTarget(const std::vector<double>& targetVals);
 
	void setTest();
 
  void connect(const std::vector< std::vector<double> > connections);
  void connect(int layerFrom, int neuronFrom, int layerTo, int neuronTo, double _R, int connection_idx = 1);
 
  void connectAll();
  void connectForward();
  void connectForward2();
  void connectAllInLayer(const pLayerX& layer);
 
  void DQN(void);
  double getMaxQ(pNeuronX state);
 
  pNeuronX getPolicy(pNeuronX currentState);
  void showPolicy(void);
 
	void feedForward(const std::vector<double>& inputVals);
	void backPropagate(const std::vector<double>& targetVals);
	void backPropagate2(const std::vector<double>& targetVals);
 
 
  int randomBetween(int lowestNumber, int highestNumber);
 
	void printOutput(void);
	void printResult(void);
};
 
 
#endif

neuron.cpp

neuron.cpp
#include <iostream>
#include <cassert>
 
#include "neuron.h"
#include "connection.h"
 
 
Neuron::Neuron()
{
#ifdef DEBUG
	std::cout << "Neuron::Neuron" << std::endl;
#endif
 
	index = -1;
	gradient = 0;
	value = 0;
 
	connections_in.reserve(10);
	connections_out.reserve(10);
 
  pActivationX tmp(new Activation());
  //pActivationX tmp(new Activation(ACTIVATION_SQRT));
  this->activation = tmp;
  //activation_type = ACTIVATION_SIGMOID;
 
	randomizeValue();
}
 
 
bool Neuron::operator==(Neuron& rhs) const
{
	//cout << "operator overloaded == " << rhs.name;
	if (this->index==rhs.index)
	//if (*this == rhs)
		return true;
 
	return false;
}
 
 
double Neuron::getGradient(void)
{
#ifdef DEBUG
	std::cout << "Neuron::getGradient" << std::endl;
#endif
 
	return gradient;
}
 
 
void Neuron::setGradient(const double& gradient)
{
#ifdef DEBUG
	std::cout << "Neuron::setGradient" << std::endl;
#endif
 
	this->gradient = gradient;
}
 
 
int Neuron::getIndex(void)
{
#ifdef DEBUG
	std::cout << "Neuron::getIndex" << std::endl;
#endif
 
	return index;
}
 
 
void Neuron::setIndex(const int& index)
{
#ifdef DEBUG
	std::cout << "Neuron::setIndex" << std::endl;
#endif
 
	this->index = index;
}
 
 
unsigned int Neuron::getSizeIn(void)
{
#ifdef DEBUG
	std::cout << "Neuron::getSizeIn" << std::endl;
#endif
 
	return connections_in.size();
}
 
 
unsigned int Neuron::getSizeOut(void)
{
#ifdef DEBUG
	std::cout << "Neuron::getSizeOut" << std::endl;
#endif
 
	return connections_out.size();
}
 
 
double Neuron::getValue(void)
{
#ifdef DEBUG
	std::cout << "Neuron::getValue" << std::endl;
#endif
 
	return value;
}
 
 
void Neuron::setValue(const double& v)
{
#ifdef DEBUG
	std::cout << "Neuron::setValue" << std::endl;
#endif
 
	value = v;
}
 
 
void Neuron::addConnectionIn(const pConnectionX& c)
{
#ifdef DEBUG
	std::cout << "Neuron::addConnectionIn" << std::endl;
#endif
 
	connections_in.push_back(c);
	//index++;
}
 
 
//void Neuron::addConnectionOut(const shared_ptr<Connection>& c)
void Neuron::addConnectionOut(const pConnectionX& c)
{
#ifdef DEBUG
	std::cout << "Neuron::addConnectionOut" << std::endl;
#endif
 
	connections_out.push_back(c);
	//index++;
}
 
 
// Returns a specific input connection.
pConnectionX &Neuron::getConnectionIn(const unsigned int& idx)
{
#ifdef DEBUG
  std::cout << "Neuron::getConnectionIn" << std::endl;
#endif
 
  assert(connections_in.size() >= idx);
 
  return connections_in[idx];
}
 
 
// Returns a specific output connection.
pConnectionX &Neuron::getConnectionOut(const unsigned int& idx)
{
#ifdef DEBUG
  std::cout << "Neuron::getConnectionOut" << std::endl;
#endif
 
  assert(connections_out.size() >= idx);
 
  return connections_out[idx];
}
 
 
// Remove all connections with a value below the indicated threshold.
//
// TODO: Should we consider abs value?
void Neuron::pruneConnectionIn(const double& threshold)
{
  for (unsigned i = connections_in.size()-1; i > 0; i--)
  {
    if (connections_in[i]->getWeight() < threshold)
    {
      // TODO. Do we need to also remove the "From" and "To" elements of Neurons manually or will these auto remove?
      // TODO.  Does this retain the actual Connection, which potentially we should potentially delete if not used?
      connections_in.erase(connections_in.begin() + i);
    }
  }
}
 
 
// Remove all connections with a value below the indicated threshold.
//
// TODO: Should we consider abs value?
void Neuron::pruneConnectionOut(const double& threshold)
{
  for (unsigned i = connections_out.size()-1; i > 0; i--)
  {
    if (connections_out[i]->getWeight() < threshold)
    {
      // TODO. Do we need to also remove the "From" and "To" elements of Neurons manually or will these auto remove?
      // TODO.  Does this retain the actual Connection, which potentially we should potentially delete if not used?
      connections_out.erase(connections_out.begin() + i);
    }
  }
}
 
 
void Neuron::removeConnectionIn(const unsigned int& idx)
{
#ifdef DEBUG
	std::cout << "Neuron::removeConnectionIn" << std::endl;
#endif
 
	assert(connections_in.size() >= idx);
 
  //for (unsigned i = 0; i < connections_in.size(); i++)
  for (unsigned i = connections_in.size()-1; i > 0; i--)
  {
    if (connections_in[i]->getIndex() == idx)
    {
      connections_in.erase(connections_in.begin() + i);
      return;
    }
  }
}
 
 
void Neuron::removeConnectionOut(const unsigned int& idx)
{
#ifdef DEBUG
	std::cout << "Neuron::removeConnectionOut" << std::endl;
#endif
 
  assert(connections_out.size() >= idx);
 
  //for (unsigned i = 0; i < connections_out.size(); i++)
  for (unsigned i = connections_out.size()-1; i > 0; i--)
  {
    if (connections_out[i]->getIndex() == idx)
    {
      connections_out.erase(connections_out.begin() + i);
      return;
    }
  }
}
 
 
double Neuron::randomizeValue(void)
{
#ifdef DEBUG
	std::cout << "Neuron::randomizeValue" << std::endl;
#endif
 
	value = rand() / double(RAND_MAX);
 
	return value;
}
 
 
 
pActivationX &Neuron::getActivation(void)
{
  return activation;
}
 
 
Activation_Types Neuron::getActivationType()
{
  return activation->getActivationType();
}
 
 
void Neuron::setActivationType(Activation_Types _activation_type)
{
  //activation_type = _activation_type;
  activation->setActivationType(_activation_type);
}
 
 
 
 
 
 
 
 
 
 
 
 
/*
// Return random double between -0.5 and +0.5.
double Neuron::randf()
{
#ifdef DEBUG
std::cout << "Neuron::randf" << std::endl;
#endif
 
double r = ((double)rand()) / double(RAND_MAX);
return r - 0.5;
}
*/
 
 
// Returns a value between 0.0 and 1.0.
double Neuron::sigmoid(const double& weightedSum)
{
#ifdef DEBUG
	std::cout << "Neuron::sigmoid" << std::endl;
#endif
 
	return 1.0 / double((1.0 + exp(-weightedSum)));
}
 
 
double Neuron::sigmoid_derivative(const double& x)
{
#ifdef DEBUG
	std::cout << "Neuron::sigmoid_derivative" << std::endl;
#endif
 
	return sigmoid(x) * (1.0 - sigmoid(x));
}
 
 
double Neuron::sigmoidX(double x)
{
#ifdef DEBUG
	std::cout << "Neuron::sigmoidX" << std::endl;
#endif
 
	if (x < -45.0)
		return 0.0;
	else
	if (x > 45.0)
		return 1.0;
	else
		return 1.0 / (1.0 + exp(-x));
}
 
 
// Returns a value between -1.0 and +1.0.
double Neuron::hyperTanFunction(double& x)
{
#ifdef DEBUG
	std::cout << "Neuron::hyperTanFunction" << std::endl;
#endif
 
	if (x < -10.0)
		return -1.0;
	else
	if (x > 10.0)
		return 1.0;
	else
		return tanh(x);
}
 
 
double Neuron::tanh_derivative(const double& x)
{
#ifdef DEBUG
	std::cout << "Neuron::tanh_derivative" << std::endl;
#endif
 
	return (1.0 - tanh(x)) * (1.0 + tanh(x));
}
 
/*
double Neuron::transferFunction(double x)
{
// tanh - output range [-1.0..1.0]
 
return tanh(x);
}
 
double Neuron::transferFunctionDerivative(double x)
{
// tanh derivative
return 1.0 - x * x;
}
*/
 
void Neuron::printOutput(void)
{
#ifdef DEBUG
	std::cout << "Neuron::printOutput" << std::endl;
#endif
 
	std::cout << "Neuron[" << index << "] = " << value << " . It has " << connections_in.size() << " Connections-In, and "
		<< connections_out.size() << " Connections-Out" << std::endl;
 
	for (unsigned int i = 0; i<connections_in.size(); i++)
	{
		if (!connections_in[i])
			continue;
 
		std::cout << "  Connection-In[" << i << "] w=" << connections_in[i]->getWeight() << ", d=" << connections_in[i]->getDeltaWeight() << std::endl;
	}
 
	for (unsigned int i = 0; i<connections_out.size(); i++)
	{
		if (!connections_out[i])
			continue;
 
		std::cout << "  Connection-Out[" << i << "] w=" << connections_out[i]->getWeight() << ", d=" << connections_out[i]->getDeltaWeight() << std::endl;
	}
}

neuron.h

neuron.h
#ifndef __SHAREWIZ_NEURON_H__
#define __SHAREWIZ_NEURON_H__
 
#include <memory>
#include <vector>
 
#include "activation.h"
 
// Neuron class.
//
// Represents Synapsis within the brain.
 
 
class Connection;
 
typedef std::shared_ptr<Connection> pConnectionX;
typedef std::vector<pConnectionX> pConnection;
 
 
class Neuron
{
private:
	int index;
	double value;
	double gradient;  // How far off, and in what direction (positive or negative), local value are relative to the target outputs.
 
	pConnection connections_in;
	pConnection connections_out;
 
  pActivationX activation;
 
public:
	Neuron();
 
	bool operator==(Neuron& rhs) const;
 
	unsigned int getSizeIn(void);  // Returns how many connections.
	unsigned int getSizeOut(void);  // Returns how many connections.
 
	void addConnectionIn(const pConnectionX& c);
	void addConnectionOut(const pConnectionX& c);
 
	pConnectionX &getConnectionIn(const unsigned int& idx);
	pConnectionX &getConnectionOut(const unsigned int& idx);
 
	void pruneConnectionIn(const double& threshold);  // Remove all synapses with a value below the indicated threshold.
	void pruneConnectionOut(const double& threshold);  // Remove all synapses with a value below the indicated threshold.
 
  void removeConnectionIn(const unsigned int& idx);
  void removeConnectionOut(const unsigned int& idx);
 
  double getGradient(void);
	void setGradient(const double& gradient);
 
	int getIndex(void);
	void setIndex(const int& index);
 
	double getValue(void);
	void setValue(const double& v);
 
	double randomizeValue(void);
 
  pActivationX &getActivation(void);
  Activation_Types getActivationType();
  void setActivationType(Activation_Types _activation_type);
 
	double sigmoid(const double& weightedSum);
	double sigmoid_derivative(const double& x);
	double sigmoidX(double x);
 
	double hyperTanFunction(double& x);
	double tanh_derivative(const double& x);
 
	void printOutput(void);
};
 
 
#endif

neuron_group.cpp

neuron_group.cpp
#include <iostream>
#include <cassert>
 
#include "neuron_group.h"
#include "neuron.h"
#include "connection.h"
 
 
NeuronGroup::NeuronGroup()
{
  index = -1;
 
  neurons.reserve(10);
}
 
 
NeuronGroup::NeuronGroup(unsigned int num_neurons)
{
  index = -1;
 
  neurons.reserve(num_neurons);
 
  for (unsigned int i = 0; i < num_neurons; i++)
  {
    pNeuronX tmp(new Neuron());
    tmp->setIndex(i);
    neurons.push_back(tmp);
  }
}
 
 
int NeuronGroup::getIndex(void)
{
  return index;
}
 
 
void NeuronGroup::setIndex(const int& index)
{
  this->index = index;
}
 
 
unsigned int NeuronGroup::getSize(void)
{
  return neurons.size();
}
 
 
void NeuronGroup::addNeuron(const pNeuronX& n)
{
  neurons.push_back(n);
}
 
 
void NeuronGroup::removeNeuron(const int& idx)
{
  assert(neurons.size() >= idx);
 
  for (unsigned i = neurons.size() - 1; i > 0; i--)
  {
    if (neurons[i]->getIndex() == idx)
    {
      neurons.erase(neurons.begin() + i);
      return;
    }
  }
}
 
 
pNeuronX &NeuronGroup::getNeuron(const int& idx)
{
  assert(neurons.size() >= idx);
 
  return neurons[idx];
}
 
 
void NeuronGroup::printOutput(void)
{
  std::cout << "Layer " << index << " has " << neurons.size() << " Neurons" << std::endl;
 
  for (unsigned int i = 0; i<neurons.size(); i++)
  {
    if (!neurons[i])
      continue;
 
    std::cout << "  Neuron[" << i << "] v=" << neurons[i]->getValue() << ", g=" << neurons[i]->getGradient() << std::endl;
 
    for (unsigned int j = 0; j<neurons[i]->getSizeOut(); j++)
    {
      pConnectionX& currentConnection = neurons[i]->getConnectionOut(j);
      if (!currentConnection)
        continue;
      currentConnection->printOutput();
    }
  }
}

neuron_group.h

neuron_group.h
#ifndef __SHAREWIZ_NEURON_GROUP_H__
#define __SHAREWIZ_NEURON_GROUP_H__
 
#include <memory>
#include <vector>
 
 
class NeuronGroup;
class Neuron;
 
 
typedef std::shared_ptr<NeuronGroup> pNeuronGroupX;
typedef std::vector<pNeuronGroupX> pNeuronGroup;
 
typedef std::shared_ptr<Neuron> pNeuronX;
typedef std::vector<pNeuronX> pNeuron;
 
 
 
class NeuronGroup
{
private:
  int index;
 
  pNeuron neurons;
 
public:
  NeuronGroup();
  NeuronGroup(unsigned int num_neurons);
 
  unsigned int getSize(void);  // Returns how many neurons.
 
  int getIndex(void);
  void setIndex(const int& index);
 
  void addNeuron(const pNeuronX& n);
  void removeNeuron(const int& idx);
 
  pNeuronX& getNeuron(const int& idx);
 
  //void feedForward(const pLayerX& prevLayer);
 
  void printOutput(void);
};
 
 
#endif

string.cpp

string.cpp
#include <string>
#include <sstream>
 
#include "string.h"
 
 
// Example:
//   numberToString(69);
template <typename T>
std::string numberToString(T pNumber)
{
	std::ostringstream oOStrStream;
	oOStrStream << pNumber;
	return oOStrStream.str();
}
 
 
#include <iostream>
#include <regex>
 
 
// Returns all occurences of the regex within the string.
//
// Example:
//   std::string regex = "([A-Z]+)([\\d]+)";
//   std::string ss = "aaaMAY14bbbJUNE4";
//
// Returns:
//   [0]=MAY14#
//   [1]=JUNE4#
std::vector<std::string> string_find(const std::string& s, const std::string& regex)
{
  std::vector<std::string> result;
  std::regex reg(regex);
  //std::sregex_token_iterator it(s.begin(), s.end(), reg, { 1, 2, 3, 4, 5, 6, 7, 8, 9 });
  //std::sregex_token_iterator it(s.begin(), s.end(), reg, { 1, 0 });
 
  // The 4th param indicates:
  //   -1 would indicate to return all none-occurences.
  //   0 indicates to return all occurences found.
  //   1 would return all the 1st sub-expression occurences.
  //   2 would return all the 2nd sub-expression occurences.
  //   3...
  std::sregex_token_iterator it(s.begin(), s.end(), reg, 0); 
  std::sregex_token_iterator reg_end;
  for (int i=0; it != reg_end; ++it, i++) 
  {
    //std::cout << "[" << i << "]=" << it->str() << "#" << std::endl;
    //std::cout << "[" << i << "]=" << *it << "#" << std::endl;
    result.push_back(*it);
  }
 
  return result;
}
 
 
// Replaces all occurences of the regex within the replacement string.
//
// Parameters:
//
//   replacement:
//     The replacement string may contain references of the form $n. Every such reference will be replaced by the 
//     text captured by the n'th parenthesized pattern. 
//     n can be from 0 to 99, and $0 refers to the text matched by the whole pattern.
//            
//     This may include format specifiers and escape sequences that are replaced by the characters they represent.
//               
//     For format_default, the possible specifiers are:
//       $n n-th backreference(i.e., a copy of the n-th matched group specified with parentheses in the regex pattern).
//          n must be an integer value designating a valid backreference, greater than 0, and of two digits at most.
//       $&	A copy of the entire match
//       $`	The prefix(i.e., the part of the target sequence that precedes the match).
//       $'	The suffix(i.e., the part of the target sequence that follows the match).
//       $$ A single $ character.
//      
//   flags:
//     One or more of these constants can be combined (using the bitwise OR operator, |) to 
//     form a valid bitmask value of type regex_constants::match_flag_type:
//
//     flag	effects	notes
//     ------------------
//     match_default	Default	Default matching behavior. This constant has a value of zero**.
//     match_not_bol	Not Beginning-Of-Line	The first character is not considered a beginning of line("^" does not match).
//     match_not_eol	Not End-Of-Line	The last character is not considered an end of line("$" does not match).
//     match_not_bow	Not Beginning-Of - Word	The escape sequence "\b" does not match as a beginning-of-word.
//     match_not_eow	Not End-Of-Word	The escape sequence "\b" does not match as an end-of-word.
//     match_any	Any match	Any match is acceptable if more than one match is possible.
//     match_not_null	Not null	Empty sequences do not match.
//     match_continuous	Continuous	The expression must match a sub-sequence that begins at the first character.
//                                  Sub-sequences must begin at the first character to match.
//     match_prev_avail	Previous Available	One or more characters exist before the first one. (match_not_bol and match_not_bow are ignored).
//     format_default	Default formatting	Uses the standard formatting rules to replace matches(those used by ECMAScript's replace method).
//                                        This constant has a value of zero**.
//     format_sed	sed formatting	Uses the same rules as the sed utility in POSIX to replace matches.
//     format_no_copy	No copy	The sections in the target sequence that do not match the regular expression are not copied when replacing matches.
//     format_first_only	First only	Only the first occurrence of a regular expression is replaced.
//
//     NOTE:  ** Constants with a value of zero are ignored if some other flag is set.
//
// Example:
//   std::string s("This is a catfish");
//   std::string regex("(cat)");
//   std::string replacement("(dog)");
//
//   result = string_replace(ss, regex, "dog");
//
// Returns:
//   This is a dogfish.
//
// Example2:
//   std::string regex("([A-Za-z]+)&([A-Za-z]+)");  // Find word&word
//   std::string replacement = "$2&$1";             // Switch order.
//
//   result = string_replace(s, regex, replacement);
//
// Example3:
//   std::string s = "April 15, 2003";
//   std::string regex = "(\\w+) (\\d+), (\\d+)";
//   std::string result = string_replace(ss, regex, "$011,$3");
//
// Returns:
//   April1,2003.
//
//  NOTE:  Isolated $1 backreferences.
//         The $011 says to use $01, or the 1st regex match.
//         If $11 was used, the system would try to use the 11th regex match.
//         This only works because the limit of set to 99 maximum matches.
//
// Example4:
//   result = string_replace(ss, regex, "dog", std::regex_constants::format_first_only);
std::string string_replace(const std::string& s, const std::string& regex, const std::string& replacement,
  std::regex_constants::match_flag_type flags)
{
  std::string result = s;
  std::regex reg(regex);
 
  // using string/c-string (3) version:
  result = std::regex_replace(result, reg, replacement, flags);
 
 
  /*
  // using string/c-string (3) version:
  std::cout << std::regex_replace(s3, e, "sub-$2");
 
  // using range/c-string (6) version:
  std::string result2;
  std::regex_replace(std::back_inserter(result2), s3.begin(), s3.end(), e, "$2");
  std::cout << result2;
 
  // with flags:
  std::cout << std::regex_replace(s3, e, "$1 and $2", std::regex_constants::format_no_copy);
  std::cout << std::endl;
  */
 
 
  return result;
}
 
 
// Replaces all occurences of the regex within the replacement string.
//
// Parameters:
//
//   replacement:
//     The replacement string may contain references of the form $n. Every such reference will be replaced by the 
//     text captured by the n'th parenthesized pattern. 
//     n can be from 0 to 99, and $0 refers to the text matched by the whole pattern.
//            
//     This may include format specifiers and escape sequences that are replaced by the characters they represent.
//               
//     For format_default, the possible specifiers are:
//       $n n-th backreference(i.e., a copy of the n-th matched group specified with parentheses in the regex pattern).
//          n must be an integer value designating a valid backreference, greater than 0, and of two digits at most.
//       $&	A copy of the entire match
//       $`	The prefix(i.e., the part of the target sequence that precedes the match).
//       $'	The suffix(i.e., the part of the target sequence that follows the match).
//       $$ A single $ character.
//
//    retain:
//      If false then the replacement string completely overwrites the previous string by the replacement.
//
// Example:
//   std::string s = "  14MAY  15JUNE ";
//   result = string_replace(ss, regex, "$1 $2");
// 
// Returns:
//   std::string s = "  14 MAY  15 JUNE ";
//
// Example2:
//   result = string_replace(ss, regex, "$1 $2", std::regex_constants::format_no_copy);
//
// Returns:
//   std::string s = "14 MAY15 JUNE ";
//
// Example3:
//   result = string_replace(ss, regex, "$1 $2", false);
//
// Returns:
//   std::string s = "14 MAY15 JUNE ";
std::string string_replace(const std::string& s, const std::string& regex, const std::string& replacement,
  bool retain)
{
  if (retain)
    return string_replace(s, regex, replacement);
  else
    return string_replace(s, regex, replacement, std::regex_constants::format_no_copy);
}
 
 
// Returns true if the string matches the regex.
//
// Example:
bool string_match(const std::string& s, const std::string& regex, std::regex_constants::match_flag_type flags)
{
  std::smatch m;
  std::regex_search(s, m, std::regex(regex), flags);
  if (m.empty()) {
    return false;
  }
  else {
    return true;
  }
 
}
 
 
// Shows all matches of the regex within the string.
//
// Example:
//   show_matches("abcdef", "abc|def");
//   show_matches("abc", "ab|abc"); // left Alernative matched first
//
//   Match of the input against the left Alternative (a) followed by the remainder of the 
//   regex (c|bc) succeeds, with results:
//     m[1]="a" and m[4]="bc".
//   The skipped Alternatives (ab) and (c) leave their submatches
//     m[3] and m[5] empty.
//
//  show_matches("abc", "((a)|(ab))((c)|(bc))");
void show_matches(const std::string& s, const std::string& regex)
{
  std::smatch m;
  std::regex_search(s, m, std::regex(regex));
  if (m.empty()) {
    std::cout << "input=[" << s << "], regex=[" << regex << "]: NO MATCH\n";
  }
  else {
    std::cout << "input=[" << s << "], regex=[" << regex << "]: ";
    std::cout << "prefix=[" << m.prefix() << "] ";
    for (std::size_t n = 0; n < m.size(); ++n)
      std::cout << " m[" << n << "]=[" << m[n] << "] ";
    std::cout << "suffix=[" << m.suffix() << "]\n";
  }
}
 
 
 
// Splits a string into seperate tokens.
//
// Example:
//   s = "0 HEAD";
//   regex = "([\\d]+)[\\s]+([A-Z]*)";
std::vector<std::string> string_tokenize(const std::string& s, const std::string& regex)
{
 
  std::vector<std::string> result;
  std::smatch m;
  std::regex_search(s, m, std::regex(regex));
  if (m.empty()) {
    return result;
  }
  else {
    //result.push_back(m.prefix());
    for (std::size_t n = 0; n < m.size(); ++n)
      result.push_back(m[n]);
    //result.push_back(m.suffix());
  }
 
  return result;
 
  /*
  std::vector<std::string> result;
  std::regex rgx(regex);
  std::sregex_token_iterator iter(s.begin(),
    s.end(),
    rgx,
    -1);
  std::sregex_token_iterator end;
  for (; iter != end; ++iter)
    result.push_back(*iter);
 
  return result;
  */
 
  /*
  std::vector<std::string> result;
  std::regex rgx(regex);
  std::sregex_token_iterator i(s.begin(), s.end(), rgx, -1);
  std::sregex_token_iterator j;
  while (i != j) {
    //std::cout << *i++ << " ";
    result.push_back(*i++);
  }
 
  return result;
  */
}

string.h

string.h
#ifndef __SHAREWIZ_STRING_H__
#define __SHAREWIZ_STRING_H__
 
#include <string>
#include <vector>
#include <regex>
// String class.
 
 
template <typename T>
std::string numberToString(T pNumber);
 
std::vector<std::string> string_find(const std::string& s, const std::string& regex);
std::string string_replace(const std::string& s, const std::string& regex, const std::string& replacement,
  std::regex_constants::match_flag_type flags = std::regex_constants::match_default);
std::string string_replace(const std::string& s, const std::string& regex, const std::string& replacement,
  bool retain);
bool string_match(const std::string& s, const std::string& regex, 
  std::regex_constants::match_flag_type flags = std::regex_constants::match_default);
void show_matches(const std::string& s, const std::string& regex);
std::vector<std::string> string_tokenize(const std::string& s, const std::string& regex);
 
 
#endif

verylong.cpp

verylong.cpp
/*
#include <cassert>
#include <cctype>
#include <cmath>
#include <cstdlib>
#include <iomanip>
#include <iostream>
#include <limits>
#include <string>
*/
#include <iostream>
#include <cassert>
#include <cmath>
#include <string>
 
#include "verylong.h"
 
// Class Data
const Verylong Verylong::zero = Verylong("0");
const Verylong Verylong::one = Verylong("1");
const Verylong Verylong::two = Verylong("2");
 
 
// Constructors, Destructors and Conversion operators.
 
Verylong::Verylong(const std::string &value = "0")
{
	std::string s = (value == "") ? "0" : value;
 
	vlsign = (s[0] == '-') ? 1 : 0;        // check for negative sign
	if (ispunct(s[0]))                     // if the first character
		vlstr = s.substr(1, s.length() - 1); // is a punctuation mark.
	else 
		vlstr = s;
}
 
 
Verylong::Verylong(int n)
{
	if (n < 0)                           // check for sign and convert the
	{                                    // number to positive if it is negative
		vlsign = 1; 
		n = (-n); 
	} 
	else 
		vlsign = 0;                        
 
	if (n > 0)
	  while (n >= 1)                     // extract the number digit by digit and store 
	  {                                  // internally
		  vlstr = char(n % 10 + '0') + vlstr;
		  n /= 10;
	  }
	  else 
		  vlstr = std::string("0");          // else number is zero
}
 
 
Verylong::Verylong(const Verylong &x) : vlstr(x.vlstr), vlsign(x.vlsign) 
{ 
}
 
 
Verylong::~Verylong() 
{ 
}
 
 
Verylong::operator int() const
{
	int number, factor = 1;
	static Verylong max0(INT_MAX);
	static Verylong min0(INT_MIN + 1);
	std::string::const_reverse_iterator j = vlstr.rbegin();
 
	if (*this > max0)
	{
		std::cerr << "Error : Conversion Verylong->integer is not possible" << std::endl;
		return INT_MAX;
	}
	else 
	if (*this < min0)
	{
		std::cerr << "Error : Conversion Verylong->integer is not possible" << std::endl;
		return INT_MIN;
	}
 
	number = *j - '0';
 
	for (j++; j != vlstr.rend(); j++)
	{
		factor *= 10;
		number += (*j - '0') * factor;
	}
 
	if (vlsign) 
		return -number;
 
	return number;
}
 
 
Verylong::operator double() const
{
	double sum, factor = 1.0;
	std::string::const_reverse_iterator i = vlstr.rbegin();
 
	sum = double(*i) - '0';
 
	for (i++; i != vlstr.rend(); i++)
	{
		factor *= 10.0;
		sum += double(*i - '0') * factor;
	}
 
	if (vlsign) 
		return -sum;
 
	return sum;
}
 
 
Verylong::operator std::string() const
{
	if (vlstr.length() == 0) 
		return std::string("0");
 
	return vlstr;
}
 
 
 
// Various member operators
 
const Verylong & Verylong::operator = (const Verylong &rhs)
{
	if (this == &rhs) 
		return *this;
 
	vlstr = rhs.vlstr;
	vlsign = rhs.vlsign;
 
	return *this;
}
 
 
// Unary - operator
Verylong Verylong::operator -() const
{
	Verylong temp(*this);
 
	if (temp != zero)  
		temp.vlsign = !vlsign;
 
	return temp;
}
 
 
// Prefix increment operator
Verylong Verylong::operator ++ ()
{
	return *this = *this + one;
}
 
 
// Postfix increment operator
Verylong Verylong::operator ++ (int)
{
	Verylong result(*this);
 
	*this = *this + one;
	return result;
}
 
 
// Prefix decrement operator
Verylong Verylong::operator -- ()
{
	return *this = *this - one;
}
 
 
// Postfix decrement operator
Verylong Verylong::operator -- (int)
{
	Verylong result(*this);
 
	*this = *this - one;
	return result;
}
 
 
Verylong Verylong::operator += (const Verylong &v)
{
	return *this = *this + v;
}
 
 
Verylong Verylong::operator -= (const Verylong &v)
{
	return *this = *this - v;
}
 
 
Verylong Verylong::operator *= (const Verylong &v)
{
	return *this = *this * v;
}
 
 
Verylong Verylong::operator /= (const Verylong &v)
{
	return *this = *this / v;
}
 
 
Verylong Verylong::operator %= (const Verylong &v)
{
	return *this = *this % v;
}
 
 
Verylong Verylong::operator ^= (const Verylong &degree)
{
	Verylong N(degree);
	Verylong Y("1");
 
	if (N == Verylong::zero)
		return Verylong::one;
 
	if (N < Verylong::zero)
		return Verylong::zero;
 
	while (1)
	{
		if (N == Verylong::zero)
		{
			*this = Y;
			break;
		}
 
		Y = Y * *this;
		N = N - Verylong::one;
	}
 
	return *this;
}
 
 
 
// Various friendship operators and functions.
 
Verylong operator + (const Verylong &u, const Verylong &v)
{
	char digitsum, d1, d2, carry = 0;
	std::string temp;
	std::string::const_reverse_iterator j, k;
 
	if (u.vlsign ^ v.vlsign)
	{
		if (u.vlsign == 0) 
			return u - abs(v);
		else               
			return v - abs(u);
	}
 
	for (j = u.vlstr.rbegin(), k = v.vlstr.rbegin();
		j != u.vlstr.rend() || k != v.vlstr.rend();)
	{
		d1 = (j == u.vlstr.rend()) ? 0 : *(j++) - '0'; // get digit
		d2 = (k == v.vlstr.rend()) ? 0 : *(k++) - '0'; // get digit
		digitsum = d1 + d2 + carry;                    // add digits
 
		carry = (digitsum >= 10) ? 1 : 0;
		digitsum -= 10 * carry;
 
		temp = char(digitsum + '0') + temp;
	}
 
	if (carry) // if carry at end, last digit is 1
		temp = '1' + temp;  
 
	if (u.vlsign) 
		temp = '-' + temp;
 
	return Verylong(temp);
}
 
 
Verylong operator - (const Verylong &u, const Verylong &v)
{
	char d, d1, d2, borrow = 0;
	int negative;
	std::string temp, temp2;
	std::string::reverse_iterator i, j;
 
	if (u.vlsign ^ v.vlsign)
	{
		if (u.vlsign == 0) 
			return u + abs(v);
		else
			return -(v + abs(u));
	}
 
	Verylong w, y;
 
	if (u.vlsign == 0)                   // both u,v are positive
		if (u<v) 
		{ 
			w = v; 
			y = u; 
			negative = 1; 
		}
		else    
		{ 
			w = u; 
			y = v; 
			negative = 0; 
		}
		else                               // both u,v are negative
		if (u<v) 
		{ 
			w = u; 
			y = v; 
			negative = 1; 
		}
		else    
		{ 
			w = v; 
			y = u; 
			negative = 0; 
		}
 
	for (i = w.vlstr.rbegin(), j = y.vlstr.rbegin();
		i != w.vlstr.rend() || j != y.vlstr.rend();)
	{
		d1 = (i == w.vlstr.rend()) ? 0 : *(i++) - '0';
		d2 = (j == y.vlstr.rend()) ? 0 : *(j++) - '0';
 
		d = d1 - d2 - borrow;
		borrow = (d < 0) ? 1 : 0;
		d += 10 * borrow;
 
		temp = char(d + '0') + temp;
	}
 
	while (temp[0] == '0') 
		temp = temp.substr(1);
 
	if (negative) 
		temp = '-' + temp;
 
	return Verylong(temp);
}
 
 
Verylong operator * (const Verylong &u, const Verylong &v)
{
	Verylong pprod("1"), tempsum("0");
	std::string::const_reverse_iterator r = v.vlstr.rbegin();
 
	for (int j = 0; r != v.vlstr.rend(); j++, r++)
	{
		int digit = *r - '0';              // extract a digit
 
		pprod = u.multdigit(digit);        // multiplied by the digit
		pprod = pprod.mult10(j);           // "adds" suitable zeros behind
		tempsum = tempsum + pprod;         // result added to tempsum
	}
 
	tempsum.vlsign = u.vlsign^v.vlsign;  // to determine sign
	return tempsum;
}
 
 
//  This algorithm is the long division algorithm.
Verylong operator / (const Verylong &u, const Verylong &v)
{
	int len = u.vlstr.length() - v.vlstr.length();
	std::string temp;
	Verylong w, y, b, c, d, quotient = Verylong::zero;
 
	if (v == Verylong::zero)
	{
		std::cerr << "Error : division by zero" << std::endl;
		return Verylong::zero;
	}
 
	w = abs(u); 
	y = abs(v);
 
	if (w < y) 
		return Verylong::zero;
 
	c = Verylong(w.vlstr.substr(0, w.vlstr.length() - len));
 
	for (int i = 0; i <= len; i++)
	{
		quotient = quotient.mult10(1);
 
		b = d = Verylong::zero;            // initialize b and d to 0
 
		while (b < c)
		{
			b = b + y; d = d + Verylong::one;
		}
 
		if (c < b)                           // if b>c, then
		{                                    // we have added one count too many 
			b = b - y;
			d = d - Verylong::one;
		}
 
		quotient = quotient + d;             // add to the quotient
 
		if (i < len)
		{
			// partial remainder * 10 and add to next digit
			c = (c - b).mult10(1);
			c += Verylong(w.vlstr[w.vlstr.length() - len + i] - '0');
		}
	}
 
	quotient.vlsign = u.vlsign^v.vlsign;   // to determine sign
 
	return quotient;
}
 
 
Verylong operator % (const Verylong &u, const Verylong &v)
{
	return (u - v*(u / v));
}
 
 
Verylong operator ^ (const Verylong &u, const Verylong &v)
{
	//return (u - v*(u / v));
 
	Verylong temp(u);
 
	return temp ^= v;
}
 
 
 
int operator == (const Verylong &u, const Verylong &v)
{
	return (u.vlsign == v.vlsign && u.vlstr == v.vlstr);
}
 
 
int operator != (const Verylong &u, const Verylong &v)
{
	return !(u == v);
}
 
 
int operator < (const Verylong &u, const Verylong &v)
{
	if (u.vlsign < v.vlsign) 
		return 0;
	else 
	if (u.vlsign > v.vlsign) 
		return 1;
 
	// exclusive or (^) to determine sign
	if (u.vlstr.length() < v.vlstr.length()) 
		return (1 ^ u.vlsign);
	else 
	if (u.vlstr.length() > v.vlstr.length()) 
		return (0 ^ u.vlsign);
 
	return (u.vlstr < v.vlstr && !u.vlsign) ||
		(u.vlstr > v.vlstr && u.vlsign);
}
 
 
int operator <= (const Verylong &u, const Verylong &v)
{
	return (u<v || u == v);
}
 
 
int operator >(const Verylong &u, const Verylong &v)
{
	return (!(u<v) && u != v);
}
 
 
int operator >= (const Verylong &u, const Verylong &v)
{
	return (u>v || u == v);
}
 
 
// Calculate the absolute value of a number
Verylong abs(const Verylong &v)
{
	Verylong u(v);
 
	if (u.vlsign) 
		u.vlsign = 0;
 
	return u;
}
 
// Calculate the integer square root of a number
//    based on the formula (a+b)^2 = a^2 + 2ab + b^2
Verylong sqrt(const Verylong &v)
{
	// if v is negative, error is reported
	if (v.vlsign) 
	{ 
		std::cerr << "NaN" << std::endl; 
		return Verylong::zero; 
	}
 
	int j, k = v.vlstr.length() + 1, num = k >> 1;
	Verylong y, z, sum, tempsum, digitsum;
 
	std::string temp, w(v.vlstr);
 
	k = 0;
	j = 1;
 
	// segment the number 2 digits by 2 digits
	if (v.vlstr.length() % 2) 
		digitsum = Verylong(w[k++] - '0');
	else
	{
		digitsum = Verylong((w[k] - '0') * 10 + w[k + 1] - '0');
		k += 2;
	}
 
	// find the first digit of the integer square root
	sum = z = Verylong(int(sqrt(double(digitsum))));
 
	// store partial result
	temp = char(int(z) + '0');
	digitsum = digitsum - z*z;
 
	for (; j<num; j++)
	{
		// get next digit from the number
		digitsum = digitsum.mult10(1) + Verylong(w[k++] - '0');
		y = z + z;        // 2*a
		z = digitsum / y;
		tempsum = digitsum.mult10(1) + Verylong(w[k++] - '0');
		digitsum = -y*z.mult10(1) + tempsum - z*z;
 
		// decrease z by 1 and re-calculate when it is over-estimated.
		while (digitsum < Verylong::zero)
		{
			--z;
			digitsum = -y*z.mult10(1) + tempsum - z*z;
		}
 
		temp = temp + char(int(z) + '0');  // store partial result
		z = sum = sum.mult10(1) + z;       // update value of the partial result
	}
 
	Verylong result(temp);
 
	return result;
}
 
 
// Raise a number X to a power of degree
Verylong pow(const Verylong &X, const Verylong &degree)
{
	Verylong N(degree), Y("1"), x(X);
 
	if (N == Verylong::zero) 
		return Verylong::one;
 
	if (N < Verylong::zero) 
		return Verylong::zero;
 
	while (1)
	{
		if (N%Verylong::two != Verylong::zero)
		{
			Y = Y * x;
			N = N / Verylong::two;
			if (N == Verylong::zero) 
				return Y;
		}
		else  
			N = N / Verylong::two;
 
		x = x * x;
	}
}
 
 
// Double division function
double div(const Verylong &u, const Verylong &v)
{
	double qq = 0.0, qqscale = 1.0;
	Verylong w, y, b, c;
	int d, count,
		decno = std::numeric_limits<double>::digits; // number of significant digits
 
	if (v == Verylong::zero)
	{
		std::cerr << "ERROR : Division by zero" << std::endl;
		return 0.0;
	}
 
	if (u == Verylong::zero) 
		return 0.0;
 
	w = abs(u); 
	y = abs(v);
 
	while (w<y) 
	{ 
		w = w.mult10(1); 
		qqscale *= 0.1; 
	}
 
	int len = w.vlstr.length() - y.vlstr.length();
	std::string temp = w.vlstr.substr(0, w.vlstr.length() - len);
 
	c = Verylong(temp);
 
	for (int i = 0; i <= len; i++)
	{
		qq *= 10.0;
 
		b = Verylong::zero; d = 0;         // initialize b and d to 0
 
		while (b < c)
		{
			b += y; d += 1;
		}
 
		if (c < b)                         // if b>c, then
		{                                  // we have added one count too many
			b -= y; 
			d -= 1;
		}                                
 
		qq += double(d);                   // add to the quotient
 
		c = (c - b).mult10(1);             // the partial remainder * 10
 
		if (i < len)                       // and add to next digit
			c += Verylong(w.vlstr[w.vlstr.length() - len + i] - '0');
	}
 
	qq *= qqscale; count = 0;
 
	while (c != Verylong::zero && count < decno)
	{
		qqscale *= 0.1;
 
		b = Verylong::zero; d = 0;         // initialize b and d to 0
 
		while (b < c)
		{
			b += y; d += 1;
		}
 
		if (c < b)                         // if b>c, then
		{                                  // we have added one count too many
			b -= y; d -= 1;
		}           
 
		qq += double(d)*qqscale;
 
		c = (c - b).mult10(1);
		count++;
	}
 
	if (u.vlsign^v.vlsign)               // check for the sign
		qq *= (-1.0); 
 
	return qq;
}
 
 
std::ostream & operator << (std::ostream &s, const Verylong &v)
{
	if (v.vlstr.length() > 0)
	{
		if (v.vlsign) s << "-";
		s << v.vlstr;
	}
	else 
		s << "0";
 
	return s;
}
 
 
std::istream & operator >> (std::istream &s, Verylong &v)
{
	std::string temp(10000, ' ');
 
	s >> temp;
	v = Verylong(temp);
 
	return s;
}
 
 
//
// Private member functions: multdigit(), mult10().
//
 
// Multiply this Verylong number by num
Verylong Verylong::multdigit(int num) const
{
	int carry = 0;
	std::string::const_reverse_iterator r;
 
	if (num)
	{
		std::string temp;
 
		for (r = vlstr.rbegin(); r != vlstr.rend(); r++)
		{
			int d1 = *r - '0',               // get digit and multiplied by
				digitprod = d1*num + carry;    // that digit plus carry
 
			if (digitprod >= 10)             // if there's a new carry,
			{
				carry = digitprod / 10;        // carry is high digit
				digitprod -= carry * 10;       // result is low digit
			}
			else 
				carry = 0;                     // otherwise carry is 0
 
			temp = char(digitprod + '0') + temp;   // insert char in string
		}
 
		if (carry) //if carry at end,
			temp = char(carry + '0') + temp; 
 
		Verylong result(temp);
		return result;
	}
	else 
		return zero;
}
 
 
// Multiply this Verylong number by 10*num
Verylong Verylong::mult10(int num) const
{
	int j;
 
	if (*this != zero)
	{
		std::string temp;
 
		for (j = 0; j<num; j++) 
			temp = temp + '0';
 
		Verylong result(vlstr + temp);
 
		if (vlsign) 
			result = -result;
 
		return result;
	}
	else 
		return zero;
}
 
 
//template <> Verylong zero(Verylong) { return Verylong::zero; }
//template <> Verylong one(Verylong) { return Verylong::one; }

verylong.h

verylong.h
#ifndef __SHAREWIZ_VERYLONG_H__
#define __SHAREWIZ_VERYLONG_H__
 
//#include <string>
 
// Very Long Integer Class
 
 
class Verylong
{
private:
	// Data Fields
	std::string vlstr;     // The string is stored in reverse order.
	int    vlsign;    // Sign of Verylong: +=>0; -=>1
 
	// Private member functions
	Verylong multdigit(int) const;
	Verylong mult10(int) const;
 
public:
	// Constructors and destructor
	Verylong(const std::string&);
	Verylong(int);
	Verylong(const Verylong &);
	~Verylong();
 
	// Conversion operators
	operator int() const;
	operator double() const;
	operator std::string () const;
 
	// Arithmetic operators and Relational operators
	const Verylong & operator = (const Verylong &);  // assignment operator
	Verylong operator - () const;     // negate  operator
	Verylong operator ++ ();          // prefix  increment operator
	Verylong operator ++ (int);       // postfix increment operator
	Verylong operator -- ();          // prefix  decrement operator
	Verylong operator -- (int);       // postfix decrement operator
 
	Verylong operator += (const Verylong &);
	Verylong operator -= (const Verylong &);
	Verylong operator *= (const Verylong &);
	Verylong operator /= (const Verylong &);
	Verylong operator %= (const Verylong &);
	Verylong operator ^= (const Verylong &);
 
	friend Verylong operator + (const Verylong &, const Verylong &);
	friend Verylong operator - (const Verylong &, const Verylong &);
	friend Verylong operator * (const Verylong &, const Verylong &);
	friend Verylong operator / (const Verylong &, const Verylong &);
	friend Verylong operator % (const Verylong &, const Verylong &);
	friend Verylong operator ^ (const Verylong &, const Verylong &);
 
	friend int operator == (const Verylong &, const Verylong &);
	friend int operator != (const Verylong &, const Verylong &);
	friend int operator <  (const Verylong &, const Verylong &);
	friend int operator <= (const Verylong &, const Verylong &);
	friend int operator >  (const Verylong &, const Verylong &);
	friend int operator >= (const Verylong &, const Verylong &);
 
	// Other functions
	friend Verylong abs(const Verylong &);
	friend Verylong sqrt(const Verylong &);
	friend Verylong pow(const Verylong &, const Verylong &);
	friend double div(const Verylong &, const Verylong &);
 
	// Class Data
	static const Verylong zero;
	static const Verylong one;
	static const Verylong two;
 
	// I/O stream functions
	friend std::ostream & operator << (std::ostream &, const Verylong &);
	friend std::istream & operator >> (std::istream &, Verylong &);
};
 
 
//template <> Verylong zero(Verylong) { return Verylong::zero; }
//template <> Verylong one(Verylong) { return Verylong::one; }
 
 
#endif
brain/brain.txt · Last modified: 2020/07/15 10:30 by 127.0.0.1

Donate Powered by PHP Valid HTML5 Valid CSS Driven by DokuWiki