layer.cpp

#include <iostream>
#include <cassert>
 
#include "layer.h"
#include "neuron.h"
#include "connection.h"
#include "activation.h"
 
 
//Layer::Layer() :
Layer::Layer()
//  index(-1),
//  neurons(10)
//  index(0)
//neurons(10)
{
	//idx++;
	//index++;  
	index = -1;
  global_bias = 0;
 
	neurons.reserve(10);
 
	//neurons = std::vector<Neuron>();
	//std::cout << "neurons size: " << neurons.size() << std::endl;
}
 
 
Layer::Layer(unsigned int num_neurons)
{
	index = -1;
  global_bias = 0.0;
 
	neurons.reserve(num_neurons);
 
	for (unsigned int i = 0; i<num_neurons; i++)
	{
		pNeuronX tmp(new Neuron());
		tmp->setIndex(i);
		neurons.push_back(tmp);
	}
 
 
	/*
	// Add a bias neuron in each layer.
	// Force the bias node's output to 1.0 (it was the last neuron pushed in this layer):
	pNeuronX tmp(new Neuron());
	tmp->setIndex(100);
	tmp->setValue(1);
	neurons.push_back(tmp);
 
	//neurons.back().back().setOutputVal(1.0);
	//neurons.back()->setValue(1.0);
	*/
}
 
 
int Layer::getIndex(void)
{
	return index;
}
 
 
void Layer::setIndex(const int& index)
{
	this->index = index;
}
 
 
double Layer::getGlobalBias(void)
{
  return global_bias;
}
 
 
void Layer::setGlobalBias(const double& _global_bias)
{
  this->global_bias = _global_bias;
}
 
 
unsigned int Layer::getSize(void)
{
	return neurons.size();
}
 
 
void Layer::addNeuron(const pNeuronX& n)
{
	neurons.push_back(n);
}
 
 
void Layer::removeNeuron(const int& idx)
{
	assert(neurons.size() >= idx);
 
  for (unsigned i = neurons.size()-1; i > 0;  i--)
  {
    if (neurons[i]->getIndex() == idx)
    {
      neurons.erase(neurons.begin() + i);
      return;
    }
  }
}
 
 
pNeuronX &Layer::getNeuron(const int& idx)
{
	assert(neurons.size() >= idx);
 
	return neurons[idx];
}
 
 
void Layer::feedForward(const pLayerX& prevLayer)
{
	/*
	// INPUT -> HIDDEN
	for(y=0; y<hidden_array_size; y++) {
	for(x=0; x<input_array_size; x++) {
	temp += (input[pattern][x] * weight_i_h[x][y]);
	}
	hidden[y] = (1.0 / (1.0 + exp(-1.0 * (temp + bias[y]))));
	temp = 0;
	}
 
	// HIDDEN -> OUTPUT
	for(y=0; y<output_array_size; y++) {
	for(x=0; x<hidden_array_size; x++) {
	temp += (hidden[x] * weight_h_o[x][y]);
	}
	output[pattern][y] = (1.0 / (1.0 + exp(-1.0 * (temp + bias[y + hidden_array_size]))));
	temp = 0;
	}
	return;
	}
	*/
 
	for (unsigned int i = 0; i<getSize(); i++) // How many Neurons in current layer.
	{
		// Weight sum of the previous layer's output values.
		double weightedSum = 0;
		//weightedSum += .5; // Add a 1 to act as a bias.
		//weightedSum += 1.0; // Add a 1 to act as a bias.
    weightedSum += global_bias;  // Add an amount to act as a bias.
 
		pNeuronX& currentNeuron = neurons[i];
		if (!currentNeuron)
			continue;
		unsigned int currentIndex = currentNeuron->getIndex();
 
		for (unsigned int j = 0; j<prevLayer->getSize(); j++) // How many Neurons in prev layer.
		{
			pNeuronX& prevNeuron = prevLayer->getNeuron(j);
			if (!prevNeuron)
				continue;
			//std::cout << "J=" << j << std::endl;
			for (unsigned int k = 0; k<prevNeuron->getSizeOut(); k++)
			{
				if (!prevNeuron->getConnectionOut(k))
					continue;
				if (!prevNeuron->getConnectionOut(k)->getTo())
					continue;
 
				// We are only interested in connections going into the currentNeuron.
				unsigned int prevIndex = prevNeuron->getConnectionOut(k)->getTo()->getIndex();
 
				//if (prevNeuron.getConnectionOut(k).getTo() == currentNeuron)
				if (prevIndex == currentIndex)
				{
					weightedSum += prevLayer->getNeuron(j)->getValue()*prevLayer->getNeuron(j)->getConnectionOut(k)->getWeight();
				}
			}
		}
 
		// Add in Bias?
		//weightedSum += .5; // Add a 1 to act as a bias. 
 
		//std::cout << "*************" << std::endl;
    if (currentNeuron)
    {
      pActivationX act = currentNeuron->getActivation();
      if (!act)
        continue;
 
      // Sigmoid function.  Activation function is applied to this intermediate value to yield the local value of the neuron.
      //currentNeuron->setValue(currentNeuron->sigmoid(weightedSum));
      //currentNeuron->setValue(act.activate(weightedSum));
      //currentNeuron->setValue(currentNeuron->getActivation()->activate(weightedSum));
      currentNeuron->setValue(act->activate(weightedSum));
 
      //std::cout << "------------------" << std::endl;      
    }
	}
 
	//  std::cout << "++++++++++++++++" << std::endl;  
}
 
 
void Layer::printOutput(void)
{
	std::cout << "Layer " << index << " has " << neurons.size() << " Neurons" << std::endl;
 
	for (unsigned int i = 0; i<neurons.size(); i++)
	{
		if (!neurons[i])
			continue;
 
		std::cout << "  Neuron[" << i << "] v=" << neurons[i]->getValue() << ", g=" << neurons[i]->getGradient() << std::endl;
 
		for (unsigned int j = 0; j<neurons[i]->getSizeOut(); j++)
		{
			pConnectionX& currentConnection = neurons[i]->getConnectionOut(j);
			if (!currentConnection)
				continue;
			currentConnection->printOutput();
		}
	}
}