26 activationFunction = LINEAR;
30 previousBiasUpdate = 0;
35 bool Neuron::init(
const UINT numInputs,
const UINT activationFunction){
37 if( !validateActivationFunction(activationFunction) ){
41 this->numInputs = numInputs;
42 this->activationFunction = activationFunction;
44 weights.resize(numInputs);
45 previousUpdate.resize(numInputs);
49 random.setSeed( (
unsigned long long)time(NULL) );
53 for(
unsigned int i=0; i<numInputs; i++){
54 weights[i] = random.getRandomNumberUniform(-0.1,0.1);
55 previousUpdate[i] = 0;
59 bias = random.getRandomNumberUniform(-0.1,0.1);
67 previousBiasUpdate = 0;
69 previousUpdate.clear();
72 double Neuron::fire(
const VectorDouble &x){
76 switch( activationFunction ){
79 for(UINT i=0; i<numInputs; i++){
80 y += x[i] * weights[i];
85 for(UINT i=0; i<numInputs; i++){
86 y += x[i] * weights[i];
90 if( y < -45.0 ){ y = 0; }
91 else if( y > 45.0 ){ y = 1.0; }
93 y = 1.0/(1.0+exp(-y));
96 case(BIPOLAR_SIGMOID):
98 for(UINT i=0; i<numInputs; i++){
99 y += x[i] * weights[i];
102 if( y < -45.0 ){ y = 0; }
103 else if( y > 45.0 ){ y = 1.0; }
105 y = (2.0 / (1.0 + exp(-gamma * y))) - 1.0;
113 double Neuron::getDerivative(
const double &y){
116 switch( activationFunction ){
123 case(BIPOLAR_SIGMOID):
124 yy = (gamma * (1.0 - (y*y))) / 2.0;
130 bool Neuron::validateActivationFunction(
const UINT actvationFunction){
131 if( actvationFunction >= LINEAR && actvationFunction < NUMBER_OF_ACTIVATION_FUNCTIONS )
return true;
This class implements a Neuron that is used by the Multilayer Perceptron.