Tuesday, August 18, 2009

A Novel Generic Multi-Layer Neural Networks :: Part 1: The Neuron Class

A Novel Generic Multi-Layer Neural Networks
Part 1: The Neuron Class


This post presents a model that I created for a NN program that I developed to solve general numeric problems (prediction, estimation, interpolation, extrapolation, etc.).

public class Neuron

{
private double __lastOutput;

private double __lastError;

private double __lastBias;

private double[] __lastWeights;

private int __siblingCount;

private int __layerInputCount;

#region Properties
private Delegates.ActivationFunction _ActivationFunction;

public Delegates.ActivationFunction ActivationFunction

{ get { return _ActivationFunction; } }

private int _InputCount;

public int InputCount

{ get { return _InputCount; } }

private double _Bias;

public double Bias

{ get { return _Bias; } }


private double[] _Weights;

public double[] Weights

{ get { return _Weights; } }

#endregion

#region Constructor


public Neuron(Delegates.ActivationFunction activationFunction, int inputCount, double? initialBias, double[] initialWeights, int siblingCount)

{ _ActivationFunction = activationFunction;

_InputCount = inputCount;

__siblingCount = siblingCount;

__layerInputCount = _InputCount * __siblingCount;

__lastError = 0.0;


if (initialBias == null)

_Bias = GlobalModule.GetRandomValue();

else

_Bias = initialBias.Value;

__lastBias = _Bias;
if (initialWeights == null)

{

_Weights = GlobalModule.GetRandomValue(inputCount);

}

else if (initialWeights.Length == inputCount)

{

initialWeights.CopyTo(_Weights, 0);

initialWeights.CopyTo(__lastWeights, 0);

}

else

throw new Exception("Invalid number of initial weights!");
}


#endregion


#region Public Methods


public double ComputeOutput(double[] input)

{

double totalInput = _Bias;
for (int i = 0; i <>

totalInput += _Weights[i] * input[i];

__lastOutput = _ActivationFunction.Invoke(totalInput);

return __lastOutput;

}

public void ChangeWeights(double error) // it also modifies the bias

{

double roughSharePlus = Math.Abs(error) / __layerInputCount;

double roughShareMinus = -roughSharePlus;

double curBias = _Bias;

if (Math.Abs(error) > Math.Abs(__lastError)) // worse (weak result)

{

roughSharePlus = -roughSharePlus;

roughShareMinus = -roughShareMinus;

}

for (int i = 0; i <> _Weights[i])

{

__lastWeights[i] = _Weights[i];

_Weights[i] += roughShareMinus * GlobalModule.GetRandomValue();

}

} // next i

if(__lastBias <> curBias)

_Bias += roughShareMinus * GlobalModule.GetRandomValue() * (__lastBias - curBias);
__lastBias = curBias;
return;

}


public void ReinitializeWeights() // it also reinitialize the bias

{

_Weights = GlobalModule.GetRandomValue(_InputCount);

_Bias = GlobalModule.GetRandomValue();

}


#endregion


} // end of class Neuron

No comments:

Post a Comment