import java.util.Arrays; import java.util.ArrayList; /** * Base Neuron class for use in Neural Networks. * A model of a basic Perceptron with a sigmoidal activation function * which returns a value between 0 and 1, with a steep activation curve * biasing towards the extremes. * * @author Jeff Chen * @version %I%,%G% */ public class Neuron{ private double[] weights; private double output; private int numInputs; private int index; private ArrayList parents=new ArrayList(); private ArrayList children=new ArrayList(); public static final int RANDOM_WEIGHTS=1; public static final int POSITIVE_WEIGHTS=2; public static final int NORMAL_WEIGHTS=3; public static final int ZERO_WEIGHTS=4; private double lastDelta=0; private double[] lastWChange; private double moment=0.5; private double rate=0.8; /** * Constructs the Neuron with default bias weights of 1.0. * Note that the Neuron has one weight for each input, as well as an * extra free bias weight. * * @param numInputs the number of inputs the Neuron will receive. */ public Neuron(int numInputs){ this.numInputs=numInputs; weights=new double[numInputs+1]; Arrays.fill(weights,1.0); } /** * Constructs the Neuron with the specified bias weights. * Note that the Neuron has one weight for each input, as well as an * extra free bias weight. * * @param weights the weights for the neurons */ public Neuron(double[] weights){ this.weights=weights; this.numInputs=weights.length-1; } /** * Adds a child neuron. * * Children are added in numerical order. * * @param n the child neuron to be added */ public void addChild(Neuron n){ children.add(n); } /** * Adds a parent to the list of parents. * * Parents are added in numerical order. * * @param n the parent neuron to be added */ public void addParent(Neuron n){ parents.add(n); weights=new double[parents.size()+1]; lastWChange=new double[parents.size()+1]; } /** * Trains the OUTPUT neuron with the specified Case and error. * * @param a the Case to train the neuron on * @param e the error */ public void backprop(Case a, double e){ lastDelta=e*output*(1-output); for(int n=0;n getChildren(){ return children; } /** * Returns the position of the Neuron in its layer. * * @return the position of the Neuron in its layer, if applicable. If not applicable, returns -1. */ public int getIndex(){ return index; } /** * Returns the number of inputs for this Neuron. * * @return number of inputs */ public int getNumInputs(){ return this.weights.length-1; } /** * Returns the output of the Neuron. *

* Note: this should only be used after running runActivation(double[] input) * at least once. * * @return the output of the Neuron */ public double getOutput(){ return output; } /** * Returns the parents of this Neuron. * * @return the parents of this Neuron */ public ArrayList getParents(){ return parents; } /** * Returns the weights used in the Neuron. * Note that there will be one weight for each input as well as an * extra bias weight. * * @return the weights used to bias the inputs */ public double[] getWeights(){ return weights; } /** * Initializes the weights of the array according to 3 schemes. * MUST be called after every call to setNumInputs(), or whenever the weights array is cleared. *
* RANDOM_WEIGHTS assigns all weights a value in [-1,1) *
* POSITIVE_WEIGHTS assigns all weights a value in [0,1) *
* NORMAL_WEIGHTS assigns all weights the value of 0.5 *
* ZERO_WEIGHTS assigns all weights the value of 0 * * @param i the initialization scheme to use */ public void init(int i){ switch(i){ case RANDOM_WEIGHTS: for(int n=0;n * NOTE: the activation function is NOT used. This is a dummy neuron. * * @param input the input to the neuron * @return the input. only purpose is to store the output */ public double run(double input){ return output=input; } /** * Runs the actual activation function. *
* This takes no input, since it gets all its data from the * lists it has of its parents. This means that layers MUST * be run in a sequential fashion for any meaningful result. * * @return the output of the activation function */ public double run(){ output=0; for(int n=0;n