Java MLP Backprop Code
/* MLP neural network in Java by Phil Brierley www.philbrierley.com This code may be freely used and modified at will Tanh hidden neurons Linear output neuron To include an input bias create an extra input in the training data and set to 1 Routines included: calcNet() WeightChangesHO() WeightChangesIH() initWeights() initData() tanh(double x) displayResults() calcOverallError() compiled and tested on Symantec Cafe Lite */ import java.lang.Math; public class JavaMLP { //user defineable variables public static int numEpochs = 500; //number of training cycles public static int numInputs = 3; //number of inputs - this includes the input bias public static int numHidden = 4; //number of hidden units public static int numPatterns = 4; //number of training patterns public static double LR_IH = 0.7; //learning rate public static double LR_HO = 0.07; //learning rate //process variables public static int patNum; public static double errThisPat; public static double outPred; public static double RMSerror; //training data public static double[][] trainInputs = new double[numPatterns][numInputs]; public static double[] trainOutput = new double[numPatterns]; //the outputs of the hidden neurons public static double[] hiddenVal = new double[numHidden]; //the weights public static double[][] weightsIH = new double[numInputs][numHidden]; public static double[] weightsHO = new double[numHidden]; //============================================================== //********** THIS IS THE MAIN PROGRAM ************************** //============================================================== public static void main(String[] args) { //initiate the weights initWeights(); //load in the data initData(); //train the network for(int j = 0;j <= numEpochs;j++) { for(int i = 0;i
20) return 1; else if (x < -20) return -1; else { double a = Math.exp(x); double b = Math.exp(-x); return (a-b)/(a+b); } } //************************************ public static void displayResults() { for(int i = 0;i