SAS MLP Backprop Code
/* MLP neural network in SAS */ /* Original source code by Phil Brierley www.philbrierley.com */ /* rewritten for SAS 9.1 by Stan Dylnicki June 29, 2007 */ /* Tanh hidden neurons */ /* Linear output neuron */ /* To include an input bias create an */ /* extra input in the training data */ /* and set to 1 */ /* ################ User settings ######### */ %LET numEpochs = 500; %LET numHidden = 4; %LET LR_IH = 0.7; %LET LR_HO = 0.07; /* ################ Data dependent settings ######### */ %LET numInputs = 3; %LET numPatterns = 4; /* ============================================================== */ /* ********** THIS IS THE MAIN PROGRAM ************************** */ /* ============================================================== */ DATA _NULL_; FILE PRINT; /* send put statements to output rather than log */ ARRAY hiddenVal{&numHidden.} _temporary_; ARRAY trainInputs{&numPatterns.,&numInputs.} _temporary_; ARRAY weightsIH{&numInputs.,&numHidden.} _temporary_; ARRAY weightsHO{&numHidden.} _temporary_; ARRAY trainOutput{&numPatterns.} _temporary_; /* initiate the weights */ LINK initWeights; /* load in the data */ LINK initData; /* train the network */ DO j2 = 1 TO &numEpochs.; DO i2 = 1 TO &numPatterns.; /* select a pattern at random */ patNum = CEIL(RANUNI(0)*&numPatterns.); /* calculate the current network output */ /* and error for this pattern */ LINK calcNet; /* change network weights */ LINK WeightChangesHO; LINK WeightChangesIH; END; /* display the overall network error */ /* after each epoch */ LINK calcOverallError; PUT "epoch = " j2 " RMS Error = " RMSerror; END; /* training has finished */ /* display the results */ LINK displayResults; RETURN; /* ============================================================ */ /* ********** END OF THE MAIN PROGRAM ************************* */ /* ============================================================ */ /* *********************************** */ calcNet: /* calculate the outputs of the hidden neurons */ /* the hidden neurons are tanh */ DO i = 1 TO &numHidden.; hiddenVal{i} = 0.0; DO j = 1 TO &numInputs.; hiddenVal{i} = hiddenVal{i} + (trainInputs{patNum,j} * weightsIH{j,i}); END; hiddenVal{i} = TANH(hiddenVal{i}); END; /* calculate the output of the network */ /* the output neuron is linear */ outPred = 0.0; DO i = 1 TO &numHidden.; outPred = outPred + hiddenVal{i} * weightsHO{i}; END; /* calculate the error */ errThisPat = outPred - trainOutput{patNum}; RETURN; /* ************************************ */ WeightChangesHO: /* adjust the weights hidden-output */ DO k = 1 TO &numHidden.; weightChange = &LR_HO. * errThisPat * hiddenVal{k}; weightsHO{k} = weightsHO{k} - weightChange; /* regularisation on the output weights */ IF (weightsHO{k} < -5) THEN weightsHO{k} = -5; ELSE IF (weightsHO{k} > 5) THEN weightsHO{k} = 5; END; RETURN; /* ************************************ */ WeightChangesIH: /* adjust the weights input-hidden */ DO i = 1 TO &numHidden.; DO k = 1 TO &numINPUTS.; x = 1 - (hiddenVal{i} * hiddenVal{i}); x = x * weightsHO{i} * errThisPat * &LR_IH.; x = x * trainInputs{patNum,k}; weightChange = x; weightsIH{k,i} = weightsIH{k,i} - weightChange; END; END; RETURN; /* ************************************ */ initWeights: DO j = 1 TO &numHidden.; weightsHO{j} = (RANUNI(0) - 0.5)/2; DO i = 1 TO &numInputs.; weightsIH{i,j} = (RANUNI(0) - 0.5)/5; END; END; RETURN; /* ************************************ */ initData: PUT "initialising data"; /* the data here is the XOR data it has been rescaled to the range [-1][1] an extra input valued 1 is also added to act as the bias the output must lie in the range -1 to 1 */ trainInputs{1,1} = 1; trainInputs{1,2} = -1; trainInputs{1,3} = 1; /* bias */ trainOutput{1} = 1; trainInputs{2,1} = -1; trainInputs{2,2} = 1; trainInputs{2,3} = 1; /* bias */ trainOutput{2} = 1; trainInputs{3,1} = 1; trainInputs{3,2} = 1; trainInputs{3,3} = 1; /* bias */ trainOutput{3} = -1; trainInputs{4,1} = -1; trainInputs{4,2} = -1; trainInputs{4,3} = 1; /* bias */ trainOutput{4} = -1; RETURN; /* ************************************ */ displayResults: DO i2 = 1 TO &numPatterns.; patNum = i2; LINK calcNet; PUT "pat = " patNum " actual = " trainOutput{patNum} " neural model = " outPred; END; RETURN; /* ************************************ */ calcOverallError: RMSerror = 0.0; DO i2 = 1 TO &numPatterns.; patNum = i2; LINK calcNet; RMSerror = RMSerror + (errThisPat * errThisPat); END; RMSerror = RMSerror/&numPatterns.; RMSerror = SQRT(RMSerror); RETURN; /* execute code */ RUN;