AI-NNFlex

 view release on metacpan or  search on metacpan

examples/bp.pl  view on Meta::CPAN

#MLP neural network in Java
#by Phil Brierley
#www.philbrierley.com
#This code may be freely used and modified at will
###########################
#Translated into perl - ccolbourn oct 2004

my $numEpochs = 500;
my $numInputs = 3;
my $numHidden = 4;
my $numPatterns = 4;
my $LR_IH = 0.7;
my $LR_HO = 0.07;

my $patNum;
my $errThisPat;
my $outPred;
my $RMSerror;

my @trainInputs;
my @trainOutput;


# the outputs of the hidden neurons
my @hiddenVal;

# the weights
my @weightsIH;
my @weightsHO;


main();


#==============================================================
#********** THIS IS THE MAIN PROGRAM **************************
#==============================================================

sub main
 {

 # initiate the weights
  initWeights();

 # load in the data
  initData();

 # train the network
    for(my $j = 0;$j <= $numEpochs;$j++)
    {

        for(my $i = 0;$i<$numPatterns;$i++)
        {

            #select a pattern at random
            $patNum = (rand()*$numPatterns)-0.001;

            #calculate the current network output
            #and error for this pattern
            calcNet();

            #change network weights
            WeightChangesHO();
            WeightChangesIH();
        }

        #display the overall network error
        #after each epoch
        calcOverallError();

        print "epoch = ".$j."  RMS Error = ".$RMSerror."\n";

    }

    #training has finished
    #display the results
    displayResults();

 }

#============================================================
#********** END OF THE MAIN PROGRAM **************************
#=============================================================






#***********************************
sub calcNet()
 {
    #calculate the outputs of the hidden neurons
    #the hidden neurons are tanh

    for(my $i = 0;$i<$numHidden;$i++)
    {
	$hiddenVal[$i] = 0.0;

        for(my $j = 0;$j<$numInputs;$j++)
	{
        $hiddenVal[$i] = $hiddenVal[$i] + ($trainInputs[$patNum][$j] * $weightsIH[$j][$i]);
	}

        $hiddenVal[$i] = tanh($hiddenVal[$i]);
    }

   #calculate the output of the network
   #the output neuron is linear
   $outPred = 0.0;

   for(my $i = 0;$i<$numHidden;$i++)
   {
    $outPred = $outPred + $hiddenVal[$i] * $weightsHO[$i];
   }



( run in 1.292 second using v1.01-cache-2.11-cpan-39bf76dae61 )