view release on metacpan or search on metacpan
Fixed a bug that allowed activation to flow through a node
even if it was inactive
Altered the syntax for output to be param=>value instead of
an anonymous hash
As per Scott Fahlmans comments about neural net benchmarking,
(Fahlman S.E., (1988) 'An empirical study of learning speed in back-propagation networks'. Tech. Rep. CMU-CS-88-162, Carnegie Mellon University, Pittsburgh, PA.) , I've started using a more realistic benchmark than xor.
The 'cars' subfolder in examples contains the learning code
for this, drawn from
ftp://ftp.ics.uci.edu/pub/machine-learning-databases/car/
#############################################################
0.16
20050218
Makefile.PL view on Meta::CPAN
use ExtUtils::MakeMaker;
# See lib/ExtUtils/MakeMaker.pm for details of how to influence
# the contents of the Makefile that is written.
WriteMakefile(
'NAME' => 'AI::NNFlex',
'VERSION_FROM' => 'lib/AI/NNFlex.pm', # finds $VERSION
'PREREQ_PM' => {'Math::Matrix'=>0},
'MAN3PODS' => { },
);
# Example demonstrating XOR with momentum backprop learning
use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
# Create the network
my $network = AI::NNFlex::Backprop->new(
learningrate=>.2,
bias=>1,
fahlmanconstant=>0.1,
momentum=>0.6,
round=>1);
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>1,
activationfunction=>"linear");
$network->init();
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[0],
[0,1],[1],
[1,0],[1],
[1,1],[0]]);
my $counter=0;
my $err = 10;
while ($err >.001)
{
$err = $dataset->learn($network);
print "Epoch = $counter error = $err\n";
$counter++;
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
examples/add.pl view on Meta::CPAN
use AI::NNFlex::Dataset;
# train the network to do addition. Adapted from code posted to perlmonks
# by tlpriest on 13/05/05
my $network = AI::NNFlex::Backprop->new(
learningrate=>.00001,
fahlmanconstant=>0,
fixedweights=>1,
momentum=>0.3,
bias=>0);
$network->add_layer( nodes=>2,
activationfunction=>"linear");
$network->add_layer( nodes=>2,
activationfunction=>"linear");
$network->add_layer( nodes=>1,
activationfunction=>"linear");
$network->init();
# Taken from Mesh ex_add.pl
my $dataset = AI::NNFlex::Dataset->new([
[ 1, 1 ], [ 2 ],
[ 1, 2 ], [ 3 ],
[ 2, 2 ], [ 4 ],
[ 20, 20 ], [ 40 ],
[ 10, 10 ], [ 20 ],
[ 15, 15 ], [ 30 ],
[ 12, 8 ], [ 20 ],
]);
my $err = 10;
# Stop after 4096 epochs -- don't want to wait more than that
for ( my $i = 0; ($err > 0.0001) && ($i < 4096); $i++ ) {
$err = $dataset->learn($network);
print "Epoch = $i error = $err\n";
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
print "this should be 4000 - ";
$network->run([2000,2000]);
foreach ( @{$network->output}){print $_."\n";}
foreach my $a ( 1..10 ) {
foreach my $b ( 1..10 ) {
my($ans) = $a+$b;
my($nnans) = @{$network->run([$a,$b])};
print "[$a] [$b] ans=$ans but nnans=$nnans\n" unless $ans == $nnans;
}
}
examples/bp.pl view on Meta::CPAN
main();
#==============================================================
#********** THIS IS THE MAIN PROGRAM **************************
#==============================================================
sub main
{
# initiate the weights
initWeights();
# load in the data
initData();
# train the network
for(my $j = 0;$j <= $numEpochs;$j++)
{
for(my $i = 0;$i<$numPatterns;$i++)
{
#select a pattern at random
$patNum = (rand()*$numPatterns)-0.001;
#calculate the current network output
#and error for this pattern
calcNet();
#change network weights
WeightChangesHO();
WeightChangesIH();
}
#display the overall network error
#after each epoch
calcOverallError();
print "epoch = ".$j." RMS Error = ".$RMSerror."\n";
}
#training has finished
#display the results
displayResults();
}
#============================================================
#********** END OF THE MAIN PROGRAM **************************
#=============================================================
#***********************************
sub calcNet()
{
#calculate the outputs of the hidden neurons
#the hidden neurons are tanh
for(my $i = 0;$i<$numHidden;$i++)
{
$hiddenVal[$i] = 0.0;
for(my $j = 0;$j<$numInputs;$j++)
{
$hiddenVal[$i] = $hiddenVal[$i] + ($trainInputs[$patNum][$j] * $weightsIH[$j][$i]);
}
$hiddenVal[$i] = tanh($hiddenVal[$i]);
}
#calculate the output of the network
#the output neuron is linear
$outPred = 0.0;
for(my $i = 0;$i<$numHidden;$i++)
{
$outPred = $outPred + $hiddenVal[$i] * $weightsHO[$i];
}
#calculate the error
$errThisPat = $outPred - $trainOutput[$patNum];
}
#************************************
sub WeightChangesHO()
#adjust the weights hidden-output
{
for(my $k = 0;$k<$numHidden;$k++)
{
$weightChange = $LR_HO * $errThisPat * $hiddenVal[$k];
$weightsHO[$k] = $weightsHO[$k] - $weightChange;
#regularisation on the output weights
if ($weightsHO[$k] < -5)
{
$weightsHO[$k] = -5;
}
elsif ($weightsHO[$k] > 5)
{
$weightsHO[$k] = 5;
}
}
}
#************************************
sub WeightChangesIH()
#adjust the weights input-hidden
{
for(my $i = 0;$i<$numHidden;$i++)
{
for(my $k = 0;$k<$numInputs;$k++)
{
my $x = 1 - ($hiddenVal[$i] * $hiddenVal[$i]);
$x = $x * $weightsHO[$i] * $errThisPat * $LR_IH;
$x = $x * $trainInputs[$patNum][$k];
my $weightChange = $x;
$weightsIH[$k][$i] = $weightsIH[$k][$i] - $weightChange;
}
}
}
#************************************
sub initWeights()
{
for(my $j = 0;$j<$numHidden;$j++)
{
$weightsHO[$j] = (rand() - 0.5)/2;
for(my $i = 0;$i<$numInputs;$i++)
{
$weightsIH[$i][$j] = (rand() - 0.5)/5;
}
}
}
#************************************
sub initData()
{
print "initialising data\n";
# the data here is the XOR data
# it has been rescaled to the range
# [-1][1]
# an extra input valued 1 is also added
# to act as the bias
$trainInputs[0][0] = 1;
$trainInputs[0][1] = -1;
$trainInputs[0][2] = 1; #bias
$trainOutput[0] = 1;
$trainInputs[1][0] = -1;
$trainInputs[1][1] = 1;
$trainInputs[1][2] = 1; #bias
$trainOutput[1] = 1;
$trainInputs[2][0] = 1;
$trainInputs[2][1] = 1;
$trainInputs[2][2] = 1; #bias
$trainOutput[2] = -1;
$trainInputs[3][0] = -1;
$trainInputs[3][1] = -1;
$trainInputs[3][2] = 1; #bias
$trainOutput[3] = -1;
}
#************************************
sub tanh()
{
my $x = shift;
if ($x > 20){ return 1;}
elsif ($x < -20){ return -1;}
else
{
my $a = exp($x);
my $b = exp(-$x);
return ($a-$b)/($a+$b);
}
}
#************************************
sub displayResults()
{
for(my $i = 0;$i<$numPatterns;$i++)
{
$patNum = $i;
calcNet();
print "pat = ".($patNum+1)." actual = ".$trainOutput[$patNum]." neural model = ".$outPred."\n";
}
}
#************************************
sub calcOverallError()
{
$RMSerror = 0.0;
for(my $i = 0;$i<$numPatterns;$i++)
{
$patNum = $i;
calcNet();
$RMSerror = $RMSerror + ($errThisPat * $errThisPat);
}
$RMSerror = $RMSerror/$numPatterns;
$RMSerror = sqrt($RMSerror);
}
examples/cars/cars.pl view on Meta::CPAN
#
#buying: vhigh, high, med, low.
#maint: vhigh, high, med, low.
#doors: 2, 3, 4, 5more.
#persons: 2, 4, more.
#lug_boot: small, med, big.
#safety: low, med, high.
my @dataArray;
my %translate = (
'accept'=>{'0 0'=>'unacc',
'0 1'=>'acc',
'1 0'=>'good',
'1 1'=>'vgood',
'unacc'=>'0 0',
'acc'=>'0 1',
'good'=>'1 0',
'vgood'=>'1 1'},
'buying'=>{'1 1'=>'vhigh',
'1 0'=>'high',
'0 1'=>'med',
'0 0'=>'low',
'vhigh'=>'1 1',
'high'=>'1 0',
'med'=>'0 1',
'low'=>'0 0'},
'maint'=>{'1 1'=>'vhigh',
'1 0'=>'high',
'0 1'=>'med',
'0 0'=>'low',
'vhigh'=>'1 1',
'high'=>'1 0',
'med'=>'0 1',
'low'=>'0 0'},
'doors'=>{'1 1'=>'2',
'1 0'=>'3',
'0 1'=>'4',
'0 0'=>'5more',
'2'=>'1 1',
'3'=>'1 0',
'4'=>'0 1',
'5more'=>'0 0'},
'persons'=>{'0 0'=>'2',
'1 0'=>'4',
'1 1'=>'more',
'2'=>'0 0',
'4'=>'1 0',
'more'=>'1 1'},
'lug_boot'=>{'0 0'=>'small',
'1 0'=>'med',
'1 1'=>'big',
'small'=>'0 0',
'med'=>'1 0',
'big'=>'1 1'},
'safety'=>{'0 0'=>'low',
'1 0'=>'med',
'1 1'=>'high',
'low'=>'0 0',
'med'=>'1 0',
'high'=>'1 1'});
open (CARS,"car_data.txt") or die "Can't open file";
while (<CARS>)
{
chomp $_;
if ($_ !~ /\w+/){next} # skip blank lines
my ($buying,$maint,$doors,$persons,$lug_boot,$safety,$accept) = split /,/,$_;
my $inputString = $translate{'buying'}->{$buying}. " "
.$translate{'maint'}->{$maint}. " "
.$translate{'doors'}->{$doors}. " "
.$translate{'persons'}->{$persons}. " "
.$translate{'lug_boot'}->{$lug_boot}. " "
.$translate{'safety'}->{$safety};
my $outputString = $translate{'accept'}->{$accept};
my @inputArray = split / /,$inputString;
my @outputArray = split / /,$outputString;
if (scalar @inputArray != 12 || scalar @outputArray != 2)
{
print "--$inputString $outputString\n";
}
push @dataArray,\@inputArray,\@outputArray;
}
close CARS;
######################################################################
# data now constructed, we can do the NN thing
######################################################################
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
my $dataset = AI::NNFlex::Dataset->new(\@dataArray);
my $network = AI::NNFlex::Backprop->new( learningrate=>.1,
fahlmanconstant=>0.1,
bias=>1,
momentum=>0.6);
$network->add_layer( nodes=>12,
activationfunction=>"tanh");
$network->add_layer( nodes=>12,
activationfunction=>"tanh");
$network->add_layer( nodes=>2,
activationfunction=>"linear");
$network->init();
$network->connect(fromlayer=>2,tolayer=>2);
my $counter=0;
my $err = 10;
while ($err >.001)
{
$err = $dataset->learn($network);
print "Epoch $counter: Error = $err\n";
$counter++;
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
examples/lesion.pl view on Meta::CPAN
# Example demonstrating XOR with momentum backprop learning
# and node lesioning
use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
# Create the network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
debug=>[],bias=>1,
momentum=>0.6,
round=>1);
$network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
$network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
$network->add_layer( nodes=>1,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"linear",
randomweights=>1);
$network->init();
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[0],
[0,1],[1],
[1,0],[1],
[1,1],[0]]);
my $counter=0;
my $err = 10;
while ($err >.001)
{
$err = $dataset->learn($network);
print "Epoch $counter: Error = $err\n";
$counter++;
}
$network->lesion(nodes=>0.5,connections=>0.5);
$network->dump_state(filename=>"weights-learned.wts",activations=>1);
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
examples/reinforceTest.pl view on Meta::CPAN
# this is /really/ experimental - see perldoc NNFlex::reinforce
use AI::NNFlex;
my $object = AI::NNFlex->new([{"nodes"=>2,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"tanh","random weights"=>1},
{"nodes"=>2,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"tanh","random weights"=>1},
{"nodes"=>1,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"linear","random weights"=>1}],{'random connections'=>0,'networktype'=>'feedforward', 'random weights'=>1,'learn...
$object->run([1,0]);
$output = $object->output();
foreach (@$output)
{
print "1,0 - $_ ";
}
print "\n";
$object->run([0,1]);
$err = $object->learn([1]);
$output = $object->output();
foreach (@$output)
{
print "0,1 - $_ ";
}
print "\n";
$object->run([0,1]);
$err = $object->learn([1]);
$output = $object->output();
foreach (@$output)
{
print "0,1 - $_ ";
}
print "\n";
$object->run([0,1]);
$output = $object->output();
foreach (@$output)
{
print "0,1 - $_ ";
}
print "\n";
$object->run([1,0]);
$output = $object->output();
foreach (@$output)
{
print "1,0 - $_ ";
}
print "\n";
examples/test.pl view on Meta::CPAN
use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
# create the numbers
my %numbers;
for (0..255)
{
my @array = split //,sprintf("%08b",$_);
$numbers{$_} = \@array;
}
my @data;
for (my $counter=0;$counter < 14;$counter+=2)
{
push @data,$numbers{$counter};
push @data,$numbers{$counter*$counter};
}
# Create the network
my $network = AI::NNFlex::Backprop->new(
learningrate=>.05,
bias=>1,
fahlmanconstant=>0.1,
momentum=>0.6,
round=>1);
$network->add_layer( nodes=>8,
activationfunction=>"tanh");
$network->add_layer( nodes=>8,
errorfunction=>'atanh',
activationfunction=>"tanh");
$network->add_layer( nodes=>8,
activationfunction=>"linear");
$network->init();
my $dataset = AI::NNFlex::Dataset->new(\@data);
my $counter=0;
my $err = 10;
while ($err >.01)
{
$err = $dataset->learn($network);
print "Epoch = $counter error = $err\n";
$counter++;
}
$network->run([0,0,0,0,0,1,0,1]);
my $output = $network->output();
print $output."\n";
foreach (@$output){print $_}
print "\n";
examples/xor.pl view on Meta::CPAN
# Example demonstrating XOR with momentum backprop learning
use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
# Create the network
my $network = AI::NNFlex::Backprop->new(
learningrate=>.2,
bias=>1,
fahlmanconstant=>0.1,
momentum=>0.6,
round=>1);
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>1,
activationfunction=>"linear");
$network->init();
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[0],
[0,1],[1],
[1,0],[1],
[1,1],[0]]);
$dataset->save(filename=>'xor.pat');
$dataset->load(filename=>'xor.pat');
my $counter=0;
my $err = 10;
while ($err >.001)
#for (1..1500)
{
$err = $dataset->learn($network);
print "Epoch = $counter error = $err\n";
$counter++;
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
print "this should be 1 - ".@{$network->run([0,1])}."\n";
examples/xor_minimal.pl view on Meta::CPAN
# Example demonstrating XOR with momentum backprop learning
# and minimal set of parameters (using default values)
use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
# Create the network
my $network = AI::NNFlex::Backprop->new( learningrate=>.1,
bias=>1,
momentum=>0.6,
fahlmanconstant=>0.1,
round=>1);
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>1,
activationfunction=>"linear");
$network->init();
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[0],
[0,1],[1],
[1,0],[1],
[1,1],[0]]);
my $counter=0;
my $err = 10;
while ($err >.001)
{
$err = $dataset->learn($network);
print "Epoch $counter: Error = $err\n";
$counter++;
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
examples/xorminus.pl view on Meta::CPAN
# Example demonstrating XOR with momentum backprop learning
use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
# Create the network
my $network = AI::NNFlex::Backprop->new(
learningrate=>.2,
bias=>1,
fahlmanconstant=>0.1,
momentum=>0.6,
round=>1);
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>2,
activationfunction=>"tanh");
$network->add_layer( nodes=>1,
activationfunction=>"linear");
$network->init();
my $dataset = AI::NNFlex::Dataset->new([
[-1,-1],[-1],
[-1,1],[1],
[1,-1],[1],
[1,1],[-1]]);
$dataset->save(filename=>'xor.pat');
$dataset->load(filename=>'xor.pat');
my $counter=0;
my $err = 10;
while ($err >.001)
#for (1..1500)
{
$err = $dataset->learn($network);
print "Epoch = $counter error = $err\n";
$counter++;
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
print "\n";
}
print "this should be 1 - ".@{$network->run([-1,1])}."\n";
lib/AI/NNFlex.pm view on Meta::CPAN
# single network with 2 layers unsupervised and 2 layers supervised
#
# Clean up the perldocs
#
###############################################################################
$VERSION = "0.24";
###############################################################################
my @DEBUG; # a single, solitary, shameful global variable. Couldn't
#avoid it really. It allows correct control of debug
#information before the $network object is created
# (in ::layer->new & ::node->new for example).
###############################################################################
###############################################################################
# package NNFlex
###############################################################################
###############################################################################
package AI::NNFlex;
use AI::NNFlex::Mathlib;
use base qw(AI::NNFlex::Mathlib);
###############################################################################
# AI::NNFlex::new
###############################################################################
sub new
{
my $class = shift;
my $network={};
bless $network,$class;
# intercept the new style 'empty network' constructor call
# Maybe I should deprecate the old one, but its convenient, provided you
# can follow the mess of hashes
if (!grep /HASH/,@_)
{
my %config = @_;
foreach (keys %config)
{
$network->{$_} = $config{$_};
}
return $network;
}
# Otherwise, continue assuming that the whole network is defined in
# a pair of anonymous hashes
my $params = shift;
my $netParams = shift;
my @layers;
dbug ($netParams,"Entered AI::NNFlex::new with params $params $netParams",2);
# clean up case & spaces in layer defs from pre 0.14 constructor calls:
my $cleanParams;
foreach my $layer(@{$params})
{
my %cleanLayer;
foreach (keys %$layer)
{
my $key = lc($_);
$key =~ s/\s//g;
$cleanLayer{$key} = $$layer{$_};
}
push @$cleanParams,\%cleanLayer;
}
# Network wide parameters (e.g. random weights)
foreach (keys %$netParams)
{
my $key = lc($_);
$key =~ s/\s//g; # up to 0.14 we had params with spaces in, now deprecated
$network->{$key} = ${$netParams}{$_};
}
if( $network->{'debug'})
{
@DEBUG = @{$network->{'debug'}};
}
# build the network
foreach (@$cleanParams)
{
if (!($$_{'nodes'})){next}
my %layer = %{$_};
push @layers,AI::NNFlex::layer->new(\%layer);
}
$$network{'layers'} = \@layers;
$network->init;
return $network;
}
###############################################################################
# AI::NNFlex::add_layer
###############################################################################
#
# Adds a layer of given node definitions to the $network object
lib/AI/NNFlex.pm view on Meta::CPAN
# syntax
#
# $network->add_layer(nodes=>4,activationfunction=>tanh);
#
# returns bool success or failure
#
###############################################################################
sub add_layer
{
my $network = shift;
my %config = @_;
my $layer = AI::NNFlex::layer->new(\%config);
if ($layer)
{
push @{$network->{'layers'}},$layer;
return 1;
}
else
{
return 0;
}
}
###############################################################################
# AI::NNFlex::output
###############################################################################
sub output
{
my $network = shift;
my %params = @_;
my $finalLayer = ${$$network{'layers'}}[-1];
my $outputLayer;
if (defined $params{'layer'})
{
$outputLayer = ${$$network{'layers'}}[$params{'layer'}]
}
else
{
$outputLayer = $finalLayer
}
my $output = AI::NNFlex::layer::layer_output($outputLayer);
# Round outputs if required
if ($network->{'round'})
{
foreach (@$output)
{
if ($_ > 0.5)
{
$_ = 1;
}
elsif ($_ < -0.5)
{
$_=-1;
}
else
{
$_=0;
}
}
}
return $output;
}
################################################################################
# sub init
################################################################################
sub init
{
#Revised version of init for NNFlex
my $network = shift;
my @layers = @{$network->{'layers'}};
# if network debug state not set, set it to null
if (!$network->{'debug'})
{
$network->{'debug'} = [];
}
my @debug = @{$network->{'debug'}};
# implement the bias node
if ($network->{'bias'})
{
my $biasNode = AI::NNFlex::node->new({'activation function'=>'linear'});
$$network{'biasnode'} = $biasNode;
$$network{'biasnode'}->{'activation'} = 1;
$$network{'biasnode'}->{'nodeid'} = "bias";
}
my $nodeid = 1;
my $currentLayer=0;
# foreach layer, we need to examine each node
foreach my $layer (@layers)
{
# Foreach node we need to make connections east and west
foreach my $node (@{$layer->{'nodes'}})
{
$node->{'nodeid'} = $nodeid;
# only initialise to the west if layer > 0
if ($currentLayer > 0 )
{
foreach my $westNodes (@{$network->{'layers'}->[$currentLayer -1]->{'nodes'}})
{
foreach my $connectionFromWest (@{$westNodes->{'connectedNodesEast'}->{'nodes'}})
{
if ($connectionFromWest eq $node)
{
my $weight = $network->calcweight;
push @{$node->{'connectedNodesWest'}->{'nodes'}},$westNodes;
push @{$node->{'connectedNodesWest'}->{'weights'}},$weight;
if (scalar @debug > 0)
{$network->dbug ("West to east Connection - ".$westNodes->{'nodeid'}." to ".$node->{'nodeid'},2);}
}
}
}
}
# Now initialise connections to the east (if not last layer)
if ($currentLayer < (scalar @layers)-1)
{
foreach my $eastNodes (@{$network->{'layers'}->[$currentLayer+1]->{'nodes'}})
{
if (!$network->{'randomconnections'} || $network->{'randomconnections'} > rand(1))
{
my $weight = $network->calcweight;
push @{$node->{'connectedNodesEast'}->{'nodes'}},$eastNodes;
push @{$node->{'connectedNodesEast'}->{'weights'}}, $weight;
if (scalar @debug > 0)
{$network->dbug ("East to west Connection ".$node->{'nodeid'}." to ".$eastNodes->{'nodeid'},2);}
}
}
}
$nodeid++;
}
$currentLayer++;
}
# add bias node to westerly connections
if ($network->{'bias'})
{
foreach my $layer (@{$network->{'layers'}})
{
foreach my $node (@{$layer->{'nodes'}})
{
push @{$node->{'connectedNodesWest'}->{'nodes'}},$network->{'biasnode'};
my $weight = $network->calcweight;
push @{$node->{'connectedNodesWest'}->{'weights'}},$weight;
if (scalar @debug > 0)
{$network->dbug ("West to east Connection - bias to ".$node->{'nodeid'}." weight = $weight",2);}
}
}
}
return 1; # return success if we get to here
}
###############################################################################
# sub $network->dbug
###############################################################################
sub dbug
{
my $network = shift;
my $message = shift;
my $level = shift;
my @DEBUGLEVELS;
# cover for debug calls before the network is created
if (!$network->{'debug'})
{
@DEBUGLEVELS=@DEBUG;
}
else
{
@DEBUGLEVELS = @{$network->{'debug'}};
}
# 0 is error so ALWAYS display
if (!(grep /0/,@DEBUGLEVELS)){push @DEBUGLEVELS,0}
foreach (@DEBUGLEVELS)
{
if ($level == $_)
{
print "$message\n";
}
}
}
###############################################################################
# AI::NNFlex::dump_state
###############################################################################
sub dump_state
{
my $network = shift;
my %params =@_;
my $filename = $params{'filename'};
my $activations = $params{'activations'};
open (OFILE,">$filename") or return "Can't create weights file $filename";
foreach my $layer (@{$network->{'layers'}})
{
foreach my $node (@{$layer->{'nodes'}})
{
if ($activations)
{
print OFILE $node->{'nodeid'}." activation = ".$node->{'activation'}."\n";
}
my $connectedNodeCounter=0;
foreach my $connectedNode (@{$node->{'connectedNodesEast'}->{'nodes'}})
{
my $weight = ${$node->{'connectedNodesEast'}->{'weights'}}[$connectedNodeCounter];
print OFILE $node->{'nodeid'}." <- ".$connectedNode->{'nodeid'}." = ".$weight."\n";
$connectedNodeCounter++;
}
if ($node->{'connectedNodesWest'})
{
my $connectedNodeCounter=0;
foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}})
{
#FIXME - a more easily read format would be connectedNode first in the file
my $weight = ${$node->{'connectedNodesWest'}->{'weights'}}[$connectedNodeCounter];
print OFILE $node->{'nodeid'}." -> ".$connectedNode->{'nodeid'}." = ".$weight."\n";
}
}
}
}
close OFILE;
}
###############################################################################
# sub load_state
###############################################################################
sub load_state
{
my $network = shift;
my %config = @_;
my $filename = $config{'filename'};
open (IFILE,$filename) or return "Error: unable to open $filename because $!";
# we have to build a map of nodeids to objects
my %nodeMap;
foreach my $layer (@{$network->{'layers'}})
{
foreach my $node (@{$layer->{'nodes'}})
{
$nodeMap{$node->{'nodeid'}} = $node;
}
}
# Add the bias node into the map
if ($network->{'bias'})
{
$nodeMap{'bias'} = $network->{'biasnode'};
}
my %stateFromFile;
while (<IFILE>)
{
chomp $_;
my ($activation,$nodeid,$destNode,$weight);
if ($_ =~ /(.*) activation = (.*)/)
{
$nodeid = $1;
$activation = $2;
$stateFromFile{$nodeid}->{'activation'} = $activation;
$network->dbug("Loading $nodeid = $activation",2);
}
elsif ($_ =~ /(.*) -> (.*) = (.*)/)
{
$nodeid = $1;
$destNode = $2;
$weight = $3;
$network->dbug("Loading $nodeid -> $destNode = $weight",2);
push @{$stateFromFile{$nodeid}->{'connectedNodesWest'}->{'weights'}},$weight;
push @{$stateFromFile{$nodeid}->{'connectedNodesWest'}->{'nodes'}},$nodeMap{$destNode};
}
elsif ($_ =~ /(.*) <- (.*) = (.*)/)
{
$nodeid = $1;
$destNode = $2;
$weight = $3;
push @{$stateFromFile{$nodeid}->{'connectedNodesEast'}->{'weights'}},$weight;
push @{$stateFromFile{$nodeid}->{'connectedNodesEast'}->{'nodes'}},$nodeMap{$destNode};
$network->dbug("Loading $nodeid <- $destNode = $weight",2);
}
}
close IFILE;
my $nodeCounter=1;
foreach my $layer (@{$network->{'layers'}})
{
foreach my $node (@{$layer->{'nodes'}})
{
$node->{'activation'} = $stateFromFile{$nodeCounter}->{'activation'};
$node->{'connectedNodesEast'} = $stateFromFile{$nodeCounter}->{'connectedNodesEast'};
$node->{'connectedNodesWest'} = $stateFromFile{$nodeCounter}->{'connectedNodesWest'};
$nodeCounter++;
}
}
return 1;
}
##############################################################################
# sub lesion
##############################################################################
sub lesion
{
my $network = shift;
my %params = @_;
my $return;
$network->dbug("Entered AI::NNFlex::lesion with %params",2);
my $nodeLesion = $params{'nodes'};
my $connectionLesion = $params{'connections'};
# go through the layers & node inactivating random nodes according
# to probability
foreach my $layer (@{$network->{'layers'}})
{
$return = $layer->lesion(%params);
}
return $return;
}
########################################################################
# AI::NNFlex::connect
########################################################################
#
# Joins layers or nodes together.
#
# takes fromlayer=>INDEX, tolayer=>INDEX or
# fromnode=>[LAYER,NODE],tonode=>[LAYER,NODE]
#
# returns success or failure
#
#
#########################################################################
sub connect
{
my $network = shift;
my %params = @_;
my $result = 0;
if ($params{'fromnode'})
{
$result = $network->connectnodes(%params);
}
elsif ($params{'fromlayer'})
{
$result = $network->connectlayers(%params);
}
return $result;
}
########################################################################
# AI::NNFlex::connectlayers
########################################################################
sub connectlayers
{
my $network=shift;
my %params = @_;
my $fromlayerindex = $params{'fromlayer'};
my $tolayerindex = $params{'tolayer'};
foreach my $node (@{$network->{'layers'}->[$tolayerindex]->{'nodes'}})
{
foreach my $connectedNode ( @{$network->{'layers'}->[$fromlayerindex]->{'nodes'}})
{
my $weight1 = $network->calcweight;
my $weight2 = $network->calcweight;
push @{$node->{'connectedNodesWest'}->{'nodes'}},$connectedNode;
push @{$connectedNode->{'connectedNodesEast'}->{'nodes'}},$node;
push @{$node->{'connectedNodesWest'}->{'weights'}},$weight1;
push @{$connectedNode->{'connectedNodesEast'}->{'weights'}},$weight2;
}
}
return 1;
}
##############################################################
# sub AI::NNFlex::connectnodes
##############################################################
sub connectnodes
{
my $network = shift;
my %params = @_;
$params{'tonode'} =~ s/\'//g;
$params{'fromnode'} =~ s/\'//g;
my @tonodeindex = split /,/,$params{'tonode'};
my @fromnodeindex = split /,/,$params{'fromnode'};
#make the connections
my $node = $network->{'layers'}->[$tonodeindex[0]]->{'nodes'}->[$tonodeindex[1]];
my $connectedNode = $network->{'layers'}->[$fromnodeindex[0]]->{'nodes'}->[$fromnodeindex[1]];
my $weight1 = $network->calcweight;
my $weight2 = $network->calcweight;
push @{$node->{'connectedNodesWest'}->{'nodes'}},$connectedNode;
push @{$connectedNode->{'connectedNodesEast'}->{'nodes'}},$node;
push @{$node->{'connectedNodesWest'}->{'weights'}},$weight1;
push @{$connectedNode->{'connectedNodesEast'}->{'weights'}},$weight2;
return 1;
}
##############################################################
# AI::NNFlex::calcweight
##############################################################
#
# calculate an initial weight appropriate for the network
# settings.
# takes no parameters, returns weight
##############################################################
sub calcweight
{
my $network= shift;
my $weight;
if ($network->{'fixedweights'})
{
$weight = $network->{'fixedweights'};
}
elsif ($network->{'randomweights'})
{
$weight = (rand(2*$network->{'randomweights'}))-$network->{'randomweights'};
}
else
{
$weight = (rand(2))-1;
}
return $weight;
}
###############################################################################
###############################################################################
# Package AI::NNFlex::layer
###############################################################################
###############################################################################
package AI::NNFlex::layer;
###############################################################################
# AI::NNFlex::layer::new
###############################################################################
sub new
{
my $class = shift;
my $params = shift;
my $layer ={};
foreach (keys %{$params})
{
$$layer{$_} = $$params{$_}
}
bless $layer,$class;
my $numNodes = $$params{'nodes'};
my @nodes;
for (1..$numNodes)
{
push @nodes, AI::NNFlex::node->new($params);
}
$$layer{'nodes'} = \@nodes;
AI::NNFlex::dbug($params,"Created layer $layer",2);
return $layer;
}
###############################################################################
# AI::NNFlex::layer::layer_output
##############################################################################
sub layer_output
{
my $layer = shift;
my $params = shift;
my @outputs;
foreach my $node (@{$$layer{'nodes'}})
{
push @outputs,$$node{'activation'};
}
return \@outputs;
}
##############################################################################
# sub lesion
##############################################################################
sub lesion
{
my $layer = shift;
my %params = @_;
my $return;
my $nodeLesion = $params{'nodes'};
my $connectionLesion = $params{'connections'};
# go through the layers & node inactivating random nodes according
# to probability
foreach my $node (@{$layer->{'nodes'}})
{
$return = $node->lesion(%params);
}
return $return;
}
###############################################################################
###############################################################################
# package AI::NNFlex::node
###############################################################################
###############################################################################
package AI::NNFlex::node;
###############################################################################
# AI::NNFlex::node::new
###############################################################################
sub new
{
my $class = shift;
my $params = shift;
my $node = {};
foreach (keys %{$params})
{
$$node{$_} = $$params{$_}
}
if ($$params{'randomactivation'})
{
$$node{'activation'} =
rand($$params{'random'});
AI::NNFlex::dbug($params,"Randomly activated at ".$$node{'activation'},2);
}
else
{
$$node{'activation'} = 0;
}
$$node{'active'} = 1;
$$node{'error'} = 0;
bless $node,$class;
AI::NNFlex::dbug($params,"Created node $node",2);
return $node;
}
##############################################################################
# sub lesion
##############################################################################
sub lesion
{
my $node = shift;
my %params = @_;
my $nodeLesion = $params{'nodes'};
my $connectionLesion = $params{'connections'};
# go through the layers & node inactivating random nodes according
# to probability
if ($nodeLesion)
{
my $probability = rand(1);
if ($probability < $nodeLesion)
{
$node->{'active'} = 0;
}
}
if ($connectionLesion)
{
# init works from west to east, so we should here too
my $nodeCounter=0;
foreach my $connectedNode (@{$node->{'connectedNodesEast'}->{'nodes'}})
{
my $probability = rand(1);
if ($probability < $connectionLesion)
{
my $reverseNodeCounter=0; # maybe should have done this differntly in init, but 2 late now!
${$node->{'connectedNodesEast'}->{'nodes'}}[$nodeCounter] = undef;
foreach my $reverseConnection (@{$connectedNode->{'connectedNodesWest'}->{'nodes'}})
{
if ($reverseConnection == $node)
{
${$connectedNode->{'connectedNodesEast'}->{'nodes'}}[$reverseNodeCounter] = undef;
}
$reverseNodeCounter++;
}
}
$nodeCounter++;
}
}
return 1;
}
1;
=pod
=head1 NAME
AI::NNFlex - A base class for implementing neural networks
=head1 SYNOPSIS
use AI::NNFlex;
my $network = AI::NNFlex->new(config parameter=>value);
$network->add_layer( nodes=>x,
activationfunction=>'function');
$network->init();
$network->lesion( nodes=>PROBABILITY,
connections=>PROBABILITY);
$network->dump_state (filename=>'badgers.wts');
$network->load_state (filename=>'badgers.wts');
my $outputsRef = $network->output(layer=>2,round=>1);
=head1 DESCRIPTION
AI::NNFlex is a base class for constructing your own neural network modules. To implement a neural network, start with the documentation for AI::NNFlex::Backprop, included in this distribution
=head1 CONSTRUCTOR
=head2 AI::NNFlex->new ( parameter => value );
randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT
fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS
debug=>[LIST OF CODES FOR MODULES TO DEBUG]
round=>0 or 1, a true value sets the network to round output values to nearest of 1, -1 or 0
The constructor implements a fairly generalised network object with a number of parameters.
The following parameters are optional:
randomweights
fixedweights
debug
round
(Note, if randomweights is not specified the network will default to a random value from 0 to 1.
=head1 METHODS
This is a short list of the main methods implemented in AI::NNFlex.
=head2 AI::NNFlex
=head3 add_layer
Syntax:
$network->add_layer( nodes=>NUMBER OF NODES IN LAYER,
persistentactivation=>RETAIN ACTIVATION BETWEEN PASSES,
decay=>RATE OF ACTIVATION DECAY PER PASS,
randomactivation=>MAXIMUM STARTING ACTIVATION,
threshold=>NYI,
activationfunction=>"ACTIVATION FUNCTION",
randomweights=>MAX VALUE OF STARTING WEIGHTS);
Add layer adds whatever parameters you specify as attributes of the layer, so if you want to implement additional parameters simply use them in your calling code.
Add layer returns success or failure, and if successful adds a layer object to the $network->{'layers'} array. This layer object contains an attribute $layer->{'nodes'}, which is an array of nodes in the layer.
=head3 init
Syntax:
$network->init();
Initialises connections between nodes, sets initial weights. The base AI::NNFlex init method implementes connections backwards and forwards from each node in each layer to each node in the preceeding and following layers.
init adds the following attributes to each node:
=over
=item *
{'connectedNodesWest'}->{'nodes'} - an array of node objects connected to this node on the west/left
lib/AI/NNFlex.pm view on Meta::CPAN
{'connectedNodesEast'}->{'weights'} - an array of scalar numeric weights for the connections to these nodes
=back
The connections to easterly nodes are not used in feedforward networks.
Init also implements the Bias node if specified in the network config.
=head3 connect
Syntax:
$network->connect(fromlayer=>1,tolayer=>0);
$network->connect(fromnode=>'1,1',tonode=>'0,0');
Connect allows you to manually create connections between layers or nodes, including recurrent connections back to the same layer/node. Node indices must be LAYER,NODE, numbered from 0.
Weight assignments for the connection are calculated based on the network wide weight policy (see INIT).
=head3 lesion
$network->lesion (nodes=>PROBABILITY,connections=>PROBABILITY)
Damages the network.
B<PROBABILITY>
A value between 0 and 1, denoting the probability of a given node or connection being damaged.
Note: this method may be called on a per network, per node or per layer basis using the appropriate object.
=head1 EXAMPLES
See the code in ./examples. For any given version of NNFlex, xor.pl will contain the latest functionality.
lib/AI/NNFlex.pm view on Meta::CPAN
Dr Martin Le Voi, for help with concepts of NN in the early stages
Dr David Plaut, for help with the project that this code was originally intended for.
Graciliano M.Passos for suggestions & improved code (see SEE ALSO).
Dr Scott Fahlman, whose very readable paper 'An empirical study of learning speed in backpropagation networks' (1988) has driven many of the improvements made so far.
=head1 SEE ALSO
AI::NNFlex::Backprop
AI::NNFlex::Feedforward
AI::NNFlex::Mathlib
AI::NNFlex::Dataset
AI::NNEasy - Developed by Graciliano M.Passos
(Shares some common code with NNFlex)
=head1 TODO
Lots of things:
clean up the perldocs some more
write gamma modules
write BPTT modules
write a perceptron learning module
speed it up
write a tk gui
=head1 CHANGES
v0.11 introduces the lesion method, png support in the draw module and datasets.
v0.12 fixes a bug in reinforce.pm & adds a reflector in feedforward->run to make $network->run($dataset) work.
v0.13 introduces the momentum learning algorithm and fixes a bug that allowed training to proceed even if the node activation function module can't be loaded
v0.14 fixes momentum and backprop so they are no longer nailed to tanh hidden units only.
lib/AI/NNFlex.pm view on Meta::CPAN
v0.23 includes a Hopfield module in the distribution.
v0.24 fixes a bug in the bias weight calculations
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
lib/AI/NNFlex/Backprop.pm view on Meta::CPAN
package AI::NNFlex::Backprop;
use AI::NNFlex;
use AI::NNFlex::Feedforward;
use base qw(AI::NNFlex::Feedforward AI::NNFlex);
use strict;
sub calc_error
{
my $network = shift;
my $outputPatternRef = shift;
my @outputPattern = @$outputPatternRef;
my @debug = @{$network->{'debug'}};
if (scalar @debug > 0)
{$network->dbug ("Output pattern @outputPattern received by Backprop",4);}
my $outputLayer = $network->{'layers'}->[-1]->{'nodes'};
if (scalar @$outputLayer != scalar @outputPattern)
{
$network->dbug ("Wrong number of output values, net has ".scalar @$outputLayer." nodes",0);
return 0;
}
# Now calculate the error
my $counter=0;
foreach (@$outputLayer)
{
my $value = $_->{'activation'} - $outputPattern[$counter];
if ($_->{'errorfunction'})
{
my $errorfunction = $_->{'errorfunction'};
$value = $network->$errorfunction($value);
}
$_->{'error'} = $value;
$counter++;
if (scalar @debug > 0)
{$network->dbug ("Error on output node $_ = ".$_->{'error'},4);}
}
}
########################################################
# AI::NNFlex::Backprop::learn
########################################################
sub learn
{
my $network = shift;
my $outputPatternRef = shift;
# if this is an incorrect dataset call translate it
if ($outputPatternRef =~/Dataset/)
{
return ($outputPatternRef->learn($network))
}
# Set a default value on the Fahlman constant
if (!$network->{'fahlmanconstant'})
{
$network->{'fahlmanconstant'} = 0.1;
}
my @outputPattern = @$outputPatternRef;
$network->calc_error($outputPatternRef);
#calculate & apply dWs
$network->hiddenToOutput;
if (scalar @{$network->{'layers'}} > 2)
{
$network->hiddenOrInputToHidden;
}
# calculate network sqErr
my $Err = $network->RMSErr($outputPatternRef);
return $Err;
}
#########################################################
# AI::NNFlex::Backprop::hiddenToOutput
#########################################################
sub hiddenToOutput
{
my $network = shift;
my @debug = @{$network->{'debug'}};
my $outputLayer = $network->{'layers'}->[-1]->{'nodes'};
foreach my $node (@$outputLayer)
{
my $connectedNodeCounter=0;
foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}})
{
my $momentum = 0;
if ($network->{'momentum'})
{
if ($node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter])
{
$momentum = ($network->{'momentum'})*($node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter]);
}
}
if (scalar @debug > 0)
{$network->dbug("Learning rate is ".$network->{'learningrate'},4);}
my $deltaW = (($network->{'learningrate'}) * ($node->{'error'}) * ($connectedNode->{'activation'}));
$deltaW = $deltaW+$momentum;
$node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter] = $deltaW;
if (scalar @debug > 0)
{$network->dbug("Applying delta $deltaW on hiddenToOutput $connectedNode to $node",4);}
#
$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] -= $deltaW;
$connectedNodeCounter++;
}
}
}
######################################################
# AI::NNFlex::Backprop::hiddenOrInputToHidden
######################################################
sub hiddenOrInputToHidden
{
my $network = shift;
my @layers = @{$network->{'layers'}};
my @debug = @{$network->{'debug'}};
# remove the last element (The output layer) from the stack
# because we've already calculated dW on that
pop @layers;
if (scalar @debug > 0)
{$network->dbug("Starting Backprop of error on ".scalar @layers." hidden layers",4);}
foreach my $layer (reverse @layers)
{
foreach my $node (@{$layer->{'nodes'}})
{
my $connectedNodeCounter=0;
if (!$node->{'connectedNodesWest'}) {last}
my $nodeError;
foreach my $connectedNode (@{$node->{'connectedNodesEast'}->{'nodes'}})
{
$nodeError += ($connectedNode->{'error'}) * ($connectedNode->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter]);
$connectedNodeCounter++;
}
if (scalar @debug > 0)
{$network->dbug("Hidden node $node error = $nodeError",4);}
# Apply error function
if ($node->{'errorfunction'})
{
my $functioncall = $node->{'errorfunction'};
$nodeError = $network->$functioncall($nodeError);
}
$node->{'error'} = $nodeError;
# update the weights from nodes inputting to here
$connectedNodeCounter=0;
foreach my $westNodes (@{$node->{'connectedNodesWest'}->{'nodes'}})
{
my $momentum = 0;
if ($network->{'momentum'})
{
if($node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes})
{
$momentum = ($network->{'momentum'})*($node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes});
}
}
# get the slope from the activation function component
my $value = $node->{'activation'};
my $functionSlope = $node->{'activationfunction'}."_slope";
$value = $network->$functionSlope($value);
# Add the Fahlman constant
$value += $network->{'fahlmanconstant'};
$value = $value * $node->{'error'} * $network->{'learningrate'} * $westNodes->{'activation'};
my $dW = $value;
$dW = $dW + $momentum;
if (scalar @debug > 0)
{$network->dbug("Applying deltaW $dW to inputToHidden connection from $westNodes to $node",4);}
$node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes} = $dW;
$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] -= $dW;
if (scalar @debug > 0)
{$network->dbug("Weight now ".$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter],4);}
$connectedNodeCounter++;
}
}
}
}
#########################################################
# AI::NNFlex::Backprop::RMSErr
#########################################################
sub RMSErr
{
my $network = shift;
my $outputPatternRef = shift;
my @outputPattern = @$outputPatternRef;
my @debug = @{$network->{'debug'}};
my $sqrErr;
my $outputLayer = $network->{'layers'}->[-1]->{'nodes'};
if (scalar @$outputLayer != scalar @outputPattern)
{
$network->dbug("Wrong number of output values, net has ".scalar @$outputLayer." nodes",0);
return 0;
}
# Now calculate the error
my $counter=0;
foreach (@$outputLayer)
{
my $value = $_->{'activation'} - $outputPattern[$counter];
$sqrErr += $value *$value;
$counter++;
if (scalar @debug > 0)
{$network->dbug("Error on output node $_ = ".$_->{'error'},4);}
}
my $error = sqrt($sqrErr);
return $error;
}
1;
=pod
=head1 NAME
AI::NNFlex::Backprop - a fast, pure perl backprop Neural Net simulator
=head1 SYNOPSIS
use AI::NNFlex::Backprop;
my $network = AI::NNFlex::Backprop->new(config parameter=>value);
$network->add_layer(nodes=>x,activationfunction=>'function');
$network->init();
use AI::NNFlex::Dataset;
my $dataset = AI::NNFlex::Dataset->new([
[INPUTARRAY],[TARGETOUTPUT],
[INPUTARRAY],[TARGETOUTPUT]]);
my $sqrError = 10;
while ($sqrError >0.01)
{
$sqrError = $dataset->learn($network);
}
$network->lesion({'nodes'=>PROBABILITY,'connections'=>PROBABILITY});
$network->dump_state(filename=>'badgers.wts');
$network->load_state(filename=>'badgers.wts');
my $outputsRef = $dataset->run($network);
my $outputsRef = $network->output(layer=>2,round=>1);
=head1 DESCRIPTION
AI::NNFlex::Backprop is a class to generate feedforward, backpropagation neural nets. It inherits various constructs from AI::NNFlex & AI::NNFlex::Feedforward, but is documented here as a standalone.
The code should be simple enough to use for teaching purposes, but a simpler implementation of a simple backprop network is included in the example file bp.pl. This is derived from Phil Brierleys freely available java code at www.philbrierley.com.
AI::NNFlex::Backprop leans towards teaching NN and cognitive modelling applications. Future modules are likely to include more biologically plausible nets like DeVries & Principes Gamma model.
Full documentation for AI::NNFlex::Dataset can be found in the modules own perldoc. It's documented here for convenience only.
=head1 CONSTRUCTOR
=head2 AI::NNFlex::Backprop->new( parameter => value );
Parameters:
randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT
fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS
debug=>[LIST OF CODES FOR MODULES TO DEBUG]
learningrate=>the learning rate of the network
momentum=>the momentum value (momentum learning only)
round=>0 or 1 - 1 sets the network to round output values to
nearest of 1, -1 or 0
fahlmanconstant=>0.1
The following parameters are optional:
randomweights
fixedweights
debug
round
momentum
fahlmanconstant
If randomweights is not specified the network will default to a random value from 0 to 1.
If momentum is not specified the network will default to vanilla (non momentum) backprop.
The Fahlman constant modifies the slope of the error curve. 0.1 is the standard value for everything, and speeds the network up immensely. If no Fahlman constant is set, the network will default to 0.1
=head2 AI::NNFlex::Dataset
new ( [[INPUT VALUES],[OUTPUT VALUES],
[INPUT VALUES],[OUTPUT VALUES],..])
=head2 INPUT VALUES
These should be comma separated values. They can be applied to the network with ::run or ::learn
=head2 OUTPUT VALUES
These are the intended or target output values. Comma separated. These will be used by ::learn
=head1 METHODS
This is a short list of the main methods implemented in AI::NNFlex::Backprop.
=head2 AI::NNFlex::Backprop
=head2 add_layer
Syntax:
$network->add_layer( nodes=>NUMBER OF NODES IN LAYER,
persistentactivation=>RETAIN ACTIVATION BETWEEN PASSES,
decay=>RATE OF ACTIVATION DECAY PER PASS,
randomactivation=>MAXIMUM STARTING ACTIVATION,
threshold=>NYI,
activationfunction=>"ACTIVATION FUNCTION",
errorfunction=>'ERROR TRANSFORMATION FUNCTION',
randomweights=>MAX VALUE OF STARTING WEIGHTS);
The activation function must be defined in AI::NNFlex::Mathlib. Valid predefined activation functions are tanh & linear.
The error transformation function defines a transform that is done on the error value. It must be a valid function in AI::NNFlex::Mathlib. Using a non linear transformation function on the error value can sometimes speed up training.
The following parameters are optional:
persistentactivation
decay
randomactivation
threshold
errorfunction
randomweights
=head2 init
Syntax:
$network->init();
Initialises connections between nodes, sets initial weights and loads external components. Implements connections backwards and forwards from each node in each layer to each node in the preceeding and following layers, and initialises weights values ...
=head2 lesion
$network->lesion ({'nodes'=>PROBABILITY,'connections'=>PROBABILITY})
Damages the network.
B<PROBABILITY>
A value between 0 and 1, denoting the probability of a given node or connection being damaged.
Note: this method may be called on a per network, per node or per layer basis using the appropriate object.
=head2 AN::NNFlex::Dataset
=head2 learn
$dataset->learn($network)
'Teaches' the network the dataset using the networks defined learning algorithm. Returns sqrError;
=head2 run
$dataset->run($network)
Runs the dataset through the network and returns a reference to an array of output patterns.
=head1 EXAMPLES
See the code in ./examples. For any given version of NNFlex, xor.pl will contain the latest functionality.
=head1 PREREQs
lib/AI/NNFlex/Backprop.pm view on Meta::CPAN
Dr Martin Le Voi, for help with concepts of NN in the early stages
Dr David Plaut, for help with the project that this code was originally intended for.
Graciliano M.Passos for suggestions & improved code (see SEE ALSO).
Dr Scott Fahlman, whose very readable paper 'An empirical study of learning speed in backpropagation networks' (1988) has driven many of the improvements made so far.
=head1 SEE ALSO
AI::NNFlex
AI::NNEasy - Developed by Graciliano M.Passos
Shares some common code with NNFlex.
=head1 TODO
=head1 CHANGES
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
lib/AI/NNFlex/Dataset.pm view on Meta::CPAN
#
use strict;
package AI::NNFlex::Dataset;
###########################################################
# AI::NNFlex::Dataset::new
###########################################################
sub new
{
my $class = shift;
my $params = shift;
my $dataset;
if ($class =~ /HASH/)
{
$dataset = $class;
$dataset->{'data'} = $params;
return 1;
}
my %attributes;
$attributes{'data'} = $params;
$dataset = \%attributes;
bless $dataset,$class;
return $dataset;
}
###########################################################
# AI::NNFlex::Datasets::run
###########################################################
sub run
{
my $self = shift;
my $network = shift;
my @outputs;
my $counter=0;
for (my $itemCounter=0;$itemCounter<(scalar @{$self->{'data'}});$itemCounter +=2)
{
$network->run(@{$self->{'data'}}[$itemCounter]);
$outputs[$counter] = $network->output();
$counter++;
}
return \@outputs;
}
###############################################################
# AI::NNFlex::Dataset::learn
###############################################################
sub learn
{
my $self = shift;
my $network = shift;
my $error;
for (my $itemCounter=0;$itemCounter<(scalar @{$self->{'data'}});$itemCounter +=2)
{
$network->run(@{$self->{'data'}}[$itemCounter]);
$error += $network->learn(@{$self->{'data'}}[$itemCounter+1]);
}
$error = $error*$error;
return $error;
}
#################################################################
# AI::NNFlex::Dataset::save
#################################################################
# save a dataset in an snns .pat file
#################################################################
sub save
{
my $dataset = shift;
my %config = @_;
open (OFILE,">".$config{'filename'}) or return "File error $!";
print OFILE "No. of patterns : ".((scalar @{$dataset->{'data'}})/2)."\n";
print OFILE "No. of input units : ".(scalar @{$dataset->{'data'}->[0]})."\n";
print OFILE "No. of output units : ".(scalar @{$dataset->{'data'}->[1]})."\n\n";
my $counter = 1;
my @values = @{$dataset->{'data'}};
while (@values)
{
print OFILE "# Input pattern $counter:\n";
my $input = shift (@values);
my @array = join " ",@$input;
print OFILE @array;
print OFILE "\n";
print OFILE "# Output pattern $counter:\n";
my $output = shift(@values);
@array = join " ",@$output;
print OFILE @array;
print OFILE "\n";
$counter++;
}
close OFILE;
return 1;
}
#############################################################
# AI::NNFlex::Dataset::load
#############################################################
sub load
{
my $dataset = shift;
my %params = @_;
my @data;
my $filename = $params{'filename'};
if (!$filename)
{
return "No filename specified";
}
open (IFILE,"$filename") or return "Unable to load $filename - $!";
my %config;
# snns pat files have a 3 line header, defining number of patterns &
# number of input and output units
my $counter =0;
while ($counter <3)
{
my $line = <IFILE>;
if ($line =~/^\n/ || $line =~/^#/){next}
my ($tag,$value) = split/:/,$line;
$tag=lc($tag);
$tag =~s/ //g;
$config{lc($tag)} = $value;
$counter++;
}
my $filecontent;
while (<IFILE>)
{
if($_ =~ /^#/ || $_ =~ /^\n/){next}
$filecontent .= $_;
}
my @individualvals = split /\s+/s,$filecontent;
for (my $offset=0;$offset<(scalar @individualvals);$offset+=($config{'no.ofinputunits'} + $config{'no.ofoutputunits'}))
{
my @input=@individualvals[$offset..($offset+$config{'no.ofinputunits'}-1)];
push @data,\@input;
if ($config{'no.ofoutputunits'} > 0)
{
my @output=@individualvals[($offset+$config{'no.ofinputunits'})..($offset+$config{'no.ofinputunits'}+$config{'no.ofoutputunits'}-1)];
push @data,\@output;
}
}
$dataset->new(\@data);
return 1;
}
##########################################################
# AI::NNFlex::Dataset::add
##########################################################
# add an input/output pair to the dataset
##########################################################
sub add
{
my $dataset= shift;
my $params = shift;
if (!$params){return "Nothing to add"};
if ($params !~/ARRAY/){return "Need a reference to an array"}
# support adding single patterns (for Hopfield type nets)
if ($$params[0] !~ /ARRAY/)
{
push @{$dataset->{'data'}},$params;
}
else
{
push @{$dataset->{'data'}},$$params[0];
push @{$dataset->{'data'}},$$params[1];
}
return 1;
}
##################################################################
# AI::NNFlex::Dataset::delete
##################################################################
# delete an item from the dataset by index
##################################################################
sub delete
{
my $dataset = shift;
my $index = shift;
my @indexarray;
if (!$index){return 0}
if ($index =~ /ARRAY/)
{
@indexarray = @$index;
}
else
{
$indexarray[0] = $index;
}
my @newarray;
my $counter=0;
foreach (@indexarray)
{
unless ($counter == $_)
{
push @newarray,${$dataset->{'data'}}[$_];
}
}
$dataset->{'data'} = \@newarray;
return 1;
}
1;
=pod
=head1 NAME
AI::NNFlex::Dataset - support for creating/loading/saving datasets for NNFlex nets
=head1 SYNOPSIS
use AI::NNFlex::Dataset;
my $dataset = AI::NNFlex::Dataset->new([[0,1,1,0],[0,0,1,1]]);
$dataset->add([[0,1,0,1],[1,1,0,0]]);
$dataset->add([0,1,0,0]);
$dataset->save(filename=>'test.pat');
$dataset->load(filename=>'test.pat');
=head1 DESCRIPTION
This module allows you to construct, load, save and maintain datasets for use with neural nets implemented using the AI::NNFlex classes. The dataset consists of an array of references to arrays of data. Items may be added in pairs (useful for feedfor...
=head1 CONSTRUCTOR
=head2 AI::NNFlex::Dataset->new([[INPUT],[TARGET]]);
Parameters:
lib/AI/NNFlex/Dataset.pm view on Meta::CPAN
The return value is an AI::NNFlex::Dataset object.
=head1 METHODS
This is a short list of the main methods implemented in AI::NNFlex::Dataset
=head2 add
Syntax:
$dataset->add([[INPUT],[OUTPUT]]);
or
$dataset->add([VALUE]);
This method adds new values to the end of the dataset. You can specify the values as pairs or individually.
=head2 load
Syntax:
$dataset->load(filename=>'filename.pat');
Loads an SNNS type .pat file into a blank dataset. If called on an existing dataset IT WILL OVERWRITE IT!
=head2 save
$dataset->save(filename=>'filename.pat');
Save the existing dataset as an SNNS .pat file. If the file already exists it will be overwritten.
=head2 delete
$dataset->delete(INDEX);
or
$dataset->delete([ARRAY OF INDICES]);
Deletes 1 or more items from the dataset by their index (counting from 0). Note that if you are using pairs of values (in a backprop net for example) you MUST delete in pairs - otherwise you will delete only the input/target, and the indices will be ...
=head1 EXAMPLES
See the code in ./examples.
=head1 PREREQs
None.
=head1 SEE ALSO
AI::NNFlex
=head1 TODO
Method to delete existing dataset entries by index
Method to validate linear separability of a dataset.
=head1 CHANGES
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
lib/AI/NNFlex/Feedforward.pm view on Meta::CPAN
#This class is internal to the NNFlex package, and is included
#in the NNFlex namespace by a require on the networktype parameter.
#
#syntax:
# $network->run([0,1,1,1,0,1,1]);
#
#
###########################################################
sub run
{
my $network = shift;
my $inputPatternRef = shift;
# if this is an incorrect dataset call translate it
if ($inputPatternRef =~/Dataset/)
{
return ($inputPatternRef->run($network))
}
my @inputPattern = @$inputPatternRef;
my @debug = @{$network->{'debug'}};
if (scalar @debug> 0)
{$network->dbug ("Input pattern @inputPattern received by Feedforward",3);}
# First of all apply the activation pattern to the input units (checking
# that the pattern has the right number of values)
my $inputLayer = $network->{'layers'}->[0]->{'nodes'};
if (scalar @$inputLayer != scalar @inputPattern)
{
$network->dbug("Wrong number of input values",0);
return 0;
}
# Now apply the activation
my $counter=0;
foreach (@$inputLayer)
{
if ($_->{'active'})
{
if ($_->{'persistentactivation'})
{
$_->{'activation'} +=$inputPattern[$counter];
if (scalar @debug> 0)
{$network->dbug("Applying ".$inputPattern[$counter]." to $_",3);}
}
else
{
$_->{'activation'} =$inputPattern[$counter];
if (scalar @debug> 0)
{$network->dbug("Applying ".$inputPattern[$counter]." to $_",3);}
}
}
$counter++;
}
# Now flow activation through the network starting with the second layer
foreach my $layer (@{$network->{'layers'}})
{
if ($layer eq $network->{'layers'}->[0]){next}
foreach my $node (@{$layer->{'nodes'}})
{
my $totalActivation;
# Set the node to 0 if not persistent
if (!($node->{'persistentactivation'}))
{
$node->{'activation'} =0;
}
# Decay the node (note that if decay is not set this
# will have no effect, hence no if).
$node->{'activation'} -= $node->{'decay'};
my $nodeCounter=0;
foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}})
{
if (scalar @debug> 0)
{$network->dbug("Flowing from ".$connectedNode->{'nodeid'}." to ".$node->{'nodeid'},3);}
my $weight = ${$node->{'connectedNodesWest'}->{'weights'}}[$nodeCounter];
my $activation = $connectedNode->{'activation'};
if (scalar @debug> 0)
{$network->dbug("Weight & activation: $weight - $activation",3);}
$totalActivation += $weight*$activation;
$nodeCounter++;
}
if ($node->{'active'})
{
my $value = $totalActivation;
my $function = $node->{'activationfunction'};
#my $functionCall ="\$value = \$network->$function(\$value);";
#eval($functionCall);
$value = $network->$function($value);
$node->{'activation'} = $value;
}
if (scalar @debug> 0)
{$network->dbug("Final activation of ".$node->{'nodeid'}." = ".$node->{'activation'},3);}
}
}
return $network->output;
}
1;
=pod
=head1 NAME
AI::NNFlex::Feedforward - methods for feedforward neural networks
=head1 SYNOPSIS
use AI::NNFlex::Feedforward;
$network->run([array of inputs]);
=head1 DESCRIPTION
AI::NNFlex::Feedforward provides a run method to flow activation through an NNFlex network in west to east feedforward style.
=head1 CONSTRUCTOR
None
=head1 METHODS
=head1 AI::NNFlex::Feedforward::run
takes an array of inputs for the network. Returns true or false.
=head1 SEE ALSO
AI::NNFlex
AI::NNFlex::Backprop
AI::NNFlex::Dataset
=head1 CHANGES
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
lib/AI/NNFlex/Hopfield.pm view on Meta::CPAN
# node to every other node, rather than being
# arranged in distinct layers like a feedforward
# network. We can retain the layer architecture to
# give us blocks of nodes, but need to overload init
# to perform full connections
#
#####################################################
sub init
{
my $network = shift;
my @nodes;
# Get a list of all the nodes in the network
foreach my $layer (@{$network->{'layers'}})
{
foreach my $node (@{$layer->{'nodes'}})
{
# cover the assumption that some inherited code
# will require an activation function
if (!$node->{'activationfunction'})
{
$node->{'activationfunction'}= 'hopfield_threshold';
$node->{'activation'} =0;
$node->{'lastactivation'} = 0;
}
push @nodes,$node;
}
}
# we'll probably need this later
$network->{'nodes'} = \@nodes;
foreach my $node (@nodes)
{
my @connectedNodes;
foreach my $connectedNode (@nodes)
{
push @connectedNodes,$connectedNode;
}
my @weights;
$node->{'connectednodes'}->{'nodes'} = \@connectedNodes;
for (0..(scalar @nodes)-1)
{
push @weights,$network->calcweight();
}
$node->{'connectednodes'}->{'weights'} = \@weights
}
return 1;
}
##########################################################
# AI::NNFlex::Hopfield::run
##########################################################
# apply activation patterns & calculate activation
# through the network
##########################################################
sub run
{
my $network = shift;
my $inputPatternRef = shift;
my @inputpattern = @$inputPatternRef;
if (scalar @inputpattern != scalar @{$network->{'nodes'}})
{
return "Error: input pattern does not match number of nodes"
}
# apply the pattern to the network
my $counter=0;
foreach my $node (@{$network->{'nodes'}})
{
$node->{'activation'} = $inputpattern[$counter];
$counter++;
}
# Now update the network with activation flow
foreach my $node (@{$network->{'nodes'}})
{
$node->{'activation'}=0;
my $counter=0;
foreach my $connectedNode (@{$node->{'connectednodes'}->{'nodes'}})
{
# hopfield nodes don't have recursive connections
unless ($node == $connectedNode)
{
$node->{'activation'} += $connectedNode->{'activation'} * $node->{'connectednodes'}->{'weights'}->[$counter];
}
$counter++;
}
# bias
$node->{'activation'} += 1 * $node->{'connectednodes'}->{'weights'}->[-1];
my $activationfunction = $node->{'activationfunction'};
$node->{'activation'} = $network->$activationfunction($node->{'activation'});
}
return $network->output;
}
#######################################################
# AI::NNFlex::Hopfield::output
#######################################################
# This needs to be overloaded, because the default
# nnflex output method returns only the rightmost layer
#######################################################
sub output
{
my $network = shift;
my @array;
foreach my $node (@{$network->{'nodes'}})
{
unshift @array,$node->{'activation'};
}
return \@array;
}
########################################################
# AI::NNFlex::Hopfield::learn
########################################################
sub learn
{
my $network = shift;
my $dataset = shift;
# calculate the weights
# turn the dataset into a matrix
my @matrix;
foreach (@{$dataset->{'data'}})
{
push @matrix,$_;
}
my $patternmatrix = Math::Matrix->new(@matrix);
my $inversepattern = $patternmatrix->transpose;
my @minusmatrix;
for (my $rows=0;$rows <(scalar @{$network->{'nodes'}});$rows++)
{
my @temparray;
for (my $cols=0;$cols <(scalar @{$network->{'nodes'}});$cols++)
{
if ($rows == $cols)
{
my $numpats = scalar @{$dataset->{'data'}};
push @temparray,$numpats;
}
else
{
push @temparray,0;
}
}
push @minusmatrix,\@temparray;
}
my $minus = Math::Matrix->new(@minusmatrix);
my $product = $inversepattern->multiply($patternmatrix);
my $weights = $product->subtract($minus);
my @element = ('1');
my @truearray;
for (1..scalar @{$dataset->{'data'}}){push @truearray,"1"}
my $truematrix = Math::Matrix->new(\@truearray);
my $thresholds = $truematrix->multiply($patternmatrix);
#$thresholds = $thresholds->transpose();
my $counter=0;
foreach (@{$network->{'nodes'}})
{
my @slice;
foreach (@{$weights->slice($counter)})
{
push @slice,$$_[0];
}
push @slice,${$thresholds->slice($counter)}[0][0];
$_->{'connectednodes'}->{'weights'} = \@slice;
$counter++;
}
return 1;
}
1;
=pod
=head1 NAME
AI::NNFlex::Hopfield - a fast, pure perl Hopfield network simulator
=head1 SYNOPSIS
use AI::NNFlex::Hopfield;
my $network = AI::NNFlex::Hopfield->new(config parameter=>value);
$network->add_layer(nodes=>x);
$network->init();
use AI::NNFlex::Dataset;
my $dataset = AI::NNFlex::Dataset->new([
[INPUTARRAY],
[INPUTARRAY]]);
$network->learn($dataset);
my $outputsRef = $dataset->run($network);
my $outputsRef = $network->output();
=head1 DESCRIPTION
AI::NNFlex::Hopfield is a Hopfield network simulator derived from the AI::NNFlex class. THIS IS THE FIRST ALPHA CUT OF THIS MODULE! Any problems, let me know and I'll fix them.
Hopfield networks differ from feedforward networks in that they are effectively a single layer, with all nodes connected to all other nodes (except themselves), and are trained in a single operation. They are particularly useful for recognising corru...
Full documentation for AI::NNFlex::Dataset can be found in the modules own perldoc. It's documented here for convenience only.
=head1 CONSTRUCTOR
=head2 AI::NNFlex::Hopfield->new();
=head2 AI::NNFlex::Dataset
new ( [[INPUT VALUES],[INPUT VALUES],
[INPUT VALUES],[INPUT VALUES],..])
=head2 INPUT VALUES
These should be comma separated values. They can be applied to the network with ::run or ::learn
=head2 OUTPUT VALUES
These are the intended or target output values. Comma separated. These will be used by ::learn
=head1 METHODS
This is a short list of the main methods implemented in AI::NNFlex::Hopfield.
=head2 AI::NNFlex::Hopfield
=head2 add_layer
Syntax:
$network->add_layer( nodes=>NUMBER OF NODES IN LAYER );
=head2 init
Syntax:
$network->init();
Initialises connections between nodes.
=head2 run
$network->run($dataset)
Runs the dataset through the network and returns a reference to an array of output patterns.
=head1 EXAMPLES
See the code in ./examples.
=head1 PREREQs
Math::Matrix
=head1 ACKNOWLEDGEMENTS
=head1 SEE ALSO
AI::NNFlex
AI::NNFlex::Backprop
=head1 TODO
More detailed documentation. Better tests. More examples.
=head1 CHANGES
v0.1 - new module
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
lib/AI/NNFlex/Mathlib.pm view on Meta::CPAN
package AI::NNFlex::Mathlib;
use strict;
#######################################################
# tanh activation function
#######################################################
sub tanh
{
my $network = shift;
my $value = shift;
my @debug = @{$network->{'debug'}};
my $a = exp($value);
my $b = exp(-$value);
if ($value > 20){ $value=1;}
elsif ($value < -20){ $value= -1;}
else
{
my $a = exp($value);
my $b = exp(-$value);
$value = ($a-$b)/($a+$b);
}
if (scalar @debug > 0)
{$network->dbug("Tanh activation returning $value",5)};
return $value;
}
sub tanh_slope
{
my $network = shift;
my $value = shift;
my @debug = @{$network->{'debug'}};
my $return = 1-($value*$value);
if (scalar @debug > 0)
{$network->dbug("Tanh_slope returning $value",5);}
return $return;
}
#################################################################
# Linear activation function
#################################################################
sub linear
{
my $network = shift;
my $value = shift;
my @debug = @{$network->{'debug'}};
if (scalar @debug >0)
{$network->dbug("Linear activation returning $value",5)};
return $value;
}
sub linear_slope
{
my $network = shift;
my $value = shift;
my @debug = @{$network->{'debug'}};
if (scalar @debug >0)
{$network->dbug("Linear slope returning $value",5)};
return $value;
}
############################################################
# P&B sigmoid activation (needs slope)
############################################################
sub sigmoid2
{
my $network = shift;
my $value = shift;
$value = (1+exp(-$value))**-1;
$network->dbug("Sigmoid activation returning $value",5);
return $value;
}
sub sigmoid2_slope
{
my $network = shift;
my $value = shift;
my @debug = @{$network->{'debug'}};
my $return = exp(-$value) * ((1 + exp(-$value)) ** -2);
if (scalar @debug > 0)
{$network->dbug("sigmoid_slope returning $value",5);}
return $return;
}
############################################################
# standard sigmoid activation
############################################################
sub sigmoid
{
my $network = shift;
my $value = shift;
$value = 1/(1+exp(1)**-$value);
$network->dbug("Sigmoid activation returning $value",5);
return $value;
}
sub sigmoid_slope
{
my $network = shift;
my $value = shift;
my @debug = @{$network->{'debug'}};
my $return = $value * (1-$value);
if (scalar @debug > 0)
{$network->dbug("sigmoid_slope returning $value",5);}
return $return;
}
############################################################
# hopfield_threshold
# standard hopfield threshold activation - doesn't need a
# slope (because hopfield networks don't use them!)
############################################################
sub hopfield_threshold
{
my $network = shift;
my $value = shift;
if ($value <0){return -1}
if ($value >0){return 1}
return $value;
}
############################################################
# atanh error function
############################################################
sub atanh
{
my $network = shift;
my $value = shift;
if ($value >-0.5 && $value <0.5)
{
$value = log((1+$value)/(1-$value))/2;
}
return $value;
}
1;
=pod
=head1 NAME
AI::NNFlex::Mathlib - miscellaneous mathematical functions for the AI::NNFlex NN package
lib/AI/NNFlex/Mathlib.pm view on Meta::CPAN
=head1 CHANGES
v1.2 includes hopfield_threshold
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
lib/AI/NNFlex/Reinforce.pm view on Meta::CPAN
use base qw(AI::NNFlex AI::NNFlex::Feedforward);
use strict;
###########################################################
#AI::NNFlex::Reinforce::learn
###########################################################
sub learn
{
my $network = shift;
my @layers = @{$network->{'layers'}};
# no connections westwards from input, so no weights to adjust
shift @layers;
# reverse to start with the last layer first
foreach my $layer (reverse @layers)
{
my @nodes = @{$layer->{'nodes'}};
foreach my $node (@nodes)
{
my @westNodes = @{$node->{'connectedNodesWest'}->{'nodes'}};
my @westWeights = @{$node->{'connectedNodesWest'}->{'weights'}};
my $connectedNodeCounter=0;
foreach my $westNode (@westNodes)
{
my $dW = $westNode->{'activation'} * $westWeights[$connectedNodeCounter] * $network->{'learning rate'};
$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] += $dW;
}
}
}
}
1;
=pod
=head1 NAME
AI::NNFlex::Reinforce - A very simple experimental NN module
=head1 SYNOPSIS
use AI::NNFlex::Reinforce;
my $network = AI::NNFlex::Reinforce->new(config parameter=>value);
$network->add_layer(nodes=>x,activationfunction=>'function');
$network->init();
use AI::NNFlex::Dataset;
my $dataset = AI::NNFlex::Dataset->new([
[INPUTARRAY],[TARGETOUTPUT],
[INPUTARRAY],[TARGETOUTPUT]]);
my $sqrError = 10;
for (1..100)
{
$dataset->learn($network);
}
$network->lesion({'nodes'=>PROBABILITY,'connections'=>PROBABILITY});
$network->dump_state(filename=>'badgers.wts');
$network->load_state(filename=>'badgers.wts');
my $outputsRef = $dataset->run($network);
my $outputsRef = $network->output(layer=>2,round=>1);
=head1 DESCRIPTION
Reinforce is a very simple NN module. It's mainly included in this distribution to provide an example of how to subclass AI::NNFlex to write your own NN modules. The training method strengthens any connections that are active during the run pass.
=head1 CONSTRUCTOR
=head2 AI::NNFlex::Reinforce
new ( parameter => value );
randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT
fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS
debug=>[LIST OF CODES FOR MODULES TO DEBUG]
learningrate=>the learning rate of the network
round=>0 or 1 - 1 sets the network to round output values to
nearest of 1, -1 or 0
The following parameters are optional:
randomweights
fixedweights
debug
round
(Note, if randomweights is not specified the network will default to a random value from 0 to 1.
=head1 METHODS
This is a short list of the main methods implemented in AI::NNFlex. Subclasses may implement other methods.
=head2 AI::NNFlex
=head3 add_layer
Syntax:
$network->add_layer( nodes=>NUMBER OF NODES IN LAYER,
persistentactivation=>RETAIN ACTIVATION BETWEEN PASSES,
decay=>RATE OF ACTIVATION DECAY PER PASS,
randomactivation=>MAXIMUM STARTING ACTIVATION,
threshold=>NYI,
activationfunction=>"ACTIVATION FUNCTION",
randomweights=>MAX VALUE OF STARTING WEIGHTS);
=head3 init
Syntax:
$network->init();
Initialises connections between nodes, sets initial weights and loads external components. The base AI::NNFlex init method implementes connections backwards and forwards from each node in each layer to each node in the preceeding and following layers...
=head3 lesion
$network->lesion ({'nodes'=>PROBABILITY,'connections'=>PROBABILITY})
Damages the network.
B<PROBABILITY>
A value between 0 and 1, denoting the probability of a given node or connection being damaged.
Note: this method may be called on a per network, per node or per layer basis using the appropriate object.
=head2 AN::NNFlex::Dataset
=head3 learn
$dataset->learn($network)
'Teaches' the network the dataset using the networks defined learning algorithm. Returns sqrError;
=head3 run
$dataset->run($network)
Runs the dataset through the network and returns a reference to an array of output patterns.
=head1 EXAMPLES
See the code in ./examples. For any given version of NNFlex, xor.pl will contain the latest functionality.
=head1 PREREQs
lib/AI/NNFlex/Reinforce.pm view on Meta::CPAN
Dr Martin Le Voi, for help with concepts of NN in the early stages
Dr David Plaut, for help with the project that this code was originally intended for.
Graciliano M.Passos for suggestions & improved code (see SEE ALSO).
Dr Scott Fahlman, whose very readable paper 'An empirical study of learning speed in backpropagation networks' (1988) has driven many of the improvements made so far.
=head1 SEE ALSO
AI::NNFlex
AI::NNFlex::Backprop
AI::NNFlex::Dataset
=head1 COPYRIGHT
Copyright (c) 2004-2005 Charles Colbourn. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself.
=head1 CONTACT
charlesc@nnflex.g0n.net
=cut
t/Backprop.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>10}
# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
debug=>[],bias=>1,
momentum=>0.6);
ok($network); #test 1
##
# test add layer
my $result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
ok($result); #test 2
##
# add an extra layer to test out connect
$result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"sigmoid",
randomweights=>1);
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test connect layer
$result = $network->connect(fromlayer=>1,tolayer=>1);
t/Backprop.t view on Meta::CPAN
$result = $network->connect(fromnode=>'1,0',tonode=>'1,1');
ok($result);
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# Test a learning pass
my $err = $dataset->learn($network);
ok($err); #test 5
##
t/Dataset.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>12}
# we need a basic network in place to test the dataset functionality against
# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
debug=>[],bias=>1,
momentum=>0.6);
ok($network); #test 1
##
# test add layer
my $result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
ok($result); #test 2
##
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# test adding an entry
$result = $dataset->add([[1,1],[0,1]]);
ok($result);
# test save
$result = $dataset->save(filename=>'test.pat');
ok ($result);
t/backprop.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>8}
# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
debug=>[],bias=>1,
momentum=>0.6);
ok($network); #test 1
##
# test add layer
my $result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
ok($result); #test 2
##
# add an extra layer to test out connect
$result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test connect layer
$result = $network->connect(fromlayer=>1,tolayer=>1);
t/backprop.t view on Meta::CPAN
$result = $network->connect(fromnode=>'1,0',tonode=>'1,1');
ok($result);
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# Test a learning pass
my $err = $dataset->learn($network);
ok($err); #test 5
##
t/reinforce.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Reinforce;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>5}
# test create network
my $network = AI::NNFlex::Reinforce->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
debug=>[],bias=>1);
ok($network); #test 1
##
# test add layer
my $result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
ok($result); #test 2
##
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# Test a run pass
$result = $dataset->run($network);
ok($result); #test 5
##