AI-NeuralNet-BackProp
view release on metacpan - search on metacpan
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
return sprintf("%.10f",($diff/$a1s));
}
# Returns $fa as a percentage of $fb
sub p {
shift if(substr($_[0],0,4) eq 'AI::');
my ($fa,$fb)=(shift,shift);
sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
}
# This sub will take an array ref of a data set, which it expects in this format:
# my @data_set = ( [ ...inputs... ], [ ...outputs ... ],
# ... rows ...
# );
#
# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
# data in the set in order. Usage:
#
# learn_set(\@data,[ options ]);
#
# Options are options in hash form. They can be of any form that $net->learn takes.
#
# It returns a percentage string.
#
sub learn_set {
my $self = shift if(substr($_[0],0,4) eq 'AI::');
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{pattern})?$args{pattern}*2+1:1;
my ($fa,$fb);
for my $x (0..$len) {
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn( $data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
my $res;
$data->[$row] = $self->crunch($data->[$row]) if($data->[$row] == 0);
if ($p) {
$res=pdiff($data->[$row],$self->run($data->[$row-1]));
} else {
$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
}
return $res;
}
# This sub will take an array ref of a data set, which it expects in this format:
# my @data_set = ( [ ...inputs... ], [ ...outputs ... ],
# ... rows ...
# );
#
# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
# data in the set in RANDOM order. Usage:
#
# learn_set_rand(\@data,[ options ]);
#
# Options are options in hash form. They can be of any form that $net->learn takes.
#
# It returns a true value.
#
sub learn_set_rand {
my $self = shift if(substr($_[0],0,4) eq 'AI::');
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my @learned;
while(1) {
_GET_X:
my $x=$self->intr(rand()*$len);
goto _GET_X if($learned[$x]);
$learned[$x]=1;
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn($data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
return 1;
}
my $net = new AI::NeuralNet::BackProp(1,5,5);
# Add a small amount of randomness to the network
$net->random(0.001);
# Demonstrate a simple learn() call
my @inputs = ( 0,0,1,1,1 );
my @ouputs = ( 1,0,1,0,1 );
print $net->learn(\@inputs, \@outputs),"\n";
# Create a data set to learn
my @set = (
[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]
);
# Demo learn_set()
my $f = $net->learn_set(\@set);
print "Forgetfulness: $f unit\n";
# Crunch a bunch of strings and return array refs
my $phrase1 = $net->crunch("I love neural networks!");
my $phrase2 = $net->crunch("Jay Lenno is wierd.");
my $phrase3 = $net->crunch("The rain in spain...");
my $phrase4 = $net->crunch("Tired of word crunching yet?");
# Make a data set from the array refs
my @phrases = (
$phrase1, $phrase2,
$phrase3, $phrase4
);
# Learn the data set
$net->learn_set(\@phrases);
# Run a test phrase through the network
my $test_phrase = $net->crunch("I love neural networking!");
my $result = $net->run($test_phrase);
# Get this, it prints "Jay Leno is networking!" ... LOL!
print $net->uncrunch($result),"\n"
examples/ex_alpha.pl view on Meta::CPAN
then it inputs an never-before-seen bitmap and displays the classification the network
gives for the unknown bitmap.
=cut
use AI::NeuralNet::BackProp;
# Create a new network with 2 layers and 35 neurons in each layer, with 1 output neuron
my $net = new AI::NeuralNet::BackProp(2,35,1);
# Debug level of 4 gives JUST learn loop iteteration benchmark and comparrison data
# as learning progresses.
$net->debug(4);
my $letters = [ # All prototype inputs
[
2,1,1,1,2, # Inputs are
1,2,2,2,1, # 5*7 digitalized caracters
1,2,2,2,1,
1,1,1,1,1,
1,2,2,2,1, # This is the alphabet of the
examples/ex_bmp.pl view on Meta::CPAN
# Set resolution
my $xres=5;
my $yres=5;
# Create a new net with 3 layes, $xres*$yres inputs, and 1 output
my $net = AI::NeuralNet::BackProp->new(2,$xres*$yres,1);
# Disable debugging
$net->debug(4);
# Create datasets.
my @data = (
[ 2,1,1,2,2,
2,2,1,2,2,
2,2,1,2,2,
2,2,1,2,2,
2,1,1,1,2 ], [ 1 ],
[ 1,1,1,2,2,
2,2,2,1,2,
2,1,1,1,2,
1,2,2,2,2,
examples/ex_dow.pl view on Meta::CPAN
use AI::NeuralNet::BackProp;
use Benchmark;
# Create a new net with 5 layes, 9 inputs, and 1 output
my $net = AI::NeuralNet::BackProp->new(2,9,1);
# Disable debugging
$net->debug(4);
# Create datasets.
# Note that these are ficticious values shown for illustration purposes
# only. In the example, CPI is a certain month's consumer price
# index, CPI-1 is the index one month before, CPI-3 is the the index 3
# months before, etc.
my @data = (
# Mo CPI CPI-1 CPI-3 Oil Oil-1 Oil-3 Dow Dow-1 Dow-3 Dow Ave (output)
[ 1, 229, 220, 146, 20.0, 21.9, 19.5, 2645, 2652, 2597], [ 2647 ],
[ 2, 235, 226, 155, 19.8, 20.0, 18.3, 2633, 2645, 2585], [ 2637 ],
[ 3, 244, 235, 164, 19.6, 19.8, 18.1, 2627, 2633, 2579], [ 2630 ],
[ 4, 261, 244, 181, 19.6, 19.6, 18.1, 2611, 2627, 2563], [ 2620 ],
[ 5, 276, 261, 196, 19.5, 19.6, 18.0, 2630, 2611, 2582], [ 2638 ],
[ 6, 287, 276, 207, 19.5, 19.5, 18.0, 2637, 2630, 2589], [ 2635 ],
[ 7, 296, 287, 212, 19.3, 19.5, 17.8, 2640, 2637, 2592], [ 2641 ]
);
# If we havnt saved the net already, do the learning
if(!$net->load('dow.dat')) {
print "\nLearning started...\n";
# Make it learn the whole dataset $top times
my @list;
my $top=1;
for my $a (0..$top) {
my $t1=new Benchmark;
print "\n\nOuter Loop: $a\n";
# Test fogetfullness
my $f = $net->learn_set(\@data, inc => 0.2,
max => 2000,
error => -1);
# Print it
print "\n\nForgetfullness: $f%\n";
# Save net to disk
$net->save('dow.dat');
my $t2=new Benchmark;
my $td=timediff($t2,$t1);
print "\nLoop $a took ",timestr($td),"\n";
}
}
# Run a prediction using fake data
# Month CPI CPI-1 CPI-3 Oil Oil-1 Oil-3 Dow Dow-1 Dow-3
my @set=( 10, 352, 309, 203, 18.3, 18.7, 16.1, 2592, 2641, 2651 );
# Dow Ave (output)
my $fb=$net->run(\@set)->[0];
# Print output
print "\nTest Factors: (",join(',',@set),")\n";
print "DOW Prediction for Month #11: $fb\n";
examples/ex_synop.pl view on Meta::CPAN
# Add a small amount of randomness to the network
$net->random(0.001);
# Demonstrate a simple learn() call
my @inputs = ( 0,0,1,1,1 );
my @ouputs = ( 1,0,1,0,1 );
print $net->learn(\@inputs, \@outputs),"\n";
# Create a data set to learn
my @set = (
[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]
);
# Demo learn_set()
my $f = $net->learn_set(\@set);
print "Forgetfulness: $f unit\n";
# Crunch a bunch of strings and return array refs
my $phrase1 = $net->crunch("I love neural networks!");
my $phrase2 = $net->crunch("Jay Lenno is wierd.");
my $phrase3 = $net->crunch("The rain in spain...");
my $phrase4 = $net->crunch("Tired of word crunching yet?");
# Make a data set from the array refs
my @phrases = (
$phrase1, $phrase2,
$phrase3, $phrase4
);
# Learn the data set
$net->learn_set(\@phrases);
# Run a test phrase through the network
my $test_phrase = $net->crunch("I love neural networking!");
my $result = $net->run($test_phrase);
# Get this, it prints "Jay Leno is networking!" ... LOL!
print $net->uncrunch($result),"\n";
view all matches for this distributionview release on metacpan - search on metacpan
( run in 0.540 second using v1.00-cache-2.02-grep-82fe00e-cpan-4673cadbf75 )