view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
# learning implemented via a generalization of Dobbs rule and
# several principals of Hoppfield networks.
# online: http://www.josiah.countystart.com/modules/AI/cgi-bin/rec.pl
#
package AI::NeuralNet::BackProp::neuron;
use strict;
# Dummy constructor
sub new {
bless {}, shift
}
# Rounds floats to ints
sub intr {
shift if(substr($_[0],0,4) eq 'AI::');
try { return int(sprintf("%.0f",shift)) }
catch { return 0 }
}
# Receives input from other neurons. They must
# be registered as a synapse of this neuron to effectively
# input.
sub input {
my $self = shift;
my $sid = shift;
my $value = shift;
# We simply weight the value sent by the neuron. The neuron identifies itself to us
# using the code we gave it when it registered itself with us. The code is in $sid,
# (synapse ID) and we use that to track the weight of the connection.
# This line simply multiplies the value by its weight and gets the integer from it.
$self->{SYNAPSES}->{LIST}->[$sid]->{VALUE} = intr($value * $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED} = 1;
BackProp.pm view on Meta::CPAN
AI::NeuralNet::BackProp::out1((($self->input_complete())?"All synapses have fired":"Not all synapses have fired"));
AI::NeuralNet::BackProp::out1(" for $self.\n");
# Check and see if all synapses have fired that are connected to this one.
# If they have, then generate the output value for this synapse.
$self->output() if($self->input_complete());
}
# Loops thru and outputs to every neuron that this
# neuron is registered as synapse of.
sub output {
my $self = shift;
my $size = $self->{OUTPUTS}->{SIZE} || 0;
my $value = $self->get_output();
for (0..$size-1) {
AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
}
}
# Used internally by output().
sub get_output {
my $self = shift;
my $size = $self->{SYNAPSES}->{SIZE} || 0;
my $value = 0;
my $state = 0;
my (@map,@weight);
# We loop through all the syanpses connected to this one and add the weighted
# valyes together, saving in a debugging list.
for (0..$size-1) {
$value += $self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
BackProp.pm view on Meta::CPAN
AI::NeuralNet::BackProp::out1("From get_output, value is $value, so state is $state.\n");
# Possible future exapnsion for self excitation. Not currently used.
$self->{LAST_VALUE} = $value;
# Just return the $state
return $state;
}
# Used by input() to check if all registered synapses have fired.
sub input_complete {
my $self = shift;
my $size = $self->{SYNAPSES}->{SIZE} || 0;
my $retvalue = 1;
# Very simple loop. Doesn't need explaning.
for (0..$size-1) {
$retvalue = 0 if(!$self->{SYNAPSES}->{LIST}->[$_]->{FIRED});
}
return $retvalue;
}
# Used to recursively adjust the weights of synapse input channeles
# to give a desired value. Designed to be called via
# AI::NeuralNet::BackProp::NeuralNetwork::learn().
sub weight {
my $self = shift;
my $ammount = shift;
my $what = shift;
my $size = $self->{SYNAPSES}->{SIZE} || 0;
my $value;
AI::NeuralNet::BackProp::out1("Weight: ammount is $ammount, what is $what with size at $size.\n");
# Now this sub is the main cog in the learning wheel. It is called recursively on
# each neuron that has been bad (given incorrect output.)
for my $i (0..$size-1) {
$value = $self->{SYNAPSES}->{LIST}->[$i]->{VALUE};
if(0) {
# Formula by Steve Purkis
# Converges very fast for low-value inputs. Has trouble converging on high-value
# inputs. Feel free to play and try to get to work for high values.
my $delta = $ammount * ($what - $value) * $self->{SYNAPSES}->{LIST}->[$i]->{INPUT};
BackProp.pm view on Meta::CPAN
# Recursivly apply
$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT} += $delta;
$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
}
}
# Registers some neuron as a synapse of this neuron.
# This is called exclusively by connect(), except for
# in initalize_group() to connect the _map() package.
sub register_synapse {
my $self = shift;
my $synapse = shift;
my $sid = $self->{SYNAPSES}->{SIZE} || 0;
$self->{SYNAPSES}->{LIST}->[$sid]->{PKG} = $synapse;
$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT} = 1.00 if(!$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED} = 0;
AI::NeuralNet::BackProp::out1("$self: Registering sid $sid with weight $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}, package $self->{SYNAPSES}->{LIST}->[$sid]->{PKG}.\n");
$self->{SYNAPSES}->{SIZE} = ++$sid;
return ($sid-1);
}
# Called via AI::NeuralNet::BackProp::NeuralNetwork::initialize_group() to
# form the neuron grids.
# This just registers another synapes as a synapse to output to from this one, and
# then we ask that synapse to let us register as an input connection and we
# save the sid that the ouput synapse returns.
sub connect {
my $self = shift;
my $to = shift;
my $oid = $self->{OUTPUTS}->{SIZE} || 0;
AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
$self->{OUTPUTS}->{LIST}->[$oid]->{PKG} = $to;
$self->{OUTPUTS}->{LIST}->[$oid]->{ID} = $to->register_synapse($self);
$self->{OUTPUTS}->{SIZE} = ++$oid;
return $self->{OUTPUTS}->{LIST}->[$oid]->{ID};
}
1;
package AI::NeuralNet::BackProp;
use Benchmark;
use strict;
# Returns the number of elements in an array ref, undef on error
sub _FETCHSIZE {
my $a=$_[0];
my ($b,$x);
return undef if(substr($a,0,5) ne "ARRAY");
foreach $b (@{$a}) { $x++ };
return $x;
}
# Debugging subs
$AI::NeuralNet::BackProp::DEBUG = 0;
sub whowasi { (caller(1))[3] . '()' }
sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; }
sub out1 { print shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
sub out2 { print shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
sub out3 { print shift() if ($AI::NeuralNet::BackProp::DEBUG) }
sub out4 { print shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
# Rounds a floating-point to an integer with int() and sprintf()
sub intr {
shift if(substr($_[0],0,4) eq 'AI::');
try { return int(sprintf("%.0f",shift)) }
catch { return 0 }
}
# Used to format array ref into columns
# Usage:
# join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
# Can also be called as method of your neural net.
# If $high_state_character is null, prints actual numerical values of each element.
sub join_cols {
no strict 'refs';
shift if(substr($_[0],0,4) eq 'AI::');
my $map = shift;
my $break = shift;
my $a = shift;
my $b = shift;
my $x;
foreach my $el (@{$map}) {
my $str = ((int($el))?$a:$b);
$str=$el."\0" if(!$a);
print $str;
$x++;
BackProp.pm view on Meta::CPAN
print "\n";
$x=0;
}
}
print "\n";
}
# Returns percentage difference between all elements of two
# array refs of exact same length (in elements).
# Now calculates actual difference in numerical value.
sub pdiff {
no strict 'refs';
shift if(substr($_[0],0,4) eq 'AI::');
my $a1 = shift;
my $a2 = shift;
my $a1s = $#{$a1}; #AI::NeuralNet::BackProp::_FETCHSIZE($a1);
my $a2s = $#{$a2}; #AI::NeuralNet::BackProp::_FETCHSIZE($a2);
my ($a,$b,$diff,$t);
$diff=0;
#return undef if($a1s ne $a2s); # must be same length
for my $x (0..$a1s) {
$a = $a1->[$x];
$b = $a2->[$x];
BackProp.pm view on Meta::CPAN
if($a<$b){$t=$a;$a=$b;$b=$t;}
$a=1 if(!$a);
$diff+=(($a-$b)/$a)*100;
}
}
$a1s = 1 if(!$a1s);
return sprintf("%.10f",($diff/$a1s));
}
# Returns $fa as a percentage of $fb
sub p {
shift if(substr($_[0],0,4) eq 'AI::');
my ($fa,$fb)=(shift,shift);
sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
}
# This sub will take an array ref of a data set, which it expects in this format:
# my @data_set = ( [ ...inputs... ], [ ...outputs ... ],
# ... rows ...
# );
#
# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
# data in the set in order. Usage:
#
# learn_set(\@data,[ options ]);
#
# Options are options in hash form. They can be of any form that $net->learn takes.
#
# It returns a percentage string.
#
sub learn_set {
my $self = shift if(substr($_[0],0,4) eq 'AI::');
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{pattern})?$args{pattern}*2+1:1;
my ($fa,$fb);
for my $x (0..$len) {
BackProp.pm view on Meta::CPAN
$data->[$row] = $self->crunch($data->[$row]) if($data->[$row] == 0);
if ($p) {
$res=pdiff($data->[$row],$self->run($data->[$row-1]));
} else {
$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
}
return $res;
}
# This sub will take an array ref of a data set, which it expects in this format:
# my @data_set = ( [ ...inputs... ], [ ...outputs ... ],
# ... rows ...
# );
#
# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
# data in the set in RANDOM order. Usage:
#
# learn_set_rand(\@data,[ options ]);
#
# Options are options in hash form. They can be of any form that $net->learn takes.
#
# It returns a true value.
#
sub learn_set_rand {
my $self = shift if(substr($_[0],0,4) eq 'AI::');
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my @learned;
while(1) {
_GET_X:
my $x=$self->intr(rand()*$len);
BackProp.pm view on Meta::CPAN
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
return 1;
}
# Returns the index of the element in array REF passed with the highest comparative value
sub high {
shift if(substr($_[0],0,4) eq 'AI::');
my $ref1 = shift;
my ($el,$len,$tmp);
foreach $el (@{$ref1}) {
$len++;
}
$tmp=0;
for my $x (0..$len-1) {
$tmp = $x if((@{$ref1})[$x] > (@{$ref1})[$tmp]);
}
return $tmp;
}
# Returns the index of the element in array REF passed with the lowest comparative value
sub low {
shift if(substr($_[0],0,4) eq 'AI::');
my $ref1 = shift;
my ($el,$len,$tmp);
foreach $el (@{$ref1}) {
$len++;
}
$tmp=0;
for my $x (0..$len-1) {
$tmp = $x if((@{$ref1})[$x] < (@{$ref1})[$tmp]);
}
return $tmp;
}
# Returns a pcx object
sub load_pcx {
my $self = shift;
return AI::NeuralNet::BackProp::PCX->new($self,shift);
}
# Crunch a string of words into a map
sub crunch {
my $self = shift;
my (@map,$ic);
my @ws = split(/[\s\t]/,shift);
for my $a (0..$#ws) {
$ic=$self->crunched($ws[$a]);
if(!defined $ic) {
$self->{_CRUNCHED}->{LIST}->[$self->{_CRUNCHED}->{_LENGTH}++]=$ws[$a];
@map[$a]=$self->{_CRUNCHED}->{_LENGTH};
} else {
@map[$a]=$ic;
}
}
return \@map;
}
# Finds if a word has been crunched.
# Returns undef on failure, word index for success.
sub crunched {
my $self = shift;
for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
return $a+1 if($self->{_CRUNCHED}->{LIST}->[$a] eq $_[0]);
}
return undef;
}
# Alias for crunched(), above
sub word { crunched(@_) }
# Uncrunches a map (array ref) into an array of words (not an array ref) and returns array
sub uncrunch {
my $self = shift;
my $map = shift;
my ($c,$el,$x);
foreach $el (@{$map}) {
$c .= $self->{_CRUNCHED}->{LIST}->[$el-1].' ';
}
return $c;
}
# Sets/gets randomness facter in the network. Setting a value of 0 disables random factors.
sub random {
my $self = shift;
my $rand = shift;
return $self->{random} if(!(defined $rand));
$self->{random} = $rand;
}
# Sets/gets column width for printing lists in debug modes 1,3, and 4.
sub col_width {
my $self = shift;
my $width = shift;
return $self->{col_width} if(!$width);
$self->{col_width} = $width;
}
# Sets/Removes value ranging
sub range {
my $self = shift;
my $ref = shift;
my $b = shift;
if(substr($ref,0,5) ne "ARRAY") {
if(($ref == 0) && (!defined $b)) {
$ref = $self->crunch($ref);
#print "\$ref is a string, crunching to ",join(',',@{$ref}),"\n";
} else {
my $a = $ref;
$a = $self->crunch($a)->[0] if($a == 0);
$b = $self->crunch($b)->[0] if($b == 0);
$_[++$#_] = $a;
$_[++$#_] = $b;
$ref = \@_;
BackProp.pm view on Meta::CPAN
}
if($rB<$rA){my $t=$rA;$rA=$rB;$rB=$t};
$self->{rA}=$rA;
$self->{rB}=$rB;
$self->{rS}=$rS if($rS);
$self->{rRef} = $ref;
return $ref;
}
# Used internally to scale outputs to fit range
sub _range {
my $self = shift;
my $in = shift;
my $rA = $self->{rA};
my $rB = $self->{rB};
my $rS = $self->{rS};
my $r = $rB;#-$rA+1;
return $in if(!$rA && !$rB);
my $l = $self->{OUT}-1;
my $out = [];
# Adjust for a maximum outside what we have seen so far
BackProp.pm view on Meta::CPAN
$out->[$i] = $self->{rRef}->[$t];
}
$self->{rS}=$rS;
return $out;
}
# Initialzes the base for a new neural network.
# It is recomended that you call learn() before run()ing a pattern.
# See documentation above for usage.
sub new {
no strict;
my $type = shift;
my $self = {};
my $layers = shift;
my $size = shift;
my $out = shift || $size;
my $flag = shift || 0;
bless $self, $type;
BackProp.pm view on Meta::CPAN
$self->{FLAG} = $flag;
$self->{col_width}= 5;
$self->{random} = 0.001;
$self->initialize_group();
return $self;
}
# Save entire network state to disk.
sub save {
my $self = shift;
my $file = shift;
my $size = $self->{SIZE};
my $div = $self->{DIV};
my $out = $self->{OUT};
my $flag = $self->{FLAG};
open(FILE,">$file");
print FILE "size=$size\n";
BackProp.pm view on Meta::CPAN
chop($w);
print FILE "n$a=$w\n";
}
close(FILE);
return $self;
}
# Load entire network state from disk.
sub load {
my $self = shift;
my $file = shift;
my $load_flag = shift || 0;
return undef if(!(-f $file));
open(FILE,"$file");
my @lines=<FILE>;
close(FILE);
BackProp.pm view on Meta::CPAN
@l=split /\,/, $w;
for my $b (0..$self->{DIV}-1) {
$self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT}=$l[$b];
}
}
return $self;
}
# Dumps the complete weight matrix of the network to STDIO
sub show {
my $self = shift;
for my $a (0..$self->{SIZE}-1) {
print "Neuron $a: ";
for my $b (0..$self->{DIV}-1) {
print $self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},"\t";
}
print "\n";
}
}
# Used internally by new() and learn().
# This is the sub block that actually creats
# the connections between the synapse chains and
# also connects the run packages and the map packages
# to the appropiate ends of the neuron grids.
sub initialize_group() {
my $self = shift;
my $size = $self->{SIZE};
my $div = $self->{DIV};
my $out = $self->{OUT};
my $flag = $self->{FLAG};
my $x = 0;
my $y = 0;
# Reset map and run synapse counters.
$self->{RUN}->{REGISTRATION} = $self->{MAP}->{REGISTRATION} = 0;
BackProp.pm view on Meta::CPAN
$self->{_tmp_synapse} = $y;
$self->{NET}->[$y]->connect($self->{MAP});
}
# And the group is done!
}
# When called with an array refrence to a pattern, returns a refrence
# to an array associated with that pattern. See usage in documentation.
sub run {
my $self = shift;
my $map = shift;
my $t0 = new Benchmark;
$self->{RUN}->run($map);
$self->{LAST_TIME}=timestr(timediff(new Benchmark, $t0));
return $self->map();
}
# This automatically uncrunches a response after running it
sub run_uc {
$_[0]->uncrunch(run(@_));
}
# Returns benchmark and loop's ran or learned
# for last run(), or learn()
# operation preformed.
#
sub benchmarked {
my $self = shift;
return $self->{LAST_TIME};
}
# Used to retrieve map from last internal run operation.
sub map {
my $self = shift;
$self->{MAP}->map();
}
# Forces network to learn pattern passed and give desired
# results. See usage in POD.
sub learn {
my $self = shift;
my $omap = shift;
my $res = shift;
my %args = @_;
my $inc = $args{inc} || 0.20;
my $max = $args{max} || 1024;
my $_mx = intr($max/10);
my $_mi = 0;
my $error = ($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
my $div = $self->{DIV};
BackProp.pm view on Meta::CPAN
}
1;
# Internal input class. Not to be used directly.
package AI::NeuralNet::BackProp::_run;
use strict;
# Dummy constructor.
sub new {
bless { PARENT => $_[1] }, $_[0]
}
# This is so we comply with the neuron interface.
sub weight {}
sub input {}
# Again, compliance with neuron interface.
sub register_synapse {
my $self = shift;
my $sid = $self->{REGISTRATION} || 0;
$self->{REGISTRATION} = ++$sid;
$self->{RMAP}->{$sid-1} = $self->{PARENT}->{_tmp_synapse};
return $sid-1;
}
# Here is the real meat of this package.
# run() does one thing: It fires values
# into the first layer of the network.
sub run {
my $self = shift;
my $map = shift;
my $x = 0;
$map = $self->{PARENT}->crunch($map) if($map == 0);
return undef if(substr($map,0,5) ne "ARRAY");
foreach my $el (@{$map}) {
# Catch ourself if we try to run more inputs than neurons
return $x if($x>$self->{PARENT}->{DIV}-1);
# Here we add a small ammount of randomness to the network.
# This is to keep the network from getting stuck on a 0 value internally.
$self->{PARENT}->{NET}->[$x]->input(0,$el+(rand()*$self->{ramdom}));
$x++;
};
# Incase we tried to run less inputs than neurons, run const 1 in extra neurons
BackProp.pm view on Meta::CPAN
1;
# Internal output class. Not to be used directly.
package AI::NeuralNet::BackProp::_map;
use strict;
# Dummy constructor.
sub new {
bless { PARENT => $_[1] }, $_[0]
}
# Compliance with neuron interface
sub weight {}
# Compliance with neuron interface
sub register_synapse {
my $self = shift;
my $sid = $self->{REGISTRATION} || 0;
$self->{REGISTRATION} = ++$sid;
$self->{RMAP}->{$sid-1} = $self->{PARENT}->{_tmp_synapse};
return $sid-1;
}
# This acts just like a regular neuron by receiving
# values from input synapes. Yet, unlike a regularr
# neuron, it doesnt weight the values, just stores
# them to be retrieved by a call to map().
sub input {
no strict 'refs';
my $self = shift;
my $sid = shift;
my $value = shift;
my $size = $self->{PARENT}->{DIV};
my $flag = 1;
$self->{OUTPUT}->[$sid]->{VALUE} = $self->{PARENT}->intr($value);
$self->{OUTPUT}->[$sid]->{FIRED} = 1;
AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
}
# Here we simply collect the value of every neuron connected to this
# one from the layer below us and return an array ref to the final map..
sub map {
my $self = shift;
my $size = $self->{PARENT}->{DIV};
my $out = $self->{PARENT}->{OUT};
my $divide = AI::NeuralNet::BackProp->intr($size/$out);
my @map = ();
my $value;
AI::NeuralNet::BackProp::out1 "Num output neurons: $out, Input neurons: $size, Division: $divide\n";
for(0..$out-1) {
$value=0;
for my $a (0..$divide-1) {
BackProp.pm view on Meta::CPAN
}
my $ret=\@map;
return $self->{PARENT}->_range($ret);
}
1;
# load_pcx() wrapper package
package AI::NeuralNet::BackProp::PCX;
# Called by load_pcx in AI::NeuralNet::BackProp;
sub new {
my $type = shift;
my $self = {
parent => $_[0],
file => $_[1]
};
my (@a,@b)=load_pcx($_[1]);
$self->{image}=\@a;
$self->{palette}=\@b;
bless \%{$self}, $type;
}
# Returns a rectangular block defined by an array ref in the form of
# [$x1,$y1,$x2,$y2]
# Return value is an array ref
sub get_block {
my $self = shift;
my $ref = shift;
my ($x1,$y1,$x2,$y2) = @{$ref};
my @block = ();
my $count = 0;
for my $x ($x1..$x2-1) {
for my $y ($y1..$y2-1) {
$block[$count++] = $self->get($x,$y);
}
}
return \@block;
}
# Returns pixel at $x,$y
sub get {
my $self = shift;
my ($x,$y) = (shift,shift);
return $self->{image}->[$y*320+$x];
}
# Returns array of (r,g,b) value from palette index passed
sub rgb {
my $self = shift;
my $color = shift;
return ($self->{palette}->[$color]->{red},$self->{palette}->[$color]->{green},$self->{palette}->[$color]->{blue});
}
# Returns mean of (rgb) value of palette index passed
sub avg {
my $self = shift;
my $color = shift;
return $self->{parent}->intr(($self->{palette}->[$color]->{red}+$self->{palette}->[$color]->{green}+$self->{palette}->[$color]->{blue})/3);
}
# Loads and decompresses a PCX-format 320x200, 8-bit image file and returns
# two arrays, first is a 64000-byte long array, each element contains a palette
# index, and the second array is a 255-byte long array, each element is a hash
# ref with the keys 'red', 'green', and 'blue', each key contains the respective color
# component for that color index in the palette.
sub load_pcx {
shift if(substr($_[0],0,4) eq 'AI::');
# open the file
open(FILE, "$_[0]");
binmode(FILE);
my $tmp;
my @image;
my @palette;
my $data;
BackProp.pm view on Meta::CPAN
This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and
run() directly. It returns an array ref.
The words are not duplicated internally. For example:
$net->crunch("How are you?");
Will probably return an array ref containing 1,2,3. A subsequent call of:
$net->crunch("How is Jane?");
Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored
only once internally and the returned array ref reflects that.
=item $net->uncrunch($array_ref);
BackProp.pm view on Meta::CPAN
=head1 MAILING LIST
A mailing list has been setup for AI::NeuralNet::BackProp for discussion of AI and
neural net related topics as they pertain to AI::NeuralNet::BackProp. I will also
announce in the group each time a new release of AI::NeuralNet::BackProp is available.
The list address is at: ai-neuralnet-backprop@egroups.com
To subscribe, send a blank email to: ai-neuralnet-backprop-subscribe@egroups.com
=cut
BackProp.pm
Changes
Makefile.PL
MANIFEST
test.pl
docs.htm
README
examples/ex_add.pl
examples/ex_add2.pl
examples/ex_sub.pl
examples/ex_bmp.pl
examples/ex_bmp2.pl
examples/ex_pcx.pl
examples/ex_pcxl.pl
examples/ex_alpha.pl
examples/ex_dow.pl
examples/ex_pat.pl
examples/ex_crunch.pl
<DD>
<B>UPDATED:</B> Now you can use a variabled instead of using qw(). Strings will be split internally.
Do not use <CODE>qw()</CODE> to pass strings to crunch.
<P>This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A>
calls. This is designed to be used to generate unique maps sutible for passing to <A HREF="#item_learn"><CODE>learn()</CODE></A> and
<A HREF="#item_run"><CODE>run()</CODE></A> directly. It returns an array ref.</P>
<P>The words are not duplicated internally. For example:</P>
<PRE>
$net->crunch("How are you?");</PRE>
<P>Will probably return an array ref containing 1,2,3. A subsequent call of:</P>
<PRE>
$net->crunch("How is Jane?");</PRE>
<P>Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word ``How''. So, each word is stored
only once internally and the returned array ref reflects that.</P>
<P></P>
<DT><STRONG><A NAME="item_uncrunch">$net->uncrunch($array_ref);</A></STRONG><BR>
<DD>
Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the
string. This is ment to be used as a counterpart to the <A HREF="#item_crunch"><CODE>crunch()</CODE></A> method, above, possibly to
<H1><A NAME="download">DOWNLOAD</A></H1>
<P>You can always download the latest copy of AI::NeuralNet::BackProp
from <A HREF="http://www.josiah.countystart.com/modules/AI/cgi-bin/rec.pl">http://www.josiah.countystart.com/modules/AI/cgi-bin/rec.pl</A></P>
<P>
<HR SIZE=1 COLOR=BLACK>
<H1><A NAME="mailing list">MAILING LIST</A></H1>
<P>A mailing list has been setup for AI::NeuralNet::BackProp for discussion of AI and
neural net related topics as they pertain to AI::NeuralNet::BackProp. I will also
announce in the group each time a new release of AI::NeuralNet::BackProp is available.</P>
<P>The list address is at: <A HREF="mailto:ai-neuralnet-backprop@egroups.com">ai-neuralnet-backprop@egroups.com</A></P>
<P>To subscribe, send a blank email to: <A HREF="mailto:ai-neuralnet-backprop-subscribe@egroups.com">ai-neuralnet-backprop-subscribe@egroups.com</A></P>
<P>
<HR SIZE=1 COLOR=BLACK>
<H1><A NAME="what can it do">WHAT CAN IT DO?</A></H1>
<P>Rodin Porrata asked on the ai-neuralnet-backprop malining list,
"What can they [Neural Networks] do?". In regards to that questioin,
consider the following:</P>
<P>Neural Nets are formed by simulated neurons connected together much the same
way the brain's neurons are, neural networks are able to associate and
examples/ex_add2.pl view on Meta::CPAN
\%diff4\n";
printf "%d %.3f %d %g %s %f %f %f %f\n",
$layers, $inc, $top, $forgetfulness, timestr($runtime),
$percent_diff[0],
$percent_diff[1], $percent_diff[2], $percent_diff[3];
}
}
}
#....................................................
sub addnet
{
print "\nCreate a new net with $layers layers, 3 inputs, and 1 output\n";
my $net = AI::NeuralNet::BackProp->new($layers,3,1);
# Disable debugging
$net->debug(0);
my @data = (
[ 2633, 2665, 2685], [ 2633 + 2665 + 2685 ],
examples/ex_add2.pl view on Meta::CPAN
my @input = ( [ 2222, 3333, 3200 ],
[ 1111, 1222, 3211 ],
[ 2345, 2543, 3000 ],
[ 2654, 2234, 2534 ] );
test_net( $net, @input );
}
#.....................................................................
sub test_net {
my @set;
my $fb;
my $net = shift;
my @data = @_;
undef @percent_diff; #@answers; undef @predictions;
for( $i=0; defined( $data[$i] ); $i++ ){
@set = @{ $data[$i] };
$fb = $net->run(\@set)->[0];
# Print output
examples/ex_pcx.pl view on Meta::CPAN
}
print "Testing random block...\n";
print "Result: ",$net->run($blocks[rand()*$b])->[0],"\n";
print "Bencmark for run: ", $net->benchmarked(), "\n";
$net->save("pcx2.net");
sub print_ref {
no strict 'refs';
shift if(substr($_[0],0,4) eq 'AI::');
my $map = shift;
my $break = shift;
my $x;
my @els = (' ','.',',',':',';','%','#');
foreach my $el (@{$map}) {
$str=$el/255*6;
print $els[$str];
$x++;
if($x>$break-1) {
print "\n";
examples/ex_sub.pl view on Meta::CPAN
=begin
File: examples/ex_sub.pl
Author: Josiah Bryan, <jdb@wcoil.com>
Desc:
This demonstrates the ability of a neural net to generalize and predict what the correct
result is for inputs that it has never seen before.
This teaches a network to subtract 6 sets of numbers, then it asks the user for
two numbers to subtract and then it displays the results of the user's input.
=cut
use AI::NeuralNet::BackProp;
my $subtract = new AI::NeuralNet::BackProp(2,2,1);
if(!$subtract->load('sub.dat')) {
$subtract->learn_set([
[ 1, 1 ], [ 0 ] ,
[ 2, 1 ], [ 1 ],
[ 10, 5 ], [ 5 ],
[ 20, 10 ], [ 10 ],
[ 100, 50 ], [ 50 ],
[ 500, 200 ], [ 300 ],
]);
$subtract->save('sub.dat');
}
print "Enter first number to subtract : "; chomp(my $a = <>);
print "Enter second number to subtract : "; chomp(my $b = <>);
print "Result: ",$subtract->run([$a,$b])->[0],"\n";
# Before `make install' is performed this script should be runnable with
# `make test'. After `make install' it should work as `perl test.pl'
BEGIN { $| = 1; print "1..13\n"; }
END {print "not ok 1\n" unless $loaded;}
sub t { my $f=shift;$t++;my $str=($f)?"ok $t":"not ok $t";print $str,"\n";}
use AI::NeuralNet::BackProp;
$loaded = 1;
t 1;
my $net = new AI::NeuralNet::BackProp(2,2,1);
t $net;
t ($net->intr(0.51) eq 1);
t ($net->intr(0.00001) eq 0);
t ($net->intr(0.50001) eq 1);
t $net->learn_set([
[ 1, 1 ], [ 2 ] ,