AI-NeuralNet-BackProp
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
my $self = shift;
my $sid = shift;
my $value = shift;
# We simply weight the value sent by the neuron. The neuron identifies itself to us
# using the code we gave it when it registered itself with us. The code is in $sid,
# (synapse ID) and we use that to track the weight of the connection.
# This line simply multiplies the value by its weight and gets the integer from it.
$self->{SYNAPSES}->{LIST}->[$sid]->{VALUE} = intr($value * $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED} = 1;
$self->{SYNAPSES}->{LIST}->[$sid]->{INPUT} = $value;
# Debugger
AI::NeuralNet::BackProp::out1("\nRecieved input of $value, weighted to $self->{SYNAPSES}->{LIST}->[$sid]->{VALUE}, synapse weight is $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT} (sid is $sid for $self).\n");
AI::NeuralNet::BackProp::out1((($self->input_complete())?"All synapses have fired":"Not all synapses have fired"));
AI::NeuralNet::BackProp::out1(" for $self.\n");
# Check and see if all synapses have fired that are connected to this one.
# If they have, then generate the output value for this synapse.
$self->output() if($self->input_complete());
}
# Loops thru and outputs to every neuron that this
# neuron is registered as synapse of.
sub output {
my $self = shift;
my $size = $self->{OUTPUTS}->{SIZE} || 0;
my $value = $self->get_output();
for (0..$size-1) {
AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
}
}
# Used internally by output().
sub get_output {
my $self = shift;
my $size = $self->{SYNAPSES}->{SIZE} || 0;
my $value = 0;
my $state = 0;
my (@map,@weight);
BackProp.pm view on Meta::CPAN
# Now this sub is the main cog in the learning wheel. It is called recursively on
# each neuron that has been bad (given incorrect output.)
for my $i (0..$size-1) {
$value = $self->{SYNAPSES}->{LIST}->[$i]->{VALUE};
if(0) {
# Formula by Steve Purkis
# Converges very fast for low-value inputs. Has trouble converging on high-value
# inputs. Feel free to play and try to get to work for high values.
my $delta = $ammount * ($what - $value) * $self->{SYNAPSES}->{LIST}->[$i]->{INPUT};
$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT} += $delta;
$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
}
# This formula in use by default is original by me (Josiah Bryan) as far as I know.
# If it is equal, then don't adjust
#
### Disabled because this soemtimes causes
### infinte loops when learning with range limits enabled
BackProp.pm view on Meta::CPAN
}
# Called via AI::NeuralNet::BackProp::NeuralNetwork::initialize_group() to
# form the neuron grids.
# This just registers another synapes as a synapse to output to from this one, and
# then we ask that synapse to let us register as an input connection and we
# save the sid that the ouput synapse returns.
sub connect {
my $self = shift;
my $to = shift;
my $oid = $self->{OUTPUTS}->{SIZE} || 0;
AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
$self->{OUTPUTS}->{LIST}->[$oid]->{PKG} = $to;
$self->{OUTPUTS}->{LIST}->[$oid]->{ID} = $to->register_synapse($self);
$self->{OUTPUTS}->{SIZE} = ++$oid;
return $self->{OUTPUTS}->{LIST}->[$oid]->{ID};
}
1;
package AI::NeuralNet::BackProp;
use Benchmark;
use strict;
# Returns the number of elements in an array ref, undef on error
sub _FETCHSIZE {
BackProp.pm view on Meta::CPAN
# values from input synapes. Yet, unlike a regularr
# neuron, it doesnt weight the values, just stores
# them to be retrieved by a call to map().
sub input {
no strict 'refs';
my $self = shift;
my $sid = shift;
my $value = shift;
my $size = $self->{PARENT}->{DIV};
my $flag = 1;
$self->{OUTPUT}->[$sid]->{VALUE} = $self->{PARENT}->intr($value);
$self->{OUTPUT}->[$sid]->{FIRED} = 1;
AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
}
# Here we simply collect the value of every neuron connected to this
# one from the layer below us and return an array ref to the final map..
sub map {
my $self = shift;
my $size = $self->{PARENT}->{DIV};
my $out = $self->{PARENT}->{OUT};
my $divide = AI::NeuralNet::BackProp->intr($size/$out);
my @map = ();
my $value;
AI::NeuralNet::BackProp::out1 "Num output neurons: $out, Input neurons: $size, Division: $divide\n";
for(0..$out-1) {
$value=0;
for my $a (0..$divide-1) {
$value += $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE};
AI::NeuralNet::BackProp::out1 "\$a is $a, index is ".(($_*$divide)+$a).", value is $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE}\n";
}
$map[$_] = AI::NeuralNet::BackProp->intr($value/$divide);
AI::NeuralNet::BackProp::out1 "Map position $_ is $map[$_] in @{[\@map]} with self set to $self.\n";
$self->{OUTPUT}->[$_]->{FIRED} = 0;
}
my $ret=\@map;
return $self->{PARENT}->_range($ret);
}
1;
# load_pcx() wrapper package
package AI::NeuralNet::BackProp::PCX;
# Called by load_pcx in AI::NeuralNet::BackProp;
examples/ex_add2.pl view on Meta::CPAN
my ( $layers, $inputs, $outputs, $top, $inc, $top, $runtime,
$forgetfulness );
my @answers;
my @predictions;
my @percent_diff;
$inputs = 3;
$outputs = 1;
my ($maxinc,$maxtop,$incstep);
select OUTFP; $OUTPUT_AUTOFLUSH = 1; select STDOUT;
print OUTFP "layers inc top forgetfulness time \%diff1 \%diff2 \%diff3
\%diff4\n\n";
for( $layers = 1; $layers <= 3; $layers++ ){
if( $layers <= 2 ){
$incstep = 0.025;
}
else{
$incstep = 0.05;
}
( run in 0.308 second using v1.01-cache-2.11-cpan-d6f9594c0a5 )