AI-NeuralNet-Mesh
view release on metacpan or search on metacpan
$self->{_inputs}->[$i]->{value} = 0;
$self->{_inputs}->[$i]->{weight} = 1; #rand()*1;
$self->{_inputs}->[$i]->{fired} = 0;
$self->{_inputs_size} = ++$i;
return $i-1;
}
sub add_output_node {
my $self = shift;
my $node = shift;
my $i = $self->{_outputs_size} || 0;
$self->{_outputs}->[$i]->{node} = $node;
$self->{_outputs}->[$i]->{from_id} = $node->add_input_node($self);
$self->{_outputs_size} = ++$i;
return $i-1;
}
sub adjust_weight {
my $self = shift;
my $inc = shift;
for my $i (@{$self->{_inputs}}) {
$i->{weight} += $inc * $i->{weight};
$i->{node}->adjust_weight($inc) if($i->{node});
}
}
1;
# Internal usage, prevents recursion on empty nodes.
package AI::NeuralNet::Mesh::cap;
sub new { bless {}, shift }
sub input {}
sub adjust_weight {}
sub add_output_node {}
sub add_input_node {}
1;
# Internal usage, collects data from output layer.
package AI::NeuralNet::Mesh::output;
use strict;
sub new {
my $type = shift;
my $self ={
_parent => shift,
_inputs => [],
};
bless $self, $type;
}
sub add_input_node {
my $self = shift;
return (++$self->{_inputs_size})-1;
}
sub input {
my $self = shift;
my $input = shift;
my $from_id = shift;
$self->{_parent}->d("GOT INPUT [$input] FROM [$from_id]\n",1);
$self->{_inputs}->[$from_id] = $self->{_parent}->intr($input);
}
sub get_outputs {
my $self = shift;
return $self->{_inputs};
}
1;
__END__
=head1 NAME
AI::NeuralNet::Mesh - An optimized, accurate neural network Mesh.
=head1 SYNOPSIS
use AI::NeuralNet::Mesh;
# Create a mesh with 2 layers, 2 nodes/layer, and one output node.
my $net = new AI::NeuralNet::Mesh(2,2,1);
# Teach the network the AND function
$net->learn([0,0],[0]);
$net->learn([0,1],[0]);
$net->learn([1,0],[0]);
$net->learn([1,1],[1]);
# Present it with two test cases
my $result_bit_1 = $net->run([0,1])->[0];
my $result_bit_2 = $net->run([1,1])->[0];
# Display the results
print "AND test with inputs (0,1): $result_bit_1\n";
print "AND test with inputs (1,1): $result_bit_2\n";
=head1 VERSION & UPDATES
This is version B<0.44>, an update release for version 0.43.
This fixed the usage conflict with perl 5.3.3.
With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),
as well as four custom activation functions, and several export
tag sets. With this release, I have also included a few
new and more practical example scripts. (See ex_wine.pl) This release
also includes a simple example of an ALN (Adaptive Logic Network) made
with this module. See ex_aln.pl. Also in this release is support for
loading data sets from simple CSV-like files. See the load_set() method
for details. This version also fixes a big bug that I never knew about
until writing some demos for this version - that is, when trying to use
more than one output node, the mesh would freeze in learning. But, that
is fixed now, and you can have as many outputs as you want (how does 3
inputs and 50 outputs sound? :-)
=head1 DESCRIPTION
( run in 0.695 second using v1.01-cache-2.11-cpan-39bf76dae61 )