AI-NeuralNet-FastSOM
view release on metacpan or search on metacpan
Revision history for Perl extension AI::NeuralNet::FastSOM.
0.19 Sat Dec 3 14:52:39 EST 2016
- fix some errant sprintf's
0.18 Sat Dec 3 14:36:03 EST 2016
- force all tests serially, not just test_dynamic
0.17 Sat Dec 3 02:43:38 EST 2016
- force test harness to test serially
- update copyright notice
- clean up tests
0.16 Sat Jan 3 05:53:12 EST 2015
*
* References
* ==========
*
* Each of Rect, Map, Array, and Vector contains a member 'ref' which is
* an SV* pointing to an RV. The RV can be returned directly to perl-land
* after being blessed into its respective class.
*
* The RV references an SV containing an IV. The IV is set to the base
* address of its component structure. This is so the class code can know
* which instance of the class is being referred to on callback.
*
* The reference count of the SV has its initial reference count set to one,
* representing its parents ownership. If a parent dies or a perl-land
* reference is taken of any componenet, its reference count should
* be adjusted accordingly.
*
* When the count reaches zero perl will call the classes DESTROY method,
* at which point we can decrease the reference count on each child and
* free the component structure.
*
examples/eigenvector_initialization.pl view on Meta::CPAN
my $epsilon = 0.001;
my $epochs = 400;
{ # random initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
my @training_vectors; # find these training vectors
{ # and prime them with this eigenvector stuff;
use PDL;
my $A = pdl \@vs;
examples/eigenvector_initialization.pl view on Meta::CPAN
}
for (@es_idx) { # from the highest values downwards, take the index
push @training_vectors, [ list $E->dice($_) ] ; # get the corresponding vector
}
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
examples/load_save.pl view on Meta::CPAN
#my @vs = ([1,-0.5], [0,1]);
#my $dim = 2;
my $epsilon = 0.001;
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
my @training_vectors; # find these training vectors
{ # and prime them with this eigenvector stuff;
use PDL;
my $A = pdl \@vs;
examples/load_save.pl view on Meta::CPAN
}
for (@es_idx) { # from the highest values downwards, take the index
push @training_vectors, [ list $E->dice($_) ] ; # get the corresponding vector
}
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
lib/AI/NeuralNet/FastSOM.pm view on Meta::CPAN
: $self->{labels}->[$x]->[$y];
}
sub value {
my ($self, $x, $y, $v) = @_;
return defined $v
? $self->{map}[$x][$y] = $v
: $self->{map}[$x][$y];
}
sub mean_error {
my $self = shift;
my $error = 0;
map { $error += $_ } # then add them all up
map { ( $self->bmu($_) )[2] } # then find the distance
@_; # take all data vectors
return ($error / scalar @_); # return the mean value
}
XSLoader::load(__PACKAGE__);
1;
__END__
=pod
t/orig/som.t view on Meta::CPAN
is ($nn->label ( 1, 0), undef, 'label set/get');
}
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
my $me = $nn->mean_error (@vs);
for (1 .. 40) {
$nn->train (50, @vs);
ok ($me >= $nn->mean_error (@vs), 'mean error getting smaller');
$me = $nn->mean_error (@vs);
# warn $me;
}
foreach (1..3) {
my @mes = $nn->train (20, @vs);
is (scalar @mes, 3 * 20, 'errors while training, nr');
ok ((!grep { $_ > 10 * $me } @mes), 'errors while training, none significantly bigger');
}
}
__END__
# randomized pick
@vectors = ...;
my $get = sub {
return @vectors [ int (rand (scalar @vectors) ) ];
{
my $nn = AI::NeuralNet::FastSOM::Rect->new(
output_dim => "5x6",
input_dim => 3,
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
my $me = $nn->mean_error(@vs);
for (1 .. 40) {
$nn->train(50, @vs);
ok ($me >= $nn->mean_error(@vs), 'mean error getting smaller');
$me = $nn->mean_error(@vs);
}
for (1..3) {
my @mes = $nn->train(20, @vs);
is (scalar @mes, 3 * 20, 'errors while training, nr');
ok ((!grep { $_ > 10 * $me } @mes), 'errors while training, none significantly bigger');
}
}
__END__
( run in 1.691 second using v1.01-cache-2.11-cpan-49f99fa48dc )