AI-NeuralNet-FastSOM
view release on metacpan or search on metacpan
examples/eigenvector_initialization.pl view on Meta::CPAN
my $epsilon = 0.001;
my $epochs = 400;
{ # random initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
my @training_vectors; # find these training vectors
{ # and prime them with this eigenvector stuff;
use PDL;
my $A = pdl \@vs;
examples/eigenvector_initialization.pl view on Meta::CPAN
}
for (@es_idx) { # from the highest values downwards, take the index
push @training_vectors, [ list $E->dice($_) ] ; # get the corresponding vector
}
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
examples/load_save.pl view on Meta::CPAN
#my @vs = ([1,-0.5], [0,1]);
#my $dim = 2;
my $epsilon = 0.001;
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
my @training_vectors; # find these training vectors
{ # and prime them with this eigenvector stuff;
use PDL;
my $A = pdl \@vs;
examples/load_save.pl view on Meta::CPAN
}
for (@es_idx) { # from the highest values downwards, take the index
push @training_vectors, [ list $E->dice($_) ] ; # get the corresponding vector
}
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
lib/AI/NeuralNet/FastSOM.pm view on Meta::CPAN
: $self->{labels}->[$x]->[$y];
}
sub value {
my ($self, $x, $y, $v) = @_;
return defined $v
? $self->{map}[$x][$y] = $v
: $self->{map}[$x][$y];
}
sub mean_error {
my $self = shift;
my $error = 0;
map { $error += $_ } # then add them all up
map { ( $self->bmu($_) )[2] } # then find the distance
@_; # take all data vectors
return ($error / scalar @_); # return the mean value
}
XSLoader::load(__PACKAGE__);
1;
__END__
=pod
t/orig/som.t view on Meta::CPAN
is ($nn->label ( 1, 0), undef, 'label set/get');
}
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
my $me = $nn->mean_error (@vs);
for (1 .. 40) {
$nn->train (50, @vs);
ok ($me >= $nn->mean_error (@vs), 'mean error getting smaller');
$me = $nn->mean_error (@vs);
# warn $me;
}
foreach (1..3) {
my @mes = $nn->train (20, @vs);
is (scalar @mes, 3 * 20, 'errors while training, nr');
ok ((!grep { $_ > 10 * $me } @mes), 'errors while training, none significantly bigger');
}
}
__END__
# randomized pick
@vectors = ...;
my $get = sub {
return @vectors [ int (rand (scalar @vectors) ) ];
{
my $nn = AI::NeuralNet::FastSOM::Rect->new(
output_dim => "5x6",
input_dim => 3,
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
my $me = $nn->mean_error(@vs);
for (1 .. 40) {
$nn->train(50, @vs);
ok ($me >= $nn->mean_error(@vs), 'mean error getting smaller');
$me = $nn->mean_error(@vs);
}
for (1..3) {
my @mes = $nn->train(20, @vs);
is (scalar @mes, 3 * 20, 'errors while training, nr');
ok ((!grep { $_ > 10 * $me } @mes), 'errors while training, none significantly bigger');
}
}
__END__
( run in 0.841 second using v1.01-cache-2.11-cpan-65fba6d93b7 )