view release on metacpan or search on metacpan
Revision history for Perl extension AI::NeuralNet::FastSOM.
0.19 Sat Dec 3 14:52:39 EST 2016
- fix some errant sprintf's
0.18 Sat Dec 3 14:36:03 EST 2016
- force all tests serially, not just test_dynamic
0.17 Sat Dec 3 02:43:38 EST 2016
- force test harness to test serially
- update copyright notice
- clean up tests
0.16 Sat Jan 3 05:53:12 EST 2015
examples/eigenvector_initialization.pl view on Meta::CPAN
input_dim => $dim);
my @training_vectors; # find these training vectors
{ # and prime them with this eigenvector stuff;
use PDL;
my $A = pdl \@vs;
while ($A->getdim(0) < $A->getdim(1)) { # make the beast quadratic
$A = append ($A, zeroes (1)); # by padding zeroes
}
my ($E, $e) = eigens_sym $A;
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
examples/load_save.pl view on Meta::CPAN
input_dim => $dim);
my @training_vectors; # find these training vectors
{ # and prime them with this eigenvector stuff;
use PDL;
my $A = pdl \@vs;
while ($A->getdim(0) < $A->getdim(1)) { # make the beast quadratic
$A = append ($A, zeroes (1)); # by padding zeroes
}
my ($E, $e) = eigens_sym $A;
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
lib/AI/NeuralNet/FastSOM/Rect.pm view on Meta::CPAN
}
}
}
sub as_string {
my $self = shift;
my $s = '';
$s .= " ";
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf(" %02d ",$y);
}
$s .= "\n" . "-"x107 . "\n";
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $w ( 0 .. $dim-1 ){
$s .= sprintf("%02d | ",$x);
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf("% 2.2f ", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= "\n";
}
$s .= "\n";
}
return $s;
}
sub as_data {
my $self = shift;
my $s = '';
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
for my $w ( 0 .. $dim-1 ){
$s .= sprintf("\t%f", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= "\n";
}
}
return $s;
}
1;
__END__
lib/AI/NeuralNet/FastSOM/Rect.pm view on Meta::CPAN
use AI::NeuralNet::FastSOM::Rect;
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
$nn->train (30,
[ 3, 2, 4 ],
[ -1, -1, -1 ],
[ 0, 4, -3]);
print $nn->as_data;
=head1 INTERFACE
=head2 Constructor
The constructor takes the following arguments (additionally to those in
the base class):
=over
lib/AI/NeuralNet/FastSOM/Rect.pm view on Meta::CPAN
my $m = $nn->map;
for my $x (0 .. 5) {
for my $y (0 .. 4){
warn "vector at $x, $y: ". Dumper $m->[$x]->[$y];
}
}
=item I<as_data>
print I<$nn>->as_data
This methods creates a string containing the raw vector data, row by
row. This can be fed into gnuplot, for instance.
=back
=head1 SEE ALSO
L<http://www.ai-junkie.com/ann/som/som1.html>
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Hexa') };
######
use Storable qw/store/;
{
my $nn = AI::NeuralNet::FastSOM::Hexa->new(
output_dim => 6,
input_dim => 3,
);
$nn->initialize;
my @vs = ( [ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3] );
$nn->train( 400, @vs );
my ($bmu_x, $bmu_y) = $nn->bmu( [ 3, 2, 4 ] );
ok( open(FILE, '> t/save_hexa_bmu.bin'), 'hexa save' );
print FILE "$bmu_x\n$bmu_y\n";
close FILE;
store( $nn, 't/save_hexa.bin' );
}
__END__
t/hexa_retrieve.t view on Meta::CPAN
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Hexa') };
######
use Storable 'retrieve';
ok( open(FILE, '< t/save_hexa_bmu.bin'), 'hexa open' );
my ( $bmu_x, $bmu_y ) = <FILE>;
t/orig/hexa.t view on Meta::CPAN
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Hexa') };
######
#use Data::Dumper;
{
my $nn = new AI::NeuralNet::FastSOM::Hexa (output_dim => 6,
input_dim => 3);
t/orig/rect.t view on Meta::CPAN
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Rect') };
######
#use Data::Dumper;
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
t/orig/rect.t view on Meta::CPAN
is ($nn->{_Y}, 6, 'Y');
is ($nn->{_Z}, 3, 'Z');
is ($nn->radius, 2.5, 'radius');
is ($nn->output_dim, "5x6", 'output dim');
}
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
t/orig/rect.t view on Meta::CPAN
use AI::NeuralNet::FastSOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::FastSOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
}
}
return 0;
}
ok ($nn->as_string, 'pretty print');
ok ($nn->as_data, 'raw format');
# print $nn->as_string;
}
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
foreach my $x (0 .. 5 -1) {
foreach my $y (0 .. 6 -1 ) {
ok ( (!grep { $_ > 0.5 || $_ < -0.5 } @{ $nn->value ( $x, $y ) }) , "$x, $y: random vectors in [-0.5, 0.5]");
t/orig/som.t view on Meta::CPAN
#########################
# Change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM') };
######
#use Data::Dumper;
{
use AI::NeuralNet::FastSOM::Rect; # any non-abstract subclass should do
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
t/orig/torus.t view on Meta::CPAN
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Torus') };
######
#use Data::Dumper;
{
my $nn = new AI::NeuralNet::FastSOM::Torus (output_dim => "5x6",
input_dim => 3);
t/orig/torus.t view on Meta::CPAN
[ 3, 2, '0' ],
[ 3, 3, '1' ],
[ 4, 2, '1' ]
]), 'neighbors 4+1');
}
{
my $nn = new AI::NeuralNet::FastSOM::Torus (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
t/orig/torus.t view on Meta::CPAN
use AI::NeuralNet::FastSOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::FastSOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
}
}
return 0;
}
ok ($nn->as_string, 'pretty print');
ok ($nn->as_data, 'raw format');
# print $nn->as_string;
}
__END__
# randomized pick
@vectors = ...;
my $get = sub {
return @vectors [ int (rand (scalar @vectors) ) ];
}
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Rect') };
######
use AI::NeuralNet::FastSOM::Utils;
use Storable qw/store/;
{
my $nn = AI::NeuralNet::FastSOM::Rect->new(
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train(400, @vs);
for my $v (@vs) {
ok(_find($v,$nn->map),'found learned vector '.join(",", @$v));
}
ok ($nn->as_string, 'pretty print');
ok ($nn->as_data, 'raw format');
}
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
for my $x (0 .. 5 -1) {
for my $y (0 .. 6 -1 ) {
input_dim => 3
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train(400, @vs);
my ($bmu_x,$bmu_y) = $nn->bmu([3,2,4]);
ok( open(FILE, '> t/save_rect_bmu.bin'), 'rect save' );
print FILE "$bmu_x\n$bmu_y\n";
close FILE;
store( $nn, 't/save_rect.bin' );
}
__END__
t/rect_retrieve.t view on Meta::CPAN
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Rect') };
######
use Storable 'retrieve';
ok( open(FILE, '< t/save_rect_bmu.bin'), 'rect open' );
my ( $bmu_x, $bmu_y ) = <FILE>;
#########################
# Change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
#BEGIN { use_ok('AI::NeuralNet::FastSOM') };
######
use AI::NeuralNet::FastSOM::Rect; # any non-abstract subclass should do
{
my $nn = AI::NeuralNet::FastSOM::Rect->new(
output_dim => "5x6",
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train(400, @vs);
for my $v (@vs) {
ok( _find($v, $nn->map), 'found learned vector '. join (",", @$v) );
}
ok( $nn->as_string, 'pretty print' );
ok( $nn->as_data, 'raw format' );
}
{
my $nn = AI::NeuralNet::FastSOM::Torus->new(
output_dim => '5x6',
input_dim => 3,
);
$nn->initialize;
input_dim => 3
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train(400, @vs);
my ($bmu_x,$bmu_y) = $nn->bmu([3,2,4]);
ok( open(FILE, '> t/save_torus_bmu.bin'), 'torus save' );
print FILE "$bmu_x\n$bmu_y\n";
close FILE;
store( $nn, 't/save_torus.bin' );
}
__END__
t/torus_retrieve.t view on Meta::CPAN
#########################
# change 'tests => 1' to 'tests => last_test_to_print';
use Test::More qw(no_plan);
BEGIN { use_ok('AI::NeuralNet::FastSOM::Torus') };
######
use Storable 'retrieve';
ok( open(FILE, '< t/save_torus_bmu.bin'), 'torus open' );
my ( $bmu_x, $bmu_y ) = <FILE>;