view release on metacpan or search on metacpan
AI-NeuralNet-SOM
================
Yet another implementation of Kohonen's SOMs (self organizing maps):
multidimensional vector samples in, 2-dimensional out, clusters become
visible. Nice.
INSTALLATION
To install this module type the following:
perl Makefile.PL
make
make test
examples/eigenvector_initialization.pl view on Meta::CPAN
$A = append ($A, zeroes (1)); # by padding zeroes
}
my ($E, $e) = eigens_sym $A;
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
my $l = shift;
for my $i (0..$#$l) {
return $i if $v == $l->[$i];
}
return undef;
}
examples/load_save.pl view on Meta::CPAN
$A = append ($A, zeroes (1)); # by padding zeroes
}
my ($E, $e) = eigens_sym $A;
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
my $l = shift;
for my $i (0..$#$l) {
return $i if $v == $l->[$i];
}
return undef;
}
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
$nn->value (3, 2, [ 1, 1, 1, 1 ]); # change value for a neuron
print $nn->value (3, 2);
$nn->label (3, 2, 'Danger'); # add a label to the neuron
print $nn->label (3, 2);
=head1 DESCRIPTION
This package is a stripped down implementation of the Kohonen Maps
(self organizing maps). It is B<NOT> meant as demonstration or for use
together with some visualisation software. And while it is not (yet)
optimized for speed, some consideration has been given that it is not
overly slow.
Particular emphasis has been given that the package plays nicely with
others. So no use of files, no arcane dependencies, etc.
=head2 Scenario
The basic idea is that the neural network consists of a 2-dimensional
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
=item C<learning_rate>: (optional, default C<0.1>)
This is a magic number which controls how strongly the vectors in the grid can be influenced. Stronger
movement can mean faster learning if the clusters are very pronounced. If not, then the movement is
like noise and the convergence is not good. To mediate that effect, the learning rate is reduced
over the iterations.
=item C<sigma0>: (optional, defaults to radius)
A non-negative number representing the start value for the learning radius. Practically, the value
should be chosen in such a way to cover a larger part of the map. During the learning process this
value will be narrowed down, so that the learning radius impacts less and less neurons.
B<NOTE>: Do not choose C<1> as the C<log> function is used on this value.
=back
Subclasses will (re)define some of these parameters and add others:
Example:
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
=pod
=head2 Methods
=over
=item I<initialize>
I<$nn>->initialize
You need to initialize all vectors in the map before training. There are several options
how this is done:
=over
=item providing data vectors
If you provide a list of vectors, these will be used in turn to seed the neurons. If the list is
shorter than the number of neurons, the list will be started over. That way it is trivial to
zero everything:
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
my $sigma = $self->{_Sigma0} * exp ( - $self->{T} / $self->{LAMBDA} ); # compute current radius
my $l = $self->{_L0} * exp ( - $self->{T} / $epochs ); # current learning rate
my @veggies = @_; # make a local copy, that will be destroyed in the loop
while (@veggies) {
my $sample = splice @veggies, int (rand (scalar @veggies) ), 1; # find (and take out)
my @bmu = $self->bmu ($sample); # find the best matching unit
push @mes, $bmu[2] if wantarray;
my $neighbors = $self->neighbors ($sigma, @bmu); # find its neighbors
map { _adjust ($self, $l, $sigma, $_, $sample) } @$neighbors; # bend them like Beckham
}
}
return @mes;
}
sub _adjust { # http://www.ai-junkie.com/ann/som/som4.html
my $self = shift;
my $l = shift; # the learning rate
my $sigma = shift; # the current radius
my $unit = shift; # which unit to change
my ($x, $y, $d) = @$unit; # it contains the distance
my $v = shift; # the vector which makes the impact
my $w = $self->{map}->[$x]->[$y]; # find the data behind the unit
my $theta = exp ( - ($d ** 2) / (2 * $sigma ** 2)); # gaussian impact (using distance and current radius)
foreach my $i (0 .. $#$w) { # adjusting values
$w->[$i] = $w->[$i] + $theta * $l * ( $v->[$i] - $w->[$i] );
}
}
=pod
=item I<bmu>
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
This method takes a number of vectors and produces the I<mean distance>, i.e. the average I<error>
which the SOM makes when finding the C<bmu>s for the vectors. At least one vector must be passed in.
Obviously, the longer you let your SOM be trained, the smaller the error should become.
=cut
sub mean_error {
my $self = shift;
my $error = 0;
map { $error += $_ } # then add them all up
map { ( $self->bmu($_) )[2] } # then find the distance
@_; # take all data vectors
return ($error / scalar @_); # return the mean value
}
=pod
=item I<neighbors>
I<$ns> = I<$nn>->neighbors (I<$sigma>, I<$x>, I<$y>)
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
=cut
sub neighbors { die; }
=pod
=item I<output_dim> (read-only)
I<$dim> = I<$nn>->output_dim
Returns the output dimensions of the map as passed in at constructor time.
=cut
sub output_dim {
my $self = shift;
return $self->{output_dim};
}
=pod
=item I<radius> (read-only)
I<$radius> = I<$nn>->radius
Returns the I<radius> of the map. Different topologies interpret this differently.
=item I<map>
I<$m> = I<$nn>->map
This method returns a reference to the map data. See the appropriate subclass of the data
representation.
=cut
sub map {
my $self = shift;
return $self->{map};
}
=pod
=item I<value>
I<$val> = I<$nn>->value (I<$x>, I<$y>)
I<$nn>->value (I<$x>, I<$y>, I<$val>)
Set or get the current vector value for a particular neuron. The neuron is addressed via its
coordinates.
=cut
sub value {
my $self = shift;
my ($x, $y) = (shift, shift);
my $v = shift;
return defined $v ? $self->{map}->[$x]->[$y] = $v : $self->{map}->[$x]->[$y];
}
=pod
=item I<label>
I<$label> = I<$nn>->label (I<$x>, I<$y>)
I<$nn>->label (I<$x>, I<$y>, I<$label>)
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
__END__
sub bmu {
my $self = shift;
my $sample = shift;
my $closest; # [x,y, distance] value and co-ords of closest match
foreach my $coor ($self->_get_coordinates) { # generate all coord pairs, not overly happy with that
my ($x, $y) = @$coor;
my $distance = _vector_distance ($self->{map}->[$x]->[$y], $sample); # || Vi - Sample ||
$closest = [0, 0, $distance] unless $closest;
$closest = [$x, $y, $distance] if $distance < $closest->[2];
}
return @$closest;
}
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
sub initialize {
my $self = shift;
my @data = @_;
our $i = 0;
my $get_from_stream = sub {
$i = 0 if $i > $#data;
return [ @{ $data[$i++] } ]; # cloning !
} if @data;
$get_from_stream ||= sub {
return [ map { rand( 1 ) - 0.5 } 1..$self->{_Z} ];
};
for my $x (0 .. $self->{_D}-1) {
for my $y (0 .. $self->{_D}-1) {
$self->{map}->[$x]->[$y] = &$get_from_stream;
}
}
}
sub bmu {
my $self = shift;
my $sample = shift;
my $closest; # [x,y, distance] value and co-ords of closest match
for my $x (0 .. $self->{_D}-1) {
for my $y (0 .. $self->{_D}-1){
my $distance = AI::NeuralNet::SOM::Utils::vector_distance ($self->{map}->[$x]->[$y], $sample); # || Vi - Sample ||
#warn "distance to $x, $y : $distance";
$closest = [0, 0, $distance] unless $closest;
$closest = [$x, $y, $distance] if $distance < $closest->[2];
}
}
return @$closest;
}
sub neighbors { # http://www.ai-junkie.com/ann/som/som3.html
my $self = shift;
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
if ($dx < 0 || $dy < 0) {
return abs ($dx) + abs ($dy);
} else {
return $dx < $dy ? $dy : $dx;
}
}
=pod
=item I<map>
I<$m> = I<$nn>->map
This method returns the 2-dimensional array of vectors in the grid (as a reference to an array of
references to arrays of vectors).
Example:
my $m = $nn->map;
for my $x (0 .. $nn->diameter -1) {
for my $y (0 .. $nn->diameter -1){
warn "vector at $x, $y: ". Dumper $m->[$x]->[$y];
}
}
This array represents a hexagon like this (ASCII drawing is so cool):
<0,0>
<0,1> <1,0>
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
1;
__END__
sub _get_coordinates {
my $self = shift;
my $D1 = $self->{_D}-1;
my $t;
return map { $t = $_ ; map { [ $t, $_ ] } (0 .. $D1) } (0 .. $D1)
}
sqrt ( ($x - $X) ** 2 + ($y - $Y) ** 2 );
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
} else {
die "output dimension does not have format MxN";
}
if ($self->{input_dim} > 0) {
$self->{_Z} = $self->{input_dim};
} else {
die "input dimension must be positive integer";
}
($self->{_R}) = map { $_ / 2 } sort {$b <= $a } ($self->{_X}, $self->{_Y}); # radius
$self->{_Sigma0} = $options{sigma0} || $self->{_R}; # impact distance, start value
$self->{_L0} = $options{learning_rate} || 0.1; # learning rate, start value
return $self;
}
=pod
=head2 Methods
=cut
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
sub initialize {
my $self = shift;
my @data = @_;
our $i = 0;
my $get_from_stream = sub {
$i = 0 if $i > $#data;
return [ @{ $data[$i++] } ]; # cloning !
} if @data;
$get_from_stream ||= sub {
return [ map { rand( 1 ) - 0.5 } 1..$self->{_Z} ];
};
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1) {
$self->{map}->[$x]->[$y] = &$get_from_stream;
}
}
}
sub bmu {
my $self = shift;
my $sample = shift;
my $closest; # [x,y, distance] value and co-ords of closest match
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
my $distance = AI::NeuralNet::SOM::Utils::vector_distance ($self->{map}->[$x]->[$y], $sample); # || Vi - Sample ||
#warn "distance to $x, $y : $distance";
$closest = [0, 0, $distance] unless $closest;
$closest = [$x, $y, $distance] if $distance < $closest->[2];
}
}
return @$closest;
}
sub neighbors { # http://www.ai-junkie.com/ann/som/som3.html
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
sub radius {
my $self = shift;
return $self->{_R};
}
=pod
=over
=item I<map>
I<$m> = I<$nn>->map
This method returns the 2-dimensional array of vectors in the grid (as a reference to an array of
references to arrays of vectors). The representation of the 2-dimensional array is straightforward.
Example:
my $m = $nn->map;
for my $x (0 .. 5) {
for my $y (0 .. 4){
warn "vector at $x, $y: ". Dumper $m->[$x]->[$y];
}
}
=cut
sub as_string {
my $self = shift;
my $s = '';
$s .= " ";
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf (" %02d ",$y);
}
$s .= sprintf "\n","-"x107,"\n";
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $w ( 0 .. $dim-1 ){
$s .= sprintf ("%02d | ",$x);
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf ("% 2.2f ", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= sprintf "\n";
}
$s .= sprintf "\n";
}
return $s;
}
=pod
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
This methods creates a string containing the raw vector data, row by
row. This can be fed into gnuplot, for instance.
=cut
sub as_data {
my $self = shift;
my $s = '';
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
for my $w ( 0 .. $dim-1 ){
$s .= sprintf ("\t%f", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= sprintf "\n";
}
}
return $s;
}
=pod
=back
lib/AI/NeuralNet/SOM/Utils.pm view on Meta::CPAN
sub vector_distance {
my ($V, $W) = (shift,shift);
# __________________
# / n-1 2
# Distance = / E ( V - W )
# \/ 0 i i
#
#warn "bef dist ".Dumper ($V, $W);
my $d2 = 0;
map { $d2 += $_ }
map { $_ * $_ }
map { $V->[$_] - $W->[$_] }
(0 .. $#$W);
#warn "d2 $d2";
return sqrt($d2);
}
1;
}
{
my $nn = new AI::NeuralNet::SOM::Hexa (output_dim => 2,
input_dim => 3);
$nn->initialize ( [ 0, 0, 1 ], [ 0, 1, 0 ] );
my $d = $nn->diameter;
for my $x (0 .. $d-1) {
for my $y (0 .. $d-1) {
ok (eq_array ($nn->{map}->[$x]->[$y],
$y == 0 ? [ 0, 0, 1 ] : [ 0, 1, 0 ]), 'value init');
}
}
# warn Dumper $nn;
}
{
my $nn = new AI::NeuralNet::SOM::Hexa (output_dim => 2,
input_dim => 3);
$nn->initialize;
plan skip_all => "Test::Pod 1.00 required for testing POD" if $@;
my @PODs = qw(
lib/AI/NeuralNet/SOM.pm
lib/AI/NeuralNet/SOM/Rect.pm
lib/AI/NeuralNet/SOM/Hexa.pm
lib/AI/NeuralNet/SOM/Torus.pm
);
plan tests => scalar @PODs;
map {
pod_file_ok ( $_, "$_ pod ok" )
} @PODs;
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
my $v = shift;
my $m = shift;
use AI::NeuralNet::SOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::SOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
my $nn = new AI::NeuralNet::SOM::Torus (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
my $v = shift;
my $m = shift;
use AI::NeuralNet::SOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::SOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;