view release on metacpan or search on metacpan
Makefile.PL view on Meta::CPAN
PREREQ_PM => {},
NORECURS => 1,
($] >= 5.005 ? ## Add these new keywords supported since 5.005
(ABSTRACT_FROM => 'lib/AI/NeuralNet/SOM.pm', # retrieve abstract from module
AUTHOR => 'Robert Barta <rho@devc.at>') : ()),
);
package MY;
sub depend {
return <<MAKE
inject:
mcpani --add --module AI::NeuralNet::SOM --authorid DRRHO --modversion \$(VERSION) --file AI-NeuralNet-SOM-\$(VERSION).tar.gz
sudo mcpani --inject
debian:
(cd /usr/local/share/packages ; rm -rf libai-neuralnet-som-perl* AI-NeuralNet-SOM* ; dh-make-perl --cpan-mirror file:/usr/local/share/minicpan --build -cpan AI::NeuralNet::SOM )
upload:
examples/eigenvector_initialization.pl view on Meta::CPAN
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
my $l = shift;
for my $i (0..$#$l) {
return $i if $v == $l->[$i];
}
return undef;
}
for (@es_idx) { # from the highest values downwards, take the index
push @training_vectors, [ list $E->dice($_) ] ; # get the corresponding vector
examples/load_save.pl view on Meta::CPAN
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
my $l = shift;
for my $i (0..$#$l) {
return $i if $v == $l->[$i];
}
return undef;
}
for (@es_idx) { # from the highest values downwards, take the index
push @training_vectors, [ list $E->dice($_) ] ; # get the corresponding vector
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
Subclasses will (re)define some of these parameters and add others:
Example:
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => 3);
=cut
sub new { die; }
=pod
=head2 Methods
=over
=item I<initialize>
I<$nn>->initialize
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
Example:
$nn->train (30,
[ 3, 2, 4 ],
[ -1, -1, -1 ],
[ 0, 4, -3]);
=cut
sub train {
my $self = shift;
my $epochs = shift || 1;
die "no data to learn" unless @_;
$self->{LAMBDA} = $epochs / log ($self->{_Sigma0}); # educated guess?
my @mes = (); # this will contain the errors during the epochs
for my $epoch (1..$epochs) {
$self->{T} = $epoch;
my $sigma = $self->{_Sigma0} * exp ( - $self->{T} / $self->{LAMBDA} ); # compute current radius
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
my @bmu = $self->bmu ($sample); # find the best matching unit
push @mes, $bmu[2] if wantarray;
my $neighbors = $self->neighbors ($sigma, @bmu); # find its neighbors
map { _adjust ($self, $l, $sigma, $_, $sample) } @$neighbors; # bend them like Beckham
}
}
return @mes;
}
sub _adjust { # http://www.ai-junkie.com/ann/som/som4.html
my $self = shift;
my $l = shift; # the learning rate
my $sigma = shift; # the current radius
my $unit = shift; # which unit to change
my ($x, $y, $d) = @$unit; # it contains the distance
my $v = shift; # the vector which makes the impact
my $w = $self->{map}->[$x]->[$y]; # find the data behind the unit
my $theta = exp ( - ($d ** 2) / (2 * $sigma ** 2)); # gaussian impact (using distance and current radius)
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
=item I<bmu>
(I<$x>, I<$y>, I<$distance>) = I<$nn>->bmu (I<$vector>)
This method finds the I<best matching unit>, i.e. that neuron which is closest to the vector passed
in. The method returns the coordinates and the actual distance.
=cut
sub bmu { die; }
=pod
=item I<mean_error>
I<$me> = I<$nn>->mean_error (I<@vectors>)
This method takes a number of vectors and produces the I<mean distance>, i.e. the average I<error>
which the SOM makes when finding the C<bmu>s for the vectors. At least one vector must be passed in.
Obviously, the longer you let your SOM be trained, the smaller the error should become.
=cut
sub mean_error {
my $self = shift;
my $error = 0;
map { $error += $_ } # then add them all up
map { ( $self->bmu($_) )[2] } # then find the distance
@_; # take all data vectors
return ($error / scalar @_); # return the mean value
}
=pod
=item I<neighbors>
I<$ns> = I<$nn>->neighbors (I<$sigma>, I<$x>, I<$y>)
Finds all neighbors of (X, Y) with a distance smaller than SIGMA. Returns a list reference of (X, Y,
distance) triples.
=cut
sub neighbors { die; }
=pod
=item I<output_dim> (read-only)
I<$dim> = I<$nn>->output_dim
Returns the output dimensions of the map as passed in at constructor time.
=cut
sub output_dim {
my $self = shift;
return $self->{output_dim};
}
=pod
=item I<radius> (read-only)
I<$radius> = I<$nn>->radius
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
=item I<map>
I<$m> = I<$nn>->map
This method returns a reference to the map data. See the appropriate subclass of the data
representation.
=cut
sub map {
my $self = shift;
return $self->{map};
}
=pod
=item I<value>
I<$val> = I<$nn>->value (I<$x>, I<$y>)
I<$nn>->value (I<$x>, I<$y>, I<$val>)
Set or get the current vector value for a particular neuron. The neuron is addressed via its
coordinates.
=cut
sub value {
my $self = shift;
my ($x, $y) = (shift, shift);
my $v = shift;
return defined $v ? $self->{map}->[$x]->[$y] = $v : $self->{map}->[$x]->[$y];
}
=pod
=item I<label>
I<$label> = I<$nn>->label (I<$x>, I<$y>)
I<$nn>->label (I<$x>, I<$y>, I<$label>)
Set or get the label for a particular neuron. The neuron is addressed via its coordinates.
The label can be anything, it is just attached to the position.
=cut
sub label {
my $self = shift;
my ($x, $y) = (shift, shift);
my $l = shift;
return defined $l ? $self->{labels}->[$x]->[$y] = $l : $self->{labels}->[$x]->[$y];
}
=pod
=item I<as_string>
print I<$nn>->as_string
This methods creates a pretty-print version of the current vectors.
=cut
sub as_string { die; }
=pod
=item I<as_data>
print I<$nn>->as_data
This methods creates a string containing the raw vector data, row by
row. This can be fed into gnuplot, for instance.
=cut
sub as_data { die; }
=pod
=back
=head1 HOWTOs
=over
=item I<using Eigenvectors to initialize the SOM>
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
=cut
our $VERSION = '0.07';
1;
__END__
sub bmu {
my $self = shift;
my $sample = shift;
my $closest; # [x,y, distance] value and co-ords of closest match
foreach my $coor ($self->_get_coordinates) { # generate all coord pairs, not overly happy with that
my ($x, $y) = @$coor;
my $distance = _vector_distance ($self->{map}->[$x]->[$y], $sample); # || Vi - Sample ||
$closest = [0, 0, $distance] unless $closest;
$closest = [$x, $y, $distance] if $distance < $closest->[2];
}
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
=back
Example:
my $nn = new AI::NeuralNet::SOM::Hexa (output_dim => 6,
input_dim => 3);
=cut
sub new {
my $class = shift;
my %options = @_;
my $self = bless { %options }, $class;
if ($self->{output_dim} > 0) {
$self->{_D} = $self->{output_dim};
} else {
die "output dimension must be positive integer";
}
if ($self->{input_dim} > 0) {
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
=head2 Methods
=over
=item I<radius>
Returns the radius (half the diameter).
=cut
sub radius {
my $self = shift;
return $self->{_R};
}
=pod
=item I<diameter>
Returns the diameter (= dimension) of the hexagon.
=cut
sub diameter {
my $self = shift;
return $self->{_D};
}
=pod
=cut
sub initialize {
my $self = shift;
my @data = @_;
our $i = 0;
my $get_from_stream = sub {
$i = 0 if $i > $#data;
return [ @{ $data[$i++] } ]; # cloning !
} if @data;
$get_from_stream ||= sub {
return [ map { rand( 1 ) - 0.5 } 1..$self->{_Z} ];
};
for my $x (0 .. $self->{_D}-1) {
for my $y (0 .. $self->{_D}-1) {
$self->{map}->[$x]->[$y] = &$get_from_stream;
}
}
}
sub bmu {
my $self = shift;
my $sample = shift;
my $closest; # [x,y, distance] value and co-ords of closest match
for my $x (0 .. $self->{_D}-1) {
for my $y (0 .. $self->{_D}-1){
my $distance = AI::NeuralNet::SOM::Utils::vector_distance ($self->{map}->[$x]->[$y], $sample); # || Vi - Sample ||
#warn "distance to $x, $y : $distance";
$closest = [0, 0, $distance] unless $closest;
$closest = [$x, $y, $distance] if $distance < $closest->[2];
}
}
return @$closest;
}
sub neighbors { # http://www.ai-junkie.com/ann/som/som3.html
my $self = shift;
my $sigma = shift;
my $X = shift;
my $Y = shift;
my @neighbors;
for my $x (0 .. $self->{_D}-1) {
for my $y (0 .. $self->{_D}-1){
my $distance = _hexa_distance ($X, $Y, $x, $y);
##warn "$X, $Y, $x, $y: distance: $distance";
next if $distance > $sigma;
push @neighbors, [ $x, $y, $distance ]; # we keep the distances
}
}
return \@neighbors;
}
sub _hexa_distance {
my ($x1, $y1) = (shift, shift); # one point
my ($x2, $y2) = (shift, shift); # another
($x1, $y1, $x2, $y2) = ($x2, $y2, $x1, $y1) # swapping
if ( $x1+$y1 > $x2+$y2 );
my $dx = $x2 - $x1;
my $dy = $y2 - $y1;
if ($dx < 0 || $dy < 0) {
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
...............................
=item I<as_string>
Not implemented.
=cut
## TODO: pretty printing of this as hexagon ?
sub as_string { die "not implemented"; }
=pod
=item I<as_data>
Not implemented.
=cut
sub as_data { die "not implemented"; }
=pod
=back
=head1 AUTHOR
Robert Barta, E<lt>rho@devc.atE<gt>
=head1 COPYRIGHT AND LICENSE
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
=cut
our $VERSION = '0.02';
1;
__END__
sub _get_coordinates {
my $self = shift;
my $D1 = $self->{_D}-1;
my $t;
return map { $t = $_ ; map { [ $t, $_ ] } (0 .. $D1) } (0 .. $D1)
}
sqrt ( ($x - $X) ** 2 + ($y - $Y) ** 2 );
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
=back
Example:
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => 3);
=cut
sub new {
my $class = shift;
my %options = @_;
my $self = bless { %options }, $class;
if ($self->{output_dim} =~ /(\d+)x(\d+)/) {
$self->{_X} = $1 and $self->{_Y} = $2;
} else {
die "output dimension does not have format MxN";
}
if ($self->{input_dim} > 0) {
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
$self->{_L0} = $options{learning_rate} || 0.1; # learning rate, start value
return $self;
}
=pod
=head2 Methods
=cut
sub initialize {
my $self = shift;
my @data = @_;
our $i = 0;
my $get_from_stream = sub {
$i = 0 if $i > $#data;
return [ @{ $data[$i++] } ]; # cloning !
} if @data;
$get_from_stream ||= sub {
return [ map { rand( 1 ) - 0.5 } 1..$self->{_Z} ];
};
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1) {
$self->{map}->[$x]->[$y] = &$get_from_stream;
}
}
}
sub bmu {
my $self = shift;
my $sample = shift;
my $closest; # [x,y, distance] value and co-ords of closest match
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
my $distance = AI::NeuralNet::SOM::Utils::vector_distance ($self->{map}->[$x]->[$y], $sample); # || Vi - Sample ||
#warn "distance to $x, $y : $distance";
$closest = [0, 0, $distance] unless $closest;
$closest = [$x, $y, $distance] if $distance < $closest->[2];
}
}
return @$closest;
}
sub neighbors { # http://www.ai-junkie.com/ann/som/som3.html
my $self = shift;
my $sigma = shift;
my $X = shift;
my $Y = shift;
my @neighbors;
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
my $distance = sqrt ( ($x - $X) * ($x - $X) + ($y - $Y) * ($y - $Y) );
next if $distance > $sigma;
push @neighbors, [ $x, $y, $distance ]; # we keep the distances
}
}
return \@neighbors;
}
=pod
=cut
sub radius {
my $self = shift;
return $self->{_R};
}
=pod
=over
=item I<map>
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
my $m = $nn->map;
for my $x (0 .. 5) {
for my $y (0 .. 4){
warn "vector at $x, $y: ". Dumper $m->[$x]->[$y];
}
}
=cut
sub as_string {
my $self = shift;
my $s = '';
$s .= " ";
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf (" %02d ",$y);
}
$s .= sprintf "\n","-"x107,"\n";
my $dim = scalar @{ $self->{map}->[0]->[0] };
lib/AI/NeuralNet/SOM/Rect.pm view on Meta::CPAN
=item I<as_data>
print I<$nn>->as_data
This methods creates a string containing the raw vector data, row by
row. This can be fed into gnuplot, for instance.
=cut
sub as_data {
my $self = shift;
my $s = '';
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
for my $w ( 0 .. $dim-1 ){
$s .= sprintf ("\t%f", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= sprintf "\n";
lib/AI/NeuralNet/SOM/Torus.pm view on Meta::CPAN
This SOM is very similar to that with a rectangular topology, except that the rectangle is connected
on the top edge and the bottom edge to first form a cylinder; and that cylinder is then formed into
a torus by connecting the rectangle's left and right border (L<http://en.wikipedia.org/wiki/Torus>).
=head1 INTERFACE
It exposes the same interface as the base class.
=cut
sub neighbors { # http://www.ai-junkie.com/ann/som/som3.html
my $self = shift;
my $sigma = shift;
my $sigma2 = $sigma * $sigma; # need the square more often
my $X = shift;
my $Y = shift;
my ($_X, $_Y) = ($self->{_X}, $self->{_Y});
my @neighbors;
for my $x (0 .. $self->{_X}-1) {
lib/AI/NeuralNet/SOM/Utils.pm view on Meta::CPAN
package AI::NeuralNet::SOM::Utils;
sub vector_distance {
my ($V, $W) = (shift,shift);
# __________________
# / n-1 2
# Distance = / E ( V - W )
# \/ 0 i i
#
#warn "bef dist ".Dumper ($V, $W);
my $d2 = 0;
map { $d2 += $_ }
map { $_ * $_ }
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
my $v = shift;
my $m = shift;
use AI::NeuralNet::SOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::SOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
}
}
return 0;
foreach my $y (0 .. 6 -1 ) {
ok ( (!grep { $_ > 0.5 || $_ < -0.5 } @{ $nn->value ( $x, $y ) }) , "$x, $y: random vectors in [-0.5, 0.5]");
}
}
}
__END__
# randomized pick
@vectors = ...;
my $get = sub {
return @vectors [ int (rand (scalar @vectors) ) ];
}
$nn->train ($get);
# take exactly 500, round robin, in order
our $i = 0;
my $get = sub {
return undef unless $i < 500;
return @vectors [ $i++ % scalar @vectors ];
}
my @mes = $nn->train (20, @vs);
is (scalar @mes, 3 * 20, 'errors while training, nr');
ok ((!grep { $_ > 10 * $me } @mes), 'errors while training, none significantly bigger');
}
}
__END__
# randomized pick
@vectors = ...;
my $get = sub {
return @vectors [ int (rand (scalar @vectors) ) ];
}
$nn->train ($get);
# take exactly 500, round robin, in order
our $i = 0;
my $get = sub {
return undef unless $i < 500;
return @vectors [ $i++ % scalar @vectors ];
}
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
my $v = shift;
my $m = shift;
use AI::NeuralNet::SOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::SOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
}
}
return 0;
ok ($nn->as_string, 'pretty print');
ok ($nn->as_data, 'raw format');
# print $nn->as_string;
}
__END__
# randomized pick
@vectors = ...;
my $get = sub {
return @vectors [ int (rand (scalar @vectors) ) ];
}
$nn->train ($get);
# take exactly 500, round robin, in order
our $i = 0;
my $get = sub {
return undef unless $i < 500;
return @vectors [ $i++ % scalar @vectors ];
}