view release on metacpan or search on metacpan
0.16 Sat Jan 3 05:53:12 EST 2015
- version bump - hasnt been tested in a while...
- added auto-README generation to Makefile.PL
- update copyright notice
0.15 Wed Jul 11 00:13:02 2012
- tidy up build a bit
- fixed warnings from CODE blocks using RETVAL without OUTPUT
blocks in newer perls
- yet another typemap workaround. this time we have a 5.6.2 with
a new ParseXS and an old xsubpp. i wont even mention the problem
i found in old Test::More finding this. i hope it never becomes
an issue. (Note: since this is an almost 3 year old issue and
haven't seen any more cases, we'll assume it was isolated
to a single user to start with and the whole mess is fixed
now.)
0.14 Fri Aug 21 12:52:32 2009
- work around some sort of ExtUtils::ParseXS bug in 5.6.2,
not picking up typemap files unless specifically named "typemap"
0.13 Mon Aug 17 08:42:37 2009
- fixed perl version check in Makefile.PL
0.12 Sat Aug 15 14:24:50 2009
- will now pass -Wall -Wextra -ansi -Wdeclaration-after-statement
(locally anyway)
- wrapped newSVpvs in INT2PTR to hopefully satisfy some platforms
- bumped perl require back up to 5.8.0 for now
- defined PERL_MAGIC_tied for older perls
- changed hv_fetchs() to hv_fetch() for older perls
- hacked in defines for Newx() and friends for older perls
- changed newSVpvs() to newSVpvn() for older perls
- created seperate typemap for older perls, along with Makefile.PL
modification to use it before 5.8.0
- added requirement for Storable which is non-core in older perls
- moved perl require back down to 5.6.2
0.11 Sun Aug 9 10:04:19 2009
- casting newSVpvs() to SV* to satisfy at least one platform
- added 'const char *' to typemap for older perls
- removed a few unneeded casts to internal types
- moved DESTROY methods to superclass, thus fixing missing
Hexa::DESTROY and consolidating common code
- consolidated neighbors code
- general housekeeping
0.10 Fri Aug 7 09:11:39 2009
- no longer relying on sizeof(void)
- removed a bit of old test code
- one more PTR2INT conversion
/*
* SOM_Rect : holds a ptr to a single SOM_Map thingy
*
* should be allocated:
* sizeof(SOM_Rect)
*
* this struct is the main object.
*
* 'X', 'Y', and 'Z' are held here for progagation down to the structs
* that make up our grid map.
*
* '_R' = initial SOM radius
* '_Sigma0' = ???
* '_L0' = initial SOM learning rate
*
* 'output_dim' is kept from instantiation simply because the perl interface
* already provides access to it.
*/
typedef struct {
SV *ref;
IV Y;
IV Z;
NV R;
NV Sigma0;
NV L0;
NV LAMBDA;
NV T;
int type;
SV *output_dim;
AV *labels;
SOM_Map *map;
} SOM_GENERIC;
typedef SOM_GENERIC SOM_Rect;
typedef SOM_GENERIC SOM_Torus;
typedef SOM_GENERIC SOM_Hexa;
enum SOMType {
SOMType_Hexa,
SOMType_Rect,
SOMType_Torus
diff = SvNV(*av_fetch(V1, w_ptr, FALSE))
- SvNV(*av_fetch(V2, w_ptr, FALSE));
sum += diff * diff;
}
return sqrt(sum);
}
void _bmuguts(SOM_GENERIC *som,AV *sample,IV *bx,IV *by,NV *bd) {
IV x,y,z,X,Y,Z;
NV sum,diff,distance;
SOM_Map *map;
SOM_Array *array;
SOM_Vector *vector;
map = som->map;
X = som->X;
Y = som->Y;
Z = som->Z;
*bx = -1;
*by = 0;
*bd = 0.0;
for ( x=0 ; x<X ; x++ ) {
array = (&map->array)[x];
for ( y=0 ; y<Y ; y++ ) {
vector = (&array->vector)[y];
sum = 0;
for ( z=0 ; z<Z ; z++ ) {
diff = SvNV(*av_fetch(sample,z,0))
- (&vector->element)[z];
sum += diff * diff;
}
}
}
/* http://www.ai-junkie.com/ann/som/som4.html */
void _adjust(SV* self,NV l,NV sigma,AV* unit,AV* v) {
IV x,y;
I32 z,Z;
NV d,theta,vold,wold;
MAGIC *mg;
SOM_Map *map;
SOM_Array *array;
SOM_Vector *vector;
SOM_GENERIC *som;
x = SvIV(*av_fetch(unit, 0, FALSE));
y = SvIV(*av_fetch(unit, 1, FALSE));
d = SvNV(*av_fetch(unit, 2, FALSE));
theta = exp( -d*d/2/sigma/sigma );
if ( !(mg = selfmagic(self)) )
croak("self has no magic!\n");
som = self2somptr(self,mg);
map = som->map;
array = (&map->array)[x];
vector = (&array->vector)[y];
/* hmm.. casting IV to I32.. is that sane? */
Z = (I32)som->Z;
for ( z=0 ; z<Z ; z++ ) {
wold = (&vector->element)[z];
vold = SvNV(*av_fetch(v,z,FALSE));
(&vector->element)[z] = (vold - wold) * l * theta + wold;
}
}
void _adjustn(SOM_GENERIC* som,NV l,NV sigma,NV* n,AV* v) {
IV x,y,X,Y;
I32 z,Z;
NV d,theta,vold,wold;
SOM_Map *map;
SOM_Array *array;
SOM_Vector *vector;
map = som->map;
X = som->X;
Y = som->Y;
for ( x=0 ; x<X ; x++ ) {
array = (&map->array)[x];
for ( y=0 ; y<Y ; y++ ) {
d = n[x*Y+y];
if (d < 0) continue;
theta = exp( -d*d/2/sigma/sigma );
vector = (&array->vector)[y];
/* hmm.. casting IV to I32.. is that sane? */
Z = (I32)som->Z;
for ( z=0 ; z<Z ; z++ ) {
vector->ref = newRV_noinc((SV*)thingy);
(&vector->element)[z] = 0.0;
for ( z-=1 ; z>=0 ; z-- ) {
(&vector->element)[z] = 0.0;
}
return vector;
}
SOM_Array* _make_array(SOM_Map* map) {
IV y,len;
AV *thingy;
SV *tie;
HV *stash;
SOM_Array *array;
y = map->Y;
len = sizeof(SOM_Array)+y*sizeof(SOM_Vector*);
Newxc(array, len, char, SOM_Array);
Zero(array, len, char);
array->Y = y;
array->Z = map->Z;
thingy = newAV();
tie = newRV_noinc(newSViv(PTR2IV(array)));
stash = gv_stashpv("AI::NeuralNet::FastSOM::ARRAY", GV_ADD);
sv_bless(tie, stash);
hv_magic((HV*)thingy, (GV*)tie, PERL_MAGIC_tied);
array->ref = newRV_noinc((SV*)thingy);
(&array->vector)[y] = NULL;
for ( y-=1 ; y>=0 ; y-- )
(&array->vector)[y] = _make_vector( array );
return array;
}
SOM_Map* _make_map(SOM_GENERIC *som) {
IV x,len;
AV *thingy;
SV *tie;
HV *stash;
SOM_Map *map;
x = som->X;
len = sizeof(SOM_Map)+x*sizeof(SOM_Array*);
Newxc(map, len, char, SOM_Map);
Zero(map, len, char);
map->X = x;
map->Y = som->Y;
map->Z = som->Z;
thingy = newAV();
tie = newRV_noinc(newSViv(PTR2IV(map)));
stash = gv_stashpv("AI::NeuralNet::FastSOM::MAP", GV_ADD);
sv_bless(tie, stash);
hv_magic((HV*)thingy, (GV*)tie, PERL_MAGIC_tied);
map->ref = newRV_noinc((SV*)thingy);
(&map->array)[x] = NULL;
for ( x-=1 ; x>=0 ; x-- )
(&map->array)[x] = _make_array( map );
return map;
}
/*
* som functions
*/
void _som_bmu(SV* self, AV* sample) {
IV cx,cy;
_bmuguts(som,sample,&cx,&cy,&cd);
PERL_UNUSED_VAR(items); /* -W */
sp = mark;
XPUSHs(sv_2mortal(newSViv(cx)));
XPUSHs(sv_2mortal(newSViv(cy)));
XPUSHs(sv_2mortal(newSVnv(cd)));
PUTBACK;
}
SV* _som_map(SV* self) {
MAGIC *mg;
SOM_GENERIC *som;
if ( !(mg = selfmagic(self)) )
croak("self has no magic!\n");
som = self2somptr(self,mg);
SvREFCNT_inc(som->map->ref);
return som->map->ref;
}
SV* _som_output_dim(SV* self) {
MAGIC *mg;
SOM_GENERIC *som;
if ( !(mg = selfmagic(self)) )
croak("self has no magic!\n");
som = self2somptr(self,mg);
XPUSHs( newRV_noinc(newSViv(som->Y)) );
XPUSHs( newRV_noinc(newSViv(som->Z)) );
XPUSHs( newRV_noinc(newSVnv(som->R)) );
XPUSHs( newRV_noinc(newSVnv(som->Sigma0)) );
XPUSHs( newRV_noinc(newSVnv(som->L0)) );
XPUSHs( newRV_noinc(newSVnv(som->LAMBDA)) );
XPUSHs( newRV_noinc(newSVnv(som->T)) );
XPUSHs( newRV_noinc(som->output_dim) );
XPUSHs( newRV_noinc((SV*)som->labels) );
m = som->map;
for ( x=som->X-1 ; x>=0 ; x-- ) {
a = (&m->array)[x];
for ( y=som->Y-1 ; y>=0 ; y-- ) {
v = (&a->vector)[y];
for ( z=som->Z-1 ; z>=0 ; z-- ) {
XPUSHs(newRV_noinc(newSVnv(
(&v->element)[z])));
}
}
}
som->Y = SvIV(SvRV(ST(5)));
som->Z = SvIV(SvRV(ST(6)));
som->R = SvNV(SvRV(ST(7)));
som->Sigma0 = SvNV(SvRV(ST(8)));
som->L0 = SvNV(SvRV(ST(9)));
som->LAMBDA = SvNV(SvRV(ST(10)));
som->T = SvNV(SvRV(ST(11)));
som->output_dim = newSVsv(SvRV(ST(12)));
som->labels = (AV*)SvRV(ST(13));
som->map = _make_map( som );
i = 14;
m = som->map;
for ( x=som->X-1 ; x>=0 ; x-- ) {
a = (&m->array)[x];
for ( y=som->Y-1 ; y>=0 ; y-- ) {
v = (&a->vector)[y];
for ( z=som->Z-1 ; z>=0 ; z-- ) {
/*
(&v->element)[z] =
SvNV(SvRV(ST(i++)));
*/
rrr = SvRV(ST(i++));
croak("you'll put an eye out!");
} /* cloning */
PERL_UNUSED_VAR(items); /* -W */
sp = mark;
PUTBACK;
}
SV* _som_FETCH(SV* self,SV* key) {
if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("map",3) ) ) ) {
SOM_GENERIC *som = INT2PTR(SOM_Rect*,self2iv(self));
SvREFCNT_inc(som->map->ref);
return som->map->ref;
}
if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_X",2) ) ) )
return newSViv(tied2ptr(self)->X);
if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_Y",2) ) ) )
return newSViv(tied2ptr(self)->Y);
if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_Z",2) ) ) )
return newSViv(tied2ptr(self)->Z);
if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_R",2) ) ) )
return newSVnv(tied2ptr(self)->R);
if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_L0",3) ) ) )
else if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_L0",3) ) ) )
tied2ptr(self)->L0 = SvNV(val);
else if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("_Sigma0",7) ) ) )
tied2ptr(self)->Sigma0 = SvNV(val);
else if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("output_dim",10) ) ) )
tied2ptr(self)->output_dim = newSVsv(val);
else if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("LAMBDA",6) ) ) )
tied2ptr(self)->LAMBDA = SvNV(val);
else if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("T",1) ) ) )
tied2ptr(self)->T = SvNV(val);
else if ( !sv_cmp( key, INT2PTR(SV*,newSVpvn("map",3) ) ) )
croak("cant assign to map");
else
croak("%s not accessible for write", SvPV_nolen(key));
return val;
}
SV* _som_FIRSTKEY() {
return INT2PTR(SV*,newSVpvn("_X",2));
}
SV* _som_NEXTKEY(SV* prev) {
return INT2PTR(SV*,newSVpvn("_Sigma0",7));
else if ( strEQ( SvPVX(prev), "_Sigma0" ) )
return INT2PTR(SV*,newSVpvn("_L0",3));
else if ( strEQ( SvPVX(prev), "_L0" ) )
return INT2PTR(SV*,newSVpvn("LAMBDA",6));
else if ( strEQ( SvPVX(prev), "LAMBDA" ) )
return INT2PTR(SV*,newSVpvn("T",1));
else if ( strEQ( SvPVX(prev), "T" ) )
return INT2PTR(SV*,newSVpvn("labels",6));
else if ( strEQ( SvPVX(prev), "labels" ) )
return INT2PTR(SV*,newSVpvn("map",3));
return &PL_sv_undef;
}
void _som_DESTROY(SV* self) {
IV iv;
SV *ref;
SOM_Map *map;
SOM_GENERIC *som;
if ( !SvROK(self) )
return;
ref = SvRV(self);
if ( !SvIOK(ref) )
return;
iv = SvIV(ref);
som = INT2PTR(SOM_GENERIC*,iv);
if ( !som )
return;
map = som->map;
/* more to do here ? */
}
/*
* rect functions
*/
void _rect_neiguts(SOM_Rect* som,NV sigma,IV X0,IV Y0,NV* n) {
if ( hv_exists( options, "learning_rate", 13 ) ) {
rate = SvNV(*hv_fetch(options,"learning_rate",13,0));
if ( rate )
som->L0 = rate;
else
som->L0 = 0.1;
}
else
som->L0 = 0.1;
som->map = _make_map(som);
som->labels = newAV();
sclass = sv_2mortal(newSVpvf("%s",class));
if (!sv_cmp(sclass,INT2PTR(
SV*,newSVpvn("AI::NeuralNet::FastSOM::Rect",28))))
som->type = SOMType_Rect;
/*
else if (!sv_cmp(sclass,INT2PTR(
SV*,newSVpvn("AI::NeuralNet::FastSOM::Hexa",28))))
som->type = SOMType_Hexa;
if ( hv_exists( options, "learning_rate", 13 ) ) {
rate = SvNV(*hv_fetch(options,"learning_rate",13,0));
if ( rate )
hexa->L0 = rate;
else
hexa->L0 = 0.1;
}
else
hexa->L0 = 0.1;
hexa->map = _make_map( hexa );
hexa->labels = newAV();
hexa->type = SOMType_Hexa;
hash = (HV*)sv_2mortal((SV*)newHV());
tie = newRV_noinc(newSViv(PTR2IV(hexa)));
stash = gv_stashpv(class, GV_ADD);
sv_bless(tie, stash);
hv_magic(hash, (GV*)tie, PERL_MAGIC_tied);
rv = sv_bless(newRV_noinc((SV*)hash),stash);
for ( y=0 ; y<Y ; y++ ) {
d = _hexa_distance(X0,Y0,x,y);
if (d <= sigma) n[x*Y+y] = d;
}
}
}
/*
* map functions
*/
SV* _map_FETCH(SV* self,I32 x) {
SOM_Map *map;
SOM_Array *array;
map = INT2PTR(SOM_Map*,self2iv(self));
array = (&map->array)[x];
SvREFCNT_inc(array->ref);
return array->ref;
}
void _map_DESTROY(SV* self) {
SOM_Map *map;
map = INT2PTR(SOM_Map*,self2iv(self));
/* need more done here ? */
Safefree( map );
}
/*
* array functions
*/
void _array_STORE(SV* self,IV y,SV* aref) {
I32 len;
PPCODE:
temp = PL_markstack_ptr++;
_som_bmu(self,sample);
if (PL_markstack_ptr != temp) {
PL_markstack_ptr = temp;
XSRETURN_EMPTY;
}
return;
SV *
map (self)
SV * self
PREINIT:
SV* rv;
CODE:
rv = _som_map(self);
ST(0) = rv;
sv_2mortal(ST(0));
SV *
output_dim (self)
SV * self
PREINIT:
SV* rv;
CODE:
rv = _som_output_dim(self);
PROTOTYPES: DISABLE
SV *
FETCH (self, x)
SV * self
I32 x
PREINIT:
SV* rv;
CODE:
rv = _map_FETCH(self, x);
ST(0) = rv;
sv_2mortal(ST(0));
IV
FETCHSIZE (self)
SV * self
PREINIT:
IV rv;
CODE:
rv = (INT2PTR(SOM_Map*,self2iv(self)))->X;
XSprePUSH; PUSHi((IV)rv);
void
DESTROY (obj)
SV * obj
PREINIT:
I32* temp;
PPCODE:
temp = PL_markstack_ptr++;
_map_DESTROY(obj);
if (PL_markstack_ptr != temp) {
PL_markstack_ptr = temp;
XSRETURN_EMPTY;
}
return;
MODULE = AI::NeuralNet::FastSOM PACKAGE = AI::NeuralNet::FastSOM::ARRAY
t/orig/rect.t
t/orig/som.t
t/orig/torus.t
t/pods.t
t/rect.t
t/rect_retrieve.t
t/som.t
t/torus.t
t/torus_retrieve.t
TODO
typemap.v1
typemap.v2
META.json Module JSON meta-data (added by MakeMaker)
Makefile.PL view on Meta::CPAN
use 5.006002;
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => 'AI::NeuralNet::FastSOM',
VERSION_FROM => 'lib/AI/NeuralNet/FastSOM.pm',
ABSTRACT_FROM => 'lib/AI/NeuralNet/FastSOM.pm',
AUTHOR => 'Rick Myers <jrm@cpan.org>',
LICENSE => 'perl',
PREREQ_PM => { Storable => 0 },
TYPEMAPS => [ $] < 5.008000 ? 'typemap.v1' : 'typemap.v2' ],
test => { TESTS => 't/*.t t/orig/*.t' },
clean => { FILES => 't/*.bin typemap' },
);
#
# everything below is a work-around for some sort of bug in ExtUtils::ParseXS
# not picking up typemap files unless named "typemap" in perl5.6.2
#
# note however that the TYPEMAPS entry above is still needed for 5.6.2's still
# using the old xsubpp
#
package MY;
sub xs_c {
my $t = shift->SUPER::xs_c(@_);
$t =~ s/:/:\n \$(MAKE) typemap/;
$t;
}
sub test {
my $t = shift->SUPER::test(@_);
$t =~ s/(PERL_DL_NONLAZY=)/HARNESS_OPTIONS=j1 $1/g;
$t;
}
sub postamble {
my $out = <<'README';
readme:
pod2text lib/AI/NeuralNet/FastSOM.pm README
perl -i -pe's{\\*(\\S+)\\*}{\\1}g' README
README
if ( $] < 5.008000 ) {
$out .= <<'EOP';
typemap:
$(CP) typemap.v1 typemap
EOP
}
else {
$out .= <<'EOP';
typemap:
$(CP) typemap.v2 typemap
EOP
}
return $out;
}
exit 0;
examples/eigenvector_initialization.pl view on Meta::CPAN
$A = append ($A, zeroes (1)); # by padding zeroes
}
my ($E, $e) = eigens_sym $A;
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
my $l = shift;
for my $i (0..$#$l) {
return $i if $v == $l->[$i];
}
return undef;
}
examples/load_save.pl view on Meta::CPAN
$A = append ($A, zeroes (1)); # by padding zeroes
}
my ($E, $e) = eigens_sym $A;
# print $E;
# print $e;
my @es = list $e; # eigenvalues
# warn "es : ".Dumper \@es;
my @es_desc = sort { $b <=> $a } @es; # eigenvalues sorted desc
# warn "desc: ".Dumper \@es_desc;
my @es_idx = map { _find_num ($_, \@es) } @es_desc; # eigenvalue indices sorted by eigenvalue (desc)
# warn "idx: ".Dumper \@es_idx;
sub _find_num {
my $v = shift;
my $l = shift;
for my $i (0..$#$l) {
return $i if $v == $l->[$i];
}
return undef;
}
lib/AI/NeuralNet/FastSOM.pm view on Meta::CPAN
sub label {
my ($self, $x, $y, $l) = @_;
return defined $l
? $self->{labels}->[$x]->[$y] = $l
: $self->{labels}->[$x]->[$y];
}
sub value {
my ($self, $x, $y, $v) = @_;
return defined $v
? $self->{map}[$x][$y] = $v
: $self->{map}[$x][$y];
}
sub mean_error {
my $self = shift;
my $error = 0;
map { $error += $_ } # then add them all up
map { ( $self->bmu($_) )[2] } # then find the distance
@_; # take all data vectors
return ($error / scalar @_); # return the mean value
}
XSLoader::load(__PACKAGE__);
1;
__END__
lib/AI/NeuralNet/FastSOM/Hexa.pm view on Meta::CPAN
sub initialize {
my $self = shift;
my @data = @_;
our $i = 0;
my $get_from_stream = sub {
$i = 0 if $i > $#data;
return [ @{ $data[$i++] } ]; # cloning !
} if @data;
$get_from_stream ||= sub {
return [ map { rand( 1 ) - 0.5 } 1..$self->{_Z} ];
};
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_X}-1) {
$self->{map}->[$x]->[$y] = &$get_from_stream;
}
}
}
1;
__END__
=pod
lib/AI/NeuralNet/FastSOM/Hexa.pm view on Meta::CPAN
=over
=item I<radius>
Returns the radius (half the diameter).
=item I<diameter>
Returns the diameter (= dimension) of the hexagon.
=item I<map>
I<$m> = I<$nn>->map
This method returns the 2-dimensional array of vectors in the grid
(as a reference to an array of references to arrays of vectors).
Example:
my $m = $nn->map;
for my $x (0 .. $nn->diameter -1) {
for my $y (0 .. $nn->diameter -1){
warn "vector at $x, $y: ". Dumper $m->[$x]->[$y];
}
}
This array represents a hexagon like this (ASCII drawing is so cool):
<0,0>
<0,1> <1,0>
lib/AI/NeuralNet/FastSOM/Hexa.pm view on Meta::CPAN
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself, either Perl version 5.10.0 or,
at your option, any later version of Perl 5 you may have available.
=cut
sub _get_coordinates {
my $self = shift;
my $D1 = $self->{_D}-1;
my $t;
return map { $t = $_ ; map { [ $t, $_ ] } (0 .. $D1) } (0 .. $D1)
}
sqrt ( ($x - $X) ** 2 + ($y - $Y) ** 2 );
lib/AI/NeuralNet/FastSOM/Rect.pm view on Meta::CPAN
my @data = @_;
my $i = 0;
my $get_from_stream = sub {
$i = 0 if $i > $#data;
return [ @{ $data[$i++] } ]; # cloning !
} if @data;
$get_from_stream ||= sub {
return [ map { rand( 1 ) - 0.5 } 1..$self->{_Z} ];
};
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1) {
$self->{map}->[$x]->[$y] = &$get_from_stream;
}
}
}
sub as_string {
my $self = shift;
my $s = '';
$s .= " ";
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf(" %02d ",$y);
}
$s .= "\n" . "-"x107 . "\n";
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $w ( 0 .. $dim-1 ){
$s .= sprintf("%02d | ",$x);
for my $y (0 .. $self->{_Y}-1){
$s .= sprintf("% 2.2f ", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= "\n";
}
$s .= "\n";
}
return $s;
}
sub as_data {
my $self = shift;
my $s = '';
my $dim = scalar @{ $self->{map}->[0]->[0] };
for my $x (0 .. $self->{_X}-1) {
for my $y (0 .. $self->{_Y}-1){
for my $w ( 0 .. $dim-1 ){
$s .= sprintf("\t%f", $self->{map}->[$x]->[$y]->[$w]);
}
$s .= "\n";
}
}
return $s;
}
1;
__END__
lib/AI/NeuralNet/FastSOM/Rect.pm view on Meta::CPAN
Example:
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
=head2 Methods
=over
=item I<map>
I<$m> = I<$nn>->map
This method returns the 2-dimensional array of vectors in the grid
(as a reference to an array of references to arrays of vectors). The
representation of the 2-dimensional array is straightforward.
Example:
my $m = $nn->map;
for my $x (0 .. 5) {
for my $y (0 .. 4){
warn "vector at $x, $y: ". Dumper $m->[$x]->[$y];
}
}
=item I<as_data>
print I<$nn>->as_data
void _adjust(SV* self,NV l,NV sigma,AV* unit,AV* v);
void _adjustn(SOM_GENERIC* som,NV l,NV sigma,NV* n,AV* v);
void _bmu_guts(SOM_GENERIC *generic,AV *sample,IV *bx,IV *by,NV *bd);
SOM_Map* _make_map(SOM_GENERIC *generic);
SOM_Array* _make_array(SOM_Map* map);
SOM_Vector* _make_vector(SOM_Array* array);
AV* _neighbors(SV* self,NV sigma,IV X0,IV Y0,...);
void _map_DESTROY(SV* self);
SV* _map_FETCH(SV* self,I32 x);
IV _map_FETCHSIZE(SV* self);
void _array_DESTROY(SV* self);
SV* _array_FETCH(SV* self,I32 y);
IV _array_FETCHSIZE(SV* self);
void _array_STORE(SV* self,IV y,SV* aref);
void _vector_DESTROY(SV* self);
SV* _vector_FETCH(SV* self,I32 z);
IV _vector_FETCHSIZE(SV* self);
void _vector_STORE(SV* self,I32 z,NV val);
NV _vector_distance(AV* V1,AV* V2);
void _som_bmu(SV* self,AV* sample);
SV* _som_map(SV* self);
SV* _som_output_dim(SV* self);
void _som_train(SV* self,IV epochs);
SV* _som_FETCH(SV* self,SV* key);
SV* _som_STORE(SV* self,SV* key,SV* val);
SV* _som_FIRSTKEY();
SV* _som_NEXTKEY(SV* prev);
void _som_FREEZE(SV* self,SV* cloning);
void _som_THAW(SV* self,SV* cloning,SV* serialized);
void _som_DESTROY(SV* self);
output_dim => 2,
input_dim => 3,
);
$nn->initialize( [ 0, 0, 1 ], [ 0, 1, 0 ] );
my $d = $nn->diameter;
for my $x ( 0 .. $d-1 ) {
for my $y (0 .. $d-1) {
ok(
eq_array(
$nn->{map}->[$x]->[$y],
$y == 0 ? [ 0, 0, 1 ] : [ 0, 1, 0 ]
), 'value init'
);
}
}
# warn Dumper $nn;
}
{
my $nn = AI::NeuralNet::FastSOM::Hexa->new(
t/hexa_retrieve.t view on Meta::CPAN
isa_ok( $nn, 'AI::NeuralNet::FastSOM::Hexa', 'retrieve hexa' );
is($nn->{_X}, 3, '_X');
#is($nn->{_Y}, 6, '_Y');
is($nn->{_Z}, 3, '_Z');
my ($x,$y) = $nn->bmu([3,2,4]);
is( $x, $bmu_x, 'stored x' );
is( $y, $bmu_y, 'stored y' );
my $m = $nn->map;
isa_ok( $m, 'ARRAY', 'stored map' );
isa_ok( $m->[0], 'ARRAY', 'stored array' );
isa_ok( $m->[0][0], 'ARRAY', 'stored vector' );
ok( $m->[0][0][0], 'stored scalar' );
}
__END__
t/orig/hexa.t view on Meta::CPAN
}
{
my $nn = new AI::NeuralNet::FastSOM::Hexa (output_dim => 2,
input_dim => 3);
$nn->initialize ( [ 0, 0, 1 ], [ 0, 1, 0 ] );
my $d = $nn->diameter;
for my $x (0 .. $d-1) {
for my $y (0 .. $d-1) {
ok (eq_array ($nn->{map}->[$x]->[$y],
$y == 0 ? [ 0, 0, 1 ] : [ 0, 1, 0 ]), 'value init');
}
}
# warn Dumper $nn;
}
{
my $nn = new AI::NeuralNet::FastSOM::Hexa (output_dim => 2,
input_dim => 3);
$nn->initialize;
t/orig/pods.t view on Meta::CPAN
plan skip_all => "Test::Pod 1.00 required for testing POD" if $@;
my @PODs = qw(
lib/AI/NeuralNet/FastSOM.pm
lib/AI/NeuralNet/FastSOM/Rect.pm
lib/AI/NeuralNet/FastSOM/Hexa.pm
lib/AI/NeuralNet/FastSOM/Torus.pm
);
plan tests => scalar @PODs;
map {
pod_file_ok ( $_, "$_ pod ok" )
} @PODs;
t/orig/rect.t view on Meta::CPAN
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
my $v = shift;
my $m = shift;
use AI::NeuralNet::FastSOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::FastSOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
t/orig/torus.t view on Meta::CPAN
my $nn = new AI::NeuralNet::FastSOM::Torus (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
# print Dumper $nn;
# exit;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train (400, @vs);
foreach my $v (@vs) {
ok (_find ($v, $nn->map), 'found learned vector '. join (",", @$v));
}
sub _find {
my $v = shift;
my $m = shift;
use AI::NeuralNet::FastSOM::Utils;
foreach my $x ( 0 .. 4 ) {
foreach my $y ( 0 .. 5 ) {
return 1 if AI::NeuralNet::FastSOM::Utils::vector_distance ($m->[$x]->[$y], $v) < 0.01;
plan skip_all => "Test::Pod 1.00 required for testing POD" if $@;
my @PODs = qw(
lib/AI/NeuralNet/FastSOM.pm
lib/AI/NeuralNet/FastSOM/Rect.pm
lib/AI/NeuralNet/FastSOM/Hexa.pm
lib/AI/NeuralNet/FastSOM/Torus.pm
);
plan tests => scalar @PODs;
map {
pod_file_ok( $_, "$_ pod ok" )
} @PODs;
__END__
output_dim => '5x6',
input_dim => 3
);
ok( $nn->isa( 'AI::NeuralNet::FastSOM::Rect' ), 'rect class' );
my $nn2 = $nn;
my $nn3 = $nn2;
is( $nn, $nn3, 'rect eq' );
my $m1 = $nn->map;
isa_ok( $m1, 'ARRAY', 'map array' );
my $m2 = $m1;
my $m3 = $nn2->map;
my $m4 = $m3;
is( $m2, $m4, 'map eq' );
my $a = $m1->[0];
isa_ok( $a, 'ARRAY', 'array array' );
ok( $a != $m1, 'array unique' );
my $a2 = $m4->[0];
is( $a, $a2, 'array eq' );
my $v = $a->[0];
isa_ok( $v, 'ARRAY', 'vector array' );
ok( $v != $a, 'vector unique' );
my $v2 = $nn3->map->[0]->[0];
is( $v, $v2, 'vector eq' );
my $v3 = $nn2->map->[0][0];
is( $v, $v3, 'vector shorter' );
my $m = $nn->map;
$m->[0][0][0] = 3.245;
is( $m->[0][0][0], 3.245, 'element set' );
$m->[0][0][0] = 1.25;
is( $m->[0][0][0], 1.25, 'element reset' );
$m->[0][0][1] = 4.8;
is( $m->[0][0][1], 4.8, 'element set z' );
$m->[0][0][1] = 2.6;
is( $m->[0][0][1], 2.6, 'element reset z' );
$m->[0][1][0] = 8.9;
is( $m->[0][1][0], 8.9, 'element set y' );
my $nn = new AI::NeuralNet::FastSOM::Rect(
output_dim => "5x6",
input_dim => 3
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train(400, @vs);
for my $v (@vs) {
ok(_find($v,$nn->map),'found learned vector '.join(",", @$v));
}
ok ($nn->as_string, 'pretty print');
ok ($nn->as_data, 'raw format');
}
{
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => 3);
$nn->initialize;
t/rect_retrieve.t view on Meta::CPAN
isa_ok( $nn, 'AI::NeuralNet::FastSOM::Rect', 'retrieve rect' );
is($nn->{_X}, 5, '_X');
is($nn->{_Y}, 6, '_Y');
is($nn->{_Z}, 3, '_Z');
my ($x,$y) = $nn->bmu([3,2,4]);
is( $x, $bmu_x, 'stored x' );
is( $y, $bmu_y, 'stored y' );
my $m = $nn->map;
isa_ok( $m, 'ARRAY', 'stored map' );
isa_ok( $m->[0], 'ARRAY', 'stored array' );
isa_ok( $m->[0][0], 'ARRAY', 'stored vector' );
ok( $m->[0][0][0], 'stored scalar' );
}
__END__
my $nn = AI::NeuralNet::FastSOM::Torus->new(
output_dim => "5x6",
input_dim => 3,
);
$nn->initialize;
my @vs = ([ 3, 2, 4 ], [ -1, -1, -1 ], [ 0, 4, -3]);
$nn->train(400, @vs);
for my $v (@vs) {
ok( _find($v, $nn->map), 'found learned vector '. join (",", @$v) );
}
ok( $nn->as_string, 'pretty print' );
ok( $nn->as_data, 'raw format' );
}
{
my $nn = AI::NeuralNet::FastSOM::Torus->new(
output_dim => '5x6',
t/torus_retrieve.t view on Meta::CPAN
isa_ok( $nn, 'AI::NeuralNet::FastSOM::Torus', 'retrieve torus' );
is($nn->{_X}, 5, '_X');
is($nn->{_Y}, 6, '_Y');
is($nn->{_Z}, 3, '_Z');
my ($x,$y) = $nn->bmu([3,2,4]);
is( $x, $bmu_x, 'stored x' );
is( $y, $bmu_y, 'stored y' );
my $m = $nn->map;
isa_ok( $m, 'ARRAY', 'stored map' );
isa_ok( $m->[0], 'ARRAY', 'stored array' );
isa_ok( $m->[0][0], 'ARRAY', 'stored vector' );
ok( $m->[0][0][0], 'stored scalar' );
}
__END__