view release on metacpan or search on metacpan
lib/AI/NNEasy.hploo view on Meta::CPAN
#define FETCH_ATTR_AV(hv,k) (AV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_SV_REF(hv,k) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_HV_REF(hv,k) (HV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_AV_REF(hv,k) (AV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ELEM(av,i) *av_fetch(av,i,0)
#define FETCH_ELEM_HV_REF(av,i) (HV*) SvRV( FETCH_ELEM(av,i) )
#define FETCH_ELEM_AV_REF(av,i) (AV*) SvRV( FETCH_ELEM(av,i) )
__[C]__
sub NNEasy ($file , \@out_types , $error_ok , $in , $out , \@layers , $conf) {
$file ||= 'nneasy.nne' ;
if ( $this->load($file) ) {
return $this ;
}
my $in_sz = ref $in ? $in->{nodes} : $in ;
my $out_sz = ref $out ? $out->{nodes} : $out ;
@layers = ($in_sz+$out_sz) if !@layers ;
lib/AI/NNEasy.hploo view on Meta::CPAN
}
$error_ok = $min_dif / 2 ;
$error_ok -= $error_ok*0.1 ;
}
$this->{ERROR_OK} = $error_ok ;
return $this ;
}
sub _layer_conf ($def,$conf) {
$def ||= {} ;
$conf = { nodes=>$conf } if !ref($conf) ;
foreach my $Key ( keys %$def ) { $$conf{$Key} = $$def{$Key} if !exists $$conf{$Key} ;}
my $layer_conf = {nodes=>1 , persistent_activation=>0 , decay=>0 , random_activation=>0 , threshold=>0 , activation_function=>'tanh' , random_weights=>1} ;
foreach my $Key ( keys %$layer_conf ) { $$layer_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}
return $layer_conf ;
}
sub reset_nn {
$this->{NN} = AI::NNEasy::NN->new( @{ $this->{NN_ARGS} } ) ;
}
sub load ($file) {
$file ||= $this->{FILE} ;
if ( -s $file ) {
open (my $fh, $file) ;
my $dump = join '' , <$fh> ;
close ($fh) ;
my $restored = thaw($dump) ;
if ($restored) {
my $fl = $this->{FILE} ;
%$this = %$restored ;
$this->{FILE} = $fl if $fl ;
return 1 ;
}
}
return ;
}
sub save ($file) {
$file ||= $this->{FILE} ;
my $dump = freeze( {%$this} ) ;
open (my $fh,">$this->{FILE}") ;
print $fh $dump ;
close ($fh) ;
}
sub learn ($in,$out,$n) {
$n ||= 100 ;
my $err ;
for (1..100) {
$this->{NN}->run($in) ;
$err = $this->{NN}->learn($out) ;
}
$err *= -1 if $err < 0 ;
return $err ;
}
*_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
sub _learn_set_get_output_error_pl ($set , $error_ok , $ins_ok , $verbose) {
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
$this->{NN}->learn($$set[$i+1]) ;
}
my ($err,$learn_ok,$print) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
$er *= -1 if $er < 0 ;
lib/AI/NNEasy.hploo view on Meta::CPAN
XSRETURN(3) ;
}
else {
EXTEND(SP , 2) ;
ST(0) = sv_2mortal(newSVnv(err)) ;
ST(1) = sv_2mortal(newSViv(learn_ok)) ;
XSRETURN(2) ;
}
}
sub learn_set (\@set,$ins_ok,$limit,$verbose) {
my $ins_sz = @set / 2 ;
$ins_ok ||= $ins_sz ;
my $err_static_limit = 15 ;
my $err_static_limit_positive ;
if ( ref($limit) eq 'ARRAY' ) {
($limit,$err_static_limit,$err_static_limit_positive) = @$limit ;
}
lib/AI/NNEasy.hploo view on Meta::CPAN
print $print ;
}
}
print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
}
}
sub get_set_error (\@set,$ins_ok) {
my $ins_sz = @set / 2 ;
$ins_ok ||= $ins_sz ;
my $err ;
for (my $i = 0 ; $i < @set ; $i+=2) {
$this->{NN}->run($set[$i]) ;
my $er = $this->{NN}->RMSErr($set[$i+1]) ;
$er *= -1 if $er < 0 ;
$err += $er ;
}
$err /= $ins_ok ;
return $err ;
}
sub run ($in) {
$this->{NN}->run($in) ;
my $out = $this->{NN}->output() ;
return $out ;
}
sub run_get_winner {
my $out = $this->run(@_) ;
foreach my $out_i ( @$out ) {
$out_i = $this->out_type_winner($out_i) ;
}
return $out ;
}
sub out_type_winner ($val) {
my ($out_type , %err) ;
foreach my $types_i ( @{ $this->{OUT_TYPES} } ) {
my $er = $types_i - $val ;
$er *= -1 if $er < 0 ;
$err{$types_i} = $er ;
}
my $min_type_err = (sort { $err{$a} <=> $err{$b} } keys %err)[0] ;
$out_type = $min_type_err ;
lib/AI/NNEasy.hploo view on Meta::CPAN
be able to create flexible NN.
=> Class::HPLOO
I have used L<Class::HPLOO> to write fast the module, specially the XS support.
L<Class::HPLOO> enables this kind of syntax for Perl classes:
class Foo {
sub bar($x , $y) {
$this->add($x , $y) ;
}
sub[C] int add( int x , int y ) {
int res = x + y ;
return res ;
}
}
lib/AI/NNEasy.pm view on Meta::CPAN
use vars qw(%CLASS_HPLOO @ISA $VERSION) ;
$VERSION = '0.06' ;
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;
my $CLASS = 'AI::NNEasy' ; sub __CLASS__ { 'AI::NNEasy' } ;
use Class::HPLOO::Base ;
use AI::NNEasy::NN ;
use Storable qw(freeze thaw) ;
use Data::Dumper ;
sub NNEasy {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $file = shift(@_) ;
my @out_types = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $error_ok = shift(@_) ;
my $in = shift(@_) ;
my $out = shift(@_) ;
my @layers = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $conf = shift(@_) ;
lib/AI/NNEasy.pm view on Meta::CPAN
}
$error_ok = $min_dif / 2 ;
$error_ok -= $error_ok*0.1 ;
}
$this->{ERROR_OK} = $error_ok ;
return $this ;
}
sub _layer_conf {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $def = shift(@_) ;
my $conf = shift(@_) ;
$def ||= {} ;
$conf = { nodes=>$conf } if !ref($conf) ;
foreach my $Key ( keys %$def ) { $$conf{$Key} = $$def{$Key} if !exists $$conf{$Key} ;}
my $layer_conf = {nodes=>1 , persistent_activation=>0 , decay=>0 , random_activation=>0 , threshold=>0 , activation_function=>'tanh' , random_weights=>1} ;
foreach my $Key ( keys %$layer_conf ) { $$layer_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}
return $layer_conf ;
}
sub reset_nn {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
$this->{NN} = AI::NNEasy::NN->new( @{ $this->{NN_ARGS} } ) ;
}
sub load {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $file = shift(@_) ;
$file ||= $this->{FILE} ;
if ( -s $file ) {
open (my $fh, $file) ;
my $dump = join '' , <$fh> ;
close ($fh) ;
lib/AI/NNEasy.pm view on Meta::CPAN
if ($restored) {
my $fl = $this->{FILE} ;
%$this = %$restored ;
$this->{FILE} = $fl if $fl ;
return 1 ;
}
}
return ;
}
sub save {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $file = shift(@_) ;
$file ||= $this->{FILE} ;
my $dump = freeze( {%$this} ) ;
open (my $fh,">$this->{FILE}") ;
print $fh $dump ;
close ($fh) ;
}
sub learn {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $in = shift(@_) ;
my $out = shift(@_) ;
my $n = shift(@_) ;
$n ||= 100 ;
my $err ;
for (1..100) {
$this->{NN}->run($in) ;
$err = $this->{NN}->learn($out) ;
}
$err *= -1 if $err < 0 ;
return $err ;
}
*_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
sub _learn_set_get_output_error_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $set = shift(@_) ;
my $error_ok = shift(@_) ;
my $ins_ok = shift(@_) ;
my $verbose = shift(@_) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
$this->{NN}->learn($$set[$i+1]) ;
lib/AI/NNEasy.pm view on Meta::CPAN
$err /= $ins_ok ;
return ( $err , $learn_ok , $print ) ;
}
sub learn_set {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $ins_ok = shift(@_) ;
my $limit = shift(@_) ;
my $verbose = shift(@_) ;
my $ins_sz = @set / 2 ;
$ins_ok ||= $ins_sz ;
lib/AI/NNEasy.pm view on Meta::CPAN
print $print ;
}
}
print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
}
}
sub get_set_error {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $ins_ok = shift(@_) ;
my $ins_sz = @set / 2 ;
$ins_ok ||= $ins_sz ;
my $err ;
lib/AI/NNEasy.pm view on Meta::CPAN
$this->{NN}->run($set[$i]) ;
my $er = $this->{NN}->RMSErr($set[$i+1]) ;
$er *= -1 if $er < 0 ;
$err += $er ;
}
$err /= $ins_ok ;
return $err ;
}
sub run {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $in = shift(@_) ;
$this->{NN}->run($in) ;
my $out = $this->{NN}->output() ;
return $out ;
}
sub run_get_winner {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $out = $this->run(@_) ;
foreach my $out_i ( @$out ) {
$out_i = $this->out_type_winner($out_i) ;
}
return $out ;
}
sub out_type_winner {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $val = shift(@_) ;
my ($out_type , %err) ;
foreach my $types_i ( @{ $this->{OUT_TYPES} } ) {
my $er = $types_i - $val ;
$er *= -1 if $er < 0 ;
$err{$types_i} = $er ;
lib/AI/NNEasy.pm view on Meta::CPAN
be able to create flexible NN.
=head1 Class::HPLOO
I have used L<Class::HPLOO> to write fast the module, specially the XS support.
L<Class::HPLOO> enables this kind of syntax for Perl classes:
class Foo {
sub bar($x , $y) {
$this->add($x , $y) ;
}
sub[C] int add( int x , int y ) {
int res = x + y ;
return res ;
}
}
lib/AI/NNEasy/NN.hploo view on Meta::CPAN
use Class::HPLOO qw(base alloo) ;
class AI::NNEasy::NN[0.06] {
use AI::NNEasy::NN::layer ;
use AI::NNEasy::NN::feedforward ;
use AI::NNEasy::NN::backprop ;
vars($AUTOLOAD) ;
sub NN ($params , $netParams) {
my @layers ;
foreach my $i (keys %$netParams) {
$this->{$i} = $$netParams{$i};
}
$this->{networktype} ||= 'feedforward' ;
$this->{learning_algorithm} ||= 'backprop' ;
$this->{learning_algorithm_class} = "AI::NNEasy::NN::" . $this->{learning_algorithm} ;
lib/AI/NNEasy/NN.hploo view on Meta::CPAN
if ( $this->{bias} ) {
$this->{biasNode} = AI::NNEasy::NN::node->new( {activation_function => 'linear'} ) ;
$this->{biasNode}->{activation} = 1;
}
$this->init ;
return $this ;
}
sub init {
my @layers = @{$this->{layers}} ;
my $currentLayer ;
foreach my $layer (@layers) {
# Foreach node we need to make connections east and west
foreach my $node ( @{$layer->{nodes}} ) {
# only initialise to the west if layer > 0
if ($currentLayer > 0 ) {
foreach my $westNodes ( @{ $this->{layers}->[$currentLayer -1]->{nodes} } ) {
lib/AI/NNEasy/NN.hploo view on Meta::CPAN
foreach my $layer (@{$this->{layers}}) {
foreach my $node (@{$layer->{nodes}}) {
push @{$node->{connectedNodesWest}->{nodes}},$this->{biasNode};
my $weight = $this->{random_weights} ? rand(1) : 0 ;
$node->{connectedNodesWest}->{weights}{ $this->{biasNode}->{nodeid} } = $weight ;
}
}
}
}
sub learn {
&{$this->{learning_algorithm_class} . '::learn'}($this , @_) ;
}
sub output ($params) {
my $outputLayer = defined $$params{layer} ? $this->{layers}[$$params{layer}] : $this->{layers}[-1] ;
return AI::NNEasy::NN::layer::layer_output($outputLayer) ;
}
sub linear ($value) { return $value ;}
*tanh = \&tanh_c ;
sub[C] double tanh_c ( SV* self , double value ) {
if ( value > 20 ) { return 1 ;}
else if ( value < -20 ) { return -1 ;}
else {
double x = Perl_exp(value) ;
double y = Perl_exp(-value) ;
double ret = (x-y)/(x+y) ;
return ret ;
}
}
sub tanh_pl ($value) {
if ($value > 20) { return 1 ;}
elsif ($value < -20) { return -1 ;}
else {
my $x = exp($value) ;
my $y = exp(-$value) ;
return ($x-$y)/($x+$y) ;
}
}
*sigmoid = \&sigmoid_c ;
sub sigmoid_pl ($value) {
return (1+exp(-$value))**-1 ;
}
sub[C] double sigmoid_c ( SV* self , double value ) {
double ret = 1 + Perl_exp( -value ) ;
ret = Perl_pow(ret , -1) ;
return ret ;
}
sub AUTOLOAD {
my ($name) = ( $AUTOLOAD =~ /(\w+)$/ ) ;
my $sub = $this->{learning_algorithm_class} . '::' . $name ;
return &$sub($this,@_) if defined &$sub ;
my @call = caller ;
die("Can't find $AUTOLOAD or $sub at @call\n") ;
}
}
class AI::NNEasy::NN::feedforward_backprop extends AI::NNEasy::NN::backprop , AI::NNEasy::NN::feedforward , AI::NNEasy::NN {
## Just define a class with this @ISA.
}
lib/AI/NNEasy/NN.pm view on Meta::CPAN
{ package AI::NNEasy::NN ;
use strict qw(vars) ; no warnings ;
use vars qw(%CLASS_HPLOO @ISA $VERSION) ;
$VERSION = '0.06' ;
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;
my $CLASS = 'AI::NNEasy::NN' ; sub __CLASS__ { 'AI::NNEasy::NN' } ;
use Class::HPLOO::Base ;
use AI::NNEasy::NN::layer ;
use AI::NNEasy::NN::feedforward ;
use AI::NNEasy::NN::backprop ;
use vars qw($AUTOLOAD) ;
sub NN {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $params = shift(@_) ;
my $netParams = shift(@_) ;
my @layers ;
foreach my $i (keys %$netParams) {
$this->{$i} = $$netParams{$i};
}
lib/AI/NNEasy/NN.pm view on Meta::CPAN
if ( $this->{bias} ) {
$this->{biasNode} = AI::NNEasy::NN::node->new( {activation_function => 'linear'} ) ;
$this->{biasNode}->{activation} = 1;
}
$this->init ;
return $this ;
}
sub init {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my @layers = @{$this->{layers}} ;
my $currentLayer ;
foreach my $layer (@layers) {
# Foreach node we need to make connections east and west
foreach my $node ( @{$layer->{nodes}} ) {
lib/AI/NNEasy/NN.pm view on Meta::CPAN
foreach my $layer (@{$this->{layers}}) {
foreach my $node (@{$layer->{nodes}}) {
push @{$node->{connectedNodesWest}->{nodes}},$this->{biasNode};
my $weight = $this->{random_weights} ? rand(1) : 0 ;
$node->{connectedNodesWest}->{weights}{ $this->{biasNode}->{nodeid} } = $weight ;
}
}
}
}
sub learn {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
&{$this->{learning_algorithm_class} . '::learn'}($this , @_) ;
}
sub output {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $params = shift(@_) ;
my $outputLayer = defined $$params{layer} ? $this->{layers}[$$params{layer}] : $this->{layers}[-1] ;
return AI::NNEasy::NN::layer::layer_output($outputLayer) ;
}
sub linear { my $this = ref($_[0]) ? shift : undef ;my $CLASS = ref($this) || __PACKAGE__ ;my $value = shift(@_) ; return $value ;}
*tanh = \&tanh_c ;
sub tanh_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $value = shift(@_) ;
if ($value > 20) { return 1 ;}
elsif ($value < -20) { return -1 ;}
else {
my $x = exp($value) ;
my $y = exp(-$value) ;
return ($x-$y)/($x+$y) ;
}
}
*sigmoid = \&sigmoid_c ;
sub sigmoid_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $value = shift(@_) ;
return (1+exp(-$value))**-1 ;
}
sub AUTOLOAD {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my ($name) = ( $AUTOLOAD =~ /(\w+)$/ ) ;
my $sub = $this->{learning_algorithm_class} . '::' . $name ;
return &$sub($this,@_) if defined &$sub ;
my @call = caller ;
die("Can't find $AUTOLOAD or $sub at @call\n") ;
}
my $INLINE_INSTALL ; BEGIN { use Config ; my @installs = ($Config{installarchlib} , $Config{installprivlib} , $Config{installsitelib}) ; foreach my $i ( @installs ) { $i =~ s/[\\\/]/\//gs ;} $INLINE_INSTALL = 1 if ( __FILE__ =~ /\.pm$/ && ( join(" ",...
use Inline C => <<'__INLINE_C_SRC__' , ( $INLINE_INSTALL ? (NAME => 'AI::NNEasy::NN' , VERSION => '0.06') : () ) ;
double tanh_c ( SV* self , double value ) {
if ( value > 20 ) { return 1 ;}
else if ( value < -20 ) { return -1 ;}
else {
lib/AI/NNEasy/NN.pm view on Meta::CPAN
{ package AI::NNEasy::NN::feedforward_backprop ;
use strict qw(vars) ; no warnings ;
use vars qw(%CLASS_HPLOO @ISA) ;
push(@ISA , qw(AI::NNEasy::NN::backprop AI::NNEasy::NN::feedforward AI::NNEasy::NN Class::HPLOO::Base UNIVERSAL)) ;
my $CLASS = 'AI::NNEasy::NN::feedforward_backprop' ; sub __CLASS__ { 'AI::NNEasy::NN::feedforward_backprop' } ;
use Class::HPLOO::Base ;
## Just define a class with this @ISA.
}
1;
lib/AI/NNEasy/NN/backprop.hploo view on Meta::CPAN
#define FETCH_ATTR_HV(hv,k) (HV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_AV(hv,k) (AV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_HV_REF(hv,k) (HV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_AV_REF(hv,k) (AV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ELEM(av,i) *av_fetch(av,i,0)
#define FETCH_ELEM_HV_REF(av,i) (HV*) SvRV( FETCH_ELEM(av,i) )
#define FETCH_ELEM_AV_REF(av,i) (AV*) SvRV( FETCH_ELEM(av,i) )
__[C]__
sub calc_error ($outputPatternRef) {
my @outputPattern = @$outputPatternRef;
my $outputLayer = $this->{layers}->[-1]->{nodes} ;
return 0 if @$outputLayer != @outputPattern ;
my $counter = 0 ;
foreach my $node (@$outputLayer) {
$node->{error} = $node->{activation} - $outputPattern[$counter] ;
++$counter ;
}
}
sub learn ($outputPatternRef) {
$this->calc_error($outputPatternRef) ;
$this->hiddenToOutput ;
$this->hiddenOrInputToHidden if @{$this->{layers}} > 2 ;
return $this->RMSErr($outputPatternRef) ;
}
*hiddenToOutput = \&hiddenToOutput_c ;
sub hiddenToOutput_pl {
foreach my $node ( @{ $this->{layers}->[-1]->{nodes} } ) {
foreach my $connectedNode ( @{$node->{connectedNodesWest}->{nodes}} ) {
$node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } -= $this->{learning_rate} * $node->{error} * $connectedNode->{activation} ;
$node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = 5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } > 5 ;
$node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = -5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } < -5 ;
}
}
}
sub[C] void hiddenToOutput_c( SV* self ) {
lib/AI/NNEasy/NN/backprop.hploo view on Meta::CPAN
else if ( val < -5 ) { val = -5 ;}
sv_setnv(weight , val) ;
}
}
}
*hiddenOrInputToHidden = \&hiddenOrInputToHidden_c ;
sub hiddenOrInputToHidden_pl {
my ( $nodeid , $nodeError , $nodeActivation ) ;
my $learningRate = $this->{learning_rate} ;
foreach my $layer ( reverse @{$this->{layers}}[0 .. $#{$this->{layers}}-1 ] ) {
foreach my $node ( @{$layer->{nodes}} ) {
last if !$node->{connectedNodesWest} ;
$nodeid = $node->{nodeid} ;
lib/AI/NNEasy/NN/backprop.hploo view on Meta::CPAN
double weight = SvNV(weight_prev) - ( (1 - (nodeActivation*nodeActivation)) * nodeError * learningRate * FETCH_ATTR_NV(connectedNode , "activation") ) ;
sv_setnv(weight_prev , weight) ;
}
}
}
}
*RMSErr = \&RMSErr_c ;
sub RMSErr_pl ($outputPatternRef) {
my $outputLayer = $this->{layers}->[-1]->{nodes} ;
my $sqrErr ;
my $counter = 0 ;
foreach my $node (@$outputLayer) {
$sqrErr += ($node->{activation} - $$outputPatternRef[$counter])**2 ;
++$counter ;
}
my $error = sqrt($sqrErr) ;
lib/AI/NNEasy/NN/backprop.pm view on Meta::CPAN
use vars qw(%CLASS_HPLOO @ISA $VERSION) ;
$VERSION = '0.06' ;
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;
my $CLASS = 'AI::NNEasy::NN::backprop' ; sub __CLASS__ { 'AI::NNEasy::NN::backprop' } ;
use Class::HPLOO::Base ;
sub calc_error {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $outputPatternRef = shift(@_) ;
my @outputPattern = @$outputPatternRef;
my $outputLayer = $this->{layers}->[-1]->{nodes} ;
return 0 if @$outputLayer != @outputPattern ;
my $counter = 0 ;
foreach my $node (@$outputLayer) {
$node->{error} = $node->{activation} - $outputPattern[$counter] ;
++$counter ;
}
}
sub learn {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $outputPatternRef = shift(@_) ;
$this->calc_error($outputPatternRef) ;
$this->hiddenToOutput ;
$this->hiddenOrInputToHidden if @{$this->{layers}} > 2 ;
return $this->RMSErr($outputPatternRef) ;
}
*hiddenToOutput = \&hiddenToOutput_c ;
sub hiddenToOutput_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
foreach my $node ( @{ $this->{layers}->[-1]->{nodes} } ) {
foreach my $connectedNode ( @{$node->{connectedNodesWest}->{nodes}} ) {
$node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } -= $this->{learning_rate} * $node->{error} * $connectedNode->{activation} ;
$node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = 5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } > 5 ;
$node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = -5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } < -5 ;
}
}
}
*hiddenOrInputToHidden = \&hiddenOrInputToHidden_c ;
sub hiddenOrInputToHidden_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my ( $nodeid , $nodeError , $nodeActivation ) ;
my $learningRate = $this->{learning_rate} ;
foreach my $layer ( reverse @{$this->{layers}}[0 .. $#{$this->{layers}}-1 ] ) {
foreach my $node ( @{$layer->{nodes}} ) {
last if !$node->{connectedNodesWest} ;
lib/AI/NNEasy/NN/backprop.pm view on Meta::CPAN
}
}
}
*RMSErr = \&RMSErr_c ;
sub RMSErr_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $outputPatternRef = shift(@_) ;
my $outputLayer = $this->{layers}->[-1]->{nodes} ;
my $sqrErr ;
my $counter = 0 ;
foreach my $node (@$outputLayer) {
$sqrErr += ($node->{activation} - $$outputPatternRef[$counter])**2 ;
lib/AI/NNEasy/NN/feedforward.hploo view on Meta::CPAN
#define FETCH_ATTR_HV_REF(hv,k) (HV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_AV_REF(hv,k) (AV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ELEM(av,i) *av_fetch(av,i,0)
#define FETCH_ELEM_HV_REF(av,i) (HV*) SvRV( FETCH_ELEM(av,i) )
#define FETCH_ELEM_AV_REF(av,i) (AV*) SvRV( FETCH_ELEM(av,i) )
__[C]__
*run = \&run_c ;
sub run_pl ($inputPatternRef) {
# Now apply the activation
my $counter = 0 ;
foreach my $node ( @{ $this->{layers}->[0]->{nodes} } ) {
if ( $node->{active} ) {
if ( $node->{persistent_activation} ) {
$node->{activation} += $$inputPatternRef[$counter] ;
}
else {
$node->{activation} = $$inputPatternRef[$counter] ;
}
lib/AI/NNEasy/NN/feedforward.pm view on Meta::CPAN
use vars qw(%CLASS_HPLOO @ISA $VERSION) ;
$VERSION = '0.06' ;
push(@ISA , qw(AI::NNEasy::NN Class::HPLOO::Base UNIVERSAL)) ;
my $CLASS = 'AI::NNEasy::NN::feedforward' ; sub __CLASS__ { 'AI::NNEasy::NN::feedforward' } ;
use Class::HPLOO::Base ;
*run = \&run_c ;
sub run_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $inputPatternRef = shift(@_) ;
# Now apply the activation
my $counter = 0 ;
foreach my $node ( @{ $this->{layers}->[0]->{nodes} } ) {
if ( $node->{active} ) {
if ( $node->{persistent_activation} ) {
$node->{activation} += $$inputPatternRef[$counter] ;
lib/AI/NNEasy/NN/layer.hploo view on Meta::CPAN
## Licence: This program is free software; you can redistribute it and/or
## modify it under the same terms as Perl itself
#############################################################################
use Class::HPLOO qw(base alloo) ;
class AI::NNEasy::NN::layer {
use AI::NNEasy::NN::node ;
sub layer ($params) {
$this->{nodes} = [] ;
for (1 .. $$params{nodes}) { push( @{$this->{nodes}} , AI::NNEasy::NN::node->new($params) ) ;}
return $this ;
}
sub layer_output ($params) {
my @outputs ;
foreach my $node ( @{$this->{nodes}} ) {
push(@outputs , $$node{activation}) ;
}
return \@outputs;
}
}
lib/AI/NNEasy/NN/layer.pm view on Meta::CPAN
{ package AI::NNEasy::NN::layer ;
use strict qw(vars) ; no warnings ;
use vars qw(%CLASS_HPLOO @ISA) ;
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;
my $CLASS = 'AI::NNEasy::NN::layer' ; sub __CLASS__ { 'AI::NNEasy::NN::layer' } ;
use Class::HPLOO::Base ;
use AI::NNEasy::NN::node ;
sub layer {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $params = shift(@_) ;
$this->{nodes} = [] ;
for (1 .. $$params{nodes}) { push( @{$this->{nodes}} , AI::NNEasy::NN::node->new($params) ) ;}
return $this ;
}
sub layer_output {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $params = shift(@_) ;
my @outputs ;
foreach my $node ( @{$this->{nodes}} ) {
push(@outputs , $$node{activation}) ;
}
return \@outputs;
lib/AI/NNEasy/NN/node.hploo view on Meta::CPAN
## Licence: This program is free software; you can redistribute it and/or
## modify it under the same terms as Perl itself
#############################################################################
use Class::HPLOO qw(base alloo) ;
class AI::NNEasy::NN::node {
my $NODEID ;
sub node ($params) {
$this->{nodeid} = ++$NODEID ;
$this->{activation} = $$params{random_activation} ? rand($$params{random}) : 0 ;
$this->{random_weights} = $$params{random_weights} ;
$this->{decay} = $$params{decay} ;
$this->{adjust_error} = $$params{adjust_error} ;
$this->{persistent_activation} = $$params{persistent_activation} ;
$this->{threshold} = $$params{threshold} ;
$this->{activation_function} = $$params{activation_function} ;
lib/AI/NNEasy/NN/node.pm view on Meta::CPAN
{ package AI::NNEasy::NN::node ;
use strict qw(vars) ; no warnings ;
use vars qw(%CLASS_HPLOO @ISA) ;
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;
my $CLASS = 'AI::NNEasy::NN::node' ; sub __CLASS__ { 'AI::NNEasy::NN::node' } ;
use Class::HPLOO::Base ;
my $NODEID ;
sub node {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $params = shift(@_) ;
$this->{nodeid} = ++$NODEID ;
$this->{activation} = $$params{random_activation} ? rand($$params{random}) : 0 ;
$this->{random_weights} = $$params{random_weights} ;
$this->{decay} = $$params{decay} ;
lib/AI/NNEasy/NN/reinforce.hploo view on Meta::CPAN
## RCS-ID:
## Copyright: (c) 2005 Graciliano M. P.
## Licence: This program is free software; you can redistribute it and/or
## modify it under the same terms as Perl itself
#############################################################################
use Class::HPLOO qw(base alloo) ;
class AI::NNEasy::NN::reinforce {
sub learn {
foreach my $layer ( reverse @{$this->{'layers'}}[ 1 .. $#{$this->{'layers'}} ] ) {
foreach my $node ( @{$layer->{nodes}} ) {
foreach my $westNode ( @{$node->{connectedNodesWest}->{nodes}} ) {
my $dW = $westNode->{activation} * $node->{connectedNodesWest}->{weights}->{ $westNode->{nodeid} } * $this->{learning_rate} ;
$node->{connectedNodesWest}->{weights}->{ $westNode->{nodeid} } += $dW ;
}
}
}
}
lib/AI/NNEasy/NN/reinforce.pm view on Meta::CPAN
{ package AI::NNEasy::NN::reinforce ;
use strict qw(vars) ; no warnings ;
use vars qw(%CLASS_HPLOO @ISA) ;
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;
my $CLASS = 'AI::NNEasy::NN::reinforce' ; sub __CLASS__ { 'AI::NNEasy::NN::reinforce' } ;
use Class::HPLOO::Base ;
sub learn {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
foreach my $layer ( reverse @{$this->{'layers'}}[ 1 .. $#{$this->{'layers'}} ] ) {
foreach my $node ( @{$layer->{nodes}} ) {
foreach my $westNode ( @{$node->{connectedNodesWest}->{nodes}} ) {
my $dW = $westNode->{activation} * $node->{connectedNodesWest}->{weights}->{ $westNode->{nodeid} } * $this->{learning_rate} ;
$node->{connectedNodesWest}->{weights}->{ $westNode->{nodeid} } += $dW ;
}
}