view release on metacpan or search on metacpan
lib/AI/NNEasy.hploo view on Meta::CPAN
}
}
return ;
}
sub save ($file) {
$file ||= $this->{FILE} ;
my $dump = freeze( {%$this} ) ;
open (my $fh,">$this->{FILE}") ;
print $fh $dump ;
close ($fh) ;
}
sub learn ($in,$out,$n) {
$n ||= 100 ;
my $err ;
for (1..100) {
$this->{NN}->run($in) ;
$err = $this->{NN}->learn($out) ;
lib/AI/NNEasy.hploo view on Meta::CPAN
}
*_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
sub _learn_set_get_output_error_pl ($set , $error_ok , $ins_ok , $verbose) {
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
$this->{NN}->learn($$set[$i+1]) ;
}
my ($err,$learn_ok,$print) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
$er *= -1 if $er < 0 ;
++$learn_ok if $er < $error_ok ;
$err += $er ;
$print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
}
$err /= $ins_ok ;
return ( $err , $learn_ok , $print ) ;
}
sub[C] SV* _av_join( AV* av ) {
SV* ret = sv_2mortal(newSVpv("",0)) ;
int i ;
for (i = 0 ; i <= av_len(av) ; ++i) {
SV* elem = *av_fetch(av, i ,0) ;
if (i > 0) sv_catpv(ret , " ") ;
sv_catsv(ret , elem) ;
}
lib/AI/NNEasy.hploo view on Meta::CPAN
}
sub[C] void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
dXSARGS;
STRLEN len;
int i ;
HV* self_hv = OBJ_HV( self );
AV* set_av = OBJ_AV( set ) ;
SV* nn = FETCH_ATTR(self_hv , "NN") ;
SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
SV* ret ;
double err = 0 ;
double er = 0 ;
int learn_ok = 0 ;
for (i = 0 ; i <= av_len(set_av) ; i+=2) {
SV* set_in = *av_fetch(set_av, i ,0) ;
SV* set_out = *av_fetch(set_av, i+1 ,0) ;
PUSHMARK(SP) ;
lib/AI/NNEasy.hploo view on Meta::CPAN
PUTBACK ;
call_method("RMSErr", G_SCALAR) ;
SPAGAIN ;
ret = POPs ;
er = SvNV(ret) ;
if (er < 0) er *= -1 ;
if (er < error_ok) ++learn_ok ;
err += er ;
if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
er
) ;
}
err /= ins_ok ;
if (verbose) {
EXTEND(SP , 3) ;
ST(0) = sv_2mortal(newSVnv(err)) ;
ST(1) = sv_2mortal(newSViv(learn_ok)) ;
ST(2) = print_verbose ;
XSRETURN(3) ;
}
else {
EXTEND(SP , 2) ;
ST(0) = sv_2mortal(newSVnv(err)) ;
ST(1) = sv_2mortal(newSViv(learn_ok)) ;
XSRETURN(2) ;
}
}
lib/AI/NNEasy.hploo view on Meta::CPAN
($limit,$err_static_limit,$err_static_limit_positive) = @$limit ;
}
$limit ||= 30000 ;
$err_static_limit_positive ||= $err_static_limit/2 ;
my $error_ok = $this->{ERROR_OK} ;
my $check_diff_count = 1000 ;
my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
$err_static = 0 ;
while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
++$counter ;
if ( !($counter % 100) || $learn_ok == $ins_ok ) {
my $err_diff = $err_last - $err ;
$err_diff *= -1 if $err_diff < 0 ;
$err_count += $err_diff ;
++$err_static if $err_diff <= 0.00001 || $err > 1 ;
print "err_static = $err_static\n" if $verbose && $err_static ;
$err_last = $err ;
my $reseted ;
if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {
$err_static = 0 ;
$counter -= 2000 ;
$reseted = 1 ;
++$reset_count1 ;
if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
$reset_count1 = $reset_count2 = 0 ;
print "** Reseting NN...\n" if $verbose ;
$this->reset_nn ;
}
else {
print "** Reseting weights due NULL diff...\n" if $verbose ;
$this->{NN}->init ;
}
}
if ( !($counter % $check_diff_count) ) {
$err_count /= ($check_diff_count/100) ;
print "ERR COUNT> $err_count\n" if $verbose ;
if ( !$reseted && $err_count < 0.001 ) {
$err_static = 0 ;
$counter -= 1000 ;
++$reset_count2 ;
if ( ($reset_count1 + $reset_count2) > 2 ) {
$reset_count1 = $reset_count2 = 0 ;
print "** Reseting NN...\n" if $verbose ;
$this->reset_nn ;
}
else {
print "** Reseting weights due LOW diff...\n" if $verbose ;
$this->{NN}->init ;
}
}
$err_count = 0 ;
}
if ( $verbose ) {
print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
print $print ;
}
}
print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
}
}
sub get_set_error (\@set,$ins_ok) {
my $ins_sz = @set / 2 ;
$ins_ok ||= $ins_sz ;
lib/AI/NNEasy.hploo view on Meta::CPAN
## If set error is bigger than maximal error lest's learn this set:
if ( $set_err > $ERR_OK ) {
$nn->learn_set( \@set ) ;
## Save the NN:
$nn->save ;
}
## Use the NN:
my $out = $nn->run_get_winner([0,0]) ;
print "0 0 => @$out\n" ; ## 0 0 => 0
my $out = $nn->run_get_winner([0,1]) ;
print "0 1 => @$out\n" ; ## 0 1 => 1
my $out = $nn->run_get_winner([1,0]) ;
print "1 0 => @$out\n" ; ## 1 0 => 1
my $out = $nn->run_get_winner([1,1]) ;
print "1 1 => @$out\n" ; ## 1 1 => 0
## or just interate through the @set:
for (my $i = 0 ; $i < @set ; $i+=2) {
my $out = $nn->run_get_winner($set[$i]) ;
print "@{$set[$i]}) => @$out\n" ;
}
=> METHODS
==> new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )
*> FILE
The file path to save the NN. Default: 'nneasy.nne'.
*> @OUTPUT_TYPES
lib/AI/NNEasy.pm view on Meta::CPAN
sub save {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $file = shift(@_) ;
$file ||= $this->{FILE} ;
my $dump = freeze( {%$this} ) ;
open (my $fh,">$this->{FILE}") ;
print $fh $dump ;
close ($fh) ;
}
sub learn {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $in = shift(@_) ;
my $out = shift(@_) ;
my $n = shift(@_) ;
lib/AI/NNEasy.pm view on Meta::CPAN
my $set = shift(@_) ;
my $error_ok = shift(@_) ;
my $ins_ok = shift(@_) ;
my $verbose = shift(@_) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
$this->{NN}->learn($$set[$i+1]) ;
}
my ($err,$learn_ok,$print) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
$er *= -1 if $er < 0 ;
++$learn_ok if $er < $error_ok ;
$err += $er ;
$print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
}
$err /= $ins_ok ;
return ( $err , $learn_ok , $print ) ;
}
sub learn_set {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
lib/AI/NNEasy.pm view on Meta::CPAN
($limit,$err_static_limit,$err_static_limit_positive) = @$limit ;
}
$limit ||= 30000 ;
$err_static_limit_positive ||= $err_static_limit/2 ;
my $error_ok = $this->{ERROR_OK} ;
my $check_diff_count = 1000 ;
my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
$err_static = 0 ;
while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
++$counter ;
if ( !($counter % 100) || $learn_ok == $ins_ok ) {
my $err_diff = $err_last - $err ;
$err_diff *= -1 if $err_diff < 0 ;
$err_count += $err_diff ;
++$err_static if $err_diff <= 0.00001 || $err > 1 ;
print "err_static = $err_static\n" if $verbose && $err_static ;
$err_last = $err ;
my $reseted ;
if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {
$err_static = 0 ;
$counter -= 2000 ;
$reseted = 1 ;
++$reset_count1 ;
if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
$reset_count1 = $reset_count2 = 0 ;
print "** Reseting NN...\n" if $verbose ;
$this->reset_nn ;
}
else {
print "** Reseting weights due NULL diff...\n" if $verbose ;
$this->{NN}->init ;
}
}
if ( !($counter % $check_diff_count) ) {
$err_count /= ($check_diff_count/100) ;
print "ERR COUNT> $err_count\n" if $verbose ;
if ( !$reseted && $err_count < 0.001 ) {
$err_static = 0 ;
$counter -= 1000 ;
++$reset_count2 ;
if ( ($reset_count1 + $reset_count2) > 2 ) {
$reset_count1 = $reset_count2 = 0 ;
print "** Reseting NN...\n" if $verbose ;
$this->reset_nn ;
}
else {
print "** Reseting weights due LOW diff...\n" if $verbose ;
$this->{NN}->init ;
}
}
$err_count = 0 ;
}
if ( $verbose ) {
print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
print $print ;
}
}
print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
}
}
sub get_set_error {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $ins_ok = shift(@_) ;
lib/AI/NNEasy.pm view on Meta::CPAN
}
void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
dXSARGS;
STRLEN len;
int i ;
HV* self_hv = OBJ_HV( self );
AV* set_av = OBJ_AV( set ) ;
SV* nn = FETCH_ATTR(self_hv , "NN") ;
SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
SV* ret ;
double err = 0 ;
double er = 0 ;
int learn_ok = 0 ;
for (i = 0 ; i <= av_len(set_av) ; i+=2) {
SV* set_in = *av_fetch(set_av, i ,0) ;
SV* set_out = *av_fetch(set_av, i+1 ,0) ;
PUSHMARK(SP) ;
lib/AI/NNEasy.pm view on Meta::CPAN
PUTBACK ;
call_method("RMSErr", G_SCALAR) ;
SPAGAIN ;
ret = POPs ;
er = SvNV(ret) ;
if (er < 0) er *= -1 ;
if (er < error_ok) ++learn_ok ;
err += er ;
if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
er
) ;
}
err /= ins_ok ;
if (verbose) {
EXTEND(SP , 3) ;
ST(0) = sv_2mortal(newSVnv(err)) ;
ST(1) = sv_2mortal(newSViv(learn_ok)) ;
ST(2) = print_verbose ;
XSRETURN(3) ;
}
else {
EXTEND(SP , 2) ;
ST(0) = sv_2mortal(newSVnv(err)) ;
ST(1) = sv_2mortal(newSViv(learn_ok)) ;
XSRETURN(2) ;
}
}
lib/AI/NNEasy.pm view on Meta::CPAN
## If set error is bigger than maximal error lest's learn this set:
if ( $set_err > $ERR_OK ) {
$nn->learn_set( \@set ) ;
## Save the NN:
$nn->save ;
}
## Use the NN:
my $out = $nn->run_get_winner([0,0]) ;
print "0 0 => @$out\n" ; ## 0 0 => 0
my $out = $nn->run_get_winner([0,1]) ;
print "0 1 => @$out\n" ; ## 0 1 => 1
my $out = $nn->run_get_winner([1,0]) ;
print "1 0 => @$out\n" ; ## 1 0 => 1
my $out = $nn->run_get_winner([1,1]) ;
print "1 1 => @$out\n" ; ## 1 1 => 0
## or just interate through the @set:
for (my $i = 0 ; $i < @set ; $i+=2) {
my $out = $nn->run_get_winner($set[$i]) ;
print "@{$set[$i]}) => @$out\n" ;
}
=head1 METHODS
=head2 new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )
=over 4
=item FILE
samples/test-nn-nonbool.pl view on Meta::CPAN
[0,0] => [0],
[0,0.5] => [0.2],
[0,1] => [0.4],
[0.5,0] => [0.6],
[0.5,0.5] => [0.8],
[0.5,1] => [1],
);
my $set_err = $nn->get_set_error(\@set) ;
print "SET ERROR NOW: $set_err\n" ;
while ( $set_err > $nn->{ERROR_OK} ) {
$nn->learn_set( \@set , undef , undef , 1) ;
$set_err = $nn->get_set_error(\@set) ;
}
$nn->save ;
print "-------------------------------------------\n" ;
print "ERR_OK: $nn->{ERROR_OK}\n" ;
print "-------------------------------------------\n" ;
my @in = ( 0.3 , 0.5 ) ;
my $out = $nn->run(\@in) ;
my $out_win = $nn->run_get_winner(\@in) ;
print "@in => @$out_win > @$out\n" ;
print "-------------------------------------------\n" ;
for (my $i = 0 ; $i < @set ; $i+=2) {
my $out = $nn->run($set[$i]) ;
my $out_win = $nn->run_get_winner($set[$i]) ;
print "@{$set[$i]}) => @$out_win > @$out\n" ;
}
samples/test-nn-xor.pl view on Meta::CPAN
my @set = (
[0,0] => [0],
[0,1] => [1],
[1,0] => [1],
[1,1] => [0],
);
my $set_err = $nn->get_set_error(\@set) ;
print "SET ERROR NOW: $set_err\n" ;
while ( $set_err > $nn->{ERROR_OK} ) {
$nn->learn_set( \@set , undef , undef , 1) ;
$set_err = $nn->get_set_error(\@set) ;
}
$nn->save ;
print "-------------------------------------------\n" ;
print "ERR_OK: $nn->{ERROR_OK}\n" ;
print "-------------------------------------------\n" ;
my @in = ( 0.9 , 1 ) ;
my $out = $nn->run(\@in) ;
my $out_win = $nn->run_get_winner(\@in) ;
print "@in => @$out_win > @$out\n" ;
print "-------------------------------------------\n" ;
for (my $i = 0 ; $i < @set ; $i+=2) {
my $out = $nn->run($set[$i]) ;
my $out_win = $nn->run_get_winner($set[$i]) ;
print "@{$set[$i]}) => @$out_win > @$out\n" ;
}
#########################
###use Data::Dumper ; print Dumper( ) ;
use Test;
BEGIN { plan tests => 10 } ;
use AI::NNEasy ;
use strict ;
use warnings qw'all' ;
#########################
ok( $$out_ok[0] , $$out_win[0] ) ;
}
$set_err = $nn->get_set_error(\@set) ;
ok( $set_err < $ERR_OK ) ;
}
#########################
print "\nThe End! By!\n" ;
1 ;