AI-NNEasy

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN

Revision history for Perl extension AI::NNEasy.

0.06  2005-01-16
      - Added reinforce learning algorithm.
      - Added check of errors bigger than 1 at learn_set().
      - Fix some memory leak for non mortal SV*.

0.05  2005-01-15
      - Fixed default values for layers, specially the activation
        funtion for the output layer that is better as linear.
      - Added samples.
      - Changed some internal values for learn_set() to learn faster.
      - More XS support: AI::NNEasy::NN::backprop::RMSErr_c

0.04  2005-01-15

lib/AI/NNEasy.hploo  view on Meta::CPAN

#define FETCH_ATTR_AV(hv,k)	(AV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_SV_REF(hv,k)	SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_HV_REF(hv,k)	(HV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_AV_REF(hv,k)	(AV*) SvRV( FETCH_ATTR(hv,k) )

#define FETCH_ELEM(av,i)		*av_fetch(av,i,0)
#define FETCH_ELEM_HV_REF(av,i)	(HV*) SvRV( FETCH_ELEM(av,i) )
#define FETCH_ELEM_AV_REF(av,i)	(AV*) SvRV( FETCH_ELEM(av,i) )
__[C]__

  sub NNEasy ($file , \@out_types , $error_ok , $in , $out , \@layers , $conf) {
    $file ||= 'nneasy.nne' ;
  
    if ( $this->load($file) ) {
      return $this ;
    }
  
    my $in_sz  = ref $in  ? $in->{nodes}  : $in ;
    my $out_sz = ref $out ? $out->{nodes} : $out ;
  
    @layers = ($in_sz+$out_sz) if !@layers ;

lib/AI/NNEasy.hploo  view on Meta::CPAN

    $this->{NN} = AI::NNEasy::NN->new( @{$this->{NN_ARGS}} ) ;
    
    $this->{FILE} = $file ;
    
    @out_types = (0,1) if !@out_types ;
    
    @out_types = sort {$a <=> $b} @out_types ;
    
    $this->{OUT_TYPES} = \@out_types ;
    
    if ( $error_ok <= 0 ) {
      my ($min_dif , $last) ;
      my $i = -1 ;
      foreach my $out_types_i ( @out_types ) {
        ++$i ;
        if ($i > 0) {
          my $dif = $out_types_i - $last ;
          $min_dif = $dif if !defined $min_dif || $dif < $min_dif ;
        }
        $last = $out_types_i ;
      }
      $error_ok = $min_dif / 2 ;
      $error_ok -= $error_ok*0.1 ;
    }
    
    $this->{ERROR_OK} = $error_ok ;
    
    return $this ;
  }
  
  sub _layer_conf ($def,$conf) {
    $def ||= {} ;
    $conf = { nodes=>$conf } if !ref($conf) ;
    
    foreach my $Key ( keys %$def ) { $$conf{$Key} = $$def{$Key} if !exists $$conf{$Key} ;}
  

lib/AI/NNEasy.hploo  view on Meta::CPAN

        
    my $dump = freeze( {%$this} ) ;
    open (my $fh,">$this->{FILE}") ;
    print $fh $dump ;
    close ($fh) ;
  }
  
  sub learn ($in,$out,$n) {
    $n ||= 100 ;
    
    my $err ;
    for (1..100) {
      $this->{NN}->run($in) ;
      $err = $this->{NN}->learn($out) ;
    }
    
    $err *= -1 if $err < 0 ;
    return $err ;
  }
  
  *_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
  
  sub _learn_set_get_output_error_pl ($set , $error_ok , $ins_ok , $verbose) {
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      $this->{NN}->learn($$set[$i+1]) ;
    }

    my ($err,$learn_ok,$print) ;
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      ++$learn_ok if $er < $error_ok ;
      $err += $er ;
      $print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
    }
    
    $err /= $ins_ok ;
    
    return ( $err , $learn_ok , $print ) ;
  }
  
  sub[C] SV* _av_join( AV* av ) {
    SV* ret = sv_2mortal(newSVpv("",0)) ;
    int i ;
    for (i = 0 ; i <= av_len(av) ; ++i) {
      SV* elem = *av_fetch(av, i ,0) ;
      if (i > 0) sv_catpv(ret , " ") ;
      sv_catsv(ret , elem) ;
    }
    return ret ;
  }
  
  sub[C] void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
    dXSARGS;
    
    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    AV* set_av = OBJ_AV( set ) ;
    SV* nn = FETCH_ATTR(self_hv , "NN") ;
    SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
    SV* ret ;
    double err = 0 ;
    double er = 0 ;
    int learn_ok = 0 ;
        
    for (i = 0 ; i <= av_len(set_av) ; i+=2) {
      SV* set_in = *av_fetch(set_av, i ,0) ;
      SV* set_out = *av_fetch(set_av, i+1 ,0) ;

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_in );

lib/AI/NNEasy.hploo  view on Meta::CPAN

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_out );
      PUTBACK ;
      call_method("RMSErr", G_SCALAR) ;
      
      SPAGAIN ;
      ret = POPs ;
      er = SvNV(ret) ;
      if (er < 0) er *= -1 ;
      if (er < error_ok) ++learn_ok ;
      err += er ;
      
      if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
                       SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
                       SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
                       er
                     ) ;

    }
    
    err /= ins_ok ;

    if (verbose) {
      EXTEND(SP , 3) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
        ST(2) = print_verbose ;
      XSRETURN(3) ;
    }
    else {
      EXTEND(SP , 2) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
      XSRETURN(2) ;
    }
  }
    
  sub learn_set (\@set,$ins_ok,$limit,$verbose) {
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
    
    my $err_static_limit = 15 ;
    my $err_static_limit_positive ;

    if ( ref($limit) eq 'ARRAY' ) {
      ($limit,$err_static_limit,$err_static_limit_positive) = @$limit ;
    }
    
    $limit ||= 30000 ;
    $err_static_limit_positive ||= $err_static_limit/2 ;
  
    my $error_ok = $this->{ERROR_OK} ;
    
    my $check_diff_count = 1000 ;
    
    my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
    
    $err_static = 0 ;
    
    while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
      ($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
      
      ++$counter ;
      
      if ( !($counter % 100) || $learn_ok == $ins_ok ) {
        my $err_diff = $err_last - $err ;
        $err_diff *= -1 if $err_diff < 0 ;
        
        $err_count += $err_diff ;
        
        ++$err_static if $err_diff <= 0.00001 || $err > 1 ;
        
        print "err_static = $err_static\n" if $verbose && $err_static ;

        $err_last = $err ;
        
        my $reseted ;
        if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {
          $err_static = 0 ;
          $counter -= 2000 ;
          $reseted = 1 ;
          ++$reset_count1 ;
          
          if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
            $reset_count1 = $reset_count2 = 0 ;
            print "** Reseting NN...\n" if $verbose ;
            $this->reset_nn ;
          }
          else {
            print "** Reseting weights due NULL diff...\n" if $verbose ;
            $this->{NN}->init ;
          }
        }
        
        if ( !($counter % $check_diff_count) ) {
          $err_count /= ($check_diff_count/100) ;
          
          print "ERR COUNT> $err_count\n" if $verbose ;
          
          if ( !$reseted && $err_count < 0.001 ) {
            $err_static = 0 ;
            $counter -= 1000 ;
            ++$reset_count2 ;
            
            if ( ($reset_count1 + $reset_count2) > 2 ) {
              $reset_count1 = $reset_count2 = 0 ;
              print "** Reseting NN...\n" if $verbose ;
              $this->reset_nn ;
            }
            else {
              print "** Reseting weights due LOW diff...\n" if $verbose ;
              $this->{NN}->init ;
            }
          }

          $err_count = 0 ;
        }
        
        if ( $verbose ) {
          print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
          print $print ;
        }
      }

      print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
      
    }
    
  }
  
  sub get_set_error (\@set,$ins_ok) {
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
  
    my $err ;
    for (my $i = 0 ; $i < @set ; $i+=2) {
      $this->{NN}->run($set[$i]) ;
      my $er = $this->{NN}->RMSErr($set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      $err += $er ;
    }
    
    $err /= $ins_ok ;
    return $err ;
  }
  
  sub run ($in) {
    $this->{NN}->run($in) ;
    my $out = $this->{NN}->output() ;
    return $out ;
  }
  
  sub run_get_winner {
    my $out = $this->run(@_) ;
    
    foreach my $out_i ( @$out ) {
      $out_i = $this->out_type_winner($out_i) ;
    }
    
    return $out ;
  }
  
  sub out_type_winner ($val) {
    my ($out_type , %err) ;
    
    foreach my $types_i ( @{ $this->{OUT_TYPES} } ) {
      my $er = $types_i - $val ;
      $er *= -1 if $er < 0 ;
      $err{$types_i} = $er ;
    }
    
    my $min_type_err = (sort { $err{$a} <=> $err{$b} } keys %err)[0] ;
    $out_type = $min_type_err ;

    return $out_type ;
  }


}

1;

__END__

lib/AI/NNEasy.hploo  view on Meta::CPAN


With this module you don't need to learn much about NN to be able to construct one, you just
define the construction of the NN, learn your set of inputs, and use it.

=> USAGE

Here's an example of a NN to compute XOR:

  use AI::NNEasy ;
  
  ## Our maximal error for the output calculation.
  my $ERR_OK = 0.1 ;

  ## Create the NN:
  my $nn = AI::NNEasy->new(
  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  $ERR_OK ,   ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  ) ;
  
  
  ## Our set of inputs and outputs to learn:
  my @set = (
  [0,0] => [0],
  [0,1] => [1],
  [1,0] => [1],
  [1,1] => [0],
  );
  
  ## Calculate the actual error for the set:
  my $set_err = $nn->get_set_error(\@set) ;
  
  ## If set error is bigger than maximal error lest's learn this set:
  if ( $set_err > $ERR_OK ) {
    $nn->learn_set( \@set ) ;
    ## Save the NN:
    $nn->save ;
  }
  
  ## Use the NN:
  
  my $out = $nn->run_get_winner([0,0]) ;
  print "0 0 => @$out\n" ; ## 0 0 => 0
  

lib/AI/NNEasy.hploo  view on Meta::CPAN

==> new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )

*> FILE
The file path to save the NN. Default: 'nneasy.nne'.

*> @OUTPUT_TYPES
An array of outputs that the NN can have, so the NN can find the nearest number in this
list to give your the right output.

*> ERROR_OK
The maximal error of the calculated output.

If not defined ERROR_OK will be calculated by the minimal difference between 2 types at
@OUTPUT_TYPES dived by 2:

  @OUTPUT_TYPES = [0 , 0.5 , 1] ;
  
  ERROR_OK = (1 - 0.5) / 2 = 0.25 ;

*> IN_SIZE
The input size (number of nodes in the inpute layer).

lib/AI/NNEasy.hploo  view on Meta::CPAN

**> bias
If true will create a BIAS node. Usefull when you have NULL inputs, like [0,0].

/*>

Here's a completly example of use:

  my $nn = AI::NNEasy->new(
  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  0.1 ,       ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  {random_connections=>0 , networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1} ,
  ) ;

And a simple example that will create a NN equal of the above:

  my $nn = AI::NNEasy->new('xor.nne' , [0,1] , 0.1 , 2 , 1 ) ;

lib/AI/NNEasy.hploo  view on Meta::CPAN


*> N
Number of times that this input should be learned. Default: 100

Example:

  $nn->learn( [0,1] , [1] , 10 ) ;

==> learn_set (@SET , OK_OUTPUTS , LIMIT , VERBOSE)

Learn a set of inputs until get the right error for the outputs.

*> @SET
A list of inputs and outputs.

*> OK_OUTPUTS
Minimal number of outputs that should be OK when calculating the erros.

By default I<OK_OUTPUTS> should have the same size of number of different
inouts in the @SET.

*> LIMIT
Limit of interations when learning. Default: 30000

*> VERBOSE
If TRUE turn verbose method ON when learning.

==> get_set_error (@SET , OK_OUTPUTS)
Get the actual error of a set in the NN. If the returned error is bigger than
I<ERROR_OK> defined on I<new()> you should learn or relearn the set.

==> run (@INPUT)
Run a input and return the output calculated by the NN based in what the NN already have learned.

==> run_get_winner (@INPUT)
Same of I<run()>, but the output will return the nearest output value based in the
I<@OUTPUT_TYPES> defined at I<new()>.

For example an input I<[0,1]> learned that have
the output I<[1]>, actually will return something like 0.98324 as output and
not 1, since the error never should be 0. So, with I<run_get_winner()>
we get the output of I<run()>, let's say that is 0.98324, and find what output
is near of this number, that in this case should be 1. An output [0], will return
by I<run()> something like 0.078964, and I<run_get_winner()> return 0.

=> Samples

Inside the release sources you can find the directory ./samples where you have some
examples of code using this module.

=> INLINE C

Some functions of this module have I<Inline> functions writed in C.

I have made a C version only for the functions that are wild called, like:

  AI::NNEasy::_learn_set_get_output_error

  AI::NNEasy::NN::tanh

  AI::NNEasy::NN::feedforward::run
  
  AI::NNEasy::NN::backprop::hiddenToOutput
  AI::NNEasy::NN::backprop::hiddenOrInputToHidden
  AI::NNEasy::NN::backprop::RMSErr

What give to us the speed that we need to learn fast the inputs, but at the same time

lib/AI/NNEasy.pm  view on Meta::CPAN

  use Storable qw(freeze thaw) ;
  use Data::Dumper ;
  


  sub NNEasy { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    my @out_types = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $error_ok = shift(@_) ;
    my $in = shift(@_) ;
    my $out = shift(@_) ;
    my @layers = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $conf = shift(@_) ;
    
    $file ||= 'nneasy.nne' ;
  
    if ( $this->load($file) ) {
      return $this ;
    }

lib/AI/NNEasy.pm  view on Meta::CPAN

    $this->{NN} = AI::NNEasy::NN->new( @{$this->{NN_ARGS}} ) ;
    
    $this->{FILE} = $file ;
    
    @out_types = (0,1) if !@out_types ;
    
    @out_types = sort {$a <=> $b} @out_types ;
    
    $this->{OUT_TYPES} = \@out_types ;
    
    if ( $error_ok <= 0 ) {
      my ($min_dif , $last) ;
      my $i = -1 ;
      foreach my $out_types_i ( @out_types ) {
        ++$i ;
        if ($i > 0) {
          my $dif = $out_types_i - $last ;
          $min_dif = $dif if !defined $min_dif || $dif < $min_dif ;
        }
        $last = $out_types_i ;
      }
      $error_ok = $min_dif / 2 ;
      $error_ok -= $error_ok*0.1 ;
    }
    
    $this->{ERROR_OK} = $error_ok ;
    
    return $this ;
  }
  
  sub _layer_conf { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $def = shift(@_) ;
    my $conf = shift(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

  
  sub learn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    my $out = shift(@_) ;
    my $n = shift(@_) ;
    
    $n ||= 100 ;
    
    my $err ;
    for (1..100) {
      $this->{NN}->run($in) ;
      $err = $this->{NN}->learn($out) ;
    }
    
    $err *= -1 if $err < 0 ;
    return $err ;
  }
  
  *_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
  
  sub _learn_set_get_output_error_pl { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $set = shift(@_) ;
    my $error_ok = shift(@_) ;
    my $ins_ok = shift(@_) ;
    my $verbose = shift(@_) ;
    
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      $this->{NN}->learn($$set[$i+1]) ;
    }

    my ($err,$learn_ok,$print) ;
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      ++$learn_ok if $er < $error_ok ;
      $err += $er ;
      $print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
    }
    
    $err /= $ins_ok ;
    
    return ( $err , $learn_ok , $print ) ;
  }
  
  
  
  
    
  sub learn_set { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    my $limit = shift(@_) ;
    my $verbose = shift(@_) ;
    
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
    
    my $err_static_limit = 15 ;
    my $err_static_limit_positive ;

    if ( ref($limit) eq 'ARRAY' ) {
      ($limit,$err_static_limit,$err_static_limit_positive) = @$limit ;
    }
    
    $limit ||= 30000 ;
    $err_static_limit_positive ||= $err_static_limit/2 ;
  
    my $error_ok = $this->{ERROR_OK} ;
    
    my $check_diff_count = 1000 ;
    
    my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
    
    $err_static = 0 ;
    
    while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
      ($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
      
      ++$counter ;
      
      if ( !($counter % 100) || $learn_ok == $ins_ok ) {
        my $err_diff = $err_last - $err ;
        $err_diff *= -1 if $err_diff < 0 ;
        
        $err_count += $err_diff ;
        
        ++$err_static if $err_diff <= 0.00001 || $err > 1 ;
        
        print "err_static = $err_static\n" if $verbose && $err_static ;

        $err_last = $err ;
        
        my $reseted ;
        if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {
          $err_static = 0 ;
          $counter -= 2000 ;
          $reseted = 1 ;
          ++$reset_count1 ;
          
          if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
            $reset_count1 = $reset_count2 = 0 ;
            print "** Reseting NN...\n" if $verbose ;
            $this->reset_nn ;
          }
          else {
            print "** Reseting weights due NULL diff...\n" if $verbose ;
            $this->{NN}->init ;
          }
        }
        
        if ( !($counter % $check_diff_count) ) {
          $err_count /= ($check_diff_count/100) ;
          
          print "ERR COUNT> $err_count\n" if $verbose ;
          
          if ( !$reseted && $err_count < 0.001 ) {
            $err_static = 0 ;
            $counter -= 1000 ;
            ++$reset_count2 ;
            
            if ( ($reset_count1 + $reset_count2) > 2 ) {
              $reset_count1 = $reset_count2 = 0 ;
              print "** Reseting NN...\n" if $verbose ;
              $this->reset_nn ;
            }
            else {
              print "** Reseting weights due LOW diff...\n" if $verbose ;
              $this->{NN}->init ;
            }
          }

          $err_count = 0 ;
        }
        
        if ( $verbose ) {
          print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
          print $print ;
        }
      }

      print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
      
    }
    
  }
  
  sub get_set_error { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
  
    my $err ;
    for (my $i = 0 ; $i < @set ; $i+=2) {
      $this->{NN}->run($set[$i]) ;
      my $er = $this->{NN}->RMSErr($set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      $err += $er ;
    }
    
    $err /= $ins_ok ;
    return $err ;
  }
  
  sub run { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    
    $this->{NN}->run($in) ;
    my $out = $this->{NN}->output() ;
    return $out ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    }
    
    return $out ;
  }
  
  sub out_type_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $val = shift(@_) ;
    
    my ($out_type , %err) ;
    
    foreach my $types_i ( @{ $this->{OUT_TYPES} } ) {
      my $er = $types_i - $val ;
      $er *= -1 if $er < 0 ;
      $err{$types_i} = $er ;
    }
    
    my $min_type_err = (sort { $err{$a} <=> $err{$b} } keys %err)[0] ;
    $out_type = $min_type_err ;

    return $out_type ;
  }


my $INLINE_INSTALL ; BEGIN { use Config ; my @installs = ($Config{installarchlib} , $Config{installprivlib} , $Config{installsitelib}) ; foreach my $i ( @installs ) { $i =~ s/[\\\/]/\//gs ;} $INLINE_INSTALL = 1 if ( __FILE__ =~ /\.pm$/ && ( join(" ",...

use Inline C => <<'__INLINE_C_SRC__' , ( $INLINE_INSTALL ? (NAME => 'AI::NNEasy' , VERSION => '0.06') : () ) ;


lib/AI/NNEasy.pm  view on Meta::CPAN

    SV* ret = sv_2mortal(newSVpv("",0)) ;
    int i ;
    for (i = 0 ; i <= av_len(av) ; ++i) {
      SV* elem = *av_fetch(av, i ,0) ;
      if (i > 0) sv_catpv(ret , " ") ;
      sv_catsv(ret , elem) ;
    }
    return ret ;
}

void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
    dXSARGS;
    
    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    AV* set_av = OBJ_AV( set ) ;
    SV* nn = FETCH_ATTR(self_hv , "NN") ;
    SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
    SV* ret ;
    double err = 0 ;
    double er = 0 ;
    int learn_ok = 0 ;
        
    for (i = 0 ; i <= av_len(set_av) ; i+=2) {
      SV* set_in = *av_fetch(set_av, i ,0) ;
      SV* set_out = *av_fetch(set_av, i+1 ,0) ;

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_in );

lib/AI/NNEasy.pm  view on Meta::CPAN

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_out );
      PUTBACK ;
      call_method("RMSErr", G_SCALAR) ;
      
      SPAGAIN ;
      ret = POPs ;
      er = SvNV(ret) ;
      if (er < 0) er *= -1 ;
      if (er < error_ok) ++learn_ok ;
      err += er ;
      
      if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
                       SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
                       SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
                       er
                     ) ;

    }
    
    err /= ins_ok ;

    if (verbose) {
      EXTEND(SP , 3) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
        ST(2) = print_verbose ;
      XSRETURN(3) ;
    }
    else {
      EXTEND(SP , 2) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
      XSRETURN(2) ;
    }
}

__INLINE_C_SRC__


}

lib/AI/NNEasy.pm  view on Meta::CPAN


With this module you don't need to learn much about NN to be able to construct one, you just
define the construction of the NN, learn your set of inputs, and use it.

=head1 USAGE

Here's an example of a NN to compute XOR:

  use AI::NNEasy ;
  
  ## Our maximal error for the output calculation.
  my $ERR_OK = 0.1 ;

  ## Create the NN:
  my $nn = AI::NNEasy->new(
  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  $ERR_OK ,   ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  ) ;
  
  
  ## Our set of inputs and outputs to learn:
  my @set = (
  [0,0] => [0],
  [0,1] => [1],
  [1,0] => [1],
  [1,1] => [0],
  );
  
  ## Calculate the actual error for the set:
  my $set_err = $nn->get_set_error(\@set) ;
  
  ## If set error is bigger than maximal error lest's learn this set:
  if ( $set_err > $ERR_OK ) {
    $nn->learn_set( \@set ) ;
    ## Save the NN:
    $nn->save ;
  }
  
  ## Use the NN:
  
  my $out = $nn->run_get_winner([0,0]) ;
  print "0 0 => @$out\n" ; ## 0 0 => 0
  

lib/AI/NNEasy.pm  view on Meta::CPAN


The file path to save the NN. Default: 'nneasy.nne'.

=item @OUTPUT_TYPES

An array of outputs that the NN can have, so the NN can find the nearest number in this
list to give your the right output.

=item ERROR_OK

The maximal error of the calculated output.

If not defined ERROR_OK will be calculated by the minimal difference between 2 types at
@OUTPUT_TYPES dived by 2:

  @OUTPUT_TYPES = [0 , 0.5 , 1] ;
  
  ERROR_OK = (1 - 0.5) / 2 = 0.25 ;

=item IN_SIZE

lib/AI/NNEasy.pm  view on Meta::CPAN


=back

=back

Here's a completly example of use:

  my $nn = AI::NNEasy->new(
  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  0.1 ,       ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  {random_connections=>0 , networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1} ,
  ) ;

And a simple example that will create a NN equal of the above:

  my $nn = AI::NNEasy->new('xor.nne' , [0,1] , 0.1 , 2 , 1 ) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

Number of times that this input should be learned. Default: 100

Example:

  $nn->learn( [0,1] , [1] , 10 ) ;

=back

=head2 learn_set (@SET , OK_OUTPUTS , LIMIT , VERBOSE)

Learn a set of inputs until get the right error for the outputs.

=over 4

=item @SET

A list of inputs and outputs.

=item OK_OUTPUTS

Minimal number of outputs that should be OK when calculating the erros.

By default I<OK_OUTPUTS> should have the same size of number of different
inouts in the @SET.

=item LIMIT

Limit of interations when learning. Default: 30000

=item VERBOSE

If TRUE turn verbose method ON when learning.

=back

=head2 get_set_error (@SET , OK_OUTPUTS)

Get the actual error of a set in the NN. If the returned error is bigger than
I<ERROR_OK> defined on I<new()> you should learn or relearn the set.

=head2 run (@INPUT)

Run a input and return the output calculated by the NN based in what the NN already have learned.

=head2 run_get_winner (@INPUT)

Same of I<run()>, but the output will return the nearest output value based in the
I<@OUTPUT_TYPES> defined at I<new()>.

For example an input I<[0,1]> learned that have
the output I<[1]>, actually will return something like 0.98324 as output and
not 1, since the error never should be 0. So, with I<run_get_winner()>
we get the output of I<run()>, let's say that is 0.98324, and find what output
is near of this number, that in this case should be 1. An output [0], will return
by I<run()> something like 0.078964, and I<run_get_winner()> return 0.

=head1 Samples

Inside the release sources you can find the directory ./samples where you have some
examples of code using this module.

=head1 INLINE C

Some functions of this module have I<Inline> functions writed in C.

I have made a C version only for the functions that are wild called, like:

  AI::NNEasy::_learn_set_get_output_error

  AI::NNEasy::NN::tanh

  AI::NNEasy::NN::feedforward::run
  
  AI::NNEasy::NN::backprop::hiddenToOutput
  AI::NNEasy::NN::backprop::hiddenOrInputToHidden
  AI::NNEasy::NN::backprop::RMSErr

What give to us the speed that we need to learn fast the inputs, but at the same time

lib/AI/NNEasy/NN/backprop.hploo  view on Meta::CPAN

#define FETCH_ATTR_HV(hv,k)	(HV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_AV(hv,k)	(AV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_HV_REF(hv,k)	(HV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_AV_REF(hv,k)	(AV*) SvRV( FETCH_ATTR(hv,k) )

#define FETCH_ELEM(av,i)		*av_fetch(av,i,0)
#define FETCH_ELEM_HV_REF(av,i)	(HV*) SvRV( FETCH_ELEM(av,i) )
#define FETCH_ELEM_AV_REF(av,i)	(AV*) SvRV( FETCH_ELEM(av,i) )
__[C]__

  sub calc_error ($outputPatternRef) {
    my @outputPattern = @$outputPatternRef;

    my $outputLayer = $this->{layers}->[-1]->{nodes} ;

    return 0 if @$outputLayer != @outputPattern ;

    my $counter = 0 ;
    foreach my $node (@$outputLayer) {
      $node->{error} = $node->{activation} - $outputPattern[$counter] ;
      ++$counter ;
    }
  }
  
  sub learn ($outputPatternRef) {
    $this->calc_error($outputPatternRef) ;
    $this->hiddenToOutput ;
    $this->hiddenOrInputToHidden if @{$this->{layers}} > 2 ;
    return $this->RMSErr($outputPatternRef) ;    
  }
  
  *hiddenToOutput = \&hiddenToOutput_c ;
  
  sub hiddenToOutput_pl {
    foreach my $node ( @{ $this->{layers}->[-1]->{nodes} } ) {
      foreach my $connectedNode ( @{$node->{connectedNodesWest}->{nodes}} ) {
        $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } -= $this->{learning_rate} * $node->{error} * $connectedNode->{activation} ;
        $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = 5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } > 5 ;
        $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = -5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } < -5 ;
      }
    }
  }
  
  sub[C] void hiddenToOutput_c( SV* self ) {
    STRLEN len;
    int i , j , k ;
    HV* self_hv = OBJ_HV( self );
        
    AV* nodes = FETCH_ATTR_AV_REF( FETCH_ELEM_HV_REF( FETCH_ATTR_AV_REF(self_hv , "layers") , -1) , "nodes") ;
    for (i = 0 ; i <= av_len(nodes) ; ++i) {
      HV* node = OBJ_HV( *av_fetch(nodes, i ,0) ) ;

      AV* westNodes = FETCH_ATTR_AV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "nodes") ;
      for (j = 0 ; j <= av_len(westNodes) ; ++j) {
        HV* connectedNode = OBJ_HV( *av_fetch(westNodes, j ,0) ) ;
        SV* weight = FETCH_ATTR( FETCH_ATTR_HV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "weights" ) , FETCH_ATTR_PV(connectedNode , "nodeid") );

        double val = FETCH_ATTR_NV(self_hv , "learning_rate") * FETCH_ATTR_NV(node , "error") * FETCH_ATTR_NV(connectedNode , "activation") ;
        val = SvNV(weight) - val ;

        if      ( val > 5 ) { val = 5 ;}
        else if ( val < -5 ) { val = -5 ;}
        
        sv_setnv(weight , val) ;
      }
      
    }
  }

lib/AI/NNEasy/NN/backprop.hploo  view on Meta::CPAN

    my $learningRate = $this->{learning_rate} ;

    foreach my $layer ( reverse @{$this->{layers}}[0 .. $#{$this->{layers}}-1 ] ) {
      foreach my $node ( @{$layer->{nodes}} ) {
        last if !$node->{connectedNodesWest} ;
        
        $nodeid = $node->{nodeid} ;

        $nodeError = 0 ;
        foreach my $connectedNode ( @{$node->{connectedNodesEast}->{nodes}} ) {
          my $noderr = $connectedNode->{error} * $connectedNode->{connectedNodesWest}->{weights}->{$nodeid} ;
          $nodeError += $noderr ;
        }
        $node->{error} = $nodeError ;
        
        $nodeActivation = $node->{activation} ;

        # update the weights from nodes inputting to here
        foreach my $westNodes ( @{$node->{connectedNodesWest}->{nodes}} ) {
          $node->{connectedNodesWest}->{weights}->{ $westNodes->{nodeid} } -= ( 1 - ($nodeActivation*$nodeActivation) ) * $nodeError * $learningRate * $westNodes->{activation} ;
        }
        
      }
    }

lib/AI/NNEasy/NN/backprop.hploo  view on Meta::CPAN

        
        if (!SvTRUE( FETCH_ATTR(node , "connectedNodesWest") ) ) break ;
        
        nodeid = FETCH_ATTR_PV(node , "nodeid") ;
        
        nodeError = 0 ;
        
        eastNodes = FETCH_ATTR_AV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesEast") , "nodes") ;
        for (k = 0 ; k <= av_len(eastNodes) ; ++k) {
          HV* connectedNode = OBJ_HV( *av_fetch(eastNodes, k ,0) ) ;
          nodeError += FETCH_ATTR_NV(connectedNode , "error") * FETCH_ATTR_NV( FETCH_ATTR_HV_REF( FETCH_ATTR_HV_REF(connectedNode , "connectedNodesWest") , "weights") , nodeid) ;
        }
        
        hv_store(node , "error" , 5 , newSVnv(nodeError) , 0) ;
        
        nodeActivation = FETCH_ATTR_NV(node , "activation") ;
        
        westNodes = FETCH_ATTR_AV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "nodes") ;
        for (k = 0 ; k <= av_len(westNodes) ; ++k) {
          HV* connectedNode = OBJ_HV( *av_fetch(westNodes, k ,0) ) ;
          char* connectedNode_id = FETCH_ATTR_PV(connectedNode , "nodeid") ;
  
          HV* hv = FETCH_ATTR_HV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "weights") ;
          SV* weight_prev = FETCH_ATTR(hv , connectedNode_id) ;

lib/AI/NNEasy/NN/backprop.hploo  view on Meta::CPAN

  sub RMSErr_pl ($outputPatternRef) {
    my $outputLayer = $this->{layers}->[-1]->{nodes} ;

    my $sqrErr ;
    my $counter = 0 ;
    foreach my $node (@$outputLayer) {
      $sqrErr += ($node->{activation} - $$outputPatternRef[$counter])**2 ;
      ++$counter ;
    }

    my $error = sqrt($sqrErr) ;

    return $error;
  }
  
  sub[C] double RMSErr_c( SV* self , SV* outputPatternRef ) {
    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    
    AV* outputLayer = FETCH_ATTR_AV_REF( FETCH_ELEM_HV_REF( FETCH_ATTR_AV_REF(self_hv , "layers") , -1) , "nodes") ;
    int outputLayer_len = av_len(outputLayer) ;
    
    AV* outputPattern = OBJ_AV(outputPatternRef) ;
        
    double sqrErr = 0 ;
    double error = 0 ;
    
    for (i = 0 ; i <= av_len(outputLayer) ; ++i) {
      HV* node = OBJ_HV( *av_fetch(outputLayer, i ,0) ) ;
      double val ;

      val = i <= outputLayer_len ? SvNV(*av_fetch(outputPattern, i ,0)) : 0 ;
      val = FETCH_ATTR_NV(node , "activation") - val ;

      sqrErr = val * val ;
    }    
    
    error = Perl_sqrt(sqrErr) ;

    return error ;
  }

}

1;


lib/AI/NNEasy/NN/backprop.pm  view on Meta::CPAN

@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;


my $CLASS = 'AI::NNEasy::NN::backprop' ; sub __CLASS__ { 'AI::NNEasy::NN::backprop' } ;


use Class::HPLOO::Base ;



  sub calc_error { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $outputPatternRef = shift(@_) ;
    
    my @outputPattern = @$outputPatternRef;

    my $outputLayer = $this->{layers}->[-1]->{nodes} ;

    return 0 if @$outputLayer != @outputPattern ;

    my $counter = 0 ;
    foreach my $node (@$outputLayer) {
      $node->{error} = $node->{activation} - $outputPattern[$counter] ;
      ++$counter ;
    }
  }
  
  sub learn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $outputPatternRef = shift(@_) ;
    
    $this->calc_error($outputPatternRef) ;
    $this->hiddenToOutput ;
    $this->hiddenOrInputToHidden if @{$this->{layers}} > 2 ;
    return $this->RMSErr($outputPatternRef) ;    
  }
  
  *hiddenToOutput = \&hiddenToOutput_c ;
  
  sub hiddenToOutput_pl { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    foreach my $node ( @{ $this->{layers}->[-1]->{nodes} } ) {
      foreach my $connectedNode ( @{$node->{connectedNodesWest}->{nodes}} ) {
        $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } -= $this->{learning_rate} * $node->{error} * $connectedNode->{activation} ;
        $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = 5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } > 5 ;
        $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } = -5 if $node->{connectedNodesWest}->{weights}->{ $connectedNode->{nodeid} } < -5 ;
      }
    }
  }
  
  
  
  *hiddenOrInputToHidden = \&hiddenOrInputToHidden_c ;
  

lib/AI/NNEasy/NN/backprop.pm  view on Meta::CPAN

    my $learningRate = $this->{learning_rate} ;

    foreach my $layer ( reverse @{$this->{layers}}[0 .. $#{$this->{layers}}-1 ] ) {
      foreach my $node ( @{$layer->{nodes}} ) {
        last if !$node->{connectedNodesWest} ;
        
        $nodeid = $node->{nodeid} ;

        $nodeError = 0 ;
        foreach my $connectedNode ( @{$node->{connectedNodesEast}->{nodes}} ) {
          my $noderr = $connectedNode->{error} * $connectedNode->{connectedNodesWest}->{weights}->{$nodeid} ;
          $nodeError += $noderr ;
        }
        $node->{error} = $nodeError ;
        
        $nodeActivation = $node->{activation} ;

        # update the weights from nodes inputting to here
        foreach my $westNodes ( @{$node->{connectedNodesWest}->{nodes}} ) {
          $node->{connectedNodesWest}->{weights}->{ $westNodes->{nodeid} } -= ( 1 - ($nodeActivation*$nodeActivation) ) * $nodeError * $learningRate * $westNodes->{activation} ;
        }
        
      }
    }

lib/AI/NNEasy/NN/backprop.pm  view on Meta::CPAN

    
    my $outputLayer = $this->{layers}->[-1]->{nodes} ;

    my $sqrErr ;
    my $counter = 0 ;
    foreach my $node (@$outputLayer) {
      $sqrErr += ($node->{activation} - $$outputPatternRef[$counter])**2 ;
      ++$counter ;
    }

    my $error = sqrt($sqrErr) ;

    return $error;
  }
  
  

my $INLINE_INSTALL ; BEGIN { use Config ; my @installs = ($Config{installarchlib} , $Config{installprivlib} , $Config{installsitelib}) ; foreach my $i ( @installs ) { $i =~ s/[\\\/]/\//gs ;} $INLINE_INSTALL = 1 if ( __FILE__ =~ /\.pm$/ && ( join(" ",...

use Inline C => <<'__INLINE_C_SRC__' , ( $INLINE_INSTALL ? (NAME => 'AI::NNEasy::NN::backprop' , VERSION => '0.06') : () ) ;


#define OBJ_HV(self)		(HV*) SvRV( self )

lib/AI/NNEasy/NN/backprop.pm  view on Meta::CPAN

        
    AV* nodes = FETCH_ATTR_AV_REF( FETCH_ELEM_HV_REF( FETCH_ATTR_AV_REF(self_hv , "layers") , -1) , "nodes") ;
    for (i = 0 ; i <= av_len(nodes) ; ++i) {
      HV* node = OBJ_HV( *av_fetch(nodes, i ,0) ) ;

      AV* westNodes = FETCH_ATTR_AV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "nodes") ;
      for (j = 0 ; j <= av_len(westNodes) ; ++j) {
        HV* connectedNode = OBJ_HV( *av_fetch(westNodes, j ,0) ) ;
        SV* weight = FETCH_ATTR( FETCH_ATTR_HV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "weights" ) , FETCH_ATTR_PV(connectedNode , "nodeid") );

        double val = FETCH_ATTR_NV(self_hv , "learning_rate") * FETCH_ATTR_NV(node , "error") * FETCH_ATTR_NV(connectedNode , "activation") ;
        val = SvNV(weight) - val ;

        if      ( val > 5 ) { val = 5 ;}
        else if ( val < -5 ) { val = -5 ;}
        
        sv_setnv(weight , val) ;
      }
      
    }
}

lib/AI/NNEasy/NN/backprop.pm  view on Meta::CPAN

        
        if (!SvTRUE( FETCH_ATTR(node , "connectedNodesWest") ) ) break ;
        
        nodeid = FETCH_ATTR_PV(node , "nodeid") ;
        
        nodeError = 0 ;
        
        eastNodes = FETCH_ATTR_AV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesEast") , "nodes") ;
        for (k = 0 ; k <= av_len(eastNodes) ; ++k) {
          HV* connectedNode = OBJ_HV( *av_fetch(eastNodes, k ,0) ) ;
          nodeError += FETCH_ATTR_NV(connectedNode , "error") * FETCH_ATTR_NV( FETCH_ATTR_HV_REF( FETCH_ATTR_HV_REF(connectedNode , "connectedNodesWest") , "weights") , nodeid) ;
        }
        
        hv_store(node , "error" , 5 , newSVnv(nodeError) , 0) ;
        
        nodeActivation = FETCH_ATTR_NV(node , "activation") ;
        
        westNodes = FETCH_ATTR_AV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "nodes") ;
        for (k = 0 ; k <= av_len(westNodes) ; ++k) {
          HV* connectedNode = OBJ_HV( *av_fetch(westNodes, k ,0) ) ;
          char* connectedNode_id = FETCH_ATTR_PV(connectedNode , "nodeid") ;
  
          HV* hv = FETCH_ATTR_HV_REF( FETCH_ATTR_HV_REF(node , "connectedNodesWest") , "weights") ;
          SV* weight_prev = FETCH_ATTR(hv , connectedNode_id) ;

lib/AI/NNEasy/NN/backprop.pm  view on Meta::CPAN

    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    
    AV* outputLayer = FETCH_ATTR_AV_REF( FETCH_ELEM_HV_REF( FETCH_ATTR_AV_REF(self_hv , "layers") , -1) , "nodes") ;
    int outputLayer_len = av_len(outputLayer) ;
    
    AV* outputPattern = OBJ_AV(outputPatternRef) ;
        
    double sqrErr = 0 ;
    double error = 0 ;
    
    for (i = 0 ; i <= av_len(outputLayer) ; ++i) {
      HV* node = OBJ_HV( *av_fetch(outputLayer, i ,0) ) ;
      double val ;

      val = i <= outputLayer_len ? SvNV(*av_fetch(outputPattern, i ,0)) : 0 ;
      val = FETCH_ATTR_NV(node , "activation") - val ;

      sqrErr = val * val ;
    }    
    
    error = Perl_sqrt(sqrErr) ;

    return error ;
}

__INLINE_C_SRC__


}


1;

lib/AI/NNEasy/NN/node.hploo  view on Meta::CPAN


  my $NODEID ;

  sub node ($params) {
    $this->{nodeid} = ++$NODEID ;
    
    $this->{activation} = $$params{random_activation} ? rand($$params{random}) : 0 ;

    $this->{random_weights} = $$params{random_weights} ;
    $this->{decay} = $$params{decay} ;
    $this->{adjust_error} = $$params{adjust_error} ;
    $this->{persistent_activation} = $$params{persistent_activation} ;
    $this->{threshold} = $$params{threshold} ;
    $this->{activation_function} = $$params{activation_function} ;
    $this->{active} = 1 ;
    
    $this->{error} = 0 ;

    return $this ;
  }

}

1;


lib/AI/NNEasy/NN/node.pm  view on Meta::CPAN

    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $params = shift(@_) ;
    
    $this->{nodeid} = ++$NODEID ;
    
    $this->{activation} = $$params{random_activation} ? rand($$params{random}) : 0 ;

    $this->{random_weights} = $$params{random_weights} ;
    $this->{decay} = $$params{decay} ;
    $this->{adjust_error} = $$params{adjust_error} ;
    $this->{persistent_activation} = $$params{persistent_activation} ;
    $this->{threshold} = $$params{threshold} ;
    $this->{activation_function} = $$params{activation_function} ;
    $this->{active} = 1 ;
    
    $this->{error} = 0 ;

    return $this ;
  }


}


1;

samples/test-nn-nonbool.pl  view on Meta::CPAN


  my @set = (
  [0,0] => [0],
  [0,0.5] => [0.2],
  [0,1] => [0.4],
  [0.5,0] => [0.6],
  [0.5,0.5] => [0.8],
  [0.5,1] => [1],
  );

  my $set_err = $nn->get_set_error(\@set) ;
  
  print "SET ERROR NOW: $set_err\n" ; 

  while ( $set_err > $nn->{ERROR_OK} ) {
    $nn->learn_set( \@set , undef , undef , 1) ;
    $set_err = $nn->get_set_error(\@set) ;
  }
  
  $nn->save ;
  
  print "-------------------------------------------\n" ;
  
  print "ERR_OK: $nn->{ERROR_OK}\n" ;
  
  print "-------------------------------------------\n" ;
  

samples/test-nn-xor.pl  view on Meta::CPAN

  [3] ,
  ) ;

  my @set = (
  [0,0] => [0],
  [0,1] => [1],
  [1,0] => [1],
  [1,1] => [0],
  );

  my $set_err = $nn->get_set_error(\@set) ;
  
  print "SET ERROR NOW: $set_err\n" ; 

  while ( $set_err > $nn->{ERROR_OK} ) {
    $nn->learn_set( \@set , undef , undef , 1) ;
    $set_err = $nn->get_set_error(\@set) ;
  }
  
  $nn->save ;
  
  print "-------------------------------------------\n" ;
  
  print "ERR_OK: $nn->{ERROR_OK}\n" ;
  
  print "-------------------------------------------\n" ;
  

test.pl  view on Meta::CPAN


  my $nn = AI::NNEasy->new($file , [0,1] , $ERR_OK , 2 , 1 ) ;
  
  my @set = (
  [0,0],[0],
  [0,1],[1],
  [1,0],[1],
  [1,1],[0],
  );
  
  my $set_err = $nn->get_set_error(\@set) ;
  
  while ( $set_err > $ERR_OK ) {
    $nn->learn_set( \@set , 4 , 30000 , 0 ) ;
    $set_err = $nn->get_set_error(\@set) ;
  }
  
  for (my $i = 0 ; $i < @set ; $i+=2) {
    my $out_ok = $set[$i+1] ;
    
    my $out = $nn->run($set[$i]) ;
    my $out_win = $nn->run_get_winner($set[$i]) ;
    
    my $er = $$out_ok[0] - $$out[0] ;
    $er *= -1 if $er < 0 ;
    
    ok( $er < $ERR_OK ) ;

    ok( $$out_ok[0] , $$out_win[0] ) ;
  }
  
  $set_err = $nn->get_set_error(\@set) ;
  ok( $set_err < $ERR_OK ) ;

}
#########################

print "\nThe End! By!\n" ;

1 ;



( run in 0.551 second using v1.01-cache-2.11-cpan-49f99fa48dc )