AI-FANN-Evolving

 view release on metacpan or  search on metacpan

LICENSE  view on Meta::CPAN

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
The MIT License (MIT)
 
Copyright (c) 2014 Naturalis Biodiversity Center
 
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
 
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
 
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR

META.json  view on Meta::CPAN

14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
   },
   "name" : "AI-FANN-Evolving",
   "no_index" : {
      "directory" : [
         "t",
         "inc"
      ]
   },
   "prereqs" : {
      "build" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0"
         }
      },
      "configure" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0"
         }
      },
      "runtime" : {
         "requires" : {
            "AI::FANN" : "0",
            "Algorithm::Genetic::Diploid" : "0"
         }
      }
   },
   "release_status" : "stable",
   "version" : "0.4"
}

META.yml  view on Meta::CPAN

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
---
abstract: 'artificial neural network that evolves'
author:
  - 'Rutger Vos <rutger.vos@naturalis.nl>'
build_requires:
  ExtUtils::MakeMaker: 0
configure_requires:
  ExtUtils::MakeMaker: 0
dynamic_config: 1
generated_by: 'ExtUtils::MakeMaker version 6.8, CPAN::Meta::Converter version 2.132830'
license: unknown
meta-spec:
  version: 1.4
name: AI-FANN-Evolving
no_index:
  directory:
    - t
    - inc
requires:
  AI::FANN: 0
  Algorithm::Genetic::Diploid: 0
version: 0.4

MYMETA.json  view on Meta::CPAN

14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
   },
   "name" : "AI-FANN-Evolving",
   "no_index" : {
      "directory" : [
         "t",
         "inc"
      ]
   },
   "prereqs" : {
      "build" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0"
         }
      },
      "configure" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0"
         }
      },
      "runtime" : {
         "requires" : {
            "AI::FANN" : "0",
            "Algorithm::Genetic::Diploid" : "0"
         }
      }
   },
   "release_status" : "stable",
   "version" : "0.4"
}

MYMETA.yml  view on Meta::CPAN

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
---
abstract: 'artificial neural network that evolves'
author:
  - 'Rutger Vos <rutger.vos@naturalis.nl>'
build_requires:
  ExtUtils::MakeMaker: 0
configure_requires:
  ExtUtils::MakeMaker: 0
dynamic_config: 0
generated_by: 'ExtUtils::MakeMaker version 6.8, CPAN::Meta::Converter version 2.132830'
license: unknown
meta-spec:
  version: 1.4
name: AI-FANN-Evolving
no_index:
  directory:
    - t
    - inc
requires:
  AI::FANN: 0
  Algorithm::Genetic::Diploid: 0
version: 0.4

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
=head1 NAME
 
AI::FANN::Evolving - artificial neural network that evolves
 
=head1 METHODS
 
=over
 
=item new
 
Constructor requires 'file', or 'data' and 'neurons' arguments. Optionally takes
'connection_rate' argument for sparse topologies. Returns a wrapper around L<AI::FANN>.
 
=cut
 
sub new {
        my $class = shift;
        my %args  = @_;
        my $self  = {};
        bless $self, $class;
        $self->_init(%args);

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
=item error
 
Getter/setter for the error rate. Default is 0.0001
 
=cut
 
sub error {
        my $self = shift;
        if ( @_ ) {
                my $value = shift;
                $log->debug("setting error threshold to $value");
                return $self->{'error'} = $value;
        }
        else {
                $log->debug("getting error threshold");
                return $self->{'error'};
        }
}
 
=item epochs
 
Getter/setter for the number of training epochs, default is 500000
 
=cut

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
                return $self->{'epochs'} = $value;
        }
        else {
                $log->debug("getting training epochs");
                return $self->{'epochs'};
        }
}
 
=item epoch_printfreq
 
Getter/setter for the number of epochs after which progress is printed. default is 1000
 
=cut
 
sub epoch_printfreq {
        my $self = shift;
        if ( @_ ) {
                my $value = shift;
                $log->debug("setting epoch printfreq to $value");
                return $self->{'epoch_printfreq'} = $value;
        }

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
                return $self->{'neurons'} = $value;
        }
        else {
                $log->debug("getting neurons");
                return $self->{'neurons'};
        }
}
 
=item neuron_printfreq
 
Getter/setter for the number of cascading neurons after which progress is printed.
default is 10
 
=cut
 
sub neuron_printfreq {
        my $self = shift;
        if ( @_ ) {
                my $value = shift;
                $log->debug("setting neuron printfreq to $value");
                return $self->{'neuron_printfreq'} = $value;

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
        else {
                $log->debug("getting activation function");
                return $self->{'activation_function'};
        }
}
 
# this is here so that we can trap method calls that need to be
# delegated to the FANN object. at this point we're not even
# going to care whether the FANN object implements these methods:
# if it doesn't we get the normal error for unknown methods, which
# the user then will have to resolve.
sub AUTOLOAD {
        my $self = shift;
        my $method = $AUTOLOAD;
        $method =~ s/.+://;
         
        # ignore all caps methods
        if ( $method !~ /^[A-Z]+$/ ) {
         
                # determine whether to invoke on an object or a package
                my $invocant;

lib/AI/FANN/Evolving/Experiment.pm  view on Meta::CPAN

69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
Runs the experiment!
 
=cut
 
sub run {
        my $self = shift;
        my $log = $self->logger;
         
        $log->info("going to run experiment");
        my @results;
        for my $i ( 1 .. $self->ngens ) {
         
                # modify workdir
                my $wd = $self->{'workdir'};
                $wd =~ s/\d+$/$i/;
                $self->{'workdir'} = $wd;
                mkdir $wd;
                 
                my $optimum = $self->optimum($i);
                 
                $log->debug("optimum at generation $i is $optimum");
                my ( $fittest, $fitness ) = $self->population->turnover($i,$self->env,$optimum);
                push @results, [ $fittest, $fitness ];
        }
        my ( $fittest, $fitness ) = map { @{ $_ } } sort { $a->[1] <=> $b->[1] } @results;
        return $fittest, $fitness;
}
 
=item optimum
 
The optimal fitness is zero error in the ANN's classification. This method returns
that value: 0.
 
=cut

lib/AI/FANN/Evolving/Gene.pm  view on Meta::CPAN

51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
        }
        else {
                $log->debug("getting ANN");
                return $self->{'ann'};
        }
}
 
=item make_function
 
Returns a code reference to the fitness function, which when executed returns a fitness
value and writes the corresponding ANN to file
 
=cut
 
sub make_function {
        my $self = shift;
        my $ann = $self->ann;
        my $error_func = $self->experiment->error_func;
        $log->debug("making fitness function");
         
        # build the fitness function

lib/AI/FANN/Evolving/Gene.pm  view on Meta::CPAN

91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
                         
                        use Data::Dumper;
                        $log->debug("Observed: ".Dumper($observed));
                        $log->debug("Expected: ".Dumper($expected));
                         
                        # invoke the error_func provided by the experiment
                        $fitness += $error_func->($observed,$expected);
                }
                $fitness /= $env->length;
                 
                # store result
                $self->{'fitness'} = $fitness;
 
                # store the AI         
                my $outfile = $self->experiment->workdir . "/${fitness}.ann";
                $self->ann->save($outfile);
                return $self->{'fitness'};
        }
}
 
=item fitness
 
Stores the fitness value after expressing the fitness function
 
=cut
 
sub fitness { shift->{'fitness'} }
 
=item clone
 
Clones the object
 
=cut

lib/AI/FANN/Evolving/TrainData.pm  view on Meta::CPAN

11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
=head1 NAME
 
AI::FANN::Evolving::TrainData - wrapper class for FANN data
 
=head1 METHODS
 
=over
 
=item new
 
Constructor takes named arguments. By default, ignores column
named ID and considers column named CLASS as classifier.
 
=cut
 
sub new {
        my $self = shift->SUPER::new(
                'ignore'    => [ 'ID'    ],
                'dependent' => [ 'CLASS' ],
                'header'    => {},
                'table'     => [],

lib/AI/FANN/Evolving/TrainData.pm  view on Meta::CPAN

247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
my %seen;
for my $dep ( @dependents ) {
        my $key = join '/', @{ $dep };
        $seen{$key}++;
}
 
# adjust counts to sample size
for my $key ( keys %seen ) {
        $log->debug("counts: $key => $seen{$key}");
        $seen{$key} = int( $seen{$key} * $sample );
        $log->debug("rescaled: $key => $seen{$key}");
}
 
# start the sampling   
my @dc = map { $self->{'header'}->{$_} } $self->dependent_columns;
my @new_table; # we will populate this
my @table = @{ $clone1->{'table'} }; # work on cloned instance
 
# as long as there is still sampling to do
SAMPLE: while( grep { !!$_ } values %seen ) {
        for my $i ( 0 .. $#table ) {

script/aivolver  view on Meta::CPAN

213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
Output directory.
 
=back
 
=back
 
=head1 DESCRIPTION
 
Artificial neural networks (ANNs) are decision-making machines that develop their
capabilities by training on input data. During this training, the ANN builds a
topology of input neurons, hidden neurons, and output neurons that respond to signals
in ways (and with sensitivities) that are determined by a variety of parameters. How
these parameters will interact to give rise to the final functionality of the ANN is
hard to predict I<a priori>, but can be optimized in a variety of ways.
 
C<aivolver> is a program that does this by evolving parameter settings using a genetic
algorithm that runs for a number of generations determined by C<ngens>. During this
process it writes the intermediate ANNs into the C<workdir> until the best result is
written to the C<outfile>.
 
The genetic algorithm proceeds by simulating a population of C<individual_count> diploid
individuals that each have C<chromosome_count> chromosomes whose C<gene_count> genes
encode the parameters of the ANN. During each generation, each individual is trained
on a sample data set, and the individual's fitness is then calculated by testing its
predictive abilities on an out-of-sample data set. The fittest individuals (whose
fraction of the total is determined by C<reproduction_rate>) are selected for breeding
in proportion to their fitness.
 
Before breeding, each individual undergoes a process of mutation, where a fraction of
the ANN parameters is randomly perturbed. Both the size of the fraction and the
maximum extent of the perturbation is determined by C<mutation_rate>. Subsequently, the
homologous chromosomes recombine (i.e. exchange parameters) at a rate determined by
C<crossover_rate>, which then results in (haploid) gametes. These gametes are fused with
those of other individuals to give rise to the next generation.
 
=head1 TRAINING AND TEST DATA
 
The data that is used for training the ANNs and for subsequently testing their predictive
abilities are provided as tab-separated tables. An example of an input data set is here:
 
 
The tables have a header row, with at least the following columns:

t/01-run.t  view on Meta::CPAN

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/usr/bin/perl
use Test::More 'no_plan';
use strict;
use FindBin qw($Bin);
use File::Temp 'tempdir';
 
# attempt to load the classes of interest
BEGIN {
        use_ok('AI::FANN::Evolving::Factory');
        use_ok('AI::FANN::Evolving::TrainData');
        use_ok('AI::FANN::Evolving');
        use_ok('Algorithm::Genetic::Diploid::Logger');
}
 
# create and configure logger
my $log = new_ok('Algorithm::Genetic::Diploid::Logger');
$log->level( 'level' => 4 );

t/03-fann-wrapper.t  view on Meta::CPAN

30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
ok( $data->size == 4, "instantiate data correctly" );
 
##########################################################################################
# train the FANN object on trivial data
my $ann = AI::FANN::Evolving->new( 'data' => $data, 'epoch_printfreq' => 0 );
$ann->train($data->to_fann);
 
# run the network
# this is the xor example from:
my @result = ( -1, +1, +1, -1 );
my @input  = ( [ -1, -1 ], [ -1, +1 ], [ +1, -1 ], [ +1, +1 ] );
for my $i ( 0 .. $#input ) {
        my $output = $ann->run($input[$i]);
        ok( ! ( $result[$i] < 0 xor $output->[0] < 0 ), "observed and expected signs match" );
}



( run in 0.794 second using v1.01-cache-2.11-cpan-a9ef4e587e4 )