AI-FANN-Evolving

 view release on metacpan or  search on metacpan

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
                'FANN_GAUSSIAN_STEPWISE'          => FANN_GAUSSIAN_STEPWISE,
#               'FANN_ELLIOT'                     => FANN_ELLIOT, # range is between 0 and 1
                'FANN_ELLIOT_SYMMETRIC'           => FANN_ELLIOT_SYMMETRIC,
#               'FANN_LINEAR_PIECE'               => FANN_LINEAR_PIECE, # range is between 0 and 1
                'FANN_LINEAR_PIECE_SYMMETRIC'     => FANN_LINEAR_PIECE_SYMMETRIC,
                'FANN_SIN_SYMMETRIC'              => FANN_SIN_SYMMETRIC,
                'FANN_COS_SYMMETRIC'              => FANN_COS_SYMMETRIC,
#               'FANN_SIN'                        => FANN_SIN, # range is between 0 and 1
#               'FANN_COS'                        => FANN_COS, # range is between 0 and 1
        },
        'errorfunc' => {
                'FANN_ERRORFUNC_LINEAR' => FANN_ERRORFUNC_LINEAR,
                'FANN_ERRORFUNC_TANH'   => FANN_ERRORFUNC_TANH,     
        },
        'stopfunc' => {
                'FANN_STOPFUNC_MSE' => FANN_STOPFUNC_MSE,
#               'FANN_STOPFUNC_BIT' => FANN_STOPFUNC_BIT,
        }      
);
 
my %constant;
for my $hashref ( values %enum ) {
        while( my ( $k, $v ) = each %{ $hashref } ) {
                $constant{$k} = $v;
        }
}
 
my %default = (
        'error'               => 0.0001,
        'epochs'              => 5000,
        'train_type'          => 'ordinary',
        'epoch_printfreq'     => 100,
        'neuron_printfreq'    => 0,
        'neurons'             => 15,
        'activation_function' => FANN_SIGMOID_SYMMETRIC,
);
 
=head1 NAME

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
sub _layer_properties {
        (
#               neuron_activation_function  => 'activationfunc',
#               neuron_activation_steepness => \&_mutate_double,
        )
}
 
sub _scalar_properties {
        (
                training_algorithm                   => 'train',
                train_error_function                 => 'errorfunc',
                train_stop_function                  => 'stopfunc',
                learning_rate                        => \&_mutate_double,
                learning_momentum                    => \&_mutate_double,
                quickprop_decay                      => \&_mutate_double,
                quickprop_mu                         => \&_mutate_double,
                rprop_increase_factor                => \&_mutate_double,
                rprop_decrease_factor                => \&_mutate_double,
                rprop_delta_min                      => \&_mutate_double,
                rprop_delta_max                      => \&_mutate_double,
                cascade_output_change_fraction       => \&_mutate_double,

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
                        $args{$key} = $constant{$args{$key}};
                }
                $default{$key} = $args{$key};
        }
        return %default;
}
 
sub _init {
        my $self = shift;
        my %args = @_;
        for ( qw(error epochs train_type epoch_printfreq neuron_printfreq neurons activation_function) ) {
                $self->{$_} = $args{$_} // $default{$_};
        }
        return $self;
}
 
=item clone
 
Clones the object
 
=cut

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
                $log->debug("cascade training");
         
                # set learning curve
                $self->cascade_activation_functions( $self->activation_function );
                 
                # train
                $self->{'ann'}->cascadetrain_on_data(
                        $data,
                        $self->neurons,
                        $self->neuron_printfreq,
                        $self->error,
                );
        }
        else {
                $log->debug("normal training");
         
                # set learning curves
                $self->hidden_activation_function( $self->activation_function );
                $self->output_activation_function( $self->activation_function );
                 
                # train
                $self->{'ann'}->train_on_data(
                        $data,
                        $self->epochs,
                        $self->epoch_printfreq,
                        $self->error,
                );     
        }
}
 
=item enum_properties
 
Returns a hash whose keys are names of enums and values the possible states for the
enum
 
=cut
 
=item error
 
Getter/setter for the error rate. Default is 0.0001
 
=cut
 
sub error {
        my $self = shift;
        if ( @_ ) {
                my $value = shift;
                $log->debug("setting error threshold to $value");
                return $self->{'error'} = $value;
        }
        else {
                $log->debug("getting error threshold");
                return $self->{'error'};
        }
}
 
=item epochs
 
Getter/setter for the number of training epochs, default is 500000
 
=cut
 
sub epochs {

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
        }
        else {
                $log->debug("getting activation function");
                return $self->{'activation_function'};
        }
}
 
# this is here so that we can trap method calls that need to be
# delegated to the FANN object. at this point we're not even
# going to care whether the FANN object implements these methods:
# if it doesn't we get the normal error for unknown methods, which
# the user then will have to resolve.
sub AUTOLOAD {
        my $self = shift;
        my $method = $AUTOLOAD;
        $method =~ s/.+://;
         
        # ignore all caps methods
        if ( $method !~ /^[A-Z]+$/ ) {
         
                # determine whether to invoke on an object or a package

lib/AI/FANN/Evolving/Experiment.pm  view on Meta::CPAN

21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
Constructor takes named arguments, sets default factory to L<AI::FANN::Evolving::Factory>
 
=cut
 
sub new { shift->SUPER::new( 'factory' => AI::FANN::Evolving::Factory->new, @_ ) }
 
=item workdir
 
Getter/Setter for the workdir where L<AI::FANN> artificial neural networks will be
written during the experiment. The files will be named after the ANN's error, which
needs to be minimized.
 
=cut
 
sub workdir {
        my $self = shift;
        if ( @_ ) {
                my $value = shift;
                $log->info("assigning new workdir $value");
                $self->{'workdir'} = $value;

lib/AI/FANN/Evolving/Experiment.pm  view on Meta::CPAN

90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
                $log->debug("optimum at generation $i is $optimum");
                my ( $fittest, $fitness ) = $self->population->turnover($i,$self->env,$optimum);
                push @results, [ $fittest, $fitness ];
        }
        my ( $fittest, $fitness ) = map { @{ $_ } } sort { $a->[1] <=> $b->[1] } @results;
        return $fittest, $fitness;
}
 
=item optimum
 
The optimal fitness is zero error in the ANN's classification. This method returns
that value: 0.
 
=cut
 
sub optimum { 0 }
 
sub _sign {
        my ( $obs, $exp ) = @_;
        my $fitness = 0;
        for my $i ( 0 .. $#{ $obs } ) {

lib/AI/FANN/Evolving/Experiment.pm  view on Meta::CPAN

115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
sub _mse {
        my ( $obs, $exp ) = @_;
        my $fitness = 0;
        for my $i ( 0 .. $#{ $obs } ) {
                $fitness += ( ( (1+$obs->[$i]) - (1+$exp->[$i]) ) ** 2 );
        }
        return $fitness / scalar(@{$obs});     
}
 
=item error_func
 
Returns a function to compute the error. Given an argument, the following can happen:
 'sign' => error is the average number of times observed and expected have different signs
 'mse'  => error is the mean squared difference between observed and expected
 CODE   => error function is the provided code reference
 
=back
 
=cut
 
sub error_func {
        my $self = shift;
         
        # process the argument
        if ( @_ ) {
                my $arg = shift;
                if ( ref $arg eq 'CODE' ) {
                        $self->{'error_func'} = $arg;
                        $log->info("using custom error function");
                }
                elsif ( $arg eq 'sign' ) {
                        $self->{'error_func'} = \&_sign;
                        $log->info("using sign test error function");
                }
                elsif ( $arg eq 'mse' ) {
                        $self->{'error_func'} = \&_mse;
                        $log->info("using MSE error function");
                }
                else {
                        $log->warn("don't understand error func '$arg'");
                }
        }
         
        # map the constructor-supplied argument
        if ( $self->{'error_func'} and $self->{'error_func'} eq 'sign' ) {
                $self->{'error_func'} = \&_sign;
                $log->info("using error function 'sign'");
        }
        elsif ( $self->{'error_func'} and $self->{'error_func'} eq 'mse' ) {
                $self->{'error_func'} = \&_mse;
                $log->info("using error function 'mse'");
        }      
         
        return $self->{'error_func'} || \&_mse;
}
 
1;

lib/AI/FANN/Evolving/Gene.pm  view on Meta::CPAN

58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
=item make_function
 
Returns a code reference to the fitness function, which when executed returns a fitness
value and writes the corresponding ANN to file
 
=cut
 
sub make_function {
        my $self = shift;
        my $ann = $self->ann;
        my $error_func = $self->experiment->error_func;
        $log->debug("making fitness function");
         
        # build the fitness function
        return sub {           
         
                # train the AI
                $ann->train( $self->experiment->traindata );
         
                # isa TrainingData object, this is what we need to use
                # to make our prognostications. It is a different data

lib/AI/FANN/Evolving/Gene.pm  view on Meta::CPAN

86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
# iterate over the list of input/output pairs
for my $i ( 0 .. ( $env->length - 1 ) ) {
        my ( $input, $expected ) = $env->data($i);
        my $observed = $ann->run($input);
         
        use Data::Dumper;
        $log->debug("Observed: ".Dumper($observed));
        $log->debug("Expected: ".Dumper($expected));
         
        # invoke the error_func provided by the experiment
        $fitness += $error_func->($observed,$expected);
}
$fitness /= $env->length;
 
# store result
$self->{'fitness'} = $fitness;
 
# store the AI         
my $outfile = $self->experiment->workdir . "/${fitness}.ann";
$self->ann->save($outfile);
return $self->{'fitness'};



( run in 0.905 second using v1.01-cache-2.11-cpan-55f5a4728d2 )