view release on metacpan or search on metacpan
lib/AI/FANN/Evolving.pm view on Meta::CPAN
'FANN_GAUSSIAN_STEPWISE' => FANN_GAUSSIAN_STEPWISE,
# 'FANN_ELLIOT' => FANN_ELLIOT, # range is between 0 and 1
'FANN_ELLIOT_SYMMETRIC' => FANN_ELLIOT_SYMMETRIC,
# 'FANN_LINEAR_PIECE' => FANN_LINEAR_PIECE, # range is between 0 and 1
'FANN_LINEAR_PIECE_SYMMETRIC' => FANN_LINEAR_PIECE_SYMMETRIC,
'FANN_SIN_SYMMETRIC' => FANN_SIN_SYMMETRIC,
'FANN_COS_SYMMETRIC' => FANN_COS_SYMMETRIC,
# 'FANN_SIN' => FANN_SIN, # range is between 0 and 1
# 'FANN_COS' => FANN_COS, # range is between 0 and 1
},
'errorfunc' => {
'FANN_ERRORFUNC_LINEAR' => FANN_ERRORFUNC_LINEAR,
'FANN_ERRORFUNC_TANH' => FANN_ERRORFUNC_TANH,
},
'stopfunc' => {
'FANN_STOPFUNC_MSE' => FANN_STOPFUNC_MSE,
# 'FANN_STOPFUNC_BIT' => FANN_STOPFUNC_BIT,
}
);
my %constant;
for my $hashref ( values %enum ) {
while( my ( $k, $v ) = each %{ $hashref } ) {
$constant{$k} = $v;
}
}
my %default = (
'error' => 0.0001,
'epochs' => 5000,
'train_type' => 'ordinary',
'epoch_printfreq' => 100,
'neuron_printfreq' => 0,
'neurons' => 15,
'activation_function' => FANN_SIGMOID_SYMMETRIC,
);
=head1 NAME
lib/AI/FANN/Evolving.pm view on Meta::CPAN
sub _layer_properties {
(
# neuron_activation_function => 'activationfunc',
# neuron_activation_steepness => \&_mutate_double,
)
}
sub _scalar_properties {
(
training_algorithm => 'train',
train_error_function => 'errorfunc',
train_stop_function => 'stopfunc',
learning_rate => \&_mutate_double,
learning_momentum => \&_mutate_double,
quickprop_decay => \&_mutate_double,
quickprop_mu => \&_mutate_double,
rprop_increase_factor => \&_mutate_double,
rprop_decrease_factor => \&_mutate_double,
rprop_delta_min => \&_mutate_double,
rprop_delta_max => \&_mutate_double,
cascade_output_change_fraction => \&_mutate_double,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
$args{$key} = $constant{$args{$key}};
}
$default{$key} = $args{$key};
}
return %default;
}
sub _init {
my $self = shift;
my %args = @_;
for ( qw(error epochs train_type epoch_printfreq neuron_printfreq neurons activation_function) ) {
$self->{$_} = $args{$_} // $default{$_};
}
return $self;
}
=item clone
Clones the object
=cut
lib/AI/FANN/Evolving.pm view on Meta::CPAN
$log->debug("cascade training");
# set learning curve
$self->cascade_activation_functions( $self->activation_function );
# train
$self->{'ann'}->cascadetrain_on_data(
$data,
$self->neurons,
$self->neuron_printfreq,
$self->error,
);
}
else {
$log->debug("normal training");
# set learning curves
$self->hidden_activation_function( $self->activation_function );
$self->output_activation_function( $self->activation_function );
# train
$self->{'ann'}->train_on_data(
$data,
$self->epochs,
$self->epoch_printfreq,
$self->error,
);
}
}
=item enum_properties
Returns a hash whose keys are names of enums and values the possible states for the
enum
=cut
=item error
Getter/setter for the error rate. Default is 0.0001
=cut
sub error {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting error threshold to $value");
return $self->{'error'} = $value;
}
else {
$log->debug("getting error threshold");
return $self->{'error'};
}
}
=item epochs
Getter/setter for the number of training epochs, default is 500000
=cut
sub epochs {
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
else {
$log->debug("getting activation function");
return $self->{'activation_function'};
}
}
# this is here so that we can trap method calls that need to be
# delegated to the FANN object. at this point we're not even
# going to care whether the FANN object implements these methods:
# if it doesn't we get the normal error for unknown methods, which
# the user then will have to resolve.
sub AUTOLOAD {
my $self = shift;
my $method = $AUTOLOAD;
$method =~ s/.+://;
# ignore all caps methods
if ( $method !~ /^[A-Z]+$/ ) {
# determine whether to invoke on an object or a package
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
Constructor takes named arguments, sets default factory to L<AI::FANN::Evolving::Factory>
=cut
sub new { shift->SUPER::new( 'factory' => AI::FANN::Evolving::Factory->new, @_ ) }
=item workdir
Getter/Setter for the workdir where L<AI::FANN> artificial neural networks will be
written during the experiment. The files will be named after the ANN's error, which
needs to be minimized.
=cut
sub workdir {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->info("assigning new workdir $value");
$self->{'workdir'} = $value;
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
$log->debug("optimum at generation $i is $optimum");
my ( $fittest, $fitness ) = $self->population->turnover($i,$self->env,$optimum);
push @results, [ $fittest, $fitness ];
}
my ( $fittest, $fitness ) = map { @{ $_ } } sort { $a->[1] <=> $b->[1] } @results;
return $fittest, $fitness;
}
=item optimum
The optimal fitness is zero error in the ANN's classification. This method returns
that value: 0.
=cut
sub optimum { 0 }
sub _sign {
my ( $obs, $exp ) = @_;
my $fitness = 0;
for my $i ( 0 .. $#{ $obs } ) {
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
sub _mse {
my ( $obs, $exp ) = @_;
my $fitness = 0;
for my $i ( 0 .. $#{ $obs } ) {
$fitness += ( ( (1+$obs->[$i]) - (1+$exp->[$i]) ) ** 2 );
}
return $fitness / scalar(@{$obs});
}
=item error_func
Returns a function to compute the error. Given an argument, the following can happen:
'sign' => error is the average number of times observed and expected have different signs
'mse' => error is the mean squared difference between observed and expected
CODE => error function is the provided code reference
=back
=cut
sub error_func {
my $self = shift;
# process the argument
if ( @_ ) {
my $arg = shift;
if ( ref $arg eq 'CODE' ) {
$self->{'error_func'} = $arg;
$log->info("using custom error function");
}
elsif ( $arg eq 'sign' ) {
$self->{'error_func'} = \&_sign;
$log->info("using sign test error function");
}
elsif ( $arg eq 'mse' ) {
$self->{'error_func'} = \&_mse;
$log->info("using MSE error function");
}
else {
$log->warn("don't understand error func '$arg'");
}
}
# map the constructor-supplied argument
if ( $self->{'error_func'} and $self->{'error_func'} eq 'sign' ) {
$self->{'error_func'} = \&_sign;
$log->info("using error function 'sign'");
}
elsif ( $self->{'error_func'} and $self->{'error_func'} eq 'mse' ) {
$self->{'error_func'} = \&_mse;
$log->info("using error function 'mse'");
}
return $self->{'error_func'} || \&_mse;
}
1;
lib/AI/FANN/Evolving/Gene.pm view on Meta::CPAN
=item make_function
Returns a code reference to the fitness function, which when executed returns a fitness
value and writes the corresponding ANN to file
=cut
sub make_function {
my $self = shift;
my $ann = $self->ann;
my $error_func = $self->experiment->error_func;
$log->debug("making fitness function");
# build the fitness function
return sub {
# train the AI
$ann->train( $self->experiment->traindata );
# isa TrainingData object, this is what we need to use
# to make our prognostications. It is a different data
lib/AI/FANN/Evolving/Gene.pm view on Meta::CPAN
# iterate over the list of input/output pairs
for my $i ( 0 .. ( $env->length - 1 ) ) {
my ( $input, $expected ) = $env->data($i);
my $observed = $ann->run($input);
use Data::Dumper;
$log->debug("Observed: ".Dumper($observed));
$log->debug("Expected: ".Dumper($expected));
# invoke the error_func provided by the experiment
$fitness += $error_func->($observed,$expected);
}
$fitness /= $env->length;
# store result
$self->{'fitness'} = $fitness;
# store the AI
my $outfile = $self->experiment->workdir . "/${fitness}.ann";
$self->ann->save($outfile);
return $self->{'fitness'};
script/aivolver view on Meta::CPAN
B<***NO LONGER ACCURATE, CONSULT THE YAML CONFIG FILES***>
=over
=item B<<config.ymlE<gt>>
If the first command line argument is a file location, this will be interpreted as the
location of a configuration file in YAML syntax structured as in this
example: L<https://raw.github.com/naturalis/ai-fann-evolving/master/examples/conf.yml>.
Subsequent command line arguments can then be provided that override the defaults in this
configuration file.
=item B<-h/--help/-?>
Prints help message and exits.
=item B<-m/--manual>
Prints manual page and exits.