AI-FANN-Evolving
view release on metacpan or search on metacpan
lib/AI/FANN/Evolving.pm view on Meta::CPAN
3536373839404142434445464748495051525354555657585960616263646566676869707172
'FANN_GAUSSIAN_STEPWISE'
=> FANN_GAUSSIAN_STEPWISE,
# 'FANN_ELLIOT' => FANN_ELLIOT, # range is between 0 and 1
'FANN_ELLIOT_SYMMETRIC'
=> FANN_ELLIOT_SYMMETRIC,
# 'FANN_LINEAR_PIECE' => FANN_LINEAR_PIECE, # range is between 0 and 1
'FANN_LINEAR_PIECE_SYMMETRIC'
=> FANN_LINEAR_PIECE_SYMMETRIC,
'FANN_SIN_SYMMETRIC'
=> FANN_SIN_SYMMETRIC,
'FANN_COS_SYMMETRIC'
=> FANN_COS_SYMMETRIC,
# 'FANN_SIN' => FANN_SIN, # range is between 0 and 1
# 'FANN_COS' => FANN_COS, # range is between 0 and 1
},
'errorfunc'
=> {
'FANN_ERRORFUNC_LINEAR'
=> FANN_ERRORFUNC_LINEAR,
'FANN_ERRORFUNC_TANH'
=> FANN_ERRORFUNC_TANH,
},
'stopfunc'
=> {
'FANN_STOPFUNC_MSE'
=> FANN_STOPFUNC_MSE,
# 'FANN_STOPFUNC_BIT' => FANN_STOPFUNC_BIT,
}
);
my
%constant
;
for
my
$hashref
(
values
%enum
) {
while
(
my
(
$k
,
$v
) =
each
%{
$hashref
} ) {
$constant
{
$k
} =
$v
;
}
}
my
%default
= (
'error'
=> 0.0001,
'epochs'
=> 5000,
'train_type'
=>
'ordinary'
,
'epoch_printfreq'
=> 100,
'neuron_printfreq'
=> 0,
'neurons'
=> 15,
'activation_function'
=> FANN_SIGMOID_SYMMETRIC,
);
=head1 NAME
lib/AI/FANN/Evolving.pm view on Meta::CPAN
327328329330331332333334335336337338339340341342343344345346347sub
_layer_properties {
(
# neuron_activation_function => 'activationfunc',
# neuron_activation_steepness => \&_mutate_double,
)
}
sub
_scalar_properties {
(
training_algorithm
=>
'train'
,
train_error_function
=>
'errorfunc'
,
train_stop_function
=>
'stopfunc'
,
learning_rate
=> \
&_mutate_double
,
learning_momentum
=> \
&_mutate_double
,
quickprop_decay
=> \
&_mutate_double
,
quickprop_mu
=> \
&_mutate_double
,
rprop_increase_factor
=> \
&_mutate_double
,
rprop_decrease_factor
=> \
&_mutate_double
,
rprop_delta_min
=> \
&_mutate_double
,
rprop_delta_max
=> \
&_mutate_double
,
cascade_output_change_fraction
=> \
&_mutate_double
,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
372373374375376377378379380381382383384385386387388389390391392
$args
{
$key
} =
$constant
{
$args
{
$key
}};
}
$default
{
$key
} =
$args
{
$key
};
}
return
%default
;
}
sub
_init {
my
$self
=
shift
;
my
%args
=
@_
;
for
(
qw(error epochs train_type epoch_printfreq neuron_printfreq neurons activation_function)
) {
$self
->{
$_
} =
$args
{
$_
} //
$default
{
$_
};
}
return
$self
;
}
=item clone
Clones the object
=cut
lib/AI/FANN/Evolving.pm view on Meta::CPAN
429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491
$log
->debug(
"cascade training"
);
# set learning curve
$self
->cascade_activation_functions(
$self
->activation_function );
# train
$self
->{
'ann'
}->cascadetrain_on_data(
$data
,
$self
->neurons,
$self
->neuron_printfreq,
$self
->error,
);
}
else
{
$log
->debug(
"normal training"
);
# set learning curves
$self
->hidden_activation_function(
$self
->activation_function );
$self
->output_activation_function(
$self
->activation_function );
# train
$self
->{
'ann'
}->train_on_data(
$data
,
$self
->epochs,
$self
->epoch_printfreq,
$self
->error,
);
}
}
=item enum_properties
Returns a hash whose keys are names of enums and values the possible states for the
enum
=cut
=item error
Getter/setter for the error rate. Default is 0.0001
=cut
sub
error {
my
$self
=
shift
;
if
(
@_
) {
my
$value
=
shift
;
$log
->debug(
"setting error threshold to $value"
);
return
$self
->{
'error'
} =
$value
;
}
else
{
$log
->debug(
"getting error threshold"
);
return
$self
->{
'error'
};
}
}
=item epochs
Getter/setter for the number of training epochs, default is 500000
=cut
sub
epochs {
lib/AI/FANN/Evolving.pm view on Meta::CPAN
596597598599600601602603604605606607608609610611612613614615616
}
else
{
$log
->debug(
"getting activation function"
);
return
$self
->{
'activation_function'
};
}
}
# this is here so that we can trap method calls that need to be
# delegated to the FANN object. at this point we're not even
# going to care whether the FANN object implements these methods:
# if it doesn't we get the normal error for unknown methods, which
# the user then will have to resolve.
sub
AUTOLOAD {
my
$self
=
shift
;
my
$method
=
$AUTOLOAD
;
$method
=~ s/.+://;
# ignore all caps methods
if
(
$method
!~ /^[A-Z]+$/ ) {
# determine whether to invoke on an object or a package
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
212223242526272829303132333435363738394041Constructor takes named arguments, sets
default
factory to L<AI::FANN::Evolving::Factory>
=cut
sub new { shift->SUPER::new( 'factory' => AI::FANN::Evolving::Factory->new, @_ ) }
=item workdir
Getter/Setter for the workdir where L<AI::FANN> artificial neural networks will be
written during the experiment. The files will be named after the ANN's error, which
needs to be minimized.
=cut
sub
workdir {
my
$self
=
shift
;
if
(
@_
) {
my
$value
=
shift
;
$log
->info(
"assigning new workdir $value"
);
$self
->{
'workdir'
} =
$value
;
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
90919293949596979899100101102103104105106107108109110
$log
->debug(
"optimum at generation $i is $optimum"
);
my
(
$fittest
,
$fitness
) =
$self
->population->turnover(
$i
,
$self
->env,
$optimum
);
push
@results
, [
$fittest
,
$fitness
];
}
my
(
$fittest
,
$fitness
) =
map
{ @{
$_
} }
sort
{
$a
->[1] <=>
$b
->[1] }
@results
;
return
$fittest
,
$fitness
;
}
=item optimum
The optimal fitness is zero error in the ANN's classification. This method returns
that value: 0.
=cut
sub
optimum { 0 }
sub
_sign {
my
(
$obs
,
$exp
) =
@_
;
my
$fitness
= 0;
for
my
$i
( 0 .. $
#{ $obs } ) {
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172sub
_mse {
my
(
$obs
,
$exp
) =
@_
;
my
$fitness
= 0;
for
my
$i
( 0 .. $
#{ $obs } ) {
$fitness
+= ( ( (1+
$obs
->[
$i
]) - (1+
$exp
->[
$i
]) ) ** 2 );
}
return
$fitness
/
scalar
(@{
$obs
});
}
=item error_func
Returns a function to compute the error. Given an argument, the following can happen:
'sign' => error is the average number of times observed and expected have different signs
'mse' => error is the mean squared difference between observed and expected
CODE => error function is the provided code reference
=back
=cut
sub
error_func {
my
$self
=
shift
;
# process the argument
if
(
@_
) {
my
$arg
=
shift
;
if
(
ref
$arg
eq
'CODE'
) {
$self
->{
'error_func'
} =
$arg
;
$log
->info(
"using custom error function"
);
}
elsif
(
$arg
eq
'sign'
) {
$self
->{
'error_func'
} = \
&_sign
;
$log
->info(
"using sign test error function"
);
}
elsif
(
$arg
eq
'mse'
) {
$self
->{
'error_func'
} = \
&_mse
;
$log
->info(
"using MSE error function"
);
}
else
{
$log
->
warn
(
"don't understand error func '$arg'"
);
}
}
# map the constructor-supplied argument
if
(
$self
->{
'error_func'
} and
$self
->{
'error_func'
} eq
'sign'
) {
$self
->{
'error_func'
} = \
&_sign
;
$log
->info(
"using error function 'sign'"
);
}
elsif
(
$self
->{
'error_func'
} and
$self
->{
'error_func'
} eq
'mse'
) {
$self
->{
'error_func'
} = \
&_mse
;
$log
->info(
"using error function 'mse'"
);
}
return
$self
->{
'error_func'
} || \
&_mse
;
}
1;
lib/AI/FANN/Evolving/Gene.pm view on Meta::CPAN
585960616263646566676869707172737475767778=item make_function
Returns a code reference to the fitness function, which when executed returns a fitness
value and writes the corresponding ANN to file
=cut
sub
make_function {
my
$self
=
shift
;
my
$ann
=
$self
->ann;
my
$error_func
=
$self
->experiment->error_func;
$log
->debug(
"making fitness function"
);
# build the fitness function
return
sub
{
# train the AI
$ann
->train(
$self
->experiment->traindata );
# isa TrainingData object, this is what we need to use
# to make our prognostications. It is a different data
lib/AI/FANN/Evolving/Gene.pm view on Meta::CPAN
8687888990919293949596979899100101102103104105106107# iterate over the list of input/output pairs
for
my
$i
( 0 .. (
$env
->
length
- 1 ) ) {
my
(
$input
,
$expected
) =
$env
->data(
$i
);
my
$observed
=
$ann
->run(
$input
);
$log
->debug(
"Observed: "
.Dumper(
$observed
));
$log
->debug(
"Expected: "
.Dumper(
$expected
));
# invoke the error_func provided by the experiment
$fitness
+=
$error_func
->(
$observed
,
$expected
);
}
$fitness
/=
$env
->
length
;
# store result
$self
->{
'fitness'
} =
$fitness
;
# store the AI
my
$outfile
=
$self
->experiment->workdir .
"/${fitness}.ann"
;
$self
->ann->save(
$outfile
);
return
$self
->{
'fitness'
};
( run in 0.905 second using v1.01-cache-2.11-cpan-55f5a4728d2 )