AI-ANN
view release on metacpan or search on metacpan
lib/AI/ANN/Evolver.pm view on Meta::CPAN
use Math::Libm qw(tan);
has 'max_value' => (is => 'rw', isa => 'Num', default => 1);
has 'min_value' => (is => 'rw', isa => 'Num', default => 0);
has 'mutation_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'mutation_amount' => (is => 'rw', isa => 'CodeRef', default => sub{sub{2 * rand() - 1}});
has 'add_link_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'kill_link_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'sub_crossover_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'gaussian_tau' => (is => 'rw', isa => 'CodeRef', default => sub{sub{1/sqrt(2*sqrt(shift))}});
has 'gaussian_tau_prime' => (is => 'rw', isa => 'CodeRef', default => sub{sub{1/sqrt(2*shift)}});
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
my %data;
if ( @_ == 1 && ref $_[0] eq 'HASH' ) {
%data = %{$_[0]};
} else {
%data = @_;
}
if ((not (ref $data{'mutation_amount'})) || ref $data{'mutation_amount'} ne 'CODE') {
my $range = $data{'mutation_amount'};
$data{'mutation_amount'} = sub { $range * (rand() * 2 - 1) };
}
return $class->$orig(%data);
};
sub crossover {
my $self = shift;
my $network1 = shift;
my $network2 = shift;
my $class = ref($network1);
my $inputcount = $network1->input_count();
my $minvalue = $network1->minvalue();
my $maxvalue = $network1->maxvalue();
my $afunc = $network1->afunc();
my $dafunc = $network1->dafunc();
# They better have the same number of inputs
$inputcount == $network2->input_count() || return -1;
my $networkdata1 = $network1->get_internals();
my $networkdata2 = $network2->get_internals();
my $neuroncount = $#{$networkdata1};
# They better also have the same number of neurons
$neuroncount == $#{$networkdata2} || return -1;
my $networkdata3 = [];
for (my $i = 0; $i <= $neuroncount; $i++) {
if (rand() < $self->{'sub_crossover_chance'}) {
$networkdata3->[$i] = { 'inputs' => [], 'neurons' => [] };
$networkdata3->[$i]->{'iamanoutput'} =
$networkdata1->[$i]->{'iamanoutput'};
for (my $j = 0; $j < $inputcount; $j++) {
$networkdata3->[$i]->{'inputs'}->[$j] =
(rand() > 0.5) ?
$networkdata1->[$i]->{'inputs'}->[$j] :
$networkdata2->[$i]->{'inputs'}->[$j];
# Note to self: Don't get any silly ideas about dclone()ing
# these, that's a good way to waste half an hour debugging.
}
for (my $j = 0; $j <= $neuroncount; $j++) {
$networkdata3->[$i]->{'neurons'}->[$j] =
(rand() > 0.5) ?
$networkdata1->[$i]->{'neurons'}->[$j] :
$networkdata2->[$i]->{'neurons'}->[$j];
}
} else {
$networkdata3->[$i] = dclone(
(rand() > 0.5) ?
$networkdata1->[$i] :
$networkdata2->[$i] );
}
}
my $network3 = $class->new ( 'inputs' => $inputcount,
'data' => $networkdata3,
'minvalue' => $minvalue,
'maxvalue' => $maxvalue,
'afunc' => $afunc,
'dafunc' => $dafunc);
return $network3;
}
sub mutate {
my $self = shift;
my $network = shift;
my $class = ref($network);
my $networkdata = $network->get_internals();
my $inputcount = $network->input_count();
my $minvalue = $network->minvalue();
my $maxvalue = $network->maxvalue();
my $afunc = $network->afunc();
my $dafunc = $network->dafunc();
my $neuroncount = $#{$networkdata}; # BTW did you notice that this
# isn't what it says it is?
$networkdata = dclone($networkdata); # For safety.
for (my $i = 0; $i <= $neuroncount; $i++) {
# First each input/neuron pair
for (my $j = 0; $j < $inputcount; $j++) {
my $weight = $networkdata->[$i]->{'inputs'}->[$j];
if (defined $weight && $weight != 0) {
if (rand() < $self->{'mutation_chance'}) {
$weight += (rand() * 2 - 1) * $self->{'mutation_amount'};
if ($weight > $self->{'max_value'}) {
$weight = $self->{'max_value'};
}
if ($weight < $self->{'min_value'}) {
$weight = $self->{'min_value'} + 0.000001;
}
}
if (abs($weight) < $self->{'mutation_amount'}) {
if (rand() < $self->{'kill_link_chance'}) {
$weight = undef;
}
}
} else {
if (rand() < $self->{'add_link_chance'}) {
$weight = rand() * $self->{'mutation_amount'};
# We want to Do The Right Thing. Here, that means to
( run in 1.401 second using v1.01-cache-2.11-cpan-39bf76dae61 )