view release on metacpan or search on metacpan
lib/Algorithm/Evolutionary/Hash_Wheel.pm view on Meta::CPAN
use strict;
use warnings;
=head1 NAME
Algorithm::Evolutionary::Hash_Wheel - Random selector of things depending on probabilities
=head1 SYNOPSIS
my $wheel = new Algorithm::Evolutionary::Hash_Wheel( \%probs );
print $wheel->spin(); #Returns an element according to probabilities;
=head1 DESCRIPTION
Creates a "roulette wheel" for spinning and selecting stuff. It will
be used in several places; mainly in the
L<Algorithm::Evolutionary::Op::CanonicalGA>. It's similar to
L<Algorithm::Evolutionary::Wheel>, but with a hash instead of an
array. Probably should unify both..
=head1 METHODS
=cut
package Algorithm::Evolutionary::Hash_Wheel;
lib/Algorithm/Evolutionary/Op/Breeder.pm view on Meta::CPAN
my $replacement_rate = 0.5;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $population_size; #One of the possible selectors
my $generation =
new Algorithm::Evolutionary::Op::Breeder( $selector, [$m, $c] );
my @sortPop = sort { $b->Fitness() <=> $a->Fitness() } @pop;
my $bestIndi = $sortPop[0];
my $previous_average = average( \@sortPop );
$generation->apply( \@sortPop );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/Breeder.pm view on Meta::CPAN
use base 'Algorithm::Evolutionary::Op::Base';
use Algorithm::Evolutionary qw(Wheel
Op::Tournament_Selection);
# Class-wide constants
our $APPLIESTO = 'ARRAY';
our $ARITY = 1;
=head2 new( $ref_to_operator_array[, $selector = new Algorithm::Evolutionary::Op::Tournament_Selection 2 ] )
Creates a breeder, with a selector and array of operators
=cut
sub new {
my $class = shift;
my $self = {};
$self->{'_ops'} = shift || croak "No operators found";
$self->{'_selector'} = shift
|| new Algorithm::Evolutionary::Op::Tournament_Selection 2;
bless $self, $class;
return $self;
}
=head2 apply( $population[, $how_many || $population_size] )
Applies the algorithm to the population, which should have
been evaluated first; checks that it receives a
ref-to-array as input, croaks if it does not.
lib/Algorithm/Evolutionary/Op/Breeder.pm view on Meta::CPAN
=cut
sub apply {
my $self = shift;
my $pop = shift || croak "No population here";
my $output_size = shift || @$pop; # Defaults to pop size
my @ops = @{$self->{'_ops'}};
#Select for breeding
my $selector = $self->{'_selector'};
my @genitors = $selector->apply( $pop );
#Reproduce
my $totRate = 0;
my @rates;
for ( @ops ) {
push( @rates, $_->{'rate'});
}
my $opWheel = new Algorithm::Evolutionary::Wheel @rates;
my @new_population;
for ( my $i = 0; $i < $output_size; $i++ ) {
my @offspring;
my $selectedOp = $ops[ $opWheel->spin()];
for ( my $j = 0; $j < $selectedOp->arity(); $j ++ ) {
my $chosen = $genitors[ rand( @genitors )];
# print "Elegido ", $chosen->asString(), "\n";
push( @offspring, $chosen->clone() );
}
my $mutante = $selectedOp->apply( @offspring );
push( @new_population, $mutante );
}
return \@new_population;
}
=head1 SEE ALSO
More or less in the same ballpark, alternatives to this one
lib/Algorithm/Evolutionary/Op/Breeder_Diverser.pm view on Meta::CPAN
my $replacement_rate = 0.5;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $population_size; #One of the possible selectors
my $generation =
new Algorithm::Evolutionary::Op::Breeder_Diverser( $selector, [$m, $c] );
my @sortPop = sort { $b->Fitness() <=> $a->Fitness() } @pop;
my $bestIndi = $sortPop[0];
my $previous_average = average( \@sortPop );
$generation->apply( \@sortPop );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/Breeder_Diverser.pm view on Meta::CPAN
use base 'Algorithm::Evolutionary::Op::Base';
use Algorithm::Evolutionary qw(Wheel
Op::Tournament_Selection);
# Class-wide constants
our $APPLIESTO = 'ARRAY';
our $ARITY = 1;
=head2 new( $ref_to_operator_array[, $selector = new Algorithm::Evolutionary::Op::Tournament_Selection 2 ] )
Creates a breeder, with a selector and array of operators
=cut
sub new {
my $class = shift;
my $self = {};
$self->{_ops} = shift || croak "No operators found";
$self->{_selector} = shift
|| new Algorithm::Evolutionary::Op::Tournament_Selection 2;
bless $self, $class;
return $self;
}
=head2 apply( $population[, $how_many || $population_size] )
Applies the algorithm to the population, which should have
been evaluated first; checks that it receives a
ref-to-array as input, croaks if it does not. Returns a sorted,
lib/Algorithm/Evolutionary/Op/Breeder_Diverser.pm view on Meta::CPAN
=cut
sub apply ($) {
my $self = shift;
my $pop = shift || croak "No population here";
my $output_size = shift || @$pop; # Defaults to pop size
my @ops = @{$self->{_ops}};
#Select for breeding
my $selector = $self->{_selector};
my @genitors = $selector->apply( $pop );
#Reproduce
my $totRate = 0;
my @rates;
for ( @ops ) {
push( @rates, $_->{rate});
}
my $op_wheel = new Algorithm::Evolutionary::Wheel @rates;
my @new_population;
my $i = 0;
while ( @new_population < $output_size ) {
my @offspring;
my $selected_op = $ops[ $op_wheel->spin()];
my $chosen = $genitors[ $i++ % @genitors]; #Chosen in turn
push( @offspring, $chosen->clone() );
if( $selected_op->{'_arity'} == 2 ) {
my $another_one;
do {
$another_one = $genitors[ rand( @genitors )];
} until ( $another_one->{'_str'} ne $chosen->{'_str'} );
push( @offspring, $another_one );
} elsif ( $selected_op->{'_arity'} > 2 ) {
for ( my $j = 1; $j < $selected_op->arity(); $j ++ ) {
my $chosen = $genitors[ rand( @genitors )];
push( @offspring, $chosen->clone() );
}
}
my $mutant = $selected_op->apply( @offspring );
my $equal;
for my $o (@offspring) {
$equal += $o->{'_str'} eq $mutant->{'_str'};
}
if ( !$equal ) {
push( @new_population, $mutant );
}
}
return \@new_population;
lib/Algorithm/Evolutionary/Op/CanonicalGA.pm view on Meta::CPAN
use Algorithm::Evolutionary qw(Wheel
Op::Bitflip
Op::QuadXOver );
use base 'Algorithm::Evolutionary::Op::Easy';
# Class-wide constants
our $APPLIESTO = 'ARRAY';
our $ARITY = 1;
=head2 new( $fitness[, $selection_rate][,$operators_ref_to_array] )
Creates an algorithm, with the usual operators. Includes a default mutation
and crossover, in case they are not passed as parameters. The first
element in the array ref should be an unary, and the second a
binary operator.
=cut
sub new {
my $class = shift;
lib/Algorithm/Evolutionary/Op/Canonical_GA_NN.pm view on Meta::CPAN
bitstrings until they reach the optimum fitness. It performs mutation
on the bitstrings by flipping a single bit, crossover interchanges a
part of the two parents.
The first operator should be unary (a la mutation) and the second
binary (a la crossover) they will be applied in turn to couples of the
population.
This is a fast version of the canonical GA, useful for large
populations, since it avoids the expensive rank operation. Roulette
wheel selection, still, is kind of slow.
=head1 METHODS
=cut
package Algorithm::Evolutionary::Op::Canonical_GA_NN;
use lib qw(../../..);
our $VERSION = "3.6";
lib/Algorithm/Evolutionary/Op/Canonical_GA_NN.pm view on Meta::CPAN
use Algorithm::Evolutionary qw(Wheel
Op::Bitflip
Op::QuadXOver );
use base 'Algorithm::Evolutionary::Op::Easy';
# Class-wide constants
our $APPLIESTO = 'ARRAY';
our $ARITY = 1;
=head2 new( [ $selection_rate][,$operators_ref_to_array] )
Creates an algorithm, with the usual operators. Includes a default
mutation and crossover, in case they are not passed as parameters. The
first element in the array ref should be an unary, and the second a
binary operator. This binary operator must accept parameters by
reference, not value; it will modify them. For the time being, just
L<Algorithm::Evolutionary::Op::QuadXOver> works that way.
=cut
lib/Algorithm/Evolutionary/Op/Canonical_GA_NN.pm view on Meta::CPAN
my @newPop;
@$pop = sort { $b->{_fitness} <=> $a->{_fitness} } @$pop;
my @rates = map( $_->Fitness(), @$pop );
#Creates a roulette wheel from the op priorities. Theoretically,
#they might have changed
my $popWheel= new Algorithm::Evolutionary::Wheel @rates;
my $popSize = scalar @$pop;
my @ops = @{$self->{_ops}};
for ( my $i = 0; $i < $popSize*(1-$self->{'_selrate'})/2; $i ++ ) {
my @selected = $popWheel->spin(2);
my @clones;
# This should be a mutation-like op which does not modify arg
for my $c (0..1) {
$clones[$c] = $ops[0]->apply( $pop->[$selected[$c]] );
}
$ops[1]->apply( @clones ); #This should be a
#crossover-like op
push @newPop, @clones;
}
#Re-sort
@{$pop}[$popSize*$self->{_selrate}..$popSize-1] = @newPop;
}
lib/Algorithm/Evolutionary/Op/Combined.pm view on Meta::CPAN
croak "Need operator array" if (!@_) ;
my $hash = { ops => shift };
my $rate = shift || 1;
my $self = Algorithm::Evolutionary::Op::Base::new( $class, $rate, $hash );
return $self;
}
=head2 apply( @operands )
Applies the operator to the set of operands. All are passed, as such,
to whatever operator is selected
=cut
sub apply ($$$){
my $self = shift;
my @victims = @_; # No need to clone, any operator will also clone.
my $op_wheel = new Algorithm::Evolutionary::Wheel map( $_->{'rate'}, @{$self->{'_ops'}} );
my $selected_op = $self->{'_ops'}->[ $op_wheel->spin()];
return $selected_op->apply(@victims);
}
=head1 SEE ALSO
=over 4
=item L<Algorithm::Evolutionary::Op::Mutation> a mutation operator.
=item L<Algorithm::Evolutionary::Op::Uniform_Crossover> another more mutation-like crossover. These two operators can be combined using this one, for instance.
lib/Algorithm/Evolutionary/Op/EDA_step.pm view on Meta::CPAN
my @pop;
my $number_of_bits = 20;
my $population_size = 20;
my $replacement_rate = 0.5;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $population_size; #One of the possible selectors
my $generation =
new Algorithm::Evolutionary::Op::EDA_step( $onemax, $selector, $replacement_rate );
my @sortPop = sort { $b->Fitness() <=> $a->Fitness() } @pop;
my $bestIndi = $sortPop[0];
my $previous_average = average( \@sortPop );
$generation->apply( \@sortPop );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/Easy.pm view on Meta::CPAN
for ( my $i = 0; $i < $max_generations; $i++ ) {
print "<", "="x 20, "Generation $i", "="x 20, ">\n";
$easy_EA->apply(\@pop );
for ( @pop ) {
print $_->asString, "\n";
}
}
#Define a default algorithm with predefined evaluation function,
#Mutation and crossover. Default selection rate is 0.4
my $algo = new Algorithm::Evolutionary::Op::Easy( $eval );
#Define an easy single-generation algorithm with predefined mutation and crossover
my $m = new Algorithm::Evolutionary::Op::Bitflip; #Changes a single bit
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $generation = new Algorithm::Evolutionary::Op::Easy( $rr, 0.2, [$m, $c] );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/Easy.pm view on Meta::CPAN
splice @popsort, -$pringaos;
#Reproduce
my @rates = map( $_->{'rate'}, @ops );
my $opWheel = new Algorithm::Evolutionary::Wheel @rates;
#Generate offpring;
my $originalSize = $#popsort; # Just for random choice
for ( my $i = 0; $i < $pringaos; $i ++ ) {
my @offspring;
my $selectedOp = $ops[ $opWheel->spin()];
croak "Problems with selected operator" if !$selectedOp;
for ( my $j = 0; $j < $selectedOp->arity(); $j ++ ) {
my $chosen = $popsort[ int ( rand( $originalSize ) )];
push( @offspring, $chosen ); #No need to clone, it's not changed in ops
}
# p rint "Op ", ref $selectedOp, "\n";
# if ( (ref $selectedOp ) =~ /ssover/ ) {
# print map( $_->{'_str'}."\n", @offspring );
# }
my $mutante = $selectedOp->apply( @offspring );
croak "Error aplying operator" if !$mutante;
# print "Mutante ", $mutante->{'_str'}, "\n";
push( @popsort, $mutante );
}
#Return
@$pop = @popsort;
}
lib/Algorithm/Evolutionary/Op/Easy_MO.pm view on Meta::CPAN
use lib qw( ../../.. );
=head1 NAME
Algorithm::Evolutionary::Op::Easy_MO - Multiobjecttive evolutionary algorithm, single generation, with
variable operators
=head1 SYNOPSIS
#Mutation and crossover. Default selection rate is 0.4
my $algo = new Algorithm::Evolutionary::Op::Easy_MO( $eval );
#Define an easy single-generation algorithm with predefined mutation and crossover
my $m = new Algorithm::Evolutionary::Op::Bitflip; #Changes a single bit
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $generation = new Algorithm::Evolutionary::Op::Easy_MO( $rr, 0.2, [$m, $c] );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/Easy_MO.pm view on Meta::CPAN
use Algorithm::Evolutionary qw( Wheel Op::Bitflip
Op::Crossover
Op::Eval::MO_Rank );
use base 'Algorithm::Evolutionary::Op::Base';
# Class-wide constants
our $APPLIESTO = 'ARRAY';
=head2 new( $eval_func, [$selection_rate,] [$operators_arrayref] )
Creates an algorithm that optimizes the handled fitness function and
reference to an array of operators. If this reference is null, an
array consisting of bitflip mutation and 2 point crossover is
generated. Which, of course, might not what you need in case you don't
have a binary chromosome. Take into account that in this case the
fitness function should return a reference to array.
=cut
lib/Algorithm/Evolutionary/Op/Easy_MO.pm view on Meta::CPAN
# print "Población ", scalar @popsort, "\n";
#Reproduce
my @rates = map( $_->{'rate'}, @ops );
my $opWheel = new Algorithm::Evolutionary::Wheel @rates;
#Generate offpring;
my $originalSize = $#popsort; # Just for random choice
for ( my $i = 0; $i < $pringaos; $i ++ ) {
my @offspring;
my $selectedOp = $ops[ $opWheel->spin()];
for ( my $j = 0; $j < $selectedOp->arity(); $j ++ ) {
my $chosen = $popsort[ int ( rand( $originalSize ) )];
push( @offspring, $chosen ); #No need to clone, it's not changed in ops
}
my $mutante = $selectedOp->apply( @offspring );
push( @popsort, $mutante );
}
#Return
for ( my $i = 0; $i <= $#popsort; $i++ ) {
# print $i, "->", $popsort[$i]->asString, "\n";
$pop->[$i] = $popsort[$i];
}
lib/Algorithm/Evolutionary/Op/FullAlgorithm.pm view on Meta::CPAN
# Using the base class as factory
my $easyEA = Algorithm::Evolutionary::Op::Base->fromXML( $ref->{$xml} );
$easyEA->apply(\@pop );
#Or using the constructor
my $m = new Algorithm::Evolutionary::Op::Bitflip; #Changes a single bit
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $replacementRate = 0.3; #Replacement rate
my $popSize = 20;
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $popSize; #One of the possible selectors
my $onemax = sub {
my $indi = shift;
my $total = 0;
my $len = $indi->length();
my $i = 0;
while ($i < $len ) {
$total += substr($indi->{'_str'}, $i, 1);
$i++;
}
return $total;
};
my $generation =
new Algorithm::Evolutionary::Op::GeneralGeneration( $onemax, $selector, [$m, $c], $replacementRate );
my $g100 = new Algorithm::Evolutionary::Op::GenerationalTerm 10;
my $f = new Algorithm::Evolutionary::Op::FullAlgorithm $generation, $g100;
print $f->asXML();
$f->apply( $pop ); # Pop should be defined else where
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/GeneralGeneration.pm view on Meta::CPAN
Algorithm::Evolutionary::Op::GeneralGeneration - Customizable single generation for an evolutionary algorithm.
=head1 SYNOPSIS
#Taken from the t/general.t file, verbatim
my $m = new Algorithm::Evolutionary::Op::Bitflip; #Changes a single bit
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $replacementRate = 0.3; #Replacement rate
use Algorithm::Evolutionary::Op::RouletteWheel;
my $popSize = 20;
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $popSize; #One of the possible selectors
use Algorithm::Evolutionary::Op::GeneralGeneration;
my $onemax = sub {
my $indi = shift;
my $total = 0;
for ( my $i = 0; $i < $indi->length(); $i ++ ) {
$total += substr( $indi->{_str}, $i, 1 );
}
return $total;
};
my @pop;
my $numBits = 10;
for ( 0..$popSize ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $numBits ; #Creates random individual
my $fitness = $onemax->( $indi );
$indi->Fitness( $fitness );
push( @pop, $indi );
}
my $generation =
new Algorithm::Evolutionary::Op::GeneralGeneration( $onemax, $selector, [$m, $c], $replacementRate );
my @sortPop = sort { $a->Fitness() <=> $b->Fitness() } @pop;
my $bestIndi = $sortPop[0];
$generation->apply( \@sortPop );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base|Algorithm::Evolutionary::Op::Base>
=head1 DESCRIPTION
lib/Algorithm/Evolutionary/Op/GeneralGeneration.pm view on Meta::CPAN
use Carp;
use base 'Algorithm::Evolutionary::Op::Base';
use Algorithm::Evolutionary::Wheel;
# Class-wide constants
our $APPLIESTO = 'ARRAY';
our $ARITY = 1;
=head2 new( $evaluation_function, $selector, $ref_to_operator_array, $replacement_rate )
Creates an algorithm, with the usual operators. Includes a default mutation
and crossover, in case they are not passed as parameters
=cut
sub new {
my $class = shift;
my $self = {};
$self->{'_eval'} = shift || croak "No eval function found";
$self->{'_selector'} = shift || croak "No selector found";
$self->{'_ops'} = shift || croak "No operator found";
$self->{'_replacementRate'} = shift || 1; #Default to all replaced
bless $self, $class;
return $self;
}
=head2 set( $ref_to_params_hash, $ref_to_code_hash, $ref_to_operators_hash )
Sets the instance variables. Takes a ref-to-hash as
lib/Algorithm/Evolutionary/Op/GeneralGeneration.pm view on Meta::CPAN
sub apply ($) {
my $self = shift;
my $pop = shift || croak "No population here";
croak "Incorrect type ".(ref $pop) if ref( $pop ) ne $APPLIESTO;
#Evaluate only the new ones
my $eval = $self->{_eval};
my @ops = @{$self->{_ops}};
#Breed
my $selector = $self->{_selector};
my @genitors = $selector->apply( @$pop );
#Reproduce
my $totRate = 0;
my @rates;
for ( @ops ) {
push( @rates, $_->{rate});
}
my $opWheel = new Algorithm::Evolutionary::Wheel @rates;
my @newpop;
my $pringaos = @$pop * $self->{'_replacementRate'} ;
for ( my $i = 0; $i < $pringaos; $i++ ) {
my @offspring;
my $selectedOp = $ops[ $opWheel->spin()];
# print $selectedOp->asXML;
for ( my $j = 0; $j < $selectedOp->arity(); $j ++ ) {
my $chosen = $genitors[ rand( @genitors )];
# print "Elegido ", $chosen->asString(), "\n";
push( @offspring, $chosen->clone() );
}
my $mutante = $selectedOp->apply( @offspring );
push( @newpop, $mutante );
}
#Eliminate and substitute
splice( @$pop, -$pringaos );
for ( @newpop ) {
$_->evaluate( $eval );
}
push @$pop, @newpop;
my @sortPop = sort { $b->{'_fitness'} <=> $a->{'_fitness'}; } @$pop;
lib/Algorithm/Evolutionary/Op/Generation_Skeleton.pm view on Meta::CPAN
my $replacement_rate = 0.5;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $population_size; #One of the possible selectors
my $generation =
new Algorithm::Evolutionary::Op::Generation_Skeleton( $onemax, $selector, [$m, $c], $replacement_rate );
my @sortPop = sort { $b->Fitness() <=> $a->Fitness() } @pop;
my $bestIndi = $sortPop[0];
my $previous_average = average( \@sortPop );
$generation->apply( \@sortPop );
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base>
lib/Algorithm/Evolutionary/Op/Generation_Skeleton.pm view on Meta::CPAN
use base 'Algorithm::Evolutionary::Op::Base';
use Algorithm::Evolutionary qw(Wheel Op::Replace_Worst);
use Sort::Key qw( rnkeysort);
# Class-wide constants
our $APPLIESTO = 'ARRAY';
our $ARITY = 1;
=head2 new( $evaluation_function, $selector, $ref_to_operator_array, $replacement_operator )
Creates an algorithm, with no defaults except for the default
replacement operator (defaults to L<Algorithm::Evolutionary::Op::ReplaceWorst>)
=cut
sub new {
my $class = shift;
my $self = {};
$self->{_eval} = shift || croak "No eval function found";
$self->{_selector} = shift || croak "No selector found";
$self->{_ops} = shift || croak "No operators found";
$self->{_replacementRate} = shift || 1; #Default to all replaced
$self->{_replacement_op} = shift || new Algorithm::Evolutionary::Op::Replace_Worst;
bless $self, $class;
return $self;
}
=head2 set( $ref_to_params_hash, $ref_to_code_hash, $ref_to_operators_hash )
lib/Algorithm/Evolutionary/Op/Generation_Skeleton.pm view on Meta::CPAN
culled, evaluated population for next generation.
=cut
sub apply ($) {
my $self = shift;
my $pop = shift || croak "No population here";
croak "Incorrect type ".(ref $pop) if ref( $pop ) ne $APPLIESTO;
#Breed
my $selector = $self->{'_selector'};
my @genitors = $selector->apply( @$pop );
#Reproduce
my $totRate = 0;
my @rates;
my @ops = @{$self->{'_ops'}};
for ( @ops ) {
push( @rates, $_->{'rate'});
}
my $opWheel = new Algorithm::Evolutionary::Wheel @rates;
my @newpop;
my $pringaos = @$pop * $self->{'_replacementRate'} ;
for ( my $i = 0; $i < $pringaos; $i++ ) {
my @offspring;
my $selectedOp = $ops[ $opWheel->spin()];
# print $selectedOp->asXML;
for ( my $j = 0; $j < $selectedOp->arity(); $j ++ ) {
my $chosen = $genitors[ rand( @genitors )];
# print "Elegido ", $chosen->asString(), "\n";
push( @offspring, $chosen->clone() );
}
my $mutante = $selectedOp->apply( @offspring );
push( @newpop, $mutante );
}
my $eval = $self->{'_eval'};
map( $_->evaluate( $eval), @newpop );
#Eliminate and substitute
my $pop_hash = $self->{'_replacement_op'}->apply( $pop, \@newpop );
@$pop = rnkeysort { $_->{'_fitness'} } @$pop_hash ;
}
lib/Algorithm/Evolutionary/Op/RouletteWheel.pm view on Meta::CPAN
use strict; #-*-cperl-*-
use warnings;
use lib qw( ../../../../lib );
=head1 NAME
Algorithm::Evolutionary::Op::RouletteWheel - Fitness-proportional selection, using a roulette wheel.
=head1 SYNOPSIS
use Algorithm::Evolutionary::Op::RouletteWheel;
my $popSize = 100;
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $popSize;
=head1 Base Class
L<Algorithm::Evolutionary::Op::Selector>
=head1 DESCRIPTION
Roulette wheel selection tries to select as many copies of the
individual as it corresponds to its fitness. It is used in the
canonical GA. Some information on this method of selection can be
found in
L<this GA tutorial|http://www.geatbx.com/docu/algselct.html#nameselectionrws>
=head1 METHODS
=cut
package Algorithm::Evolutionary::Op::RouletteWheel;
use Carp;
our $VERSION = '3.1';
use base 'Algorithm::Evolutionary::Op::Selector';
use Algorithm::Evolutionary::Wheel;
# Class-wide constants
#our $APPLIESTO = 'ARRAY';
#our $ARITY = 2; #Needs an array for input, a reference for output
=head2 new( $output_population_size )
Creates a new roulette wheel selector
=cut
sub new {
my $class = shift;
my $self = Algorithm::Evolutionary::Op::Selector::new($class,shift );
return $self;
}
=head2 apply
Applies the tournament selection to a population, returning
another of the said size
=cut
sub apply (@) {
my $self = shift;
my @pop = @_;
croak "Small population size" if ! @_;
my @output;
#Create the value array
my $sum = 0;
my @rates;
for ( @pop ) {
$sum .= $_->Fitness() if defined $_->Fitness();
push @rates, $_->Fitness();
}
my $popWheel=new Algorithm::Evolutionary::Wheel @rates;
#Select
for ( my $i = 0; $i < $self->{_outputSize}; $i++ ) {
#Randomly select a few guys
push @output, $pop[$popWheel->spin()];
}
return @output;
}
=head1 See Also
L<Algorithm::Evolutionary::Op::TournamentSelect> is another option for
selecting a pool of individuals
=head1 Copyright
This file is released under the GPL. See the LICENSE file included in this distribution,
or go to http://www.fsf.org/licenses/gpl.txt
=cut
"The truth is in there";
lib/Algorithm/Evolutionary/Op/Selector.pm view on Meta::CPAN
use strict; #-*-cperl-*-
use warnings;
=head1 NAME
Algorithm::Evolutionary::Op::Selector - Abstract base class for population selectors
=head1 SYNOPSIS
package My::Selector;
use base ' Algorithm::Evolutionary::Op::Selector';
=head1 Base Class
L<Algorithm::Evolutionary::Op::Base|Algorithm::Evolutionary::Op::Base>
=head1 DESCRIPTION
Abstract base class for population selectors; defines a few instance
variables and interface elements
=head1 METHODS
=cut
package Algorithm::Evolutionary::Op::Selector;
use Carp;
our ($VERSION) = ( '$Revision: 3.0 $ ' =~ / (\d+\.\d+)/ ) ;
use base 'Algorithm::Evolutionary::Op::Base';
=head2 new( $output_population_size )
Creates a new selector which outputs a fixed amount of
individuals. This goes to the base class, since all selectors must
know in advance how many they need to generate
=cut
sub new {
my $class = shift;
carp "Should be called from subclasses" if ( $class eq __PACKAGE__ );
my $self = {};
$self->{_outputSize} = shift || croak "I need an output population size";
bless $self, $class;
return $self;
}
=head2 apply
Applies the tournament selection to a population, returning another of
the set size. This is an abstract method that should be implemented by
descendants.
=cut
sub apply (@) {
croak "To be redefined by siblings";
}
=head1 Known descendants
lib/Algorithm/Evolutionary/Op/Tournament_Selection.pm view on Meta::CPAN
use strict; #-*-cperl-*-
use warnings;
use lib qw( ../../../../lib );
=head1 NAME
Algorithm::Evolutionary::Op::Tournament_Selection - Tournament selector, takes individuals from one population and puts them into another
=head1 SYNOPSIS
my $popSize = 100;
my $tournamentSize = 7;
my $selector = new Algorithm::Evolutionary::Op::Tournament_Selection $tournamentSize;
my @newPop = $selector->apply( @pop ); #Creates a new population from old
=head1 Base Class
L<Algorithm::Evolutionary::Op::Selector>
=head1 DESCRIPTION
One of the possible selectors used for selecting the pool of individuals
that are going to be the parents of the following generation. Takes a
set of individuals randomly out of the population, and select the best.
=head1 METHODS
=cut
package Algorithm::Evolutionary::Op::Tournament_Selection;
use Carp;
our $VERSION = '1.5';
use base 'Algorithm::Evolutionary::Op::Base';
=head2 new( $output_population_size, $tournament_size )
Creates a new tournament selector
=cut
sub new {
my $class = shift;
my $self = Algorithm::Evolutionary::Op::Base::new($class );
$self->{'_tournament_size'} = shift || 2;
bless $self, $class;
return $self;
}
=head2 apply( $ref_to_population[, $output_size || @$ref_to_population] )
Applies the tournament selection to a population, returning another of
the same size by default or whatever size is selected. Please bear in
mind that, unlike other selectors, this one uses a reference to
population instead of a population array.
=cut
sub apply ($$) {
my $self = shift;
my $pop = shift || croak "No pop";
my $output_size = shift || @$pop;
my @output;
for ( my $i = 0; $i < $output_size; $i++ ) {
#Randomly select a few guys
my $best = $pop->[ rand( @$pop ) ];
for ( my $j = 1; $j < $self->{'_tournament_size'}; $j++ ) {
my $this_one = $pop->[ rand( @$pop ) ];
if ( $this_one->{'_fitness'} > $best->{'_fitness'} ) {
$best = $this_one;
}
}
#Sort by fitness
push @output, $best;
}
return @output;
}
=head1 See Also
L<Algorithm::Evolutionary::Op::RouleteWheel> is another option for
selecting a pool of individuals
=head1 Copyright
This file is released under the GPL. See the LICENSE file included in this distribution,
or go to http://www.fsf.org/licenses/gpl.txt
=cut
"The truth is in here";
lib/Algorithm/Evolutionary/Run.pm view on Meta::CPAN
'points' => '2'
},
'max_generations' => '1000',
'mutation' => {
'priority' => '2',
'rate' => '0.1'
},
'length' => '120',
'max_fitness' => '20',
'pop_size' => '1024',
'selection_rate' => '0.1'
};
my $algorithm = new Algorithm::Evolutionary::Run $conf;
#Run it to the end
$algorithm->run();
#Print results
$algorithm->results();
lib/Algorithm/Evolutionary/Run.pm view on Meta::CPAN
$fitness_object = eval $fitness_class."->new( \@params )" || die "Can't instantiate $fitness_class: $@\n";
}
$self->{'_fitness'} = $fitness_object;
#----------------------------------------------------------#
#Usamos estos operadores para definir una generación del algoritmo. Lo cual
# no es realmente necesario ya que este algoritmo define ambos operadores por
# defecto. Los parámetros son la función de fitness, la tasa de selección y los
# operadores de variación.
my $algorithm_class = "Algorithm::Evolutionary::Op::".($self->{'algorithm'}?$self->{'algorithm'}:'Easy');
my $generation = eval $algorithm_class."->new( \$fitness_object , \$self->{'selection_rate'} , [\$m, \$c] )"
|| die "Can't instantiate $algorithm_class: $@\n";;
#Time
my $inicioTiempo = [gettimeofday()];
#----------------------------------------------------------#
bless $self, $class;
$self->reset_population;
for ( @{$self->{'_population'}} ) {
if ( !defined $_->Fitness() ) {
lib/Algorithm/Evolutionary/Run.pm view on Meta::CPAN
=head2 results()
Returns results in a hash that contains the best, total time so far
and the number of evaluations.
=cut
sub results {
my $self = shift;
my $population_size = scalar @{$self->{'_population'}};
my $last_good_pos = $population_size*(1-$self->{'selection_rate'});
my $results = { best => $self->{'_population'}->[0],
median => $self->{'_population'}->[ $population_size / 2],
last_good => $self->{'_population'}->[ $last_good_pos ],
time => tv_interval( $self->{'_start_time'} ),
evaluations => $self->{'_fitness'}->evaluations() };
return $results;
}
=head2 evaluated_population()
Returns the portion of population that has been evaluated (all but the new ones)
=cut
sub evaluated_population {
my $self = shift;
my $population_size = scalar @{$self->{'_population'}};
my $last_good_pos = $population_size*(1-$self->{'selection_rate'}) - 1;
return @{$self->{'_population'}}[0..$last_good_pos];
}
=head2 compute_average_distance( $individual )
Computes the average hamming distance to the population
=cut
lib/Algorithm/Evolutionary/Wheel.pm view on Meta::CPAN
use strict;
use warnings;
=head1 NAME
Algorithm::Evolutionary::Wheel - Random selector of things depending on probabilities
=head1 SYNOPSIS
my $wheel = new Algorithm::Evolutionary::Wheel( @probs );
print $wheel->spin(); #Returns an element according to probabilities;
=head1 DESCRIPTION
Creates a "roulette wheel" for spinning and selecting stuff. It will
be used in several places; mainly in the
L<Algorithm::Evolutionary::Op::CanonicalGA>. Take care that fitness
must be non-zero positives; since if they aren't, roulette wheel won't
work at all
=head1 METHODS
=cut
package Algorithm::Evolutionary::Wheel;
scripts/canonical-genetic-algorithm.pl view on Meta::CPAN
#!/usr/bin/perl
=head1 NAME
canonical-genetic-algorithm.pl - Canonical Genetic Algorithm on a simple fitness function
=head1 SYNOPSIS
prompt% ./canonical-genetic-algorithm.pl <bits> <block size> <population> <number of generations> <selection rate>
=head1 DESCRIPTION
A canonical GA uses mutation, crossover, binary representation, and
roulette wheel selection. Here mainly for reference, and so that
you can peruse to start your own programs.
In this case, we are optimizing the Royal Road function,
L<http://web.cecs.pdx.edu/~mm/handbook-of-ec-rr.pdf>. By default,
these values are used:
=over
=item *
scripts/canonical-genetic-algorithm.pl view on Meta::CPAN
I<population size>: 256
=item *
I<number of generations>: 200 (could end before, if solution is
found)
=item *
I<selection rate>: 20% (will be replaced each generation); this means it's a steady state algorithm, which only changes a part of the population each generation.
=back
This program also demonstrates the use of caches in the fitness
evaluation, so be careful if you use too many bits or too many
generations, check the memory.
Output shows the number of generations, the winning chromosome, and
fitness. After finishing, it outputs time, cache ratio and some other
things.
scripts/canonical-genetic-algorithm.pl view on Meta::CPAN
use Algorithm::Evolutionary qw( Individual::BitString Op::Creator
Op::CanonicalGA Op::Bitflip
Op::Crossover Fitness::Royal_Road);
use Algorithm::Evolutionary::Utils qw(entropy consensus);
#----------------------------------------------------------#
my $bits = shift || 64;
my $block_size = shift || 4;
my $pop_size = shift || 256; #Population size
my $numGens = shift || 200; #Max number of generations
my $selection_rate = shift || 0.2;
#----------------------------------------------------------#
#Initial population
my @pop;
my $creator = new Algorithm::Evolutionary::Op::Creator( $pop_size, 'BitString', { length => $bits });
$creator->apply( \@pop ); #Generates population
#----------------------------------------------------------#
# Variation operators
scripts/canonical-genetic-algorithm.pl view on Meta::CPAN
# Fitness function: create it and evaluate
my $rr = new Algorithm::Evolutionary::Fitness::Royal_Road( $block_size );
map( $_->evaluate( $rr ), @pop );
#----------------------------------------------------------#
#Usamos estos operadores para definir una generación del algoritmo. Lo cual
# no es realmente necesario ya que este algoritmo define ambos operadores por
# defecto. Los parámetros son la función de fitness, la tasa de selección y los
# operadores de variación.
my $generation = Algorithm::Evolutionary::Op::CanonicalGA->new( $rr , $selection_rate , [$m, $c] ) ;
#Time, counter and do the do
my $inicioTiempo = [gettimeofday()];
my $contador=0;
do {
$generation->apply( \@pop );
print "$contador : ", $pop[0]->asString(), "\n" ;
$contador++;
} while( ($contador < $numGens)
&& ($pop[0]->Fitness() < $bits));
scripts/rectangle-coverage.pl view on Meta::CPAN
rectangle-coverage.pl - Find the dot maximally covered by (random) rectangles
=head1 SYNOPSIS
You might have to do
prompt% cpanm --installdeps .
first, since that module is not installed by default with L<Algorithm::Evolutionary>. Use C<sudo> if appropriate.
prompt% ./rectangle-coverage.pl <number-of-rectangles> <arena-side> <bits-per-coordinate> <population> <number of generations> <selection rate>
Or
prompt% ./rectangle-coverage.pl
And change variable values from the user interface
=head1 DESCRIPTION
A demo that combines the L<Algorithm::Evolutionary::Op::Easy> module
scripts/rectangle-coverage.pl view on Meta::CPAN
=item *
I<population size>: 64
=item *
I<number of generations>: 200
=item *
I<selection rate>: 20% (will be replaced each generation); this means it's a steady state algorithm, which only changes a part of the population each generation.
=back
This program also demonstrates the use of caches in the fitness
evaluation, so be careful if you use too many bits or too many
generations, check out memory usage.
Console output shows the number of generations, the winning chromosome, and
fitness. After finishing, it outputs time, cache ratio and some other
things.
scripts/rectangle-coverage.pl view on Meta::CPAN
my $mw = MainWindow->new;
$mw->configure( -width=>$width, -height=>$width );
$mw->resizable( 0, 0 ); # not resizable in any direction
my $num_rects = shift || 300;
my $arena_side = shift || 10;
my $bits = shift || 32;
my $pop_size = shift || 64; #Population size
my $number_of_generations = shift || 200; #Max number of generations
my $selection_rate = shift || 0.2;
my $scale_x = $arena_side/$width;
my $scale_y = $arena_side/$height;
my $alg = Algorithm::RectanglesContainingDot->new;
my $fitness;
my $generation;
my @pop;
# Start Evolutionary Algorithm
my $contador=0;
my $dot_size = 6;
my $mini_dot_size = $dot_size/2;
my @dot_population;
# Create and configure the widgets
my $f = $mw->Frame(-relief => 'groove',
-bd => 2)->pack(-side => 'top',
-fill => 'x');
for my $v ( qw( num_rects arena_side bits pop_size number_of_generations selection_rate ) ){
create_and_pack( $f, $v );
}
my $canvas = $mw->Canvas( -cursor=>"crosshair", -background=>"white",
-width=>$width, -height=>$height )->pack;
$mw->Button( -text => 'Start',
-command => \&start,
)->pack( -side => 'left',
-expand => 1);
$mw->Button( -text => 'End',
scripts/rectangle-coverage.pl view on Meta::CPAN
#----------------------------------------------------------#
# Variation operators
my $m = Algorithm::Evolutionary::Op::Bitflip->new; # Rate = 1
my $c = Algorithm::Evolutionary::Op::Crossover->new(2, 9 ); # Rate = 9
#----------------------------------------------------------#
#Usamos estos operadores para definir una generación del algoritmo. Lo cual
# no es realmente necesario ya que este algoritmo define ambos operadores por
# defecto. Los parámetros son la función de fitness, la tasa de selección y los
# operadores de variación.
$generation = Algorithm::Evolutionary::Op::Easy->new( $fitness , $selection_rate , [$m, $c] ) ;
#----------------------------------------------------------#
for ( @pop ) {
if ( !defined $_->Fitness() ) {
my $this_fitness = $fitness->($_);
$_->Fitness( $this_fitness );
}
}
t/0200-run-ga.t view on Meta::CPAN
'points' => '2'
},
'max_generations' => '10',
'mutation' => {
'priority' => '2',
'rate' => '0.1'
},
'length' => '120',
'max_fitness' => '20',
'pop_size' => '128',
'selection_rate' => '0.1'
};
my $another_algorithm = new Algorithm::Evolutionary::Run $conf;
isa_ok( $another_algorithm, 'Algorithm::Evolutionary::Run' );
my $somebody = $algorithm->random_member();
isa_ok( $somebody, 'Algorithm::Evolutionary::Individual::BitString');
$another_algorithm->run();
ok( $another_algorithm->{'_counter'} == 10, "run OK" );
my $results = $another_algorithm->results();
cmp_ok( $results->{'evaluations'}, ">",100, "Evaluations OK" );
cmp_ok( $results->{'best'}->Fitness(), ">",
t/0407-tournament.t view on Meta::CPAN
my $onemax = new Algorithm::Evolutionary::Fitness::ONEMAX;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::Tournament_Selection 2;
ok( ref $selector eq "Algorithm::Evolutionary::Op::Tournament_Selection", 1);
my @new_pop = $selector->apply( \@pop );
ok( scalar( @new_pop) == scalar( @pop ), 1 ); #At least size is the same
@new_pop = $selector->apply( \@pop, @pop/10 );
ok( scalar( @new_pop) == scalar( @pop )/10, 1 ); #At least size is the same
$selector = new Algorithm::Evolutionary::Op::Tournament_Selection 10;
@new_pop = $selector->apply( \@pop );
ok( scalar( @new_pop) == scalar( @pop ), 1 ); #At least size is the same
=head1 Copyright
This file is released under the GPL. See the LICENSE file included in this distribution,
or go to http://www.fsf.org/licenses/gpl.txt
CVS Info: $Date: 2010/12/20 16:01:39 $
$Header: /media/Backup/Repos/opeal/opeal/Algorithm-Evolutionary/t/0407-tournament.t,v 1.1 2010/12/20 16:01:39 jmerelo Exp $
$Author: jmerelo $
t/0499-breeder-diverser.t view on Meta::CPAN
my $onemax = new Algorithm::Evolutionary::Fitness::ONEMAX;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::Tournament_Selection 2;
my $generation =
new Algorithm::Evolutionary::Op::Breeder_Diverser( [$m, $c] );
ok( ref $generation eq "Algorithm::Evolutionary::Op::Breeder_Diverser", 1);
my $new_pop = $generation->apply( \@pop );
ok( scalar( @$new_pop) == scalar( @pop ), 1 ); #At least size is the same
map( $_->evaluate( $onemax ), @$new_pop );
$new_pop = $generation->apply( $new_pop, @$new_pop/10 );
ok( scalar( @$new_pop) == scalar( @pop )/10, 1 ); #At least size is the same
$selector = new Algorithm::Evolutionary::Op::Tournament_Selection 5;
$generation =
new Algorithm::Evolutionary::Op::Breeder_Diverser( [$m, $c], $selector );
ok( ref $generation eq "Algorithm::Evolutionary::Op::Breeder_Diverser", 1);
$new_pop = $generation->apply( \@pop );
ok( scalar( @$new_pop) == scalar( @pop ), 1 ); #At least size is the same
=head1 Copyright
This file is released under the GPL. See the LICENSE file included in this distribution,
or go to http://www.fsf.org/licenses/gpl.txt
CVS Info: $Date: 2010/12/20 16:01:39 $
t/0499-breeder.t view on Meta::CPAN
my $onemax = new Algorithm::Evolutionary::Fitness::ONEMAX;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::Tournament_Selection 2;
my $generation =
new Algorithm::Evolutionary::Op::Breeder( [$m, $c] );
ok( ref $generation eq "Algorithm::Evolutionary::Op::Breeder", 1);
my $new_pop = $generation->apply( \@pop );
ok( scalar( @$new_pop) == scalar( @pop ), 1 ); #At least size is the same
map( $_->evaluate( $onemax ), @$new_pop );
$new_pop = $generation->apply( $new_pop, @$new_pop/10 );
ok( scalar( @$new_pop) == scalar( @pop )/10, 1 ); #At least size is the same
$selector = new Algorithm::Evolutionary::Op::Tournament_Selection 5;
$generation =
new Algorithm::Evolutionary::Op::Breeder( [$m, $c], $selector );
ok( ref $generation eq "Algorithm::Evolutionary::Op::Breeder", 1);
$new_pop = $generation->apply( \@pop );
ok( scalar( @$new_pop) == scalar( @pop ), 1 ); #At least size is the same
=head1 Copyright
This file is released under the GPL. See the LICENSE file included in this distribution,
or go to http://www.fsf.org/licenses/gpl.txt
CVS Info: $Date: 2010/12/16 18:57:41 $
t/0500-generation-skel.t view on Meta::CPAN
my $replacement_rate = 0.5;
for ( 1..$population_size ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $number_of_bits ; #Creates random individual
$indi->evaluate( $onemax );
push( @pop, $indi );
}
my $m = new Algorithm::Evolutionary::Op::Mutation 0.5;
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $population_size; #One of the possible selectors
my $generation =
new Algorithm::Evolutionary::Op::Generation_Skeleton( $onemax, $selector, [$m, $c], $replacement_rate );
my @sorted_pop = sort { $b->Fitness() <=> $a->Fitness() } @pop;
my $bestIndi = $sorted_pop[0];
my $previous_average = average( \@sorted_pop );
$generation->apply( \@sorted_pop );
ok( $bestIndi->Fitness() <= $sorted_pop[0]->Fitness(), 1 ); #fitness
#improves,
#but not
#always
#This should have improved...
do {
$generation->apply( \@sorted_pop );
} until ( $previous_average < average( \@sorted_pop)); #It eventually improves
my $this_average = average( \@sorted_pop );
ok( $previous_average < $this_average , 1 );
my $replacer = new Algorithm::Evolutionary::Op::Replace_Worst;
my $new_generation =
new Algorithm::Evolutionary::Op::Generation_Skeleton( $onemax, $selector, [$m, $c], $replacement_rate, $replacer );
do {
$new_generation->apply( \@sorted_pop );
} until ( $this_average < average( \@sorted_pop)); #It eventually improves
ok( $this_average < average( \@sorted_pop), 1 );
$replacer = new Algorithm::Evolutionary::Op::Replace_Different;
$new_generation =
new Algorithm::Evolutionary::Op::Generation_Skeleton( $onemax, $selector, [$m, $c], $replacement_rate, $replacer );
do {
$new_generation->apply( \@sorted_pop );
} until ( $this_average < average( \@sorted_pop)); #It eventually improves
ok( $this_average < average( \@sorted_pop), 1 );
=head1 Copyright
This file is released under the GPL. See the LICENSE file included in this distribution,
t/general.t view on Meta::CPAN
return sin( $sqrt )/$sqrt;
};
my $sa = new Algorithm::Evolutionary::Op::SimulatedAnnealing( $eval, $m, $freezer, $initTemp, $minTemp, );
is( ref $sa, 'Algorithm::Evolutionary::Op::SimulatedAnnealing', "Good class" );
#test 34
my $c = new Algorithm::Evolutionary::Op::Crossover; #Classical 2-point crossover
my $replacementRate = 0.3; #Replacement rate
use Algorithm::Evolutionary::Op::RouletteWheel;
my $popSize = 20;
my $selector = new Algorithm::Evolutionary::Op::RouletteWheel $popSize; #One of the possible selectors
use Algorithm::Evolutionary::Op::GeneralGeneration;
my $onemax = sub {
my $indi = shift;
my $total = 0;
my $len = $indi->size();
my $i = 0;
while ($i < $len ) {
$total += substr($indi->{'_str'}, $i, 1);
$i++;
}
t/general.t view on Meta::CPAN
my $numBits = 20;
for ( 0..$popSize ) {
my $indi = new Algorithm::Evolutionary::Individual::BitString $numBits ; #Creates random individual
my $fitness = $onemax->( $indi );
$indi->Fitness( $fitness );
push( @pop, $indi );
}
#fitness
my $generation =
new Algorithm::Evolutionary::Op::GeneralGeneration( $onemax, $selector, [$m, $c], $replacementRate );
my @sortPop = sort { $b->Fitness() <=> $a->Fitness() } @pop;
my $bestIndi = $sortPop[0];
$generation->apply( \@sortPop );
is( $bestIndi->Fitness() <= $sortPop[0]->Fitness(), 1, "Fitness improvement" ); #fitness improves, but not always
# To be obsoleted
my $ggxml = $generation->asXML();
my $gprime = Algorithm::Evolutionary::Op::Base->fromXML( $ggxml );
is( $gprime->{_eval}( $pop[0] ) eq $generation->{_eval}( $pop[0] ) , 1, "XML" ); #Code snippets will never be exactly the same.
t/p_peaks.yaml view on Meta::CPAN
crossover:
points: 2
priority: 3
fitness:
class: P_Peaks
params:
- 100
- 64
max_generations: 500
max_fitness: 1
selection_rate: 0.2