view release on metacpan or search on metacpan
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
}
# mutate the list properties
$log->debug("mutating list properties");
my %list_properties = __PACKAGE__->_list_properties;
for my $prop ( keys %list_properties ) {
my $handler = $list_properties{$prop};
my @values = $self->$prop;
if ( ref $handler ) {
$self->$prop( map { $handler->($_,$mu) } @values );
}
else {
$self->$prop( map { _mutate_enum($handler,$_,$mu) } @values );
}
}
# mutate the layer properties
$log->debug("mutating layer properties");
my %layer_properties = __PACKAGE__->_layer_properties;
for my $prop ( keys %layer_properties ) {
my $handler = $layer_properties{$prop};
for my $i ( 1 .. $self->num_layers ) {
for my $j ( 1 .. $self->layer_num_neurons($i) ) {
lib/AI/FANN/Evolving.pm view on Meta::CPAN
return $self->{'train_type'} = $value;
}
else {
$log->debug("getting train type");
return $self->{'train_type'};
}
}
=item activation_function
Getter/setter for the function that maps inputs to outputs. default is
FANN_SIGMOID_SYMMETRIC
=back
=cut
sub activation_function {
my $self = shift;
if ( @_ ) {
my $value = shift;
lib/AI/FANN/Evolving/Chromosome.pm view on Meta::CPAN
=item recombine
Recombines properties of the AI during meiosis in proportion to the crossover_rate
=cut
sub recombine {
$log->debug("recombining chromosomes");
# get the genes and columns for the two chromosomes
my ( $chr1, $chr2 ) = @_;
my ( $gen1 ) = map { $_->mutate } $chr1->genes;
my ( $gen2 ) = map { $_->mutate } $chr2->genes;
my ( $ann1, $ann2 ) = ( $gen1->ann, $gen2->ann );
$ann1->recombine($ann2,$chr1->experiment->crossover_rate);
# assign the genes to the chromosomes (this because they are clones
# so we can't use the old object reference)
$chr1->genes($gen1);
$chr2->genes($gen2);
}
=item clone
lib/AI/FANN/Evolving/Chromosome.pm view on Meta::CPAN
Clones the object
=back
=cut
sub clone {
my $self = shift;
my @genes = $self->genes;
my $self_clone = $self->SUPER::clone;
$self_clone->genes( map { $_->clone } @genes );
return $self_clone;
}
1;
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
$wd =~ s/\d+$/$i/;
$self->{'workdir'} = $wd;
mkdir $wd;
my $optimum = $self->optimum($i);
$log->debug("optimum at generation $i is $optimum");
my ( $fittest, $fitness ) = $self->population->turnover($i,$self->env,$optimum);
push @results, [ $fittest, $fitness ];
}
my ( $fittest, $fitness ) = map { @{ $_ } } sort { $a->[1] <=> $b->[1] } @results;
return $fittest, $fitness;
}
=item optimum
The optimal fitness is zero error in the ANN's classification. This method returns
that value: 0.
=cut
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
}
elsif ( $arg eq 'mse' ) {
$self->{'error_func'} = \&_mse;
$log->info("using MSE error function");
}
else {
$log->warn("don't understand error func '$arg'");
}
}
# map the constructor-supplied argument
if ( $self->{'error_func'} and $self->{'error_func'} eq 'sign' ) {
$self->{'error_func'} = \&_sign;
$log->info("using error function 'sign'");
}
elsif ( $self->{'error_func'} and $self->{'error_func'} eq 'mse' ) {
$self->{'error_func'} = \&_mse;
$log->info("using error function 'mse'");
}
return $self->{'error_func'} || \&_mse;
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
=item predictor_columns
Getter for column name(s) of input value(s)
=cut
sub predictor_columns {
my $self = shift;
my @others = ( $self->ignore_columns, $self->dependent_columns );
my %skip = map { $_ => 1 } @others;
return grep { ! $skip{$_} } keys %{ $self->{'header'} };
}
=item predictor_data
Getter for rows of input values
=cut
sub predictor_data {
my ( $self, %args ) = @_;
my $i = $args{'row'};
my @cols = $args{'cols'} ? @{ $args{'cols'} } : $self->predictor_columns;
# build hash of indices to keep
my %keep = map { $self->{'header'}->{$_} => 1 } @cols;
# only return a single row
if ( defined $i ) {
my @pred;
for my $j ( 0 .. $#{ $self->{'table'}->[$i] } ) {
push @pred, $self->{'table'}->[$i]->[$j] if $keep{$j};
}
return \@pred;
}
else {
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
}
=item dependent_data
Getter for dependent (classifier) data
=cut
sub dependent_data {
my ( $self, $i ) = @_;
my @dc = map { $self->{'header'}->{$_} } $self->dependent_columns;
if ( defined $i ) {
return [ map { $self->{'table'}->[$i]->[$_] } @dc ];
}
else {
my @dep;
for my $j ( 0 .. $self->size - 1 ) {
push @dep, $self->dependent_data($j);
}
return @dep;
}
}
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
my ( $self, $file ) = @_; # file is tab-delimited
$log->debug("reading data from file $file");
open my $fh, '<', $file or die "Can't open $file: $!";
my ( %header, @table );
while(<$fh>) {
chomp;
next if /^\s*$/;
my @fields = split /\t/, $_;
if ( not %header ) {
my $i = 0;
%header = map { $_ => $i++ } @fields;
}
else {
push @table, \@fields;
}
}
$self->{'header'} = \%header;
$self->{'table'} = \@table;
return $self;
}
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
}
# adjust counts to sample size
for my $key ( keys %seen ) {
$log->debug("counts: $key => $seen{$key}");
$seen{$key} = int( $seen{$key} * $sample );
$log->debug("rescaled: $key => $seen{$key}");
}
# start the sampling
my @dc = map { $self->{'header'}->{$_} } $self->dependent_columns;
my @new_table; # we will populate this
my @table = @{ $clone1->{'table'} }; # work on cloned instance
# as long as there is still sampling to do
SAMPLE: while( grep { !!$_ } values %seen ) {
for my $i ( 0 .. $#table ) {
my @r = @{ $table[$i] };
my $key = join '/', @r[@dc];
if ( $seen{$key} ) {
my $rand = rand(1);
script/aivolver view on Meta::CPAN
'workdir' => $wd,
%experiment,
);
# initialize the experiment
$exp->initialize(%initialize);
# run!
my ( $fittest, $fitness ) = $exp->run();
$log->info("*** overall best fitness: $fitness");
my ($gene) = sort { $a->fitness <=> $b->fitness } map { $_->genes } $fittest->chromosomes;
$gene->ann->save($outfile);
__END__
=pod
=head1 NAME
aivolver - Evolves optimal artificial neural networks