view release on metacpan or search on metacpan
lib/AI/FANN/Evolving.pm view on Meta::CPAN
=over
=item new
Constructor requires 'file', or 'data' and 'neurons' arguments. Optionally takes
'connection_rate' argument for sparse topologies. Returns a wrapper around L<AI::FANN>.
=cut
sub new {
my $class = shift;
my %args = @_;
my $self = {};
bless $self, $class;
$self->_init(%args);
# de-serialize from a file
if ( my $file = $args{'file'} ) {
$self->{'ann'} = AI::FANN->new_from_file($file);
$log->debug("instantiating from file $file");
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
}
=item template
Uses the object as a template for the properties of the argument, e.g.
$ann1->template($ann2) applies the properties of $ann1 to $ann2
=cut
sub template {
my ( $self, $other ) = @_;
# copy over the simple properties
$log->debug("copying over simple properties");
my %scalar_properties = __PACKAGE__->_scalar_properties;
for my $prop ( keys %scalar_properties ) {
my $val = $self->$prop;
$other->$prop($val);
}
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
=item recombine
Recombines (exchanges) properties between the two objects at the provided rate, e.g.
$ann1->recombine($ann2,0.5) means that on average half of the object properties are
exchanged between $ann1 and $ann2
=cut
sub recombine {
my ( $self, $other, $rr ) = @_;
# recombine the simple properties
my %scalar_properties = __PACKAGE__->_scalar_properties;
for my $prop ( keys %scalar_properties ) {
if ( rand(1) < $rr ) {
my $vals = $self->$prop;
my $valo = $other->$prop;
$other->$prop($vals);
$self->$prop($valo);
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
return $self;
}
=item mutate
Mutates the object by the provided mutation rate
=cut
sub mutate {
my ( $self, $mu ) = @_;
$log->debug("going to mutate at rate $mu");
# mutate the simple properties
$log->debug("mutating scalar properties");
my %scalar_properties = __PACKAGE__->_scalar_properties;
for my $prop ( keys %scalar_properties ) {
my $handler = $scalar_properties{$prop};
my $val = $self->$prop;
if ( ref $handler ) {
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
else {
$self->$prop( _mutate_enum($handler,$val,$mu) );
}
}
}
}
return $self;
}
sub _mutate_double {
my ( $value, $mu ) = @_;
my $scale = 1 + ( rand( 2 * $mu ) - $mu );
return $value * $scale;
}
sub _mutate_int {
my ( $value, $mu ) = @_;
if ( rand(1) < $mu ) {
my $inc = ( int(rand(2)) * 2 ) - 1;
while( ( $value < 0 ) xor ( ( $value + $inc ) < 0 ) ) {
$inc = ( int(rand(2)) * 2 ) - 1;
}
return $value + $inc;
}
return $value;
}
sub _mutate_enum {
my ( $enum_name, $value, $mu ) = @_;
if ( rand(1) < $mu ) {
my ($newval) = shuffle grep { $_ != $value } values %{ $enum{$enum_name} };
$value = $newval if defined $newval;
}
return $value;
}
sub _list_properties {
(
# cascade_activation_functions => 'activationfunc',
cascade_activation_steepnesses => \&_mutate_double,
)
}
sub _layer_properties {
(
# neuron_activation_function => 'activationfunc',
# neuron_activation_steepness => \&_mutate_double,
)
}
sub _scalar_properties {
(
training_algorithm => 'train',
train_error_function => 'errorfunc',
train_stop_function => 'stopfunc',
learning_rate => \&_mutate_double,
learning_momentum => \&_mutate_double,
quickprop_decay => \&_mutate_double,
quickprop_mu => \&_mutate_double,
rprop_increase_factor => \&_mutate_double,
rprop_decrease_factor => \&_mutate_double,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
cascade_candidate_limit => \&_mutate_double, # 'fann_type',
)
}
=item defaults
Getter/setter to influence default ANN configuration
=cut
sub defaults {
my $self = shift;
my %args = @_;
for my $key ( keys %args ) {
$log->info("setting $key to $args{$key}");
if ( $key eq 'activation_function' ) {
$args{$key} = $constant{$args{$key}};
}
$default{$key} = $args{$key};
}
return %default;
}
sub _init {
my $self = shift;
my %args = @_;
for ( qw(error epochs train_type epoch_printfreq neuron_printfreq neurons activation_function) ) {
$self->{$_} = $args{$_} // $default{$_};
}
return $self;
}
=item clone
Clones the object
=cut
sub clone {
my $self = shift;
$log->debug("cloning...");
# we delete the reference here so we can use
# Algorithm::Genetic::Diploid::Base's cloning method, which
# dumps and loads from YAML. This wouldn't work if the
# reference is still attached because it cannot be
# stringified, being an XS data structure
my $ann = delete $self->{'ann'};
my $clone = $self->SUPER::clone;
lib/AI/FANN/Evolving.pm view on Meta::CPAN
return $clone;
}
=item train
Trains the AI on the provided data object
=cut
sub train {
my ( $self, $data ) = @_;
if ( $self->train_type eq 'cascade' ) {
$log->debug("cascade training");
# set learning curve
$self->cascade_activation_functions( $self->activation_function );
# train
$self->{'ann'}->cascadetrain_on_data(
$data,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
enum
=cut
=item error
Getter/setter for the error rate. Default is 0.0001
=cut
sub error {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting error threshold to $value");
return $self->{'error'} = $value;
}
else {
$log->debug("getting error threshold");
return $self->{'error'};
}
}
=item epochs
Getter/setter for the number of training epochs, default is 500000
=cut
sub epochs {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting training epochs to $value");
return $self->{'epochs'} = $value;
}
else {
$log->debug("getting training epochs");
return $self->{'epochs'};
}
}
=item epoch_printfreq
Getter/setter for the number of epochs after which progress is printed. default is 1000
=cut
sub epoch_printfreq {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting epoch printfreq to $value");
return $self->{'epoch_printfreq'} = $value;
}
else {
$log->debug("getting epoch printfreq");
return $self->{'epoch_printfreq'}
}
}
=item neurons
Getter/setter for the number of neurons. Default is 15
=cut
sub neurons {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting neurons to $value");
return $self->{'neurons'} = $value;
}
else {
$log->debug("getting neurons");
return $self->{'neurons'};
}
}
=item neuron_printfreq
Getter/setter for the number of cascading neurons after which progress is printed.
default is 10
=cut
sub neuron_printfreq {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting neuron printfreq to $value");
return $self->{'neuron_printfreq'} = $value;
}
else {
$log->debug("getting neuron printfreq");
return $self->{'neuron_printfreq'};
}
}
=item train_type
Getter/setter for the training type: 'cascade' or 'ordinary'. Default is ordinary
=cut
sub train_type {
my $self = shift;
if ( @_ ) {
my $value = lc shift;
$log->debug("setting train type to $value");
return $self->{'train_type'} = $value;
}
else {
$log->debug("getting train type");
return $self->{'train_type'};
}
lib/AI/FANN/Evolving.pm view on Meta::CPAN
=item activation_function
Getter/setter for the function that maps inputs to outputs. default is
FANN_SIGMOID_SYMMETRIC
=back
=cut
sub activation_function {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting activation function to $value");
return $self->{'activation_function'} = $value;
}
else {
$log->debug("getting activation function");
return $self->{'activation_function'};
}
}
# this is here so that we can trap method calls that need to be
# delegated to the FANN object. at this point we're not even
# going to care whether the FANN object implements these methods:
# if it doesn't we get the normal error for unknown methods, which
# the user then will have to resolve.
sub AUTOLOAD {
my $self = shift;
my $method = $AUTOLOAD;
$method =~ s/.+://;
# ignore all caps methods
if ( $method !~ /^[A-Z]+$/ ) {
# determine whether to invoke on an object or a package
my $invocant;
if ( ref $self ) {
lib/AI/FANN/Evolving/Chromosome.pm view on Meta::CPAN
=head1 METHODS
=over
=item recombine
Recombines properties of the AI during meiosis in proportion to the crossover_rate
=cut
sub recombine {
$log->debug("recombining chromosomes");
# get the genes and columns for the two chromosomes
my ( $chr1, $chr2 ) = @_;
my ( $gen1 ) = map { $_->mutate } $chr1->genes;
my ( $gen2 ) = map { $_->mutate } $chr2->genes;
my ( $ann1, $ann2 ) = ( $gen1->ann, $gen2->ann );
$ann1->recombine($ann2,$chr1->experiment->crossover_rate);
# assign the genes to the chromosomes (this because they are clones
# so we can't use the old object reference)
lib/AI/FANN/Evolving/Chromosome.pm view on Meta::CPAN
}
=item clone
Clones the object
=back
=cut
sub clone {
my $self = shift;
my @genes = $self->genes;
my $self_clone = $self->SUPER::clone;
$self_clone->genes( map { $_->clone } @genes );
return $self_clone;
}
1;
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
=head1 METHODS
=over
=item new
Constructor takes named arguments, sets default factory to L<AI::FANN::Evolving::Factory>
=cut
sub new { shift->SUPER::new( 'factory' => AI::FANN::Evolving::Factory->new, @_ ) }
=item workdir
Getter/Setter for the workdir where L<AI::FANN> artificial neural networks will be
written during the experiment. The files will be named after the ANN's error, which
needs to be minimized.
=cut
sub workdir {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->info("assigning new workdir $value");
$self->{'workdir'} = $value;
}
else {
$log->debug("retrieving workdir");
}
return $self->{'workdir'};
}
=item traindata
Getter/setter for the L<AI::FANN::TrainData> object.
=cut
sub traindata {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->info("assigning new traindata $value");
$self->{'traindata'} = $value;
}
else {
$log->debug("retrieving traindata");
}
return $self->{'traindata'};
}
=item run
Runs the experiment!
=cut
sub run {
my $self = shift;
my $log = $self->logger;
$log->info("going to run experiment");
my @results;
for my $i ( 1 .. $self->ngens ) {
# modify workdir
my $wd = $self->{'workdir'};
$wd =~ s/\d+$/$i/;
lib/AI/FANN/Evolving/Experiment.pm view on Meta::CPAN
return $fittest, $fitness;
}
=item optimum
The optimal fitness is zero error in the ANN's classification. This method returns
that value: 0.
=cut
sub optimum { 0 }
sub _sign {
my ( $obs, $exp ) = @_;
my $fitness = 0;
for my $i ( 0 .. $#{ $obs } ) {
$fitness += ( ( $obs->[$i] > 0 ) xor ( $exp->[$i] > 0 ) );
}
return $fitness / scalar(@{$obs});
}
sub _mse {
my ( $obs, $exp ) = @_;
my $fitness = 0;
for my $i ( 0 .. $#{ $obs } ) {
$fitness += ( ( (1+$obs->[$i]) - (1+$exp->[$i]) ) ** 2 );
}
return $fitness / scalar(@{$obs});
}
=item error_func
Returns a function to compute the error. Given an argument, the following can happen:
'sign' => error is the average number of times observed and expected have different signs
'mse' => error is the mean squared difference between observed and expected
CODE => error function is the provided code reference
=back
=cut
sub error_func {
my $self = shift;
# process the argument
if ( @_ ) {
my $arg = shift;
if ( ref $arg eq 'CODE' ) {
$self->{'error_func'} = $arg;
$log->info("using custom error function");
}
elsif ( $arg eq 'sign' ) {
lib/AI/FANN/Evolving/Factory.pm view on Meta::CPAN
=item new
Constructor takes named arguments. Key is a short name (e.g. 'traindata'), value is a
fully qualified package name (e.g. L<AI::FANN::TrainData>) from which to instantiate
objects identified by the short name.
=back
=cut
sub new { shift->SUPER::new(%defaults,@_) }
1;
lib/AI/FANN/Evolving/Gene.pm view on Meta::CPAN
=head1 METHODS
=over
=item new
Constructor is passed named arguments. Instantiates a trained L<AI::FANN::Evolving> ANN
=cut
sub new {
# initialize self up the inheritance tree
my $self = shift->SUPER::new(@_);
# instantiate and train the FANN object
my $traindata = $self->experiment->traindata;
$self->ann( AI::FANN::Evolving->new( 'data' => $traindata ) );
return $self;
}
=item ann
Getter/setter for an L<AI::FANN::Evolving> ANN
=cut
sub ann {
my $self = shift;
if ( @_ ) {
my $ann = shift;
$log->debug("setting ANN $ann");
return $self->{'ann'} = $ann;
}
else {
$log->debug("getting ANN");
return $self->{'ann'};
}
}
=item make_function
Returns a code reference to the fitness function, which when executed returns a fitness
value and writes the corresponding ANN to file
=cut
sub make_function {
my $self = shift;
my $ann = $self->ann;
my $error_func = $self->experiment->error_func;
$log->debug("making fitness function");
# build the fitness function
return sub {
# train the AI
$ann->train( $self->experiment->traindata );
# isa TrainingData object, this is what we need to use
# to make our prognostications. It is a different data
# set (out of sample) than the TrainingData object that
# the AI was trained on.
my $env = shift;
lib/AI/FANN/Evolving/Gene.pm view on Meta::CPAN
return $self->{'fitness'};
}
}
=item fitness
Stores the fitness value after expressing the fitness function
=cut
sub fitness { shift->{'fitness'} }
=item clone
Clones the object
=cut
sub clone {
my $self = shift;
my $ann = delete $self->{'ann'};
my $ann_clone = $ann->clone;
my $self_clone = $self->SUPER::clone;
$self_clone->ann( $ann_clone );
$self->ann( $ann );
return $self_clone;
}
=item mutate
Mutates the ANN by stochastically altering its properties in proportion to
the mutation_rate
=back
=cut
sub mutate {
my $self = shift;
# probably 0.05
my $mu = $self->experiment->mutation_rate;
# make a clone, whose untrained ANN properties are mutated
my $self_clone = $self->clone;
my $ann = AI::FANN::Evolving->new( 'ann' => $self->ann );
$ann->mutate($mu);
$self_clone->ann($ann);
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
=over
=item new
Constructor takes named arguments. By default, ignores column
named ID and considers column named CLASS as classifier.
=cut
sub new {
my $self = shift->SUPER::new(
'ignore' => [ 'ID' ],
'dependent' => [ 'CLASS' ],
'header' => {},
'table' => [],
@_
);
my %args = @_;
$self->read_data($args{'file'}) if $args{'file'};
$self->trim_data if $args{'trim'};
return $self;
}
=item ignore_columns
Getter/setter for column names to ignore in the train data structure.
For example: an identifier columns named 'ID'
=cut
sub ignore_columns {
my $self = shift;
$self->{'ignore'} = \@_ if @_;
return @{ $self->{'ignore'} };
}
=item dependent_columns
Getter/setter for column name(s) of the output value(s).
=cut
sub dependent_columns {
my $self = shift;
$self->{'dependent'} = \@_ if @_;
return @{ $self->{'dependent'} };
}
=item predictor_columns
Getter for column name(s) of input value(s)
=cut
sub predictor_columns {
my $self = shift;
my @others = ( $self->ignore_columns, $self->dependent_columns );
my %skip = map { $_ => 1 } @others;
return grep { ! $skip{$_} } keys %{ $self->{'header'} };
}
=item predictor_data
Getter for rows of input values
=cut
sub predictor_data {
my ( $self, %args ) = @_;
my $i = $args{'row'};
my @cols = $args{'cols'} ? @{ $args{'cols'} } : $self->predictor_columns;
# build hash of indices to keep
my %keep = map { $self->{'header'}->{$_} => 1 } @cols;
# only return a single row
if ( defined $i ) {
my @pred;
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
return @preds;
}
}
=item dependent_data
Getter for dependent (classifier) data
=cut
sub dependent_data {
my ( $self, $i ) = @_;
my @dc = map { $self->{'header'}->{$_} } $self->dependent_columns;
if ( defined $i ) {
return [ map { $self->{'table'}->[$i]->[$_] } @dc ];
}
else {
my @dep;
for my $j ( 0 .. $self->size - 1 ) {
push @dep, $self->dependent_data($j);
}
return @dep;
}
}
=item read_data
Reads provided input file
=cut
sub read_data {
my ( $self, $file ) = @_; # file is tab-delimited
$log->debug("reading data from file $file");
open my $fh, '<', $file or die "Can't open $file: $!";
my ( %header, @table );
while(<$fh>) {
chomp;
next if /^\s*$/;
my @fields = split /\t/, $_;
if ( not %header ) {
my $i = 0;
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
$self->{'table'} = \@table;
return $self;
}
=item write_data
Writes to provided output file
=cut
sub write_data {
my ( $self, $file ) = @_;
# use file or STDOUT
my $fh;
if ( $file ) {
open $fh, '>', $file or die "Can't write to $file: $!";
$log->info("writing data to $file");
}
else {
$fh = \*STDOUT;
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
print $fh "\n";
}
}
=item trim_data
Trims sparse rows with missing values
=cut
sub trim_data {
my $self = shift;
my @trimmed;
ROW: for my $row ( @{ $self->{'table'} } ) {
next ROW if grep { not defined $_ } @{ $row };
push @trimmed, $row;
}
my $num = $self->{'size'} - scalar @trimmed;
$log->info("removed $num incomplete rows");
$self->{'table'} = \@trimmed;
}
=item sample_data
Sample a fraction of the data
=cut
sub sample_data {
my $self = shift;
my $sample = shift || 0.5;
my $clone1 = $self->clone;
my $clone2 = $self->clone;
my $size = $self->size;
my @sample;
$clone2->{'table'} = \@sample;
while( scalar(@sample) < int( $size * $sample ) ) {
my @shuffled = shuffle( @{ $clone1->{'table'} } );
push @sample, shift @shuffled;
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
}
return $clone2, $clone1;
}
=item partition_data
Creates two clones that partition the data according to the provided ratio.
=cut
sub partition_data {
my $self = shift;
my $sample = shift || 0.5;
my $clone1 = $self->clone;
my $clone2 = $self->clone;
my $remain = 1 - $sample;
$log->info("going to partition into $sample : $remain");
# compute number of different dependent patterns and ratios of each
my @dependents = $self->dependent_data;
my %seen;
lib/AI/FANN/Evolving/TrainData.pm view on Meta::CPAN
$clone1->{'table'} = \@table;
return $clone2, $clone1;
}
=item size
Returns the number of data records
=cut
sub size { scalar @{ shift->{'table'} } }
=item to_fann
Packs data into an L<AI::FANN> TrainData structure
=cut
sub to_fann {
$log->debug("encoding data as FANN struct");
my $self = shift;
my @cols = @_ ? @_ : $self->predictor_columns;
my @deps = $self->dependent_data;
my @pred = $self->predictor_data( 'cols' => \@cols );
my @interdigitated;
for my $i ( 0 .. $#deps ) {
push @interdigitated, $pred[$i], $deps[$i];
}
return AI::FANN::TrainData->new(@interdigitated);
script/aivolver view on Meta::CPAN
# process command line arguments
GetOptions(
'verbose+' => \$verbosity,
'formatter=s' => \$formatter,
'outfile=s' => \$outfile,
'initialize=s' => \%initialize,
'data=s' => \%data,
'experiment=s' => \%experiment,
'ann=s' => \%ann,
'help|?' => sub { pod2usage( '-verbose' => 1 ) },
'manual' => sub { pod2usage( '-verbose' => 2 ) },
);
# configure ANN
AI::FANN::Evolving->defaults(%ann);
# configure logger
my $log = Algorithm::Genetic::Diploid::Logger->new;
$log->level( 'level' => $verbosity );
$log->formatter( $formatter );