view release on metacpan or search on metacpan
_Inline/build/AI/ANN/Neuron_6185/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_03a5/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_3d06/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_6dc1/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_ac1b/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_b832/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_b905/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/_Inline/build/benchmark_pl_c51b/Makefile view on Meta::CPAN
ci :
$(PERLRUN) "-MExtUtils::Manifest=maniread" \
-e "@all = keys %{ maniread() };" \
-e "print(qq{Executing $(CI) @all\n}); system(qq{$(CI) @all});" \
-e "print(qq{Executing $(RCS_LABEL) ...\n}); system(qq{$(RCS_LABEL) @all});"
# --- MakeMaker distmeta section:
distmeta : create_distdir metafile
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{META.yml} => q{Module meta-data (added by MakeMaker)}}) } ' \
-e ' or print "Could not add META.yml to MANIFEST: $${'\''@'\''}\n"' --
# --- MakeMaker distsignature section:
distsignature : create_distdir
$(NOECHO) cd $(DISTVNAME) && $(ABSPERLRUN) -MExtUtils::Manifest=maniadd -e 'eval { maniadd({q{SIGNATURE} => q{Public-key signature (added by MakeMaker)}}) } ' \
-e ' or print "Could not add SIGNATURE to MANIFEST: $${'\''@'\''}\n"' --
$(NOECHO) cd $(DISTVNAME) && $(TOUCH) SIGNATURE
cd $(DISTVNAME) && cpansign -s
examples/benchmark.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use Benchmark qw(:all);
use AI::ANN::Neuron;
my %data = (id => 1, inputs => [ 4*rand()-2, 4*rand()-2, 4*rand()-2,
4*rand()-2, 4*rand()-2 ],
neurons => [ 4*rand()-2, 4*rand()-2, 4*rand()-2,
4*rand()-2, 4*rand()-2 ]);
my $object1 = new AI::ANN::Neuron ( %data, inline_c => 0 );
my $object2 = new AI::ANN::Neuron ( %data, inline_c => 1 );
my @data = ( [ 4*rand()-2, 4*rand()-2, 4*rand()-2, 4*rand()-2, 4*rand()-2 ],
[ 4*rand()-2, 4*rand()-2, 4*rand()-2, 4*rand()-2, 4*rand()-2 ]);
cmpthese( -1, { 'pure_perl' => sub{$object1->execute(@data)},
'inline_c' => sub{$object2->execute(@data)} });
use Math::Libm qw(erf M_PI);
use Inline C => <<'END_C';
#include <math.h>
double afunc[4001];
double dafunc[4001];
void generate_globals() {
int i;
for (i=0;i<=4000;i++) {
afunc[i] = 2 * (erf(i/1000.0-2));
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
lib/AI/ANN.pm view on Meta::CPAN
has 'rawpotentials' => (is => 'ro', isa => 'ArrayRef[Int]');
has 'minvalue' => (is => 'rw', isa => 'Int', default => 0);
has 'maxvalue' => (is => 'rw', isa => 'Int', default => 1);
has 'afunc' => (is => 'rw', isa => 'CodeRef', default => sub {sub {shift}});
has 'dafunc' => (is => 'rw', isa => 'CodeRef', default => sub {sub {1}});
has 'backprop_eta' => (is => 'rw', isa => 'Num', default => 0.1);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
my %data;
if ( @_ == 1 && ref $_[0] eq 'HASH' ) {
%data = %{$_[0]};
} else {
%data = @_;
}
if (exists $data{'inputs'} && not exists $data{'input_count'}) {
$data{'input_count'} = $data{'inputs'};
delete $data{'inputs'}; # inputs is used later for the actual
# values of the inputs.
}
my $neuronlist = $data{'data'};
$data{'outputneurons'} = [];
$data{'network'} = [];
for (my $i = 0; $i <= $#{$neuronlist} ; $i++) {
push @{$data{'outputneurons'}}, $i
if $neuronlist->[$i]->{'iamanoutput'};
my @pass = (
$i,
$neuronlist->[$i]->{'inputs'},
$neuronlist->[$i]->{'neurons'} );
push @pass, $neuronlist->[$i]->{'eta_inputs'},
$neuronlist->[$i]->{'eta_neurons'}
if defined $neuronlist->[$i]->{'eta_neurons'};
$data{'network'}->[$i]->{'object'} =
new AI::ANN::Neuron( @pass );
}
delete $data{'data'};
return $class->$orig(%data);
};
sub execute {
my $self = shift;
my $inputs = $self->{'inputs'} = shift;
# Don't bother dereferencing $inputs only to rereference a lot
my $net = $self->{'network'}; # For less typing
my $lastneuron = $#{$net};
my @neurons = ();
lib/AI/ANN.pm view on Meta::CPAN
topology and weights, as well as many parameters), and to then retrieve those
details. The purpose of this is to allow an additional module to then tweak
these values by a means that models evolution by natural selection. The
canonical way to do this is the included AI::ANN::Evolver, which allows
the addition of random mutations to individual networks, and the crossing of
two networks. You will also, depending on your application, need a fitness
function of some sort, in order to determine which networks to allow to
propagate. Here is an example of that system.
use AI::ANN;
my $network = new AI::ANN ( input_count => $inputcount, data => \@neuron_definition );
my $outputs = $network->execute( \@inputs ); # Basic network use
use AI::ANN::Evolver;
my $handofgod = new AI::ANN::Evolver (); # See that module for calling details
my $network2 = $handofgod->mutate($network); # Random mutations
# Test an entire 'generation' of networks, and let $network and $network2 be
# among those with the highest fitness function in the generation.
my $network3 = $handofgod->crossover($network, $network2);
# Perhaps mutate() each network either before or after the crossover to
# introduce variety.
We elected to do this with a new module rather than by extending an existing
module because of the extensive differences in the internal structure and the
interface that were necessary to accomplish these goals.
=head1 METHODS
=head2 new
ANN::new(input_count => $inputcount, data => [{ iamanoutput => 0, inputs => {$inputid => $weight, ...}, neurons => {$neuronid => $weight}}, ...])
input_count is number of inputs.
data is an arrayref of neuron definitions.
The first neuron with iamanoutput=1 is output 0. The second is output 1.
I hope you're seeing the pattern...
minvalue is the minimum value a neuron can pass. Default 0.
maxvalue is the maximum value a neuron can pass. Default 1.
afunc is a reference to the activation function. It should be simple and fast.
The activation function is processed /after/ minvalue and maxvalue.
dafunc is the derivative of the activation function.
We strongly advise that you memoize your afunc and dafunc if they are at all
complicated. We will do our best to behave.
lib/AI/ANN.pm view on Meta::CPAN
network, then returns the output.
We store the current state of the network in two places - once in the object,
for persistence, and once in $neurons, for simplicity. This might be wrong,
but I couldn't think of a better way.
=head2 get_state
$network->get_state()
Returns three arrayrefs, [$input0, ...], [$neuron0, ...], [$output0, ...],
corresponding to the data from the last call to execute().
Intended primarily to assist with debugging.
=head2 get_internals
$network->get_internals()
Returns the weights in a not-human-consumable format.
=head2 readable
$network->readable()
Returns a human-friendly and diffable description of the network.
=head2 backprop
$network->backprop(\@inputs, \@outputs)
Performs back-propagation learning on the neural network with the provided
training data. Uses backprop_eta as a training rate and dafunc as the
derivative of the activation function.
=head1 AUTHOR
Dan Collins <DCOLLINS@cpan.org>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2011 by Dan Collins.
lib/AI/ANN/Evolver.pm view on Meta::CPAN
has 'mutation_amount' => (is => 'rw', isa => 'CodeRef', default => sub{sub{2 * rand() - 1}});
has 'add_link_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'kill_link_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'sub_crossover_chance' => (is => 'rw', isa => 'Num', default => 0);
has 'gaussian_tau' => (is => 'rw', isa => 'CodeRef', default => sub{sub{1/sqrt(2*sqrt(shift))}});
has 'gaussian_tau_prime' => (is => 'rw', isa => 'CodeRef', default => sub{sub{1/sqrt(2*shift)}});
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
my %data;
if ( @_ == 1 && ref $_[0] eq 'HASH' ) {
%data = %{$_[0]};
} else {
%data = @_;
}
if ((not (ref $data{'mutation_amount'})) || ref $data{'mutation_amount'} ne 'CODE') {
my $range = $data{'mutation_amount'};
$data{'mutation_amount'} = sub { $range * (rand() * 2 - 1) };
}
return $class->$orig(%data);
};
sub crossover {
my $self = shift;
my $network1 = shift;
my $network2 = shift;
my $class = ref($network1);
my $inputcount = $network1->input_count();
my $minvalue = $network1->minvalue();
my $maxvalue = $network1->maxvalue();
my $afunc = $network1->afunc();
my $dafunc = $network1->dafunc();
# They better have the same number of inputs
$inputcount == $network2->input_count() || return -1;
my $networkdata1 = $network1->get_internals();
my $networkdata2 = $network2->get_internals();
my $neuroncount = $#{$networkdata1};
# They better also have the same number of neurons
$neuroncount == $#{$networkdata2} || return -1;
my $networkdata3 = [];
for (my $i = 0; $i <= $neuroncount; $i++) {
if (rand() < $self->{'sub_crossover_chance'}) {
$networkdata3->[$i] = { 'inputs' => [], 'neurons' => [] };
$networkdata3->[$i]->{'iamanoutput'} =
$networkdata1->[$i]->{'iamanoutput'};
for (my $j = 0; $j < $inputcount; $j++) {
$networkdata3->[$i]->{'inputs'}->[$j] =
(rand() > 0.5) ?
$networkdata1->[$i]->{'inputs'}->[$j] :
$networkdata2->[$i]->{'inputs'}->[$j];
# Note to self: Don't get any silly ideas about dclone()ing
# these, that's a good way to waste half an hour debugging.
}
for (my $j = 0; $j <= $neuroncount; $j++) {
$networkdata3->[$i]->{'neurons'}->[$j] =
(rand() > 0.5) ?
$networkdata1->[$i]->{'neurons'}->[$j] :
$networkdata2->[$i]->{'neurons'}->[$j];
}
} else {
$networkdata3->[$i] = dclone(
(rand() > 0.5) ?
$networkdata1->[$i] :
$networkdata2->[$i] );
}
}
my $network3 = $class->new ( 'inputs' => $inputcount,
'data' => $networkdata3,
'minvalue' => $minvalue,
'maxvalue' => $maxvalue,
'afunc' => $afunc,
'dafunc' => $dafunc);
return $network3;
}
sub mutate {
my $self = shift;
my $network = shift;
my $class = ref($network);
my $networkdata = $network->get_internals();
my $inputcount = $network->input_count();
my $minvalue = $network->minvalue();
my $maxvalue = $network->maxvalue();
my $afunc = $network->afunc();
my $dafunc = $network->dafunc();
my $neuroncount = $#{$networkdata}; # BTW did you notice that this
# isn't what it says it is?
$networkdata = dclone($networkdata); # For safety.
for (my $i = 0; $i <= $neuroncount; $i++) {
# First each input/neuron pair
for (my $j = 0; $j < $inputcount; $j++) {
my $weight = $networkdata->[$i]->{'inputs'}->[$j];
if (defined $weight && $weight != 0) {
if (rand() < $self->{'mutation_chance'}) {
$weight += (rand() * 2 - 1) * $self->{'mutation_amount'};
if ($weight > $self->{'max_value'}) {
$weight = $self->{'max_value'};
}
if ($weight < $self->{'min_value'}) {
$weight = $self->{'min_value'} + 0.000001;
}
}
lib/AI/ANN/Evolver.pm view on Meta::CPAN
$weight = $self->{'max_value'};
}
if ($weight < $self->{'min_value'}) {
$weight = $self->{'min_value'} + 0.000001;
}
# But we /don't/ need to to a kill_link_chance just yet.
}
}
# This would be a bloody nightmare if we hadn't done that dclone
# magic before. But look how easy it is!
$networkdata->[$i]->{'inputs'}->[$j] = $weight;
}
# Now each neuron/neuron pair
for (my $j = 0; $j <= $neuroncount; $j++) {
# As a reminder to those cursed with the duty of maintaining this code:
# This should be an exact copy of the code above, except that 'inputs'
# would be replaced with 'neurons'.
my $weight = $networkdata->[$i]->{'neurons'}->[$j];
if (defined $weight && $weight != 0) {
if (rand() < $self->{'mutation_chance'}) {
$weight += (rand() * 2 - 1) * $self->{'mutation_amount'};
if ($weight > $self->{'max_value'}) {
$weight = $self->{'max_value'};
}
if ($weight < $self->{'min_value'}) {
$weight = $self->{'min_value'} + 0.000001;
}
}
lib/AI/ANN/Evolver.pm view on Meta::CPAN
$weight = $self->{'max_value'};
}
if ($weight < $self->{'min_value'}) {
$weight = $self->{'min_value'} + 0.000001;
}
# But we /don't/ need to to a kill_link_chance just yet.
}
}
# This would be a bloody nightmare if we hadn't done that dclone
# magic before. But look how easy it is!
$networkdata->[$i]->{'neurons'}->[$j] = $weight;
}
# That was rather tiring, and that's only for the first neuron!!
}
# All done. Let's pack it back into an object and let someone else deal
# with it.
$network = $class->new ( 'inputs' => $inputcount,
'data' => $networkdata,
'minvalue' => $minvalue,
'maxvalue' => $maxvalue,
'afunc' => $afunc,
'dafunc' => $dafunc);
return $network;
}
sub mutate_gaussian {
my $self = shift;
my $network = shift;
my $class = ref($network);
my $networkdata = $network->get_internals();
my $inputcount = $network->input_count();
my $minvalue = $network->minvalue();
my $maxvalue = $network->maxvalue();
my $afunc = $network->afunc();
my $dafunc = $network->dafunc();
my $neuroncount = $#{$networkdata}; # BTW did you notice that this
# isn't what it says it is?
$networkdata = dclone($networkdata); # For safety.
for (my $i = 0; $i <= $neuroncount; $i++) {
my $n = 0;
for (my $j = 0; $j < $inputcount; $j++) {
my $weight = $networkdata->[$i]->{'inputs'}->[$j];
$n++ if $weight;
}
for (my $j = 0; $j <= $neuroncount; $j++) {
my $weight = $networkdata->[$i]->{'neurons'}->[$j];
$n++ if $weight;
}
next if $n == 0;
my $tau = &{$self->{'gaussian_tau'}}($n);
my $tau_prime = &{$self->{'gaussian_tau_prime'}}($n);
my $random1 = 2 * rand() - 1;
for (my $j = 0; $j < $inputcount; $j++) {
my $weight = $networkdata->[$i]->{'inputs'}->[$j];
next unless $weight;
my $random2 = 2 * rand() - 1;
$networkdata->[$i]->{'eta_inputs'}->[$j] *= exp($tau_prime*$random1+$tau*$random2);
$networkdata->[$i]->{'inputs'}->[$j] += $networkdata->[$i]->{'eta_inputs'}->[$j]*$random2;
}
for (my $j = 0; $j <= $neuroncount; $j++) {
my $weight = $networkdata->[$i]->{'neurons'}->[$j];
next unless $weight;
my $random2 = 2 * rand() - 1;
$networkdata->[$i]->{'eta_neurons'}->[$j] *= exp($tau_prime*$random1+$tau*$random2);
$networkdata->[$i]->{'neurons'}->[$j] += $networkdata->[$i]->{'eta_neurons'}->[$j]*$random2;
}
}
# All done. Let's pack it back into an object and let someone else deal
# with it.
$network = $class->new ( 'inputs' => $inputcount,
'data' => $networkdata,
'minvalue' => $minvalue,
'maxvalue' => $maxvalue,
'afunc' => $afunc,
'dafunc' => $dafunc);
return $network;
}
__PACKAGE__->meta->make_immutable;
1;
lib/AI/ANN/Neuron.pm view on Meta::CPAN
has 'id' => (is => 'rw', isa => 'Int');
has 'inputs' => (is => 'rw', isa => 'ArrayRef', required => 1);
has 'neurons' => (is => 'rw', isa => 'ArrayRef', required => 1);
has 'eta_inputs' => (is => 'rw', isa => 'ArrayRef');
has 'eta_neurons' => (is => 'rw', isa => 'ArrayRef');
has 'inline_c' => (is => 'ro', isa => 'Int', required => 1, default => 1);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
my %data;
if ( @_ >= 2 && ref $_[0] && ref $_[1]) {
%data = ('inputs' => $_[0], 'neurons' => $_[1]);
$data{'eta_inputs'} = $_[2] if defined $_[2];
$data{'eta_neurons'} = $_[3] if defined $_[3];
} elsif ( @_ >= 3 && ref $_[1] && ref $_[2]) {
%data = ('id' => $_[0], 'inputs' => $_[1], 'neurons' => $_[2]);
$data{'eta_inputs'} = $_[3] if defined $_[3];
$data{'eta_neurons'} = $_[4] if defined $_[4];
} elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) {
%data = %{$_[0]};
} else {
%data = @_;
}
if (ref $data{'inputs'} eq 'HASH') {
my @temparray;
foreach my $i (keys %{$data{'inputs'}}) {
if (defined $data{'inputs'}->{$i} && $data{'inputs'}->{$i} != 0) {
$temparray[$i]=$data{'inputs'}->{$i};
}
}
$data{'inputs'}=\@temparray;
}
if (ref $data{'neurons'} eq 'HASH') {
my @temparray;
foreach my $i (keys %{$data{'neurons'}}) {
if (defined $data{'neurons'}->{$i} && $data{'neurons'}->{$i} != 0) {
$temparray[$i]=$data{'neurons'}->{$i};
}
}
$data{'neurons'}=\@temparray;
}
if (defined $data{'eta_inputs'} && ref $data{'eta_inputs'} eq 'HASH') {
my @temparray;
foreach my $i (keys %{$data{'eta_inputs'}}) {
if (defined $data{'eta_inputs'}->{$i} && $data{'eta_inputs'}->{$i} != 0) {
$temparray[$i]=$data{'eta_inputs'}->{$i};
}
}
$data{'eta_inputs'}=\@temparray;
}
if (defined $data{'eta_neurons'} && ref $data{'eta_neurons'} eq 'HASH') {
my @temparray;
foreach my $i (keys %{$data{'eta_neurons'}}) {
if (defined $data{'eta_neurons'}->{$i} && $data{'eta_neurons'}->{$i} != 0) {
$temparray[$i]=$data{'eta_neurons'}->{$i};
}
}
$data{'eta_neurons'}=\@temparray;
}
foreach my $i (0..$#{$data{'inputs'}}) {
$data{'inputs'}->[$i] ||= 0;
}
foreach my $i (0..$#{$data{'neurons'}}) {
$data{'neurons'}->[$i] ||= 0;
}
foreach my $i (0..$#{$data{'eta_inputs'}}) {
$data{'eta_inputs'}->[$i] ||= 0;
}
foreach my $i (0..$#{$data{'eta_neurons'}}) {
$data{'eta_neurons'}->[$i] ||= 0;
}
return $class->$orig(%data);
};
sub ready {
my $self = shift;
my $inputs = shift;
my $neurons = shift;
if (ref $neurons eq 'HASH') {
my @temparray;
foreach my $i (keys %$neurons) {
t/00_initialize.t view on Meta::CPAN
use Test::More tests => 9;
BEGIN { use_ok('AI::ANN');
use_ok('AI::ANN::Neuron');
use_ok('AI::ANN::Evolver'); };
#########################
# Insert your test code below, the Test::More module is use()ed here so read
# its man page ( perldoc Test::More ) for help writing this test script.
$network=new AI::ANN ('inputs' => 1, 'data' => [{ iamanoutput => 1, inputs => {0 => 1}, neurons => {}}]);
ok(defined $network, "new() works");
ok($network->isa("AI::ANN"), "Right class");
$neuron=new AI::ANN::Neuron (0, {0 => 1}, {});
ok(defined $neuron, "new() works");
ok($neuron->isa("AI::ANN::Neuron"), "Right class");
$evolver=new AI::ANN::Evolver ();
t/02_network_basic.t view on Meta::CPAN
use Test::More tests => 25;
BEGIN { use_ok('AI::ANN');
use_ok('AI::ANN::Neuron'); };
#########################
# Insert your test code below, the Test::More module is use()ed here so read
# its man page ( perldoc Test::More ) for help writing this test script.
$network=new AI::ANN ('inputs'=>1, 'data'=>[{ iamanoutput => 1, inputs => {0 => 1}, neurons => {}}], 'maxvalue' => 10);
ok(defined $network, "new() works");
ok($network->isa("AI::ANN"), "Right class");
ok($out=$network->execute([1]), "executed and still alive");
is($#{$out}, 0, "execute() output for a single neuron is the right length");
is($out->[0], 1, "execute() output for a single neuron has the correct value");
($inputs, $neurons, $outputs) = $network->get_state();
is($#{$inputs}, 0, "get_state inputs is correct length");
is($inputs->[0], 1, "get_state inputs returns correct element 0");
is($#{$neurons}, 0, "get_state neurons is correct length");
is($neurons->[0], 1, "get_state neurons returns correct element 0");
is($#{$outputs}, 0, "get_state outputs is correct length");
is($outputs->[0], 1, "get_state outputs returns correct element 0");
$network=new AI::ANN ('inputs'=>1, 'data'=>[{ iamanoutput => 0, inputs => {0 => 2}, neurons => {}},
{ iamanoutput => 1, inputs => {}, neurons => {0 => 2}}], 'maxvalue' => 10);
ok(defined $network, "new() works");
ok($network->isa("AI::ANN"), "Right class");
ok($out=$network->execute([1]), "executed and still alive");
is($#{$out}, 0, "execute() output for two neurons is the right length");
is($out->[0], 4, "execute() output for two neurons has the correct value");
t/03_evolver_basic.t view on Meta::CPAN
BEGIN { use_ok('AI::ANN');
use_ok('AI::ANN::Evolver'); };
#########################
# Insert your test code below, the Test::More module is use()ed here so read
# its man page ( perldoc Test::More ) for help writing this test script.
# This test basically works by crossing a network with itself and making sure nothing changes
$network1=new AI::ANN ('inputs'=>1, 'data'=>[{ iamanoutput => 1, inputs => {0 => 1}, neurons => {}}]);
$network2=new AI::ANN ('inputs'=>1, 'data'=>[{ iamanoutput => 1, inputs => {0 => 1}, neurons => {}}]);
ok(defined $network1, "new() works");
ok($network1->isa("AI::ANN"), "Right class");
ok(defined $network2, "new() works");
ok($network2->isa("AI::ANN"), "Right class");
ok($out=$network1->execute([1]), "executed and still alive");
is($#{$out}, 0, "execute() output for a single neuron is the right length before crossover");
is($out->[0], 1, "execute() output for a single neuron has the correct value before crossover");
t/03_evolver_basic.t view on Meta::CPAN
is($outputs->[0], 1, "get_state outputs returns correct element 0");
# Now we'll do it bigger!
$evolver=new AI::ANN::Evolver ({mutation_chance => 0.5,
mutation_amount => 0.2, add_link_chance => 0.2,
kill_link_chance => 0.2, sub_crossover_chance =>
0.2, min_value => 0, max_value => 4});
$network1=new AI::ANN ('inputs'=>1,
'data'=>[{ iamanoutput => 0, inputs => {0 => 2}, neurons => {}, eta_inputs => {0 => 0.5}, eta_neurons => {}},
{ iamanoutput => 0, inputs => {0 => 1}, neurons => {0 => 1}, eta_inputs => {0 => 0.3}, eta_neurons => {0 => 0.7}},
{ iamanoutput => 1, inputs => {}, neurons => {0 => 2, 1 => 1}, eta_inputs => {}, eta_neurons => {0 => 0.4, 1 => 0.6}},
{ iamanoutput => 1, inputs => {}, neurons => {0 => 3}, eta_inputs => {}, eta_neurons => {0 => 0.8}}]);
$network2=new AI::ANN ('inputs'=>1,
'data'=>[{ iamanoutput => 0, inputs => {0 => 1}, neurons => {}},
{ iamanoutput => 0, inputs => {0 => 2}, neurons => {0 => 2}},
{ iamanoutput => 1, inputs => {}, neurons => {0 => 3}},
{ iamanoutput => 1, inputs => {}, neurons => {0 => 2, 1 => 1}}]);
ok(defined $network1, "new() works");
ok($network1->isa("AI::ANN"), "Right class");
ok($out=$network1->execute([1]), "executed and still alive");
is($#{$out}, 1, "execute() output is the right length");
t/10_network_integrity.t view on Meta::CPAN
use Test::More tests => 10;
BEGIN { use_ok('AI::ANN');
use_ok('AI::ANN::Neuron'); };
#########################
# Insert your test code below, the Test::More module is use()ed here so read
# its man page ( perldoc Test::More ) for help writing this test script.
$network=new AI::ANN ('inputs'=>1, 'data'=>[{ iamanoutput => 0, inputs => {0 => 2}, neurons => {}},
{ iamanoutput => 1, inputs => {}, neurons => {0 => 2}}], 'maxvalue' => 10);
ok(defined $network, "new() works");
ok($network->isa("AI::ANN"), "Right class");
ok($out=$network->execute([1]), "executed and still alive");
is($#{$out}, 0, "execute() output for two neurons is the right length");
is($out->[0], 4, "execute() output for two neurons has the correct value");
$data = $network->get_internals();
$data->[1]->{'iamanoutput'} = 0;
$data->[1]->{'inputs'}->[0] = 8;
$data->[1]->{'neurons'}->[0] = 0;
ok($out=$network->execute([1]), "executed and still alive");
is($#{$out}, 0, "execute() output for two neurons is the right length");
is($out->[0], 4, "execute() output for two neurons has the correct value");