AI-NNFlex
view release on metacpan or search on metacpan
Removed momentum module (well, removed backprop module and
renamed momentum module in fact). There were few code differences
and its easier to maintain this way. Default option is vanilla
backprop, momentum & fahlman adjustments are only implemented if
specified in the network config.
Bundled all the maths functions into AI::NNFlex::Mathlib
Implemented calls to error transformation function, as per
Fahlman. Testing, it doesn't seem to make much difference, but
at least now the facility is there.
#############################################################
0.20
20050308
v0.17 was never released, as I rejigged the whole lot for
object inheritance before I got around to uploading it to CPAN.
Why? I hear you ask, when it worked OK already.
t/Backprop.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>10}
# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
t/Backprop.t view on Meta::CPAN
# add an extra layer to test out connect
$result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"sigmoid",
randomweights=>1);
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test connect layer
$result = $network->connect(fromlayer=>1,tolayer=>1);
ok($result);
# test connect node
t/Backprop.t view on Meta::CPAN
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# Test a learning pass
my $err = $dataset->learn($network);
ok($err); #test 5
##
# Test a run pass
$result = $dataset->run($network);
ok($result); #test 8
##
# test saving weights
$result = $network->dump_state(filename=>'state.wts',activations=>1);
ok($result);
# test loading weights
$result = $network->load_state(filename=>'state.wts');
t/Dataset.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>12}
# we need a basic network in place to test the dataset functionality against
t/Dataset.t view on Meta::CPAN
my $result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
ok($result); #test 2
##
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
t/Dataset.t view on Meta::CPAN
# compare original & loaded dataset
my $comparison;
if (scalar @{$dataset->{'data'}} == scalar @{$dataset2->{'data'}}){$comparison=1}
ok($comparison);
# delete a pair from the dataset
$result = $dataset->delete([4,5]);
ok($result);
# Test a learning pass
my $err = $dataset->learn($network);
ok($err); #test 5
##
# Test a run pass
$result = $dataset->run($network);
ok($result); #test 8
##
t/Hopfield.t view on Meta::CPAN
# example script to build a hopfield net
use strict;
use AI::NNFlex::Hopfield;
use AI::NNFlex::Dataset;
use Test;
BEGIN{plan tests=>4}
my $matrixpresent = eval("require(Math::Matrix)");
my $matrixabsent = !$matrixpresent;
my $network = AI::NNFlex::Hopfield->new();
skip($matrixabsent,$network);
t/backprop.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>8}
# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
t/backprop.t view on Meta::CPAN
# add an extra layer to test out connect
$result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test connect layer
$result = $network->connect(fromlayer=>1,tolayer=>1);
ok($result);
# test connect node
t/backprop.t view on Meta::CPAN
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# Test a learning pass
my $err = $dataset->learn($network);
ok($err); #test 5
##
# Test a run pass
$result = $dataset->run($network);
ok($result); #test 8
##
t/reinforce.t view on Meta::CPAN
use strict;
use Test;
use AI::NNFlex::Reinforce;
use AI::NNFlex::Dataset;
BEGIN{
plan tests=>5}
# test create network
my $network = AI::NNFlex::Reinforce->new(randomconnections=>0,
randomweights=>1,
learningrate=>.1,
t/reinforce.t view on Meta::CPAN
my $result = $network->add_layer( nodes=>2,
persistentactivation=>0,
decay=>0.0,
randomactivation=>0,
threshold=>0.0,
activationfunction=>"tanh",
randomweights=>1);
ok($result); #test 2
##
# Test initialise network
$result = $network->init();
ok($result); #test 3
##
# test create dataset
my $dataset = AI::NNFlex::Dataset->new([
[0,0],[1,1],
[0,1],[1,0],
[1,0],[0,1],
[1,1],[0,0]]);
ok ($dataset); #test 4
##
# Test a run pass
$result = $dataset->run($network);
ok($result); #test 5
##
( run in 0.726 second using v1.01-cache-2.11-cpan-4d50c553e7e )