AI-NNFlex

 view release on metacpan or  search on metacpan

CHANGES  view on Meta::CPAN

Altered Dataset constructor to allow an empty param set - you
can now construct a null Dataset & add items to it using the
$dataset->add([[0,1],[1]) method (also implemented in this 
version.

Altered feedforward run method to return output pattern - more
intuitive that way.

Implemented a Hopfield module. This is very much the first cut
at this, since I've never really used hopfield nets before, and
haven't put any debug in etc, until I've rethought the whole
approach to debug in this set of code.

Implemented dataset->delete method.

Put the pod documentation back in Dataset.pm :-)


###############################################################
0.21
20050313

CHANGES  view on Meta::CPAN

20050218

Changed the way weights are identified from a hash (keyed on
stringified node object ref) to an array. Cleaner that way,
and makes it easier to turn the weights into a matrix for
compatibility with the upcoming PDL based modules.

Altered the syntax for calls to load_state and dump_state,
to get rid of the nasty hashref thing.

Cleaned up a bit of debugging in ::init


###############################################################
0.15
20050206

I've rushed another release out because I found a big fat bug
in feedforward. Instead of applying the transfer function to
the summed node inputs, it was applying it to each input in
turn, then again to the sum. Wups! The network is now more
stable and a lot faster.

I also altered the debug calls, so in normal use without debug
the overhead is a lot smaller.

This version, particular in use with momentum, is a LOT faster.

###############################################################

0.14
20050201

The backprop calculation was nailed to 1-y*y, which is the

examples/lesion.pl  view on Meta::CPAN


use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;

# Create the network 

my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
				randomweights=>1,
				learningrate=>.1,
				debug=>[],bias=>1,
				momentum=>0.6,
				round=>1);



$network->add_layer(	nodes=>2,
			persistentactivation=>0,
			decay=>0.0,
			randomactivation=>0,
			threshold=>0.0,

examples/reinforceTest.pl  view on Meta::CPAN

# this is /really/ experimental - see perldoc NNFlex::reinforce
use AI::NNFlex;

my $object = AI::NNFlex->new([{"nodes"=>2,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"tanh","random weights"=>1},
                        {"nodes"=>2,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"tanh","random weights"=>1},
                       {"nodes"=>1,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"linear","random weights"=>1}],{'random connections'=>0,'networktype'=>'feedforward', 'random weights'=>1,'learn...


$object->run([1,0]);
$output = $object->output();
foreach (@$output)
{
	print "1,0 - $_ ";
}
print "\n";

lib/AI/NNFlex.pm  view on Meta::CPAN

#					syntax
#
# 0.13 20050121		CColbourn	Created momentum learning module
#
# 0.14 20050201		CColbourn	Abstracted derivatiive of activation
#					function into a separate function call
#					instead of hardcoded 1-y*y in backprop
#					tanh, linear & momentum
#
# 0.15 20050206		CColbourn	Fixed a bug in feedforward.pm. Stopped
#					calling dbug unless scalar debug > 0
#					in a lot of calls
#
# 0.16 20050218		CColbourn	Changed from a hash of weights to an
# 					array of weights, to make it easier
# 					to adapt the code to PDL
#
# 0.17 20050302		CColbourn	Changed input params to ::output to
#					be param=>parameter not anon hash
#					Included round parameter in output
#

lib/AI/NNFlex.pm  view on Meta::CPAN

# single network with 2 layers unsupervised and 2 layers supervised
#
# Clean up the perldocs
#
###############################################################################
$VERSION = "0.24";


###############################################################################
my @DEBUG; 	# a single, solitary, shameful global variable. Couldn't
		#avoid it really. It allows correct control of debug
		#information before the $network object is created
		# (in ::layer->new & ::node->new for  example).


###############################################################################
###############################################################################
# package NNFlex
###############################################################################
###############################################################################
package AI::NNFlex;

lib/AI/NNFlex.pm  view on Meta::CPAN



	# Network wide parameters (e.g. random weights)
	foreach (keys %$netParams)
	{
		my $key = lc($_);
		$key =~ s/\s//g; # up to 0.14 we had params with spaces in, now deprecated
		$network->{$key} = ${$netParams}{$_};
	}

	if( $network->{'debug'})
	{
		@DEBUG = @{$network->{'debug'}};
	}

	# build the network
	foreach (@$cleanParams)
	{
		if (!($$_{'nodes'})){next}
		my %layer = %{$_};	
		push @layers,AI::NNFlex::layer->new(\%layer);	
	}	
	$$network{'layers'} = \@layers;

lib/AI/NNFlex.pm  view on Meta::CPAN

# sub init
################################################################################
sub init
{

	#Revised version of init for NNFlex

	my $network = shift;
	my @layers = @{$network->{'layers'}};

	# if network debug state not set, set it to null
	if (!$network->{'debug'})
	{
		$network->{'debug'} = [];
	}
	my @debug = @{$network->{'debug'}};
	

	# implement the bias node
	if ($network->{'bias'})
	{
		my $biasNode = AI::NNFlex::node->new({'activation function'=>'linear'});
		$$network{'biasnode'} = $biasNode;
		$$network{'biasnode'}->{'activation'} = 1;
		$$network{'biasnode'}->{'nodeid'} = "bias";
	}

lib/AI/NNFlex.pm  view on Meta::CPAN

				foreach my $westNodes (@{$network->{'layers'}->[$currentLayer -1]->{'nodes'}})	
				{
					foreach my $connectionFromWest (@{$westNodes->{'connectedNodesEast'}->{'nodes'}})
					{
						if ($connectionFromWest eq $node)
						{
							my $weight = $network->calcweight;
				
							push @{$node->{'connectedNodesWest'}->{'nodes'}},$westNodes;
							push @{$node->{'connectedNodesWest'}->{'weights'}},$weight;
							if (scalar @debug > 0)	
							{$network->dbug ("West to east Connection - ".$westNodes->{'nodeid'}." to ".$node->{'nodeid'},2);}
						}
					}
				}
			}

			# Now initialise connections to the east (if not last layer)
			if ($currentLayer < (scalar @layers)-1)
			{
			foreach my $eastNodes (@{$network->{'layers'}->[$currentLayer+1]->{'nodes'}})
			{
				if (!$network->{'randomconnections'}  || $network->{'randomconnections'} > rand(1))
				{
					my $weight = $network->calcweight;
					push @{$node->{'connectedNodesEast'}->{'nodes'}},$eastNodes;
					push @{$node->{'connectedNodesEast'}->{'weights'}}, $weight;
					if (scalar @debug > 0)
					{$network->dbug ("East to west Connection ".$node->{'nodeid'}." to ".$eastNodes->{'nodeid'},2);}
				}
			}
			}
			$nodeid++;
		}
		$currentLayer++;
	}


lib/AI/NNFlex.pm  view on Meta::CPAN

	if ($network->{'bias'})
	{
		foreach my $layer (@{$network->{'layers'}})
		{
			foreach my $node (@{$layer->{'nodes'}})
			{
				push @{$node->{'connectedNodesWest'}->{'nodes'}},$network->{'biasnode'};
				my $weight = $network->calcweight;

				push @{$node->{'connectedNodesWest'}->{'weights'}},$weight;
				if (scalar @debug > 0)
				{$network->dbug ("West to east Connection - bias to ".$node->{'nodeid'}." weight = $weight",2);}
			}
		}
	}



	return 1; # return success if we get to here


lib/AI/NNFlex.pm  view on Meta::CPAN

# sub $network->dbug
###############################################################################
sub dbug
{
	my $network = shift;
	my $message = shift;
	my $level = shift;


	my @DEBUGLEVELS;
	# cover for debug calls before the network is created
	if (!$network->{'debug'})
	{
		@DEBUGLEVELS=@DEBUG;
	}
	else
	{
		@DEBUGLEVELS = @{$network->{'debug'}};
	}


	# 0 is error so ALWAYS display
	if (!(grep /0/,@DEBUGLEVELS)){push @DEBUGLEVELS,0}

	foreach (@DEBUGLEVELS)
	{
	
		if ($level == $_)

lib/AI/NNFlex.pm  view on Meta::CPAN


=head1 CONSTRUCTOR 

=head2 AI::NNFlex->new ( parameter => value );
	

randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT

fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS

debug=>[LIST OF CODES FOR MODULES TO DEBUG]

round=>0 or 1, a true value sets the network to round output values to nearest of 1, -1 or 0


The constructor implements a fairly generalised network object with a number of parameters.


The following parameters are optional:
 randomweights
 fixedweights
 debug
 round


(Note, if randomweights is not specified the network will default to a random value from 0 to 1.

=head1 METHODS

This is a short list of the main methods implemented in AI::NNFlex. 

=head2 AI::NNFlex

lib/AI/NNFlex.pm  view on Meta::CPAN

=head1 CHANGES

v0.11 introduces the lesion method, png support in the draw module and datasets.

v0.12 fixes a bug in reinforce.pm & adds a reflector in feedforward->run to make $network->run($dataset) work.

v0.13 introduces the momentum learning algorithm and fixes a bug that allowed training to proceed even if the node activation function module can't be loaded

v0.14 fixes momentum and backprop so they are no longer nailed to tanh hidden units only.

v0.15 fixes a bug in feedforward, and reduces the debug overhead

v0.16 changes some underlying addressing of weights, to simplify and speed  

v0.17 is a bugfix release, plus some cleaning of UI

v0.20 changes AI::NNFlex to be a base class, and ships three different network types (i.e. training algorithms). Backprop & momentum are both networks of the feedforward class, and inherit their 'run' method from feedforward.pm. 0.20 also fixes a who...

v0.21 cleans up the perldocs more, and makes nnflex more distinctly a base module. There are quite a number of changes in Backprop in the v0.21 distribution.

v0.22 introduces the ::connect method, to allow creation of recurrent connections, and manual control over connections between nodes/layers.

lib/AI/NNFlex/Backprop.pm  view on Meta::CPAN

use strict;


sub calc_error
{
	my $network = shift;

	my $outputPatternRef = shift;
	my @outputPattern = @$outputPatternRef;

	my @debug = @{$network->{'debug'}};

	if (scalar @debug > 0)
	{$network->dbug ("Output pattern @outputPattern received by Backprop",4);}


	my $outputLayer = $network->{'layers'}->[-1]->{'nodes'};

	if (scalar @$outputLayer != scalar @outputPattern)
	{	
		$network->dbug ("Wrong number of output values, net has ".scalar @$outputLayer." nodes",0);
		return 0;
	}

lib/AI/NNFlex/Backprop.pm  view on Meta::CPAN



		if ($_->{'errorfunction'})
		{
			my $errorfunction = $_->{'errorfunction'};
			$value = $network->$errorfunction($value);
		}
		
		$_->{'error'} = $value;
		$counter++;
		if (scalar @debug > 0)
		{$network->dbug ("Error on output node $_ = ".$_->{'error'},4);}
	}


}


########################################################
# AI::NNFlex::Backprop::learn
########################################################

lib/AI/NNFlex/Backprop.pm  view on Meta::CPAN

}


#########################################################
# AI::NNFlex::Backprop::hiddenToOutput
#########################################################
sub hiddenToOutput
{
	my $network = shift;

	my @debug = @{$network->{'debug'}};

	my $outputLayer = $network->{'layers'}->[-1]->{'nodes'};

	foreach my $node (@$outputLayer)
	{
		my $connectedNodeCounter=0;
		foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}})
		{
			my $momentum = 0;
			if ($network->{'momentum'})
			{

				if ($node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter])
				{
					$momentum = ($network->{'momentum'})*($node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter]);
				}
			}
			if (scalar @debug > 0)
			{$network->dbug("Learning rate is ".$network->{'learningrate'},4);}
			my $deltaW = (($network->{'learningrate'}) * ($node->{'error'}) * ($connectedNode->{'activation'}));
			$deltaW = $deltaW+$momentum;
			$node->{'connectedNodesWest'}->{'lastdelta'}->[$connectedNodeCounter] = $deltaW;
			
			if (scalar @debug > 0)
			{$network->dbug("Applying delta $deltaW on hiddenToOutput $connectedNode to $node",4);}
			# 
			$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] -= $deltaW;
			$connectedNodeCounter++;
		}
			
	}
}

######################################################
# AI::NNFlex::Backprop::hiddenOrInputToHidden
######################################################
sub hiddenOrInputToHidden
{

	my $network = shift;

	my @layers = @{$network->{'layers'}};

	my @debug = @{$network->{'debug'}};

	# remove the last element (The output layer) from the stack
	# because we've already calculated dW on that
	pop @layers;

	if (scalar @debug > 0)
	{$network->dbug("Starting Backprop of error on ".scalar @layers." hidden layers",4);}

	foreach my $layer (reverse @layers)
	{
		foreach my $node (@{$layer->{'nodes'}})
		{
			my $connectedNodeCounter=0;
			if (!$node->{'connectedNodesWest'}) {last}

			my $nodeError;
			foreach my $connectedNode (@{$node->{'connectedNodesEast'}->{'nodes'}})
			{
				$nodeError += ($connectedNode->{'error'}) * ($connectedNode->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter]);
				$connectedNodeCounter++;
			}

			if (scalar @debug > 0)
			{$network->dbug("Hidden node $node error = $nodeError",4);}

			# Apply error function
			if ($node->{'errorfunction'})
			{
				my $functioncall = $node->{'errorfunction'};
				$nodeError = $network->$functioncall($nodeError);
			}

			$node->{'error'} = $nodeError;

lib/AI/NNFlex/Backprop.pm  view on Meta::CPAN

				$value = $network->$functionSlope($value);

				# Add the Fahlman constant
				$value += $network->{'fahlmanconstant'};

				$value = $value * $node->{'error'} * $network->{'learningrate'} * $westNodes->{'activation'};

				
				my $dW = $value;
				$dW = $dW + $momentum;
				if (scalar @debug > 0)
				{$network->dbug("Applying deltaW $dW to inputToHidden connection from $westNodes to $node",4);}

				$node->{'connectedNodesWest'}->{'lastdelta'}->{$westNodes} = $dW;

				$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter] -= $dW;
				if (scalar @debug > 0)
				{$network->dbug("Weight now ".$node->{'connectedNodesWest'}->{'weights'}->[$connectedNodeCounter],4);}
				$connectedNodeCounter++;

			}	


		}
	}
								
				

lib/AI/NNFlex/Backprop.pm  view on Meta::CPAN

#########################################################
# AI::NNFlex::Backprop::RMSErr
#########################################################
sub RMSErr
{
	my $network = shift;

	my $outputPatternRef = shift;
	my @outputPattern = @$outputPatternRef;

	my @debug = @{$network->{'debug'}};

	my $sqrErr;

	my $outputLayer = $network->{'layers'}->[-1]->{'nodes'};

	if (scalar @$outputLayer != scalar @outputPattern)
	{	
		$network->dbug("Wrong number of output values, net has ".scalar @$outputLayer." nodes",0);
		return 0;
	}

	# Now calculate the error
	my $counter=0;
	foreach (@$outputLayer)
	{	
		my $value = $_->{'activation'} - $outputPattern[$counter];

		$sqrErr += $value *$value;
		$counter++;
		if (scalar @debug > 0)
		{$network->dbug("Error on output node $_ = ".$_->{'error'},4);}
	}

	my $error = sqrt($sqrErr);

	return $error;
}

1;

lib/AI/NNFlex/Backprop.pm  view on Meta::CPAN


=head2 AI::NNFlex::Backprop->new( parameter => value );

Parameters:

	
	randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT

	fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS

	debug=>[LIST OF CODES FOR MODULES TO DEBUG]

	learningrate=>the learning rate of the network

	momentum=>the momentum value (momentum learning only)

	round=>0 or 1 - 1 sets the network to round output values to
		nearest of 1, -1 or 0

	fahlmanconstant=>0.1
		


The following parameters are optional:

 randomweights

 fixedweights

 debug

 round

 momentum

 fahlmanconstant


If randomweights is not specified the network will default to a random value from 0 to 1.

lib/AI/NNFlex/Feedforward.pm  view on Meta::CPAN

	
	# if this is an incorrect dataset call translate it
	if ($inputPatternRef =~/Dataset/)
	{
		return ($inputPatternRef->run($network))
	}


	my @inputPattern = @$inputPatternRef;

	my @debug = @{$network->{'debug'}};
	if (scalar @debug> 0)
	{$network->dbug ("Input pattern @inputPattern received by Feedforward",3);}


	# First of all apply the activation pattern to the input units (checking
	# that the pattern has the right number of values)

	my $inputLayer = $network->{'layers'}->[0]->{'nodes'};

	if (scalar @$inputLayer != scalar @inputPattern)
	{	

lib/AI/NNFlex/Feedforward.pm  view on Meta::CPAN

	# Now apply the activation
	my $counter=0;
	foreach (@$inputLayer)
	{	
		if ($_->{'active'})
		{

			if ($_->{'persistentactivation'})
			{
				$_->{'activation'} +=$inputPattern[$counter];
				if (scalar @debug> 0)
				{$network->dbug("Applying ".$inputPattern[$counter]." to $_",3);}
			}
			else
			{
				$_->{'activation'} =$inputPattern[$counter];
				if (scalar @debug> 0)
				{$network->dbug("Applying ".$inputPattern[$counter]." to $_",3);}
			 
			}
		}
		$counter++;
	}
	

	# Now flow activation through the network starting with the second layer
	foreach my $layer (@{$network->{'layers'}})

lib/AI/NNFlex/Feedforward.pm  view on Meta::CPAN

			{
				$node->{'activation'} =0;
			}

			# Decay the node (note that if decay is not set this
			# will have no effect, hence no if).
			$node->{'activation'} -= $node->{'decay'};
			my $nodeCounter=0;
			foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}})
			{
				if (scalar @debug> 0)
				{$network->dbug("Flowing from ".$connectedNode->{'nodeid'}." to ".$node->{'nodeid'},3);}
	
				my $weight = ${$node->{'connectedNodesWest'}->{'weights'}}[$nodeCounter];
				my $activation = $connectedNode->{'activation'};		
				if (scalar @debug> 0)
				{$network->dbug("Weight & activation: $weight - $activation",3);}
				

				$totalActivation += $weight*$activation;
				$nodeCounter++;	
			}

			if ($node->{'active'})
			{
				my $value = $totalActivation;

				my $function = $node->{'activationfunction'};
				#my $functionCall ="\$value = \$network->$function(\$value);";

				#eval($functionCall);
				$value = $network->$function($value);
				$node->{'activation'} = $value;
			}
			if (scalar @debug> 0)
			{$network->dbug("Final activation of ".$node->{'nodeid'}." = ".$node->{'activation'},3);}
		}
	}



	return $network->output;

}

lib/AI/NNFlex/Mathlib.pm  view on Meta::CPAN


#######################################################
# tanh activation function
#######################################################
sub tanh
{

	my $network = shift;
	my $value = shift;

	my @debug = @{$network->{'debug'}};

	my $a = exp($value);
	my $b = exp(-$value);
   if ($value > 20){ $value=1;}
    elsif ($value < -20){ $value= -1;}
    else
        {
	        my $a = exp($value);
       		 my $b = exp(-$value);
   	        $value =  ($a-$b)/($a+$b);
        }
	if (scalar @debug > 0)
	{$network->dbug("Tanh activation returning $value",5)};	
	return $value;
}

sub tanh_slope
{
	my $network = shift;
	my $value = shift;
	my @debug = @{$network->{'debug'}};


	my $return = 1-($value*$value);
	if (scalar @debug > 0)
	{$network->dbug("Tanh_slope returning $value",5);}

	return $return;
}

#################################################################
# Linear activation function
#################################################################
sub linear
{

	my $network = shift;
	my $value = shift;	

	my @debug = @{$network->{'debug'}};
	if (scalar @debug >0)
	{$network->dbug("Linear activation returning $value",5)};	
	return $value;
}

sub linear_slope
{
	my $network = shift;
	my $value = shift;
	my @debug = @{$network->{'debug'}};
	if (scalar @debug >0)
	{$network->dbug("Linear slope returning $value",5)};
	return $value;
}


############################################################
# P&B sigmoid activation (needs slope)
############################################################

sub sigmoid2

lib/AI/NNFlex/Mathlib.pm  view on Meta::CPAN

	my $value = shift;	
	$value = (1+exp(-$value))**-1;
	$network->dbug("Sigmoid activation returning $value",5);	
	return $value;
}

sub sigmoid2_slope
{
	my $network = shift;
	my $value = shift;
	my @debug = @{$network->{'debug'}};


	my $return = exp(-$value) * ((1 + exp(-$value)) ** -2);
	if (scalar @debug > 0)
	{$network->dbug("sigmoid_slope returning $value",5);}

	return $return;
}

############################################################
# standard sigmoid activation 
############################################################

sub sigmoid

lib/AI/NNFlex/Mathlib.pm  view on Meta::CPAN

	my $value = shift;	
	$value = 1/(1+exp(1)**-$value);
	$network->dbug("Sigmoid activation returning $value",5);	
	return $value;
}

sub sigmoid_slope
{
	my $network = shift;
	my $value = shift;
	my @debug = @{$network->{'debug'}};


	my $return = $value * (1-$value);
	if (scalar @debug > 0)
	{$network->dbug("sigmoid_slope returning $value",5);}

	return $return;
}

############################################################
# hopfield_threshold
# standard hopfield threshold activation - doesn't need a 
# slope (because hopfield networks don't use them!)
############################################################

lib/AI/NNFlex/Reinforce.pm  view on Meta::CPAN

=head1 CONSTRUCTOR 

=head2 AI::NNFlex::Reinforce

 new ( parameter => value );
	
	randomweights=>MAXIMUM VALUE FOR INITIAL WEIGHT

	fixedweights=>WEIGHT TO USE FOR ALL CONNECTIONS

	debug=>[LIST OF CODES FOR MODULES TO DEBUG]

	learningrate=>the learning rate of the network

	round=>0 or 1 - 1 sets the network to round output values to
		nearest of 1, -1 or 0


The following parameters are optional:
 randomweights
 fixedweights
 debug
 round

(Note, if randomweights is not specified the network will default to a random value from 0 to 1.


=head1 METHODS

This is a short list of the main methods implemented in AI::NNFlex. Subclasses may implement other methods.

=head2 AI::NNFlex

t/Backprop.t  view on Meta::CPAN

use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;

BEGIN{
	plan tests=>10}

# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
				randomweights=>1,
				learningrate=>.1,
				debug=>[],bias=>1,
				momentum=>0.6);

ok($network); #test 1
##

# test add layer
my $result = $network->add_layer(	nodes=>2,
			persistentactivation=>0,
			decay=>0.0,
			randomactivation=>0,

t/Dataset.t  view on Meta::CPAN

	plan tests=>12}




# we need a basic network  in place to test the dataset functionality against
# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
				randomweights=>1,
				learningrate=>.1,
				debug=>[],bias=>1,
				momentum=>0.6);

ok($network); #test 1
##

# test add layer
my $result = $network->add_layer(	nodes=>2,
			persistentactivation=>0,
			decay=>0.0,
			randomactivation=>0,

t/backprop.t  view on Meta::CPAN

use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;

BEGIN{
	plan tests=>8}

# test create network
my $network = AI::NNFlex::Backprop->new(randomconnections=>0,
				randomweights=>1,
				learningrate=>.1,
				debug=>[],bias=>1,
				momentum=>0.6);

ok($network); #test 1
##

# test add layer
my $result = $network->add_layer(	nodes=>2,
			persistentactivation=>0,
			decay=>0.0,
			randomactivation=>0,

t/reinforce.t  view on Meta::CPAN

use AI::NNFlex::Reinforce;
use AI::NNFlex::Dataset;

BEGIN{
	plan tests=>5}

# test create network
my $network = AI::NNFlex::Reinforce->new(randomconnections=>0,
				randomweights=>1,
				learningrate=>.1,
				debug=>[],bias=>1);

ok($network); #test 1
##

# test add layer
my $result = $network->add_layer(	nodes=>2,
			persistentactivation=>0,
			decay=>0.0,
			randomactivation=>0,
			threshold=>0.0,



( run in 1.170 second using v1.01-cache-2.11-cpan-49f99fa48dc )