AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN

0.31  Fri Aug 25 05:10:11 20000
        - Second release, by Josiah Bryan
        - 3 Major features:
                - seperate layer sizes
                - custom node activations
                - increased learning speed

0.43  Wed Sep 14 03:13:01 20000
        - Third release, by Josiah Bryan
        - Several bug fixes
                - fixed 'flag' option on learn_set()
                - fixed multiple-output bug
                - fixed learning gradient error
        - Improved learning function to not degrade increment automatically
        - Added CSV-style dataset loader
        - Added Export tags
        - Added four custom node activations, including range and ramp
        - Added several misc. extra functions
        - Added ALN example demo


Mesh.pm  view on Meta::CPAN

    }    
    
    # See POD for usage
    sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# See POD for usage
	sub learn {
    	my $self	=	shift;					
    	my $inputs	=	shift;					# input set
    	my $outputs	=	shift;					# target outputs
    	my %args	=	@_;						# get args into hash
    	my $inc		=	$args{inc} || 0.002;	# learning gradient
    	my $max     =   $args{max} || 1024;     # max iteterations
    	my $degrade =   $args{degrade} || 0;    # enable gradient degrading
		my $error   = 	($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
  		my $dinc	=	0.0002;					# amount to adjust gradient by
		my $diff	=	100;					# error magin between results
		my $start	=	new Benchmark;			
		$inputs		=	$self->crunch($inputs)  if($inputs == 0); 

Mesh.pm  view on Meta::CPAN

   			d('.',12);
   			d('['.join(',',@{$got})."-".join(',',@{$outputs}).']',13);
   		}  
   		my $str = "Learning took $loop loops and ".timestr(timediff(new Benchmark,$start))."\n";
   		d($str,3); $self->{benchmark} = "$loop loops and ".timestr(timediff(new Benchmark,$start))."\n";
   		return $str;
   	}


	# See POD for usage
	sub learn_set {
		my $self	=	shift;
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my $degrade	=	$args{degrade};
	    my $p		=	(defined $args{flag}) ?$args{flag} :1;
	    my $row		=	(defined $args{row})  ?$args{row}+1:1;
	    my $leave	=	(defined $args{leave})?$args{leave}:0;
		for my $x (0..$len-$leave) {
			d("Learning set $x...\n",4);
			my $str = $self->learn( $data->[$x*2],
					  		  		$data->[$x*2+1],
					    			inc=>$inc,
					    			max=>$max,
					    			error=>$error,
					    			degrade=>$degrade);
		}
			
		if ($p) {
			return pdiff($data->[$row],$self->run($data->[$row-1]));
		} else {
			return $data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
	}
	
	# See POD for usage
	sub run_set {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my (@results,$res);
		for my $x (0..$len) {
			$res = $self->run($data->[$x*2]);
			for(0..$#{$res}){$results[$x]->[$_]=$res->[$_]}
			d("Running set $x [$res->[0]]...\r",4);
		}
		return \@results;
	}
	
	#
	# Loads a CSV-like dataset from disk
	#
	# Usage:
	#	my $set = $set->load_set($file, $column, $seperator);
	#
	# Returns a data set of the same format as required by the
	# learn_set() method. $file is the disk file to load set from.
	# $column an optional variable specifying the column in the 
	# data set to use as the class attribute. $class defaults to 0.
	# $seperator is an optional variable specifying the seperator
	# character between values. $seperator defaults to ',' (a single comma). 
	# NOTE: This does not handle quoted fields, or any other record
	# seperator other than "\n".
	#
	sub load_set {
		my $self	=	shift;
		my $file	=	shift;
		my $attr	=	shift || 0;
		my $sep		=	shift || ',';
		my $data	=	[];
		open(FILE,	$file);
		my @lines	=	<FILE>;
		close(FILE);
		for my $x (0..$#lines) {
			chomp($lines[$x]);

Mesh.pm  view on Meta::CPAN

		return $tmp;
	}  
	
	# Following is a collection of a few nifty custom activation functions.
	# range() is exported by default, the rest you can get with:
	#	use AI::NeuralNet::Mesh ':acts'
	# The ':all' tag also gets these into your namespace.
	 
	#
	# range() returns a closure limiting the output 
	# of that node to a specified set of values.
	# Good for output layers.
	#
	# usage example:
	#	$net->activation(4,range(0..5));
	# or:
	#	..
	#	{ 
	#		nodes		=>	1,
	#		activation	=>	range 5..2
	#	}
	#	..
	# You can also pass an array containing the range
	# values (not array ref), or you can pass a comma-
	# seperated list of values as parameters:
	#
	#	$net->activation(4,range(@numbers));
	#	$net->activation(4,range(6,15,26,106,28,3));
	#
	# Note: when using a range() activatior, train the
	# net TWICE on the data set, because the first time
	# the range() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#	
	sub range {
		my @r=@_;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$r[intr($_[0]/$_[1]->{t}*$#r)]}
	}
	
	#
	# ramp() preforms smooth ramp activation between 0 and 1 if $r is 1, 
	# or between -1 and 1 if $r is 2. $r defaults to 1, as you can see.	
	#
	# Note: when using a ramp() activatior, train the
	# net at least TWICE on the data set, because the first 
	# time the ramp() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#
	sub ramp {
		my $r=shift||1;my $t=($r<2)?0:-1;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$_[0]/$_[1]->{t}*$r-$b}
	}

	# Self explanitory, pretty much. $threshold is used to decide if an input 

Mesh.pm  view on Meta::CPAN


=head1 VERSION & UPDATES

This is version B<0.44>, an update release for version 0.43.

This fixed the usage conflict with perl 5.3.3.

With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),
as well as four custom activation functions, and several export 
tag sets. With this release, I have also included a few
new and more practical example scripts. (See ex_wine.pl) This release 
also includes a simple example of an ALN (Adaptive Logic Network) made
with this module. See ex_aln.pl. Also in this release is support for 
loading data sets from simple CSV-like files. See the load_set() method 
for details. This version also fixes a big bug that I never knew about 
until writing some demos for this version - that is, when trying to use 
more than one output node, the mesh would freeze in learning. But, that 
is fixed now, and you can have as many outputs as you want (how does 3 
inputs and 50 outputs sound? :-)


=head1 DESCRIPTION

AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.

In this module I have included a more accurate form of "learning" for the
mesh. This form preforms descent toward a local error minimum (0) on a 
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.

=head1 EXPORTS

This module exports three functions by default:

	range
	intr
	pdiff
	
See range() intr() and pdiff() for description of their respective functions.

Also provided are several export tag sets for usage in the form of:

	use AI::NeuralNet::Mesh ':tag';
	
Tag sets are:

	:default 
	    - These functions are always exported.
		- Exports:
		range()
		intr()
		pdiff()
	
	:all
		- Exports:

Mesh.pm  view on Meta::CPAN

		{ },
		...
	);

You are passing an array ref who's each element is a hash refrence. Each
hash refrence, or more precisely, each element in the array refrence you are passing
to the constructor, represents a layer in the network. Like the constructor above,
the first element is the input layer, and the last is the output layer. The rest are
hidden layers.

Each hash refrence is expected to have AT LEAST the "nodes" key set to the number
of nodes (neurons) in that layer. The other two keys are optional. If "activation" is left
out, it defaults to "linear". If "threshold" is left out, it defaults to 0.50.

The "activation" key can be one of four values:

	linear                    ( simply use sum of inputs as output )
	sigmoid    [ sigmoid_1 ]  ( only positive sigmoid )
	sigmoid_2                 ( positive / 0 /negative sigmoid )
	\&code_ref;

Mesh.pm  view on Meta::CPAN

The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.

Three of the activation syntaxes are shown in the first constructor above, the "linear",
"sigmoid" and code ref types.

You can also set the activation and threshold values after network creation with the
activation() and threshold() methods. 

	



=item $net->learn($input_map_ref, $desired_result_ref [, options ]);

NOTE: learn_set() now has increment-degrading turned OFF by default. See note
on the degrade flag, below.

This will 'teach' a network to associate an new input map with a desired 
result. It will return a string containg benchmarking information. 

You can also specify strings as inputs and ouputs to learn, and they will be 
crunched automatically. Example:

	$net->learn('corn', 'cob');
	

Mesh.pm  view on Meta::CPAN

	 

$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
 
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.

$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.

$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on. 

In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
as several texts and decided that it would be better to not use increment degrading. The
option is still there for those that feel the inclination to use it. I have found some areas
that do need the degrade flag to work at a faster speed. See test.pl for an example. If
the degrade flag wasn't in test.pl, it would take a very long time to learn.



=item $net->learn_set(\@set, [ options ]);

This takes the same options as learn() (learn_set() uses learn() internally) 
and allows you to specify a set to learn, rather than individual patterns. 
A dataset is an array refrence with at least two elements in the array, 
each element being another array refrence (or now, a scalar string). For 
each pattern to learn, you must specify an input array ref, and an ouput 
array ref as the next element. Example:
	
	my @set = (
		# inputs        outputs
		[ 1,2,3,4 ],  [ 1,3,5,6 ],
		[ 0,2,5,6 ],  [ 0,2,1,2 ]
	);


Inputs and outputs in the dataset can also be strings.

See the paragraph on measuring forgetfulness, below. There are 
two learn_set()-specific option tags available:

	flag     =>  $flag
	pattern  =>  $row

If "flag" is set to some TRUE value, as in "flag => 1" in the hash of options, or if the option "flag"
is not set, then it will return a percentage represting the amount of forgetfullness. Otherwise,
learn_set() will return an integer specifying the amount of forgetfulness when all the patterns 
are learned. 

If "pattern" is set, then learn_set() will use that pattern in the data set to measure forgetfulness by.
If "pattern" is omitted, it defaults to the first pattern in the set. Example:

	my @set = (
		[ 0,1,0,1 ],  [ 0 ],
		[ 0,0,1,0 ],  [ 1 ],
		[ 1,1,0,1 ],  [ 2 ],  #  <---
		[ 0,1,1,0 ],  [ 3 ]
	);
	
If you wish to measure forgetfulness as indicated by the line with the arrow, then you would
pass 2 as the "pattern" option, as in "pattern => 2".

Now why the heck would anyone want to measure forgetfulness, you ask? Maybe you wonder how I 
even measure that. Well, it is not a vital value that you have to know. I just put in a 
"forgetfulness measure" one day because I thought it would be neat to know. 

How the module measures forgetfulness is this: First, it learns all the patterns 
in the set provided, then it will run the very first pattern (or whatever pattern
is specified by the "row" option) in the set after it has finished learning. It 
will compare the run() output with the desired output as specified in the dataset. 
In a perfect world, the two should match exactly. What we measure is how much that 
they don't match, thus the amount of forgetfulness the network has.

Example (from examples/ex_dow.pl):

	# Data from 1989 (as far as I know..this is taken from example data on BrainMaker)
	my @data = ( 
		#	Mo  CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3   Dow Ave (output)
		[	1, 	229, 220,  146, 	20.0, 21.9, 19.5, 	2645, 2652, 2597], 	[	2647  ],
		[	2, 	235, 226,  155, 	19.8, 20.0, 18.3, 	2633, 2645, 2585], 	[	2637  ],
		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],
		[	4, 	261, 244,  181, 	19.6, 19.6, 18.1, 	2611, 2627, 2563], 	[	2620  ],
		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
	
	# Learn the set
	my $f = $net->learn_set(\@data, 
					  inc	=>	0.1,	
					  max	=>	500,
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/finance.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	
	$net->learn_set(\@data);
	
Same effect as above, but not the same data (obviously).


=item $net->run($input_map_ref);

This method will apply the given array ref at the input layer of the neural network, and
it will return an array ref to the output of the network. run() will now automatically crunch() 
a string given as an input (See the crunch() method for info on crunching).

Example Usage:
	
	my $inputs  = [ 1,1,0,1 ];
	my $outputs = $net->run($inputs);

You can also do this with a string:
                                                                                  
	my $outputs = $net->run('cloudy - wind is 5 MPH NW');
	

See also run_uc() and run_set() below.


=item $net->run_uc($input_map_ref);

This method does the same thing as this code:
	
	$net->uncrunch($net->run($input_map_ref));

All that run_uc() does is that it automatically calls uncrunch() on the output, regardless
of whether the input was crunch() -ed or not.
	

=item $net->run_set($set);
                                                                                    
This takes an array ref of the same structure as the learn_set() method, above. It returns
an array ref. Each element in the returned array ref represents the output for the corresponding
element in the dataset passed. Uses run() internally.


=item $net->get_outs($set);

Simple utility function which takes an array ref of the same structure as the learn_set() method,
above. It returns an array ref of the same type as run_set() wherein each element contains an
output value. The output values are the target values specified in the $set passed. Each element
in the returned array ref represents the output value for the corrseponding row in the dataset
passed. (A row is two elements of the dataset together, see learn_set() for dataset structure.)

=item $net->load_set($file,$column,$seperator);

Loads a CSV-like dataset from disk

Returns a data set of the same structure as required by the
learn_set() method. $file is the disk file to load set from.
$column an optional variable specifying the column in the 
data set to use as the class attribute. $class defaults to 0.
$seperator is an optional variable specifying the seperator
character between values. $seperator defaults to ',' (a single comma). 
NOTE: This does not handle quoted fields, or any other record
seperator other than "\n".

The returned array ref is suitable for passing directly to
learn_set() or get_outs().
	

=item $net->range();

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions.


=item $net->benchmark();

=item $net->benchmarked();

Mesh.pm  view on Meta::CPAN


Toggles debuging off when called with no arguments. 



=item $net->save($filename);

This will save the complete state of the network to disk, including all weights and any
words crunched with crunch() . Also saves the layer size and activations of the network.

NOTE: The only activation type NOT saved is the CODE ref type, which must be set again
after loading.

This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.

This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the error() method,
below.

If there were no errors, it will return a refrence to $net.


=item $net->load($filename);

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.

If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.

If there were no errors, it will return a refrence to $net.

UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!



=item $net->activation($layer,$type);

This sets the activation type for layer C<$layer>.

C<$type> can be one of four values:

	linear                    ( simply use sum of inputs as output )
	sigmoid    [ sigmoid_1 ]  ( only positive sigmoid )
	sigmoid_2                 ( positive / 0 /negative sigmoid )
	\&code_ref;

"sigmoid_1" is an alias for "sigmoid". 

Mesh.pm  view on Meta::CPAN

other than the ones listed above.

The activation type for each layer is preserved across load/save calls. 

EXCEPTION: Due to the constraints of Perl, I cannot load/save the actual subs that the code
ref option points to. Therefore, you must re-apply any code ref activation types after a 
load() call.

=item $net->node_activation($layer,$node,$type);

This sets the activation function for a specific node in a layer. The same notes apply
here as to the activation() method above.


=item $net->threshold($layer,$value);

This sets the activation threshold for a specific layer. The threshold only is used
when activation is set to "sigmoid", "sigmoid_1", or "sigmoid_2". 


=item $net->node_threshold($layer,$node,$value);

This sets the activation threshold for a specific node in a layer. The threshold only is used
when activation is set to "sigmoid", "sigmoid_1", or "sigmoid_2".  

=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 

Mesh.pm  view on Meta::CPAN

=item $net->extend(\@array_of_hashes);

This allows you to re-apply any activations and thresholds with the same array ref which
you created a network with. This is useful for re-applying code ref activations after a load()
call without having to type the code ref twice.

You can also specify the extension in a simple array ref like this:

	$net->extend([2,3,1]);
	
Which will simply add more nodes if needed to set the number of nodes in each layer to their 
respective elements. This works just like the respective new() constructor, above.

NOTE: Your net will probably require re-training after adding nodes.


=item $net->extend_layer($layer,\%hash);

With this you can modify only one layer with its specifications in a hash refrence. This hash
refrence uses the same keys as for the last new() constructor form, above. 

You can also specify just the number of nodes for the layer in this form:

	$net->extend_layer(0,5);

Which will set the number of nodes in layer 0 to 5 nodes. This is the same as calling:
	
	$net->add_nodes(0,5);

Which does the exact same thing. See add_nodes() below.

NOTE: Your net will probably require re-training after adding nodes.


=item $net->add_nodes($layer,$total_nodes);

Mesh.pm  view on Meta::CPAN

=item $net->uncrunch($array_ref);

Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the crunch() method, above, possibly to 
uncrunch() the output of a run() call. Consider the below code (also in ./examples/ex1.pl):
                           
	use AI::NeuralNet::Mesh;
	my $net = AI::NeuralNet::Mesh->new(2,3);
	
	for (0..3) {
		$net->learn_set([
			$net->crunch("I love chips."),  $net->crunch("That's Junk Food!")),
			$net->crunch("I love apples."), $net->crunch("Good, Healthy Food.")),
			$net->crunch("I love pop."),    $net->crunch("That's Junk Food!")),
			$net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."))
		]);
	}
	
	print $net->run_uc("I love corn.")),"\n";


Mesh.pm  view on Meta::CPAN

that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list. 

If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the error() method, below.

=item $net->word($word);

A function alias for crunched().


=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.

The column width is preserved across load() and save() calls.


=item $net->random($rand);

This will set the randomness factor from the network. Default is 0. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. The randomness factor
is preserved across load() and save() calls. 


=item $net->const($const);

This sets the run const. for the network. The run const. is a value that is added
to every input line when a set of inputs are run() or learn() -ed, to prevent the
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across load() and save() calls.


=item $net->error();

Returns the last error message which occured in the mesh, or undef if no errors have
occured.

Mesh.pm  view on Meta::CPAN

the time comes.


=item range(0..X);

=item range(@range);

=item range(A,B,C);

range() returns a closure limiting the output 
of that node to a specified set of values.
Good for use in output layers.

Usage example:
	$net->activation(4,range(0..5));
or (in the new() hash constructor form):
	..
	{ 
		nodes		=>	1,
		activation	=>	range 5..2
	}
	..
You can also pass an array containing the range
values (not array ref), or you can pass a comma-
seperated list of values as parameters:

	$net->activation(4,range(@numbers));
	$net->activation(4,range(6,15,26,106,28,3));

Note: when using a range() activatior, train the
net TWICE on the data set, because the first time
the range() function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.

The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.

	= line 1 = 	sub {
	= line 2 =		my @values = ( 6..10 );

Mesh.pm  view on Meta::CPAN

	= line 4 =		my $self  = shift;
	= line 5 =		$self->{top_value}=$sum if($sum>$self->{top_value});
	= line 6 =		my $index = intr($sum/$self->{top_value}*$#values);
	= line 7 =		return $values[$index];
	= line 8 =	}

Now, the actual function fits in one line of code, but I expanded it a bit
here. Line 1 creates our array of allowed output values. Lines two and
three grab our parameters off the stack which allow us access to the
internals of this node. Line 5 checks to see if the sum output of this
node is higher than any previously encountered, and, if so, it sets
the marker higher. This also shows that you can use the $self refrence
to maintain information across activations. This technique is also used
in the ramp() activator. Line 6 computes the index into the allowed
values array by first scaling the $sum to be between 0 and 1 and then
expanding it to fit smoothly inside the number of elements in the array. Then
we simply round to an integer and pluck that index from the array and
use it as the output value for that node. 

See? It's not that hard! Using custom activation functions, you could do
just about anything with the node that you want to, since you have

Mesh.pm  view on Meta::CPAN


ramp() preforms smooth ramp activation between 0 and 1 if $r is 1, 
or between -1 and 1 if $r is 2. $r defaults to 1.	

You can get this into your namespace with the ':acts' export 
tag as so:
	
	use AI::NeuralNet::Mesh ':acts';

Note: when using a ramp() activatior, train the
net at least TWICE on the data set, because the first 
time the ramp() function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.

No code to show here, as it is almost exactly the same as range().


=item and_gate($threshold);

Self explanitory, pretty much. This turns the node into a basic AND gate.

Mesh.pm  view on Meta::CPAN


	$AI::NeuralNet::Mesh::Connector = 'main::tree'
	
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. See notes on CUSTOM NETWORK CONNECTORS,
below, for more information on creating your own custom connector.


=item $AI::NeuralNet::Mesh::DEBUG

This variable controls the verbosity level. It will not hurt anything to set this 
directly, yet most people find it easier to set it using the debug() method, or 
any of its aliases.


=head1 CUSTOM NETWORK CONNECTORS

Creating custom network connectors is step up from average use of this module. 
However, it can be very useful in creating other styles of neural networks, other
than the default fully-connected feed-foward network. 

You create a custom connector by setting the variable $AI::NeuralNet::Mesh::Connector
to the fully qualified name of the function used to make the actual connections
between the nodes in the network. This variable contains '_c' by default, but if you use
this variable, be sure to add the fully qualified name of the method. For example,
in the ALN example, I use a connector in the main package called tree() instead of
the default connector. Before I call the new() constructor, I use this line of code:

	$AI::NeuralNet::Mesh::Connector = 'main::tree'
	
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. 

Mesh.pm  view on Meta::CPAN

Thanks to Rodin for continual suggetions and questions about the module and more.

=head1 DOWNLOAD

You can always download the latest copy of AI::NeuralNet::Mesh
from http://www.josiah.countystart.com/modules/get.pl?mesh:pod


=head1 MAILING LIST

A mailing list has been setup for AI::NeuralNet::Mesh and AI::NeuralNet::BackProp. 
The list is for discussion of AI and neural net related topics as they pertain to 
AI::NeuralNet::BackProp and AI::NeuralNet::mesh. I will also announce in the group
each time a new release of AI::NeuralNet::Mesh is available.

The list address is at:
	 ai-neuralnet-backprop@egroups.com 
	 
To subscribe, send a blank email:
	ai-neuralnet-backprop-subscribe@egroups.com  

README  view on Meta::CPAN

** What is this?

AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
Contact Josiah Bryan at <jdb@wcoil.com>

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you. (See ex_aln.pl)

As always, included is a cleaned, CSS-ed, HTML-format of the POD docs.

** What's new?

>From the POD:
This is version 0.44, an bug fix for the third release of the module.

This fixed a compatibilty issue that 0.43 had with Perl 5.3.3

With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),
as well as four custom activation functions, and several export 
tag sets. With this release, I have also included a few
new and more practical example scripts. (See ex_wine.pl) This release 
also includes a simple example of an ALN (Adaptive Logic Network) made
with this module. See ex_aln.pl. Also in this release is support for 
loading data sets from simple CSV-like files. See the load_set() method 
for details. This version also fixes a big bug that I never knew about 
until writing some demos for this version - that is, when trying to use 
more than one output node, the mesh would freeze in learning. But, that 
is fixed now, and you can have as many outputs as you want (how does 3 
inputs and 50 outputs sound? :-) Also in this release is output range
limiting via the range() activation function.

** What do you think?

Now I know you people are out there that are using the module...

examples/ex_add.pl  view on Meta::CPAN

=begin
    
    File:	examples/ex_add.pl
	Author: Josiah Bryan, <jdb@wcoil.com>
	Desc: 

		This demonstrates the ability of a neural net to generalize 
		and predict what the correct result is for inputs that it has 
		never seen before.
		
		This teaches a network to add 11 sets of numbers, then it asks 
		the user for two numbers to add and it displays the results of 
		the user's input.

=cut

	use AI::neuralNet::Mesh;
	
	my $addition = new AI::NeuralNet::Mesh(2,2,1);
	
	if(!$addition->load('add.mesh')) {
		$addition->learn_set([	
			[ 1,   1   ], [ 2    ] ,
			[ 1,   2   ], [ 3    ],
			[ 2,   2   ], [ 4    ],
			[ 20,  20  ], [ 40   ],
			[ 50,  50  ], [ 100  ],
			[ 60,  40  ], [ 100  ],
			[ 100, 100 ], [ 200  ],
			[ 150, 150 ], [ 300  ],
			[ 500, 500 ], [ 1000 ],
			[ 10,  10  ], [ 20   ],

examples/ex_add2.pl  view on Meta::CPAN

=begin
    
    File:	examples/ex_add2.pl
	Author: Rodin Porrata, <rodin@ursa.llnl.gov>
	Desc: 

		This script runs a test of the networks ability to add 
		and remember data sets, as well as testing the optimum "inc" to 
		learn and the optimum number of layers for a network.

=cut

	use AI::NeuralNet::Mesh;
	use Benchmark;
	use English;
	
	my $ofile = "addnet_data.txt";
	

examples/ex_add2.pl  view on Meta::CPAN

	 my @data = (
	  [   2633, 2665, 2685],  [ 2633 + 2665 + 2685 ],
	  [   2623, 2645, 2585],  [ 2623 + 2645 + 2585 ],
	  [  2627, 2633, 2579],  [ 2627 + 2633 + 2579 ],
	  [   2611, 2627, 2563],  [ 2611 + 2627 + 2563 ],
	  [  2640, 2637, 2592],  [ 2640 + 2637 + 2592 ]
	 );
	
	 print "Learning started, will cycle $top times with inc = $inc\n";
	
	  # Make it learn the whole dataset $top times
	  my @list;
	
	 my $t1=new Benchmark;
	 for my $a (1..$top)
	 {
	  print "Outer Loop: $a : ";
	
	  $forgetfulness = $net->learn_set( \@data,
	           inc  => $inc,
	           max  => 500,
	           error => -1);
	
	  print "Forgetfulness: $forgetfulness %\n";
	
	 }
	 my $t2=new Benchmark;
	
	 $runtime = timediff($t2,$t1);

examples/ex_add2.pl  view on Meta::CPAN

	
	 my @input = ( [ 2222, 3333, 3200 ],
	      [ 1111, 1222, 3211 ],
	      [ 2345, 2543, 3000 ],
	      [ 2654, 2234, 2534 ] );
	
	    test_net( $net, @input );
	}
	#.....................................................................
	 sub test_net {
	  my @set;
	  my $fb;
	  my $net = shift;
	  my @data = @_;
	  undef @percent_diff; #@answers; undef @predictions;
	
	  for( $i=0; defined( $data[$i] ); $i++ ){
	   @set = @{ $data[$i] };
	   $fb = $net->run(\@set)->[0];
	   # Print output
	   print "Test Factors: (",join(',',@set),")\n";
	   $answer = eval( join( '+',@set ));
	   push @percent_diff, 100.0 * abs( $answer - $fb )/ $answer;
	   print "Prediction : $fb      answer: $answer\n";
	  }
	 }
	
	

examples/ex_aln.pl  view on Meta::CPAN

	$net->learn([1,1,0,1,0,1,1,1],[0]), and each line represents 
	a layer):
	
	L R L L L L L L
	OR OR OR OR
	OR OR
	AND
	
	All the standard methods that work on AI::NeuralNet::Mesh work
	on the object returned by Tree(). load() and save() will correctly
	preserve the gate structure and types of your network. learn_set()
	and everything else works pretty much as expected. Only thing
	that is useless is the crunch() method, as this only takes binary
	inputs. But...for those of you who couldnt live without integers
	in your network...I'm going to create a small package in the next 
	week, AI::NeuralNet::ALNTree, from this code. It will which includes 
	a integer-vectorizer (convert your integers into bit vectors), a bit 
	vector class to play with, as well as support for concating and 
	learning bit vectors. But, for now, enjoy this!
	
	This file contains just a simple, functional, ALN implementation. 

examples/ex_aln.pl  view on Meta::CPAN

			my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			$f=1 if( $b1 &&  $b2);
			$f=2 if(!$b1 && !$b2);
			my $lo = $self->{_last_output};
			if($lo!=$target) {
				# Adjust right lead if $lo, else adjust left lead
				($target &&  $lo)?$self->{_inputs}->[0]->{weight}++:$self->{_inputs}->[0]->{weight}--;
				($target && !$lo)?$self->{_inputs}->[1]->{weight}++:$self->{_inputs}->[1]->{weight}--;
			}
			# Thanks to Rolf Mandersheidd for this set of nested conditions on one line
			# This determines heuristic error responsibilty on the children
			# and recurses the error up the tree.
			if($lo!=$target || $f!=($lo?1:2)) {
				$self->{_inputs}->[1]->{node}->adjust_weight($inc,$target) if($self->{_inputs}->[1]->{node});
			} else {
				$self->{_inputs}->[0]->{node}->adjust_weight($inc,$target) if($self->{_inputs}->[1]->{node});
			}
		};

	    # Set our custom node connector

examples/ex_alpha.pl  view on Meta::CPAN

        1,0,0,0,1,
        1,1,0,1,1,
        0,1,0,1,0,
        0,1,1,1,0,
        0,0,1,0,2
        ]
     ];
	
	if(!$net->load("alpha.mesh")) {
		#$net->range(0..29);
		$net->learn_set($letters);
		$net->save("alpha.mesh");
	}
			
	# Build a test map 
	my $tmp	=	[0,1,1,1,0,
				 1,0,0,0,1,
				 1,0,0,0,1,
				 1,1,1,1,1,
				 1,0,0,0,1,
				 1,0,0,0,1,

examples/ex_and.pl  view on Meta::CPAN

	
	# Uses 1 layer and 2 nodes per layer, with one output node
	my $net = new AI::NeuralNet::Mesh([
		{
			nodes		=>	2,    		# input layer, 2 nodes
			activation  =>	linear		# linear transfer function
		},
		{
			nodes		=>	1,			# output layer, 1 node
			activation	=>	sigmoid,	# sigmoid transfer function, (0/1)
			threshold	=>	0.75		# set threshold for sigmoid fn to 0.75
		}
	]);
	
	if(!$net->load('and.mesh')) {
		$net->learn_set([	
			[1,1], [1],
			[1,0], [0],
			[0,1], [0],
			[0,0], [0],
		]);
		$net->save('and.mesh');
	}

	print "Learning complete.\n";
	print "Testing with a gate value of (0,0):",$net->run([0,0])->[0],"\n";

examples/ex_bmp.pl  view on Meta::CPAN

	# Set resolution
	my $xres=5;
	my $yres=5;
	
	# Create a new net with 3 layes, $xres*$yres inputs, and 1 output
	my $net = AI::NeuralNet::Mesh->new(1,$xres*$yres,1);
	
	# Enable debugging
	$net->debug(4);
	
	# Create datasets.
	my @data = ( 
		[	0,1,1,0,0,
			0,0,1,0,0,
			0,0,1,0,0,
			0,0,1,0,0,
			0,1,1,1,2	],		[	1	],
		
		[	1,1,1,0,0,
			0,0,0,1,0,
			0,1,1,1,0,

examples/ex_bmp.pl  view on Meta::CPAN

			0,0,0,1,0,
			1,1,1,1,2	],		[	5	],
		
	);
    
    
	# If we havnt saved the net already, do the learning
	if(!$net->load('images.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=3;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc => 0.1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
			$net->save('images.mesh');

			my $t2=new Benchmark;
			my $td=timediff($t0,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	my @set=(		0,1,1,1,0,
					1,0,0,0,0,
					1,1,1,0,0,
					1,0,0,0,0,
					0,1,1,1,2		);
		
	
	# Image number
	my $fb=$net->run(\@set)->[0];
	
	
	# Print output
	print "\nTest Map: \n";
	$net->join_cols(\@set,5);
	print "Image number matched: $fb\n";
	


examples/ex_crunch.pl  view on Meta::CPAN



	use AI::NeuralNet::Mesh;
	
	my $net = AI::NeuralNet::Mesh->new(2,3);
	
	# Here crunch is good for storing sentance crunches
	my $bad  = $net->crunch("That's Junk Food!");
	my $good = $net->crunch("Good, Healthy Food.");
	
	my $set  = [
		"I love chips.",	$bad,
		"I love apples.",	$good,
		"I love pop.",		$bad,
		"I love oranges.",	$good
	];
	
	#$net->debug(4);
	for (0..2) {
		my $f = $net->learn_set($set);
		print "Forgotten: $f%\n";
	}
	
	# run() automatically crunches the string (run_uc() uses run() internally) and
	# run_uc() automatically uncrunches the results.
	print $net->run_uc("I love pop-tarts.");

examples/ex_dow.pl  view on Meta::CPAN


    use AI::NeuralNet::Mesh;
	use Benchmark;

	# Create a new net with 5 layes, 9 inputs, and 1 output
        my $net = AI::NeuralNet::Mesh->new(2,9,1);
	
	# Disable debugging
        $net->debug(2);
	
	# Create datasets.
	#	Note that these are ficticious values shown for illustration purposes
	#	only.  In the example, CPI is a certain month's consumer price
	#	index, CPI-1 is the index one month before, CPI-3 is the the index 3
	#	months before, etc.

	my @data = ( 
		#	Mo  CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3   Dow Ave (output)
		[	1, 	229, 220,  146, 	20.0, 21.9, 19.5, 	2645, 2652, 2597], 	[	2647  ],
		[	2, 	235, 226,  155, 	19.8, 20.0, 18.3, 	2633, 2645, 2585], 	[	2637  ],
		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],

examples/ex_dow.pl  view on Meta::CPAN

		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
    
    
	# If we havnt saved the net already, do the learning
        if(!$net->load('DOW.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=1;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc		=>	0.2,	
											max		=>	2000,
											error	=>	-1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
            $net->save('DOW.mesh');
            
			my $t2=new Benchmark;
			my $td=timediff($t2,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	# Run a prediction using fake data
	#			Month	CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3    
	my @set=(	10,		352, 309,  203, 	18.3, 18.7, 16.1, 	2592, 2641, 2651	  ); 
	
	# Dow Ave (output)	
	my $fb=$net->run(\@set)->[0];
	
	# Print output
	print "\nTest Factors: (",join(',',@set),")\n";
	print "DOW Prediction for Month #11: $fb\n";
	

examples/ex_mult.pl  view on Meta::CPAN

=begin
    
    File:	examples/ex_mult.pl
	Author: Josiah Bryan, <jdb@wcoil.com>
	Desc: 

		This demonstrates the ability of a neural net to generalize and predict what the correct
		result is for inputs that it has never seen before.
		
		This teaches a network to multiply 6 sets of numbers, then it asks the user for 
		two numbers to multiply and then it displays the results of the user's input.

=cut

	use AI::NeuralNet::Mesh;
	
	my $multiply = new AI::NeuralNet::Mesh(2,2,1);
	
	if(!$multiply->load('mult.mesh')) {
		$multiply->learn_set([	
			[ 1,   1   ], [ 1      ] ,
			[ 2,   1   ], [ 2      ],
			[ 2,   2   ], [ 4      ],
			[ 2,   4   ], [ 8      ],
			[ 2,   8   ], [ 16     ],
			[ 9,   9   ], [ 81     ],
			[ 10,  5   ], [ 50     ],
			[ 20,  10  ], [ 200    ],
			[ 100, 50  ], [ 5000   ],
		]);

examples/ex_or.pl  view on Meta::CPAN

		as an OR gate with no learning and only a sigmoid transfer function
		on the output node.

=cut

	use AI::neuralNet::Mesh;
	
	# Uses 1 layer and 2 nodes per layer, with one output node
	my $net = new AI::NeuralNet::Mesh(1,2,1);
	
	# Example of alternate ways to set activation and thresholds
	$net->activation(1,sigmoid);
	$net->threshold( 1,0.5);
	
	if(!$net->load('or.mesh')) {
		$net->learn_set([	
			[1,1], [1],
			[1,0], [1],
			[0,1], [1],
			[0,0], [0],
		]);
		$net->save('or.mesh');
	}

	print "Learning complete.\n";
	print "Testing with a gate value of (0,0):",$net->run([0,0])->[0],"\n";

examples/ex_sub.pl  view on Meta::CPAN

=begin
    
    File:	examples/ex_sub.pl
	Author: Josiah Bryan, <jdb@wcoil.com>
	Desc: 

		This demonstrates the ability of a neural net to generalize and predict what the correct
		result is for inputs that it has never seen before.
		
		This teaches a network to subtract 6 sets of numbers, then it asks the user for 
		two numbers to subtract and then it displays the results of the user's input.

=cut

	use AI::NeuralNet::Mesh;
	
	my $subtract = new AI::NeuralNet::Mesh(2,2,1);
	
	if(!$subtract->load('sub.mesh')) {
		$subtract->learn_set([	
			[ 1,   1   ], [ 0      ] ,
			[ 2,   1   ], [ 1      ],
			[ 10,  5   ], [ 5      ],
			[ 20,  10  ], [ 10     ],
			[ 100, 50  ], [ 50     ],
			[ 500, 200 ], [ 300    ],
		]);
		$subtract->save('sub.mesh');
	}
		

examples/ex_synop.pl  view on Meta::CPAN

	
	# Add a small amount of randomness to the network
	$net->random(0.001);

	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";

examples/ex_wine.pl  view on Meta::CPAN

		 	12) OD280/OD315 of diluted wines
		 	13) Proline            
		
		There are 168 total examples, with the class distrubution
		as follows:
		
			class 1: 59 instances
			class 2: 71 instances
			class 3: 48 instances
			
		The datasets are stored in wine.dat, and the first
		column on every row is the class attribute for that
		row.

=cut

    use AI::NeuralNet::Mesh;
	use Benchmark;

	# Create a new net
    my $net = AI::NeuralNet::Mesh->new([13,45,1]);

	# Set activation on output node to contstrain values
	# to a specific range of values.    
    $net->activation(2,range(1..3));
	
	# Enable debugging
	$net->verbose(4);

	# Load the data set
	my $data = $net->load_set('wine.dat',0);
	
	# Seperate data based on class
	my $sets=[];
	for my $i (0..$#{$data}/2) {
		my $c = $data->[$i*2+1]->[0];
		print "Class of set $i: $c                                  \r";
		# inputs
		$sets->[$c]->[++$#{$sets->[$c]}] = $data->[$i*2];
		# class
		$sets->[$c]->[++$#{$sets->[$c]}] = $data->[$i*2+1];
	}                                  
	
			
	for(0..$#{$sets}) {
		next if(!defined $sets->[$_]->[0]);
		print "Size of set $_: ",$#{$sets->[$_]}/2,"\n";
	}
	
	# If we havnt saved the net already, do the learning
    if(!$net->load('wine.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=5;
		for my $a (0..$top) {
			print "\n\nOuter Loop: $a\n";
			
			for(0..$#{$sets}) {
				next if(!defined $sets->[$_]->[0]);
				my $t1=new Benchmark;
				
				# Test fogetfullness
				my $f = $net->learn_set($sets->[$_],	inc		=>	0.2,	
														max		=>	2000,
														error	=>	0.01,
														leave	=>	2);
				
				# Print it 
				print "\n\nForgetfullness: $f%\n";
	
				my $t2=new Benchmark;
				my $td=timediff($t2,$t1);
				print "\nLoop [$a,$_] took ",timestr($td),"\n";

examples/ex_wine.pl  view on Meta::CPAN

			# Save net to disk				
            $net->save('wine.mesh');
		}
	}

	# Set activation on output node to contstrain values
	# to a specific range of values.    
    $net->activation(2,range(1..3));
	
	my @cnts;
	for(0..$#{$sets}) {
		next if(!defined $sets->[$_]->[0]);
		my $s=$#{$sets->[$_]}/2;
		my $cnt=0;
		print "Set: $_\n";
		my $results=$net->run_set($sets->[$_]);
		for my $x (0..$s-1) {
			$cnt++ if($results->[$x]->[0]==$_);
		}
		$cnts[$_]=$cnt;
	}
	
	for(0..$#{$sets}) {
		next if(!defined $sets->[$_]->[0]);
		my $s=$#{$sets->[$_]}/2;
		print "Class $_: $cnts[$_] correct out of $s (",$net->p($cnts[$_],$s),"%).\n";
	}
		

mesh.htm  view on Meta::CPAN

        print &quot;AND test with inputs (1,1): $result_bit_2\n&quot;;

</PRE>
<P>
<HR>
<H1><A NAME="version & updates">VERSION &amp; UPDATES</A></H1>
<P>This is version <STRONG>0.43</STRONG>, the second release of this module.</P>
<P>With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),
as well as four custom activation functions, and several export 
tag sets. With this release, I have also included a few
new and more practical example scripts. (See ex_wine.pl) This release 
also includes a simple example of an ALN (Adaptive Logic Network) made
with this module. See ex_aln.pl. Also in this release is support for 
loading data sets from simple CSV-like files. See the <A HREF="#item_load_set"><CODE>load_set()</CODE></A> method 
for details. This version also fixes a big bug that I never knew about 
until writing some demos for this version - that is, when trying to use 
more than one output node, the mesh would freeze in learning. But, that 
is fixed now, and you can have as many outputs as you want (how does 3 
inputs and 50 outputs sound? :-)</P>
<P>
<HR>
<H1><A NAME="description">DESCRIPTION</A></H1>
<P>AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind.</P>
<P>This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.</P>
<P>This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.</P>
<P>In this module I have included a more accurate form of ``learning'' for the
mesh. This form preforms descent toward a local error minimum (0) on a 
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.</P>
<P>
<HR>
<H1><A NAME="exports">EXPORTS</A></H1>
<P>This module exports three functions by default:</P>
<PRE>
        range
        intr
        pdiff
</PRE>
<P>See range() intr() and pdiff() for description of their respective functions.</P>
<P>Also provided are several export tag sets for usage in the form of:</P>
<PRE>
        use AI::NeuralNet::Mesh ':tag';
</PRE>
<P>Tag sets are:</P>
<PRE>
        :default 
            - These functions are always exported.
                - Exports:
                range()
                intr()
                pdiff()

        :all
                - Exports:

mesh.htm  view on Meta::CPAN

                { },
                { },
                { },
                ...
        );</PRE>
<P>You are passing an array ref who's each element is a hash refrence. Each
hash refrence, or more precisely, each element in the array refrence you are passing
to the constructor, represents a layer in the network. Like the constructor above,
the first element is the input layer, and the last is the output layer. The rest are
hidden layers.</P>
<P>Each hash refrence is expected to have AT LEAST the ``nodes'' key set to the number
of nodes (neurons) in that layer. The other two keys are optional. If ``activation'' is left
out, it defaults to ``linear''. If ``threshold'' is left out, it defaults to 0.50.</P>
<P>The ``activation'' key can be one of four values:</P>
<PRE>
        linear                    ( simply use sum of inputs as output )
        sigmoid    [ sigmoid_1 ]  ( only positive sigmoid )
        sigmoid_2                 ( positive / 0 /negative sigmoid )
        \&amp;code_ref;</PRE>
<P>``sigmoid_1'' is an alias for ``sigmoid''.</P>
<P>The code ref option allows you to have a custom activation function for that layer.

mesh.htm  view on Meta::CPAN

<PRE>
        $output = &amp;$code_ref($sum_of_inputs, $self);
</PRE>
<P>The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.</P>
<P>See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.</P>
<P>Three of the activation syntaxes are shown in the first constructor above, the ``linear'',
``sigmoid'' and code ref types.</P>
<P>You can also set the activation and threshold values after network creation with the
<A HREF="#item_activation"><CODE>activation()</CODE></A> and <A HREF="#item_threshold"><CODE>threshold()</CODE></A> methods.</P>
<P></P>
<P></P>
<DT><STRONG><A NAME="item_learn">$net-&gt;learn($input_map_ref, $desired_result_ref [, options ]);</A></STRONG><BR>
<DD>
NOTE: <A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> now has increment-degrading turned OFF by default. See note
on the degrade flag, below.
<P>This will 'teach' a network to associate an new input map with a desired 
result. It will return a string containg benchmarking information.</P>
<P>You can also specify strings as inputs and ouputs to learn, and they will be 
crunched automatically. Example:</P>
<PRE>
        $net-&gt;learn('corn', 'cob');
</PRE>
<P>Note, the old method of calling crunch on the values still works just as well.</P>
<P>The first two arguments may be array refs (or now, strings), and they may be 

mesh.htm  view on Meta::CPAN

         max      =&gt;    $maximum_iterations
         error    =&gt;    $maximum_allowable_percentage_of_error
         degrade  =&gt;    $degrade_increment_flag</PRE>
<P>$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
</P>
<P>$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.</P>
<P>$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then <A HREF="#item_learn"><CODE>learn()</CODE></A> will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then <A HREF="#item_learn"><CODE>learn()</CODE></A> will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.</P>
<P>$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on.</P>
<P>In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
as several texts and decided that it would be better to not use increment degrading. The
option is still there for those that feel the inclination to use it. I have found some areas
that do need the degrade flag to work at a faster speed. See test.pl for an example. If
the degrade flag wasn't in test.pl, it would take a very long time to learn.</P>
<P></P>
<DT><STRONG><A NAME="item_learn_set">$net-&gt;learn_set(\@set, [ options ]);</A></STRONG><BR>
<DD>
This takes the same options as <A HREF="#item_learn"><CODE>learn()</CODE></A> (learn_set() uses <A HREF="#item_learn"><CODE>learn()</CODE></A> internally) 
and allows you to specify a set to learn, rather than individual patterns. 
A dataset is an array refrence with at least two elements in the array, 
each element being another array refrence (or now, a scalar string). For 
each pattern to learn, you must specify an input array ref, and an ouput 
array ref as the next element. Example:

<PRE>

        my @set = (
                # inputs        outputs
                [ 1,2,3,4 ],  [ 1,3,5,6 ],
                [ 0,2,5,6 ],  [ 0,2,1,2 ]
        );</PRE>
<P>Inputs and outputs in the dataset can also be strings.</P>
<P>See the paragraph on measuring forgetfulness, below. There are 
two learn_set()-specific option tags available:</P>
<PRE>
        flag     =&gt;  $flag
        pattern  =&gt;  $row</PRE>
<P>If ``flag'' is set to some TRUE value, as in ``flag =&gt; 1'' in the hash of options, or if the option ``flag''
is not set, then it will return a percentage represting the amount of forgetfullness. Otherwise,
<A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> will return an integer specifying the amount of forgetfulness when all the patterns 
are learned.</P>
<P>If ``pattern'' is set, then <A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> will use that pattern in the data set to measure forgetfulness by.
If ``pattern'' is omitted, it defaults to the first pattern in the set. Example:</P>
<PRE>
        my @set = (
                [ 0,1,0,1 ],  [ 0 ],
                [ 0,0,1,0 ],  [ 1 ],
                [ 1,1,0,1 ],  [ 2 ],  #  &lt;---
                [ 0,1,1,0 ],  [ 3 ]
        );
</PRE>
<P>If you wish to measure forgetfulness as indicated by the line with the arrow, then you would
pass 2 as the &quot;pattern&quot; option, as in &quot;pattern =&gt; 2&quot;.</P>
<P>Now why the heck would anyone want to measure forgetfulness, you ask? Maybe you wonder how I 
even measure that. Well, it is not a vital value that you have to know. I just put in a 
``forgetfulness measure'' one day because I thought it would be neat to know.</P>
<P>How the module measures forgetfulness is this: First, it learns all the patterns 
in the set provided, then it will run the very first pattern (or whatever pattern
is specified by the ``row'' option) in the set after it has finished learning. It 
will compare the <A HREF="#item_run"><CODE>run()</CODE></A> output with the desired output as specified in the dataset. 
In a perfect world, the two should match exactly. What we measure is how much that 
they don't match, thus the amount of forgetfulness the network has.</P>
<P>Example (from examples/ex_dow.pl):</P>
<PRE>
        # Data from 1989 (as far as I know..this is taken from example data on BrainMaker)
        my @data = ( 
                #       Mo  CPI  CPI-1 CPI-3    Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3   Dow Ave (output)
                [       1,      229, 220,  146,         20.0, 21.9, 19.5,       2645, 2652, 2597],      [       2647  ],
                [       2,      235, 226,  155,         19.8, 20.0, 18.3,       2633, 2645, 2585],      [       2637  ],
                [       3,      244, 235,  164,         19.6, 19.8, 18.1,       2627, 2633, 2579],      [       2630  ],
                [       4,      261, 244,  181,         19.6, 19.6, 18.1,       2611, 2627, 2563],      [       2620  ],
                [       5,      276, 261,  196,         19.5, 19.6, 18.0,       2630, 2611, 2582],      [       2638  ],
                [       6,      287, 276,  207,         19.5, 19.5, 18.0,       2637, 2630, 2589],      [       2635  ],
                [       7,      296, 287,  212,         19.3, 19.5, 17.8,       2640, 2637, 2592],      [       2641  ]                 
        );

        # Learn the set
        my $f = $net-&gt;learn_set(\@data, 
                                          inc   =&gt;      0.1,    
                                          max   =&gt;      500,
                                         );

        # Print it 
        print &quot;Forgetfullness: $f%&quot;;</PRE>
<P></P>
<P>This is a snippet from the example script examples/finance.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:</P>
<PRE>
        my @data = (
                [ 0,1 ], [ 1 ],
                [ 1,0 ], [ 0 ]
        );

        $net-&gt;learn_set(\@data);</PRE>
<P>Same effect as above, but not the same data (obviously).</P>
<P></P>
<DT><STRONG><A NAME="item_run">$net-&gt;run($input_map_ref);</A></STRONG><BR>
<DD>
This method will apply the given array ref at the input layer of the neural network, and
it will return an array ref to the output of the network. <A HREF="#item_run"><CODE>run()</CODE></A> will now automatically <A HREF="#item_crunch"><CODE>crunch()</CODE></A> 
a string given as an input (See the <A HREF="#item_crunch"><CODE>crunch()</CODE></A> method for info on crunching).
<P>Example Usage:
</P>
<PRE>
        my $inputs  = [ 1,1,0,1 ];
        my $outputs = $net-&gt;run($inputs);</PRE>
<P>You can also do this with a string:
</P>
<PRE>
        my $outputs = $net-&gt;run('cloudy - wind is 5 MPH NW');</PRE>
<P>See also <A HREF="#item_run_uc"><CODE>run_uc()</CODE></A> and <A HREF="#item_run_set"><CODE>run_set()</CODE></A> below.</P>
<P></P>
<DT><STRONG><A NAME="item_run_uc">$net-&gt;run_uc($input_map_ref);</A></STRONG><BR>
<DD>
This method does the same thing as this code:

<PRE>
        $net-&gt;uncrunch($net-&gt;run($input_map_ref));</PRE>
<P>All that <A HREF="#item_run_uc"><CODE>run_uc()</CODE></A> does is that it automatically calls <A HREF="#item_uncrunch"><CODE>uncrunch()</CODE></A> on the output, regardless
of whether the input was <A HREF="#item_crunch"><CODE>crunch()</CODE></A> -ed or not.</P>
<P></P>
<DT><STRONG><A NAME="item_run_set">$net-&gt;run_set($set);</A></STRONG><BR>
<DD>
<P>This takes an array ref of the same structure as the learn_set() method, above. It returns
an array ref. Each element in the returned array ref represents the output for the corresponding
element in the dataset passed. Uses run() internally.</P>
<DT><STRONG><A NAME="item_get_outs">$net-&gt;get_outs($set);</A></STRONG><BR>
<DD>
Simple utility function which takes an array ref of the same structure as the <A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> method,
above. It returns an array ref of the same type as <A HREF="#item_run_set"><CODE>run_set()</CODE></A> wherein each element contains an
output value. The output values are the target values specified in the $set passed. Each element
in the returned array ref represents the output value for the corrseponding row in the dataset
passed. (A row is two elements of the dataset together, see <A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> for dataset structure.)
<P></P>
<DT><STRONG><A NAME="item_load_set">$net-&gt;load_set($file,$column,$seperator);</A></STRONG><BR>
<DD>
Loads a CSV-like dataset from disk
<P>Returns a data set of the same structure as required by the
<A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> method. $file is the disk file to load set from.
$column an optional variable specifying the column in the 
data set to use as the class attribute. $class defaults to 0.
$seperator is an optional variable specifying the seperator
character between values. $seperator defaults to ',' (a single comma). 
NOTE: This does not handle quoted fields, or any other record
seperator other than ``\n''.</P>
<P>The returned array ref is suitable for passing directly to
<A HREF="#item_learn_set"><CODE>learn_set()</CODE></A> or get_outs().</P>
<P></P>
<DT><STRONG><A NAME="item_range">$net-&gt;range();</A></STRONG><BR>
<DD>
See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions.
<P></P>
<DT><STRONG><A NAME="item_benchmark">$net-&gt;benchmark();</A></STRONG><BR>
<DD>
<DT><STRONG><A NAME="item_benchmarked">$net-&gt;benchmarked();</A></STRONG><BR>
<DD>
This returns a benchmark info string for the last <A HREF="#item_learn"><CODE>learn()</CODE></A> call.

mesh.htm  view on Meta::CPAN

<P>Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.</P>
<P>Toggles debuging off when called with no arguments.</P>
<P></P>
<DT><STRONG><A NAME="item_save">$net-&gt;save($filename);</A></STRONG><BR>
<DD>
This will save the complete state of the network to disk, including all weights and any
words crunched with <A HREF="#item_crunch"><CODE>crunch()</CODE></A> . Also saves the layer size and activations of the network.
<P>NOTE: The only activation type NOT saved is the CODE ref type, which must be set again
after loading.</P>
<P>This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.</P>
<P>This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the <A HREF="#item_error"><CODE>error()</CODE></A> method,
below.</P>
<P>If there were no errors, it will return a refrence to $net.</P>
<P></P>
<DT><STRONG><A NAME="item_load">$net-&gt;load($filename);</A></STRONG><BR>
<DD>
This will load from disk any network saved by <A HREF="#item_save"><CODE>save()</CODE></A> and completly restore the internal
state at the point it was <A HREF="#item_save"><CODE>save()</CODE></A> was called at.
<P>If the file is of an invalid file type, then <A HREF="#item_load"><CODE>load()</CODE></A> will
return undef. Use the <A HREF="#item_error"><CODE>error()</CODE></A> method, below, to print the error message.</P>
<P>If there were no errors, it will return a refrence to $net.</P>
<P>UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net-&gt;load(join(``\n'',&lt;DATA&gt;)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!</P>
<P></P>
<DT><STRONG><A NAME="item_activation">$net-&gt;activation($layer,$type);</A></STRONG><BR>
<DD>
This sets the activation type for layer <CODE>$layer</CODE>.
<P><CODE>$type</CODE> can be one of four values:</P>
<PRE>
        linear                    ( simply use sum of inputs as output )
        sigmoid    [ sigmoid_1 ]  ( only positive sigmoid )
        sigmoid_2                 ( positive / 0 /negative sigmoid )
        \&amp;code_ref;</PRE>
<P>``sigmoid_1'' is an alias for ``sigmoid''.</P>
<P>The code ref option allows you to have a custom activation function for that layer.
The code ref is called with this syntax:</P>
<PRE>

mesh.htm  view on Meta::CPAN

a blessed hash refrence to that node.</P>
<P>See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.</P>
<P>The activation type for each layer is preserved across load/save calls.</P>
<P>EXCEPTION: Due to the constraints of Perl, I cannot load/save the actual subs that the code
ref option points to. Therefore, you must re-apply any code ref activation types after a 
<A HREF="#item_load"><CODE>load()</CODE></A> call.</P>
<P></P>
<DT><STRONG><A NAME="item_node_activation">$net-&gt;node_activation($layer,$node,$type);</A></STRONG><BR>
<DD>
This sets the activation function for a specific node in a layer. The same notes apply
here as to the <A HREF="#item_activation"><CODE>activation()</CODE></A> method above.
<P></P>
<DT><STRONG><A NAME="item_threshold">$net-&gt;threshold($layer,$value);</A></STRONG><BR>
<DD>
This sets the activation threshold for a specific layer. The threshold only is used
when activation is set to ``sigmoid'', ``sigmoid_1'', or ``sigmoid_2''.
<P></P>
<DT><STRONG><A NAME="item_node_threshold">$net-&gt;node_threshold($layer,$node,$value);</A></STRONG><BR>
<DD>
This sets the activation threshold for a specific node in a layer. The threshold only is used
when activation is set to ``sigmoid'', ``sigmoid_1'', or ``sigmoid_2''.
<P></P>
<DT><STRONG><A NAME="item_join_cols">$net-&gt;join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);</A></STRONG><BR>
<DD>
This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like <CODE>join()</CODE> ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 

mesh.htm  view on Meta::CPAN

<P></P>
<DT><STRONG><A NAME="item_extend">$net-&gt;extend(\@array_of_hashes);</A></STRONG><BR>
<DD>
This allows you to re-apply any activations and thresholds with the same array ref which
you created a network with. This is useful for re-applying code ref activations after a <A HREF="#item_load"><CODE>load()</CODE></A>
call without having to type the code ref twice.
<P>You can also specify the extension in a simple array ref like this:</P>
<PRE>
        $net-&gt;extend([2,3,1]);
</PRE>
<P>Which will simply add more nodes if needed to set the number of nodes in each layer to their 
respective elements. This works just like the respective new() constructor, above.</P>
<P>NOTE: Your net will probably require re-training after adding nodes.</P>
<P></P>
<DT><STRONG><A NAME="item_extend_layer">$net-&gt;extend_layer($layer,\%hash);</A></STRONG><BR>
<DD>
With this you can modify only one layer with its specifications in a hash refrence. This hash
refrence uses the same keys as for the last <A HREF="#item_new"><CODE>new()</CODE></A> constructor form, above.
<P>You can also specify just the number of nodes for the layer in this form:</P>
<PRE>
        $net-&gt;extend_layer(0,5);</PRE>
<P>Which will set the number of nodes in layer 0 to 5 nodes. This is the same as calling:
</P>
<PRE>
        $net-&gt;add_nodes(0,5);</PRE>
<P>Which does the exact same thing. See <A HREF="#item_add_nodes"><CODE>add_nodes()</CODE></A> below.</P>
<P>NOTE: Your net will probably require re-training after adding nodes.</P>
<P></P>
<DT><STRONG><A NAME="item_add_nodes">$net-&gt;add_nodes($layer,$total_nodes);</A></STRONG><BR>
<DD>
This method was created mainly to service the extend*() group of functions, but it 
can also be called independently. This will add nodes as needed to layer <CODE>$layer</CODE> to 

mesh.htm  view on Meta::CPAN

<DD>
Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the <A HREF="#item_crunch"><CODE>crunch()</CODE></A> method, above, possibly to 
<A HREF="#item_uncrunch"><CODE>uncrunch()</CODE></A> the output of a <A HREF="#item_run"><CODE>run()</CODE></A> call. Consider the below code (also in ./examples/ex1.pl):

<PRE>
        use AI::NeuralNet::Mesh;
        my $net = AI::NeuralNet::Mesh-&gt;new(2,3);

        for (0..3) {
                $net-&gt;learn_set([
                        $net-&gt;crunch(&quot;I love chips.&quot;),  $net-&gt;crunch(&quot;That's Junk Food!&quot;)),
                        $net-&gt;crunch(&quot;I love apples.&quot;), $net-&gt;crunch(&quot;Good, Healthy Food.&quot;)),
                        $net-&gt;crunch(&quot;I love pop.&quot;),    $net-&gt;crunch(&quot;That's Junk Food!&quot;)),
                        $net-&gt;crunch(&quot;I love oranges.&quot;),$net-&gt;crunch(&quot;Good, Healthy Food.&quot;))
                ]);
        }

        print $net-&gt;run_uc(&quot;I love corn.&quot;)),&quot;\n&quot;;</PRE>
<P>On my system, this responds with, ``Good, Healthy Food.'' If you try to run <A HREF="#item_crunch"><CODE>crunch()</CODE></A> with
``I love pop.'', though, you will probably get ``Food! apples. apples.'' (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!</P>
<P></P>
<DT><STRONG><A NAME="item_crunched">$net-&gt;crunched($word);</A></STRONG><BR>
<DD>
This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list.
<P>If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the <A HREF="#item_error"><CODE>error()</CODE></A> method, below.</P>
<P></P>
<DT><STRONG><A NAME="item_word">$net-&gt;word($word);</A></STRONG><BR>
<DD>
A function alias for crunched().
<P></P>
<DT><STRONG><A NAME="item_col_width">$net-&gt;col_width($width);</A></STRONG><BR>
<DD>
This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.
<P>It will return the current width when called with a 0 or undef value.</P>
<P>The column width is preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A> calls.</P>
<P></P>
<DT><STRONG><A NAME="item_random">$net-&gt;random($rand);</A></STRONG><BR>
<DD>
This will set the randomness factor from the network. Default is 0. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. The randomness factor
is preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A> calls.
<P></P>
<DT><STRONG><A NAME="item_const">$net-&gt;const($const);</A></STRONG><BR>
<DD>
This sets the run const. for the network. The run const. is a value that is added
to every input line when a set of inputs are <A HREF="#item_run"><CODE>run()</CODE></A> or <A HREF="#item_learn"><CODE>learn()</CODE></A> -ed, to prevent the
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A> calls.
<P></P>
<DT><STRONG><A NAME="item_error">$net-&gt;error();</A></STRONG><BR>
<DD>
Returns the last error message which occured in the mesh, or undef if no errors have
occured.
<P></P>
<DT><STRONG><A NAME="item_load_pcx">$net-&gt;load_pcx($filename);</A></STRONG><BR>

mesh.htm  view on Meta::CPAN

These functions return code refs to a Perl closure which does the actual work when
the time comes.</P>
<DL>
<DT><STRONG>range(0..X);</STRONG><BR>
<DD>
<DT><STRONG>range(@range);</STRONG><BR>
<DD>
<DT><STRONG>range(A,B,C);</STRONG><BR>
<DD>
<A HREF="#item_range"><CODE>range()</CODE></A> returns a closure limiting the output 
of that node to a specified set of values.
Good for use in output layers.
<P>Usage example:
	$net-&gt;activation(4,range(0..5));
or (in the <A HREF="#item_new"><CODE>new()</CODE></A> hash constructor form):
	..
	{ 
		nodes		=&gt;	1,
		activation	=&gt;	range 5..2
	}
	..
You can also pass an array containing the range
values (not array ref), or you can pass a comma-
seperated list of values as parameters:</P>
<PRE>
        $net-&gt;activation(4,range(@numbers));
        $net-&gt;activation(4,range(6,15,26,106,28,3));</PRE>
<P>Note: when using a <A HREF="#item_range"><CODE>range()</CODE></A> activatior, train the
net TWICE on the data set, because the first time
the <A HREF="#item_range"><CODE>range()</CODE></A> function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.</P>
<P>The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.</P>
<PRE>
        = line 1 =      sub {
        = line 2 =              my @values = ( 6..10 );
        = line 3 =              my $sum   = shift;
        = line 4 =              my $self  = shift;
        = line 5 =              $self-&gt;{top_value}=$sum if($sum&gt;$self-&gt;{top_value});
        = line 6 =              my $index = intr($sum/$self-&gt;{top_value}*$#values);
        = line 7 =              return $values[$index];
        = line 8 =      }</PRE>
<P>Now, the actual function fits in one line of code, but I expanded it a bit
here. Line 1 creates our array of allowed output values. Lines two and
three grab our parameters off the stack which allow us access to the
internals of this node. Line 5 checks to see if the sum output of this
node is higher than any previously encountered, and, if so, it sets
the marker higher. This also shows that you can use the $self refrence
to maintain information across activations. This technique is also used
in the <A HREF="#item_ramp"><CODE>ramp()</CODE></A> activator. Line 6 computes the index into the allowed
values array by first scaling the $sum to be between 0 and 1 and then
expanding it to fit smoothly inside the number of elements in the array. Then
we simply round to an integer and pluck that index from the array and
use it as the output value for that node.</P>
<P>See? It's not that hard! Using custom activation functions, you could do
just about anything with the node that you want to, since you have
access to the node just as if you were a blessed member of that node's object.</P>

mesh.htm  view on Meta::CPAN

<DT><STRONG><A NAME="item_ramp">ramp($r);</A></STRONG><BR>
<DD>
<A HREF="#item_ramp"><CODE>ramp()</CODE></A> preforms smooth ramp activation between 0 and 1 if $r is 1, 
or between -1 and 1 if $r is 2. $r defaults to 1.
<P>You can get this into your namespace with the ':acts' export 
tag as so:
</P>
<PRE>
        use AI::NeuralNet::Mesh ':acts';</PRE>
<P>Note: when using a <A HREF="#item_ramp"><CODE>ramp()</CODE></A> activatior, train the
net at least TWICE on the data set, because the first 
time the <A HREF="#item_ramp"><CODE>ramp()</CODE></A> function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.</P>
<P>No code to show here, as it is almost exactly the same as range().</P>
<P></P>
<DT><STRONG><A NAME="item_and_gate">and_gate($threshold);</A></STRONG><BR>
<DD>
Self explanitory, pretty much. This turns the node into a basic AND gate.
$threshold is used to decide if an input is true or false (1 or 0). If 
an input is below $threshold, it is false. $threshold defaults to 0.5.

mesh.htm  view on Meta::CPAN

the default connector. Before I call the <A HREF="#item_new"><CODE>new()</CODE></A> constructor, I use this line of code:
<PRE>
        $AI::NeuralNet::Mesh::Connector = 'main::tree'
</PRE>
<P>The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. See notes on CUSTOM NETWORK CONNECTORS,
below, for more information on creating your own custom connector.</P>
<P></P>
<DT><STRONG><A NAME="item_%24AI%3A%3ANeuralNet%3A%3AMesh%3A%3ADEBUG">$AI::NeuralNet::Mesh::DEBUG</A></STRONG><BR>
<DD>
This variable controls the verbosity level. It will not hurt anything to set this 
directly, yet most people find it easier to set it using the <A HREF="#item_debug"><CODE>debug()</CODE></A> method, or 
any of its aliases.
<P></P></DL>
<P>
<HR>
<H1><A NAME="custom network connectors">CUSTOM NETWORK CONNECTORS</A></H1>
<P>Creating custom network connectors is step up from average use of this module. 
However, it can be very useful in creating other styles of neural networks, other
than the default fully-connected feed-foward network.</P>
<P>You create a custom connector by setting the variable $AI::NeuralNet::Mesh::Connector
to the fully qualified name of the function used to make the actual connections
between the nodes in the network. This variable contains '_c' by default, but if you use
this variable, be sure to add the fully qualified name of the method. For example,
in the ALN example, I use a connector in the main package called <CODE>tree()</CODE> instead of
the default connector. Before I call the <A HREF="#item_new"><CODE>new()</CODE></A> constructor, I use this line of code:</P>
<PRE>
        $AI::NeuralNet::Mesh::Connector = 'main::tree'
</PRE>
<P>The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument.</P>

mesh.htm  view on Meta::CPAN

Thanks to Randal and Michiel for spoting some documentation and makefile bugs in the last release.
Thanks to Rodin for continual suggetions and questions about the module and more.</PRE>
<P>
<HR>
<H1><A NAME="download">DOWNLOAD</A></H1>
<P>You can always download the latest copy of AI::NeuralNet::Mesh
from <A HREF="http://www.josiah.countystart.com/modules/get.pl?mesh:pod">http://www.josiah.countystart.com/modules/get.pl?mesh:pod</A></P>
<P>
<HR>
<H1><A NAME="mailing list">MAILING LIST</A></H1>
<P>A mailing list has been setup for AI::NeuralNet::Mesh and AI::NeuralNet::BackProp. 
The list is for discussion of AI and neural net related topics as they pertain to 
AI::NeuralNet::BackProp and AI::NeuralNet::mesh. I will also announce in the group
each time a new release of AI::NeuralNet::Mesh is available.</P>
The list address is: <A HREF="mailto:ai-neuralnet-backprop@egroups.com">ai-neuralnet-backprop@egroups.com</A> <BR>
To subscribe, send a blank email to: <A HREF="mailto:ai-neuralnet-backprop-subscribe@egroups.com">ai-neuralnet-backprop-subscribe@egroups.com</A> 

<BR><BR><BR>
<HR>
<A HREF="http://www.josiah.countystart.com/modules/get.pl?mesh:(c)"><B>AI::NeuralNet::Mesh</B></A> - An optimized, accurate neural network Mesh. By <A HREF="mailto:jdb@wcoil.com"><B>Josiah Bryan</B></A>.

test.pl  view on Meta::CPAN


use AI::NeuralNet::Mesh;
$loaded = 1;
t 1;

my $net = new AI::NeuralNet::Mesh(2,2,1);
t $net;
t ($net->intr(0.51) eq 1);
t ($net->intr(0.00001) eq 0);
t ($net->intr(0.50001) eq 1);
t $net->learn_set([	
	[ 1,   1   ], [ 2    ] ,
	[ 1,   2   ], [ 3    ],
	[ 2,   2   ], [ 4    ],
	[ 20,  20  ], [ 40   ],
	[ 100, 100 ], [ 200  ],
	[ 150, 150 ], [ 300  ],
	[ 500, 500 ], [ 1000 ],
],degrade=>1);
t ($net->run([60,40])->[0] eq 100);
t $net->save("add.mesh");



( run in 0.853 second using v1.01-cache-2.11-cpan-49f99fa48dc )