AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN

        - 3 Major features:
                - seperate layer sizes
                - custom node activations
                - increased learning speed

0.43  Wed Sep 14 03:13:01 20000
        - Third release, by Josiah Bryan
        - Several bug fixes
                - fixed 'flag' option on learn_set()
                - fixed multiple-output bug
                - fixed learning gradient error
        - Improved learning function to not degrade increment automatically
        - Added CSV-style dataset loader
        - Added Export tags
        - Added four custom node activations, including range and ramp
        - Added several misc. extra functions
        - Added ALN example demo


Mesh.pm  view on Meta::CPAN

	}
    
    # See POD for usage
    sub extend {
    	my $self	=	shift;
    	my $layers	=	shift;
    
    	# Looks like we got ourselves a layer specs array
		if(ref($layers) eq "ARRAY") { 
			if($self->{total_layers}!=$#{$layers}) {
				$self->{error} = "extend(): Cannot add new layers. Create a new network to add layers.\n";
				return undef;
			}
			if(ref($layers->[0]) eq "HASH") {
				$self->{total_nodes}	=	0;
				$self->{inputs}			=	$layers->[0]->{nodes};
				$self->{nodes}			=	$layers->[0]->{nodes};
				$self->{outputs}		=	$layers->[$#{$layers}]->{nodes};
				for (0..$#{$layers}){
					$self->extend_layer($_,$layers->[$_]);
					$self->{layers}->[$_] =$layers->[$_]->{nodes};

Mesh.pm  view on Meta::CPAN

			} else {
				$self->{inputs}			= $layers->[0];
			    $self->{nodes}			= $layers->[0];
				$self->{outputs}		= $layers->[$#{$layers}];
				$self->{total_nodes}	= 0;
				for (0..$self->{total_layers}){$self->extend_layer($_,$layers->[$_])}
				$self->{layers} 		= $layers;
				for (0..$self->{total_layers}){$self->{total_nodes}+= $self->{layers}->[$_]}
			}
		} else {
			$self->{error} = "extend(): Invalid argument type.\n";
			return undef;
		}
		return 1;
	}
    
    # See POD for usage
    sub extend_layer {
    	my $self	=	shift;
    	my $layer	=	shift || 0;
    	my $specs	=	shift;
    	if(!$specs) {
    		$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";
    		return undef;
    	}
    	if(ref($specs) eq "HASH") {
    		$self->activation($layer,$specs->{activation}) if($specs->{activation});
    		$self->threshold($layer,$specs->{threshold})   if($specs->{threshold});
    		$self->mean($layer,$specs->{mean})             if($specs->{mean});
    		return $self->add_nodes($layer,$specs->{nodes});
    	} else { 
    		return $self->add_nodes($layer,$specs);
    	}

Mesh.pm  view on Meta::CPAN

    sub add_nodes {
    	no strict 'refs';
		my $self	=	shift;
    	my $layer	=	shift;
    	my $nodes	=	shift;
    	my $n		=	0;
		my $more	=	$nodes - $self->{layers}->[$layer] - 1;
        d("Checking on extending layer $layer to $nodes nodes (check:$self->{layers}->[$layer]).\n",9);
        return 1 if ($nodes == $self->{layers}->[$layer]);
        if ($self->{layers}->[$layer]>$nodes) {
        	$self->{error} = "add_nodes(): I cannot remove nodes from the network with this version of my module. You must create a new network to remove nodes.\n";
        	return undef;
        }
        d("Extending layer $layer by $more.\n",9);
        for (0..$more){$self->{mesh}->[$#{$self->{mesh}}+1]=AI::NeuralNet::Mesh::node->new($self)}
        for(0..$layer-2){$n+=$self->{layers}->[$_]}
		$self->_c($n,$n+$self->{layers}->[$layer-1],$#{$self->{mesh}}-$more+1,$#{$self->{mesh}});
		$self->_c($#{$self->{mesh}}-$more+1,$#{$self->{mesh}},$n+$self->{layers}->[$layer],$n+$self->{layers}->[$layer]+$self->{layers}->[$layer+1]);
    }
        
        

Mesh.pm  view on Meta::CPAN


	# See POD for usage
	sub learn {
    	my $self	=	shift;					
    	my $inputs	=	shift;					# input set
    	my $outputs	=	shift;					# target outputs
    	my %args	=	@_;						# get args into hash
    	my $inc		=	$args{inc} || 0.002;	# learning gradient
    	my $max     =   $args{max} || 1024;     # max iteterations
    	my $degrade =   $args{degrade} || 0;    # enable gradient degrading
		my $error   = 	($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
  		my $dinc	=	0.0002;					# amount to adjust gradient by
		my $diff	=	100;					# error magin between results
		my $start	=	new Benchmark;			
		$inputs		=	$self->crunch($inputs)  if($inputs == 0); 
		$outputs	=	$self->crunch($outputs) if($outputs == 0);
		my ($flag,$ldiff,$cdiff,$_mi,$loop,$y); 
		while(!$flag && ($max ? $loop<$max : 1)) {
    		my $b	=	new Benchmark;
    		my $got	=	$self->run($inputs);
    		$diff 	=	pdiff($got,$outputs);
		    $flag	=	1;
    		    		
		    if(($error>-1 ? $diff<$error : 0) || !$diff) {
				$flag=1;
				last;
			}
			
			if($degrade) {
				$inc   -= ($dinc*$diff);
				
				if($diff eq $ldiff) {
					$cdiff++;
					$inc += ($dinc*$diff)+($dinc*$cdiff*10);

Mesh.pm  view on Meta::CPAN



	# See POD for usage
	sub learn_set {
		my $self	=	shift;
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my $degrade	=	$args{degrade};
	    my $p		=	(defined $args{flag}) ?$args{flag} :1;
	    my $row		=	(defined $args{row})  ?$args{row}+1:1;
	    my $leave	=	(defined $args{leave})?$args{leave}:0;
		for my $x (0..$len-$leave) {
			d("Learning set $x...\n",4);
			my $str = $self->learn( $data->[$x*2],
					  		  		$data->[$x*2+1],
					    			inc=>$inc,
					    			max=>$max,
					    			error=>$error,
					    			degrade=>$degrade);
		}
			
		if ($p) {
			return pdiff($data->[$row],$self->run($data->[$row-1]));
		} else {
			return $data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
	}
	

Mesh.pm  view on Meta::CPAN

					$w.="$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				print FILE "n$n=$w$self->{mesh}->[$n]->{activation},$self->{mesh}->[$n]->{threshold},$self->{mesh}->[$n]->{mean}\n";
				$n++;
			}
		}
		
	    close(FILE);
	    
	    if(!(-f $file)) {
	    	$self->{error} = "Error writing to \"$file\".";
	    	return undef;
	    }
	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self		=	shift;
		my $file		=	shift;  

Mesh.pm  view on Meta::CPAN

	    }
	    
	    my %db;
	    for my $line (@lines) {
	    	chomp($line);
	    	my ($a,$b) = split /=/, $line;
	    	$db{$a}=$b;
	    }
	    
	    if(!$db{"header"}) {
	    	$self->{error} = "Invalid format.";
	    	return undef;
	    }
	    
	    return $self->load_old($file) if($self->version($db{"header"})<0.21);
	    
	    if($load_flag) {
		    undef $self;
	        $self = AI::NeuralNet::Mesh->new([split(',',$db{layers})]);
		} else {
			$self->{inputs}			= $db{inputs};

Mesh.pm  view on Meta::CPAN

		return $self;
	}
	
	# Load entire network state from disk.
	sub load_old {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    if(!(-f $file)) {
	    	$self->{error} = "File \"$file\" does not exist.";
	    	return undef;
	    }
	    
	    open(FILE,"$file");
	    my @lines=<FILE>;
	    close(FILE);
	    
	    my %db;
	    for my $line (@lines) {
	    	chomp($line);
	    	my ($a,$b) = split /=/, $line;
	    	$db{$a}=$b;
	    }
	    
	    if(!$db{"header"}) {
	    	$self->{error} = "Invalid format.";
	    	return undef;
	    }
	    
	    if($load_flag) {
		    undef $self;
	
			# Create new network
			$self = AI::NeuralNet::Mesh->new($db{"layers"},
		    			 				 	 $db{"nodes"},
		    						      	 $db{"outputs"});

Mesh.pm  view on Meta::CPAN

		}
	}
	
	  
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		my $file	=	shift;
		eval('use PCX::Loader');
		if(@_) {
			$self->{error}="Cannot load PCX::Loader module: @_";
			return undef;
		}
		return PCX::Loader->new($self,$file);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my @ws 		=	split(/[\s\t]/,shift);
		my (@map,$ic);

Mesh.pm  view on Meta::CPAN

		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_crunched}->{_length}-1) {
			return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
		}
		$self->{error} = "Word \"$_[0]\" not found.";
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) 
	# and returns array
	sub uncrunch {
		my $self	=	shift;

Mesh.pm  view on Meta::CPAN

	# Return benchmark time from last learn() operation.
	sub benchmark {
		shift->{benchmarked};
	}
	
	# Same as benchmark()
	sub benchmarked {
		benchmark(shift);
	}
	
	# Return the last error in the mesh, or undef if no error.
	sub error {
		my $self = shift;
		return undef if !$self->{error};
		chomp($self->{error});
		return $self->{error}."\n";
	}
	
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 

Mesh.pm  view on Meta::CPAN

See AUTHOR, below, for contact info.

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.

In this module I have included a more accurate form of "learning" for the
mesh. This form preforms descent toward a local error minimum (0) on a 
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.

=head1 EXPORTS

This module exports three functions by default:

	range

Mesh.pm  view on Meta::CPAN

	
Note, the old method of calling crunch on the values still works just as well.	

The first two arguments may be array refs (or now, strings), and they may be 
of different lengths.

Options should be written on hash form. There are three options:
	 
	 inc      =>    $learning_gradient
	 max      =>    $maximum_iterations
	 error    =>    $maximum_allowable_percentage_of_error
	 degrade  =>    $degrade_increment_flag
	 

$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
 
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.

$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.

$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on. 

In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
as several texts and decided that it would be better to not use increment degrading. The
option is still there for those that feel the inclination to use it. I have found some areas
that do need the degrade flag to work at a faster speed. See test.pl for an example. If
the degrade flag wasn't in test.pl, it would take a very long time to learn.

Mesh.pm  view on Meta::CPAN

Level 1 ($level = 1) : Displays the activity between nodes, prints what values were
received and what they were weighted to.

Level 2 ($level = 2) : Just prints info from the learn() loop, in the form of "got: X, wanted Y"
type of information. This is about the third most useful debugging level, after level 12 and
level 4.

Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.

Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.

Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.

Toggles debuging off when called with no arguments. 

Mesh.pm  view on Meta::CPAN

This will save the complete state of the network to disk, including all weights and any
words crunched with crunch() . Also saves the layer size and activations of the network.

NOTE: The only activation type NOT saved is the CODE ref type, which must be set again
after loading.

This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.

This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the error() method,
below.

If there were no errors, it will return a refrence to $net.


=item $net->load($filename);

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.

If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.

If there were no errors, it will return a refrence to $net.

UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!



=item $net->activation($layer,$type);

This sets the activation type for layer C<$layer>.

Mesh.pm  view on Meta::CPAN

that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list. 

If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the error() method, below.

=item $net->word($word);

A function alias for crunched().


=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many

Mesh.pm  view on Meta::CPAN


=item $net->const($const);

This sets the run const. for the network. The run const. is a value that is added
to every input line when a set of inputs are run() or learn() -ed, to prevent the
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across load() and save() calls.


=item $net->error();

Returns the last error message which occured in the mesh, or undef if no errors have
occured.


=item $net->load_pcx($filename);

NOTE: To use this function, you must have PCX::Loader installed. If you do not have
PCX::Loader installed, it will return undef and store an error for you to retrive with 
the error() method, below.

This is a treat... this routine will load a PCX-format file (yah, I know ... ancient 
format ... but it is the only one I could find specs for to write it in Perl. If 
anyone can get specs for any other formats, or could write a loader for them, I 
would be very grateful!) Anyways, a PCX-format file that is exactly 320x200 with 8 bits 
per pixel, with pure Perl. It returns a blessed refrence to a PCX::Loader object, which 
supports the following routinges/members. See example files ex_pcx.pl and ex_pcxl.pl in 
the ./examples/ directory.

See C<perldoc PCX::Loader> for information on the methods of the object returned.

Mesh.pm  view on Meta::CPAN


Neural Nets are formed by simulated neurons connected together much the same
way the brain's neurons are, neural networks are able to associate and
generalize without rules.  They have solved problems in pattern recognition,
robotics, speech processing, financial predicting and signal processing, to
name a few.

One of the first impressive neural networks was NetTalk, which read in ASCII
text and correctly pronounced the words (producing phonemes which drove a
speech chip), even those it had never seen before.  Designed by John Hopkins
biophysicist Terry Sejnowski and Charles Rosenberg of Princeton in 1986,
this application made the Backprogagation training algorithm famous.  Using
the same paradigm, a neural network has been trained to classify sonar
returns from an undersea mine and rock.  This classifier, designed by
Sejnowski and R.  Paul Gorman, performed better than a nearest-neighbor
classifier.

The kinds of problems best solved by neural networks are those that people
are good at such as association, evaluation and pattern recognition.
Problems that are difficult to compute and do not require perfect answers,
just very good answers, are also best done with neural networks.  A quick,

Mesh.pm  view on Meta::CPAN

designers are working on weather forecasts by neural networks (Myself
included).  Currently, doctors are developing medical neural networks as an
aid in diagnosis.  Attorneys and insurance companies are also working on
neural networks to help estimate the value of claims.

Neural networks are poor at precise calculations and serial processing. They
are also unable to predict or recognize anything that does not inherently
contain some sort of pattern.  For example, they cannot predict the lottery,
since this is a random process.  It is unlikely that a neural network could
be built which has the capacity to think as well as a person does for two
reasons.  Neural networks are terrible at deduction, or logical thinking and
the human brain is just too complex to completely simulate.  Also, some
problems are too difficult for present technology.  Real vision, for
example, is a long way off.

In short, Neural Networks are poor at precise calculations, but good at
association, evaluation, and pattern recognition.


=head1 EXAMPLES

examples/ex_add2.pl  view on Meta::CPAN

	  my @list;
	
	 my $t1=new Benchmark;
	 for my $a (1..$top)
	 {
	  print "Outer Loop: $a : ";
	
	  $forgetfulness = $net->learn_set( \@data,
	           inc  => $inc,
	           max  => 500,
	           error => -1);
	
	  print "Forgetfulness: $forgetfulness %\n";
	
	 }
	 my $t2=new Benchmark;
	
	 $runtime = timediff($t2,$t1);
	 print "run took ",timestr($runtime),"\n";
	
	

examples/ex_aln.pl  view on Meta::CPAN

	    # load activations. We have to add this up here because next
	    # thing we do is check if they passed a file name as $leaves,
	    # and we need to have our new load sub already in place before
	    # we try to load anything in $leaves.
	    *{'AI::NeuralNet::Mesh::load'} = sub {
	        my $self		=	shift;
			my $file		=	shift;  
			my $load_flag   =	shift;
			
		    if(!(-f $file)) {
		    	$self->{error} = "File \"$file\" does not exist.";
		    	return undef;
		    }
		    
		    open(FILE,"$file");
		    my @lines=<FILE>;
		    close(FILE);
		    
		    my %db;
		    for my $line (@lines) {
		    	chomp($line);
		    	my ($a,$b) = split /=/, $line;
		    	$db{$a}=$b;
		    }
		    
		    if(!$db{"header"}) {
		    	$self->{error} = "Invalid format.";
		    	return undef;
		    }
		    
		    return $self->load_old($file) if($self->version($db{"header"})<0.21);
		    
		    if($load_flag) {
			    undef $self;
		        $self = Tree($db{inputs},$db{outputs});
			} else {
				$self->{inputs}			= $db{inputs};

examples/ex_aln.pl  view on Meta::CPAN

			my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			$f=1 if( $b1 &&  $b2);
			$f=2 if(!$b1 && !$b2);
			my $lo = $self->{_last_output};
			if($lo!=$target) {
				# Adjust right lead if $lo, else adjust left lead
				($target &&  $lo)?$self->{_inputs}->[0]->{weight}++:$self->{_inputs}->[0]->{weight}--;
				($target && !$lo)?$self->{_inputs}->[1]->{weight}++:$self->{_inputs}->[1]->{weight}--;
			}
			# Thanks to Rolf Mandersheidd for this set of nested conditions on one line
			# This determines heuristic error responsibilty on the children
			# and recurses the error up the tree.
			if($lo!=$target || $f!=($lo?1:2)) {
				$self->{_inputs}->[1]->{node}->adjust_weight($inc,$target) if($self->{_inputs}->[1]->{node});
			} else {
				$self->{_inputs}->[0]->{node}->adjust_weight($inc,$target) if($self->{_inputs}->[1]->{node});
			}
		};

	    # Set our custom node connector
		$AI::NeuralNet::Mesh::Connector = 'main::_c_tree'; 
		

examples/ex_dow.pl  view on Meta::CPAN

		# Make it learn the whole dataset $top times
		my @list;
		my $top=1;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc		=>	0.2,	
											max		=>	2000,
											error	=>	-1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
            $net->save('DOW.mesh');
            
			my $t2=new Benchmark;
			my $td=timediff($t2,$t1);
			print "\nLoop $a took ",timestr($td),"\n";

examples/ex_pcxl.pl  view on Meta::CPAN

		
		This just demonstrates simple usage of the pcx loader.
	
=cut

	use AI::NeuralNet::Mesh;
	
	my $net=AI::NeuralNet::Mesh->new(2,2);
	
	my $img = $net->load_pcx("josiah.pcx");
        print "ERROR: ",$net->error(),"\n" if($net->error());
	
	$net->join_cols($img->get_block([0,0,50,50]),50,0);
	

examples/ex_wine.pl  view on Meta::CPAN

		for my $a (0..$top) {
			print "\n\nOuter Loop: $a\n";
			
			for(0..$#{$sets}) {
				next if(!defined $sets->[$_]->[0]);
				my $t1=new Benchmark;
				
				# Test fogetfullness
				my $f = $net->learn_set($sets->[$_],	inc		=>	0.2,	
														max		=>	2000,
														error	=>	0.01,
														leave	=>	2);
				
				# Print it 
				print "\n\nForgetfullness: $f%\n";
	
				my $t2=new Benchmark;
				my $td=timediff($t2,$t1);
				print "\nLoop [$a,$_] took ",timestr($td),"\n";
			}
			

mesh.htm  view on Meta::CPAN

set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.</P>
<P>This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.</P>
<P>In this module I have included a more accurate form of ``learning'' for the
mesh. This form preforms descent toward a local error minimum (0) on a 
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.</P>
<P>
<HR>
<H1><A NAME="exports">EXPORTS</A></H1>
<P>This module exports three functions by default:</P>
<PRE>
        range

mesh.htm  view on Meta::CPAN

        $net-&gt;learn('corn', 'cob');
</PRE>
<P>Note, the old method of calling crunch on the values still works just as well.</P>
<P>The first two arguments may be array refs (or now, strings), and they may be 
of different lengths.</P>
<P>Options should be written on hash form. There are three options:
</P>
<PRE>
         inc      =&gt;    $learning_gradient
         max      =&gt;    $maximum_iterations
         error    =&gt;    $maximum_allowable_percentage_of_error
         degrade  =&gt;    $degrade_increment_flag</PRE>
<P>$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
</P>
<P>$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.</P>
<P>$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then <A HREF="#item_learn"><CODE>learn()</CODE></A> will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then <A HREF="#item_learn"><CODE>learn()</CODE></A> will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.</P>
<P>$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on.</P>
<P>In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
as several texts and decided that it would be better to not use increment degrading. The
option is still there for those that feel the inclination to use it. I have found some areas
that do need the degrade flag to work at a faster speed. See test.pl for an example. If
the degrade flag wasn't in test.pl, it would take a very long time to learn.</P>
<P></P>
<DT><STRONG><A NAME="item_learn_set">$net-&gt;learn_set(\@set, [ options ]);</A></STRONG><BR>

mesh.htm  view on Meta::CPAN

levels, but they are included for those of you that feel daring (or just plain bored.)</P>
<P>Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.</P>
<P>Level 1 ($level = 1) : Displays the activity between nodes, prints what values were
received and what they were weighted to.</P>
<P>Level 2 ($level = 2) : Just prints info from the <A HREF="#item_learn"><CODE>learn()</CODE></A> loop, in the form of ``got: X, wanted Y''
type of information. This is about the third most useful debugging level, after level 12 and
level 4.</P>
<P>Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.</P>
<P>Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.</P>
<P>Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.</P>
<P>Toggles debuging off when called with no arguments.</P>
<P></P>
<DT><STRONG><A NAME="item_save">$net-&gt;save($filename);</A></STRONG><BR>
<DD>
This will save the complete state of the network to disk, including all weights and any
words crunched with <A HREF="#item_crunch"><CODE>crunch()</CODE></A> . Also saves the layer size and activations of the network.
<P>NOTE: The only activation type NOT saved is the CODE ref type, which must be set again
after loading.</P>
<P>This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.</P>
<P>This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the <A HREF="#item_error"><CODE>error()</CODE></A> method,
below.</P>
<P>If there were no errors, it will return a refrence to $net.</P>
<P></P>
<DT><STRONG><A NAME="item_load">$net-&gt;load($filename);</A></STRONG><BR>
<DD>
This will load from disk any network saved by <A HREF="#item_save"><CODE>save()</CODE></A> and completly restore the internal
state at the point it was <A HREF="#item_save"><CODE>save()</CODE></A> was called at.
<P>If the file is of an invalid file type, then <A HREF="#item_load"><CODE>load()</CODE></A> will
return undef. Use the <A HREF="#item_error"><CODE>error()</CODE></A> method, below, to print the error message.</P>
<P>If there were no errors, it will return a refrence to $net.</P>
<P>UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net-&gt;load(join(``\n'',&lt;DATA&gt;)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!</P>
<P></P>
<DT><STRONG><A NAME="item_activation">$net-&gt;activation($layer,$type);</A></STRONG><BR>
<DD>
This sets the activation type for layer <CODE>$layer</CODE>.
<P><CODE>$type</CODE> can be one of four values:</P>
<PRE>
        linear                    ( simply use sum of inputs as output )

mesh.htm  view on Meta::CPAN

        print $net-&gt;run_uc(&quot;I love corn.&quot;)),&quot;\n&quot;;</PRE>
<P>On my system, this responds with, ``Good, Healthy Food.'' If you try to run <A HREF="#item_crunch"><CODE>crunch()</CODE></A> with
``I love pop.'', though, you will probably get ``Food! apples. apples.'' (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!</P>
<P></P>
<DT><STRONG><A NAME="item_crunched">$net-&gt;crunched($word);</A></STRONG><BR>
<DD>
This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list.
<P>If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the <A HREF="#item_error"><CODE>error()</CODE></A> method, below.</P>
<P></P>
<DT><STRONG><A NAME="item_word">$net-&gt;word($word);</A></STRONG><BR>
<DD>
A function alias for crunched().
<P></P>
<DT><STRONG><A NAME="item_col_width">$net-&gt;col_width($width);</A></STRONG><BR>
<DD>
This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

mesh.htm  view on Meta::CPAN

is preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A> calls.
<P></P>
<DT><STRONG><A NAME="item_const">$net-&gt;const($const);</A></STRONG><BR>
<DD>
This sets the run const. for the network. The run const. is a value that is added
to every input line when a set of inputs are <A HREF="#item_run"><CODE>run()</CODE></A> or <A HREF="#item_learn"><CODE>learn()</CODE></A> -ed, to prevent the
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A> calls.
<P></P>
<DT><STRONG><A NAME="item_error">$net-&gt;error();</A></STRONG><BR>
<DD>
Returns the last error message which occured in the mesh, or undef if no errors have
occured.
<P></P>
<DT><STRONG><A NAME="item_load_pcx">$net-&gt;load_pcx($filename);</A></STRONG><BR>
<DD>
NOTE: To use this function, you must have PCX::Loader installed. If you do not have
PCX::Loader installed, it will return undef and store an error for you to retrive with 
the <A HREF="#item_error"><CODE>error()</CODE></A> method, below.
<P>This is a treat... this routine will load a PCX-format file (yah, I know ... ancient 
format ... but it is the only one I could find specs for to write it in Perl. If 
anyone can get specs for any other formats, or could write a loader for them, I 
would be very grateful!) Anyways, a PCX-format file that is exactly 320x200 with 8 bits 
per pixel, with pure Perl. It returns a blessed refrence to a PCX::Loader object, which 
supports the following routinges/members. See example files ex_pcx.pl and ex_pcxl.pl in 
the ./examples/ directory.</P>
<P>See <CODE>perldoc PCX::Loader</CODE> for information on the methods of the object returned.</P>
<P>You can download PCX::Loader from <A HREF="http://www.josiah.countystart.com/modules/get.pl?pcx-loader:mpod">http://www.josiah.countystart.com/modules/get.pl?pcx-loader:mpod</A></P>
<P></P></DL>

mesh.htm  view on Meta::CPAN

``What can they [Neural Networks] do?''. In regards to that questioin,
consider the following:</P>
<P>Neural Nets are formed by simulated neurons connected together much the same
way the brain's neurons are, neural networks are able to associate and
generalize without rules.  They have solved problems in pattern recognition,
robotics, speech processing, financial predicting and signal processing, to
name a few.</P>
<P>One of the first impressive neural networks was NetTalk, which read in ASCII
text and correctly pronounced the words (producing phonemes which drove a
speech chip), even those it had never seen before.  Designed by John Hopkins
biophysicist Terry Sejnowski and Charles Rosenberg of Princeton in 1986,
this application made the Backprogagation training algorithm famous.  Using
the same paradigm, a neural network has been trained to classify sonar
returns from an undersea mine and rock.  This classifier, designed by
Sejnowski and R.  Paul Gorman, performed better than a nearest-neighbor
classifier.</P>
<P>The kinds of problems best solved by neural networks are those that people
are good at such as association, evaluation and pattern recognition.
Problems that are difficult to compute and do not require perfect answers,
just very good answers, are also best done with neural networks.  A quick,
very good response is often more desirable than a more accurate answer which

mesh.htm  view on Meta::CPAN

loan analysis and financial forecasting make good applications.  New network
designers are working on weather forecasts by neural networks (Myself
included).  Currently, doctors are developing medical neural networks as an
aid in diagnosis.  Attorneys and insurance companies are also working on
neural networks to help estimate the value of claims.</P>
<P>Neural networks are poor at precise calculations and serial processing. They
are also unable to predict or recognize anything that does not inherently
contain some sort of pattern.  For example, they cannot predict the lottery,
since this is a random process.  It is unlikely that a neural network could
be built which has the capacity to think as well as a person does for two
reasons.  Neural networks are terrible at deduction, or logical thinking and
the human brain is just too complex to completely simulate.  Also, some
problems are too difficult for present technology.  Real vision, for
example, is a long way off.</P>
<P>In short, Neural Networks are poor at precise calculations, but good at
association, evaluation, and pattern recognition.</P>
<P>
<HR>
<H1><A NAME="examples">EXAMPLES</A></H1>
<P>Included are several example files in the ``examples'' directory from the
distribution ZIP file. Each of the examples includes a short explanation 



( run in 0.581 second using v1.01-cache-2.11-cpan-49f99fa48dc )