AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

	# Dummy constructor
    sub new {
    	bless {}, shift
	}	
	
	# Rounds floats to ints
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	
	# Receives input from other neurons. They must
	# be registered as a synapse of this neuron to effectively
	# input.
	sub input {
		my $self 	 =	shift;
		my $sid		 =	shift;
		my $value	 =	shift;
		
		# We simply weight the value sent by the neuron. The neuron identifies itself to us
		# using the code we gave it when it registered itself with us. The code is in $sid, 
		# (synapse ID) and we use that to track the weight of the connection.
		# This line simply multiplies the value by its weight and gets the integer from it.
		$self->{SYNAPSES}->{LIST}->[$sid]->{VALUE}	=	intr($value	*	$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
		$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED}	=	1;                                 
		$self->{SYNAPSES}->{LIST}->[$sid]->{INPUT}	=	$value;
		
		# Debugger
		AI::NeuralNet::BackProp::out1("\nRecieved input of $value, weighted to $self->{SYNAPSES}->{LIST}->[$sid]->{VALUE}, synapse weight is $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT} (sid is $sid for $self).\n");
		AI::NeuralNet::BackProp::out1((($self->input_complete())?"All synapses have fired":"Not all synapses have fired"));
		AI::NeuralNet::BackProp::out1(" for $self.\n");
		
		# Check and see if all synapses have fired that are connected to this one.
		# If they have, then generate the output value for this synapse.
		$self->output() if($self->input_complete());
	}
	
	# Loops thru and outputs to every neuron that this
	# neuron is registered as synapse of.
	sub output {
		my $self	=	shift;
		my $size	=	$self->{OUTPUTS}->{SIZE} || 0;
		my $value	=	$self->get_output();
		for (0..$size-1) {
			AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
			$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
		}
	}
	
	# Used internally by output().
	sub get_output {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value		=	0;
		my $state		= 	0;
		my (@map,@weight);
	
	    # We loop through all the syanpses connected to this one and add the weighted
	    # valyes together, saving in a debugging list.
		for (0..$size-1) {
			$value	+=	$self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
			$self->{SYNAPSES}->{LIST}->[$_]->{FIRED} = 0;
			
			$map[$_]=$self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
			$weight[$_]=$self->{SYNAPSES}->{LIST}->[$_]->{WEIGHT};
		}
		                                              
		# Debugger
		AI::NeuralNet::BackProp::join_cols(\@map,5) if(($AI::NeuralNet::BackProp::DEBUG eq 3) || ($AI::NeuralNet::BackProp::DEBUG eq 2));
		AI::NeuralNet::BackProp::out2("Weights: ".join(" ",@weight)."\n");
		
		# Simply average the values and get the integer of the average.
		$state	=	intr($value/$size);
		
		# Debugger
		AI::NeuralNet::BackProp::out1("From get_output, value is $value, so state is $state.\n");
		
		# Possible future exapnsion for self excitation. Not currently used.
		$self->{LAST_VALUE}	=	$value;
		
		# Just return the $state
		return $state;
	}
	
	# Used by input() to check if all registered synapses have fired.
	sub input_complete {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $retvalue	=	1;
		
		# Very simple loop. Doesn't need explaning.
		for (0..$size-1) {
			$retvalue = 0 if(!$self->{SYNAPSES}->{LIST}->[$_]->{FIRED});
		}
		return $retvalue;
	}
	
	# Used to recursively adjust the weights of synapse input channeles
	# to give a desired value. Designed to be called via 
	# AI::NeuralNet::BackProp::NeuralNetwork::learn().
	sub weight	{                
		my $self		=	shift;
		my $ammount		=	shift;
		my $what		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value;
		AI::NeuralNet::BackProp::out1("Weight: ammount is $ammount, what is $what with size at $size.\n");
		      
		# Now this sub is the main cog in the learning wheel. It is called recursively on 
		# each neuron that has been bad (given incorrect output.)
		for my $i (0..$size-1) {
			$value		=	$self->{SYNAPSES}->{LIST}->[$i]->{VALUE};

if(0) {
       
       		# Formula by Steve Purkis
       		# Converges very fast for low-value inputs. Has trouble converging on high-value
       		# inputs. Feel free to play and try to get to work for high values.
			my $delta	=	$ammount * ($what - $value) * $self->{SYNAPSES}->{LIST}->[$i]->{INPUT};

BackProp.pm  view on Meta::CPAN

							$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT};
			
			#print "($value,$what) delta:$delta\n";
			
			# Recursivly apply
			$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT}  +=  $delta;
			$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
			    
		}
	}
	
	# Registers some neuron as a synapse of this neuron.           
	# This is called exclusively by connect(), except for
	# in initalize_group() to connect the _map() package.
	sub register_synapse {
		my $self	=	shift;
		my $synapse	=	shift;
		my $sid		=	$self->{SYNAPSES}->{SIZE} || 0;
		$self->{SYNAPSES}->{LIST}->[$sid]->{PKG}		=	$synapse;
		$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}		=	1.00		if(!$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
		$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED}		=	0;       
		AI::NeuralNet::BackProp::out1("$self: Registering sid $sid with weight $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}, package $self->{SYNAPSES}->{LIST}->[$sid]->{PKG}.\n");
		$self->{SYNAPSES}->{SIZE} = ++$sid;
		return ($sid-1);
	}
	
	# Called via AI::NeuralNet::BackProp::NeuralNetwork::initialize_group() to 
	# form the neuron grids.
	# This just registers another synapes as a synapse to output to from this one, and
	# then we ask that synapse to let us register as an input connection and we
	# save the sid that the ouput synapse returns.
	sub connect {
		my $self	=	shift;
		my $to		=	shift;
		my $oid		=	$self->{OUTPUTS}->{SIZE} || 0;
		AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
		$self->{OUTPUTS}->{LIST}->[$oid]->{PKG}	=	$to;
 		$self->{OUTPUTS}->{LIST}->[$oid]->{ID}	=	$to->register_synapse($self);
		$self->{OUTPUTS}->{SIZE} = ++$oid;
		return $self->{OUTPUTS}->{LIST}->[$oid]->{ID};
	}
1;
			 
package AI::NeuralNet::BackProp;
	
	use Benchmark;          
	use strict;
	
	# Returns the number of elements in an array ref, undef on error
	sub _FETCHSIZE {
		my $a=$_[0];
		my ($b,$x);
		return undef if(substr($a,0,5) ne "ARRAY");
		foreach $b (@{$a}) { $x++ };
		return $x;
	}

	# Debugging subs
	$AI::NeuralNet::BackProp::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; } 
	sub out1  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
	sub out2  { print  shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
	sub out3  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG) }
	sub out4  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;
			$x++;
			if($x>$break-1) {
				print "\n";
				$x=0;
			}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1}; #AI::NeuralNet::BackProp::_FETCHSIZE($a1);
		my $a2s	=	$#{$a2}; #AI::NeuralNet::BackProp::_FETCHSIZE($a2);
		my ($a,$b,$diff,$t);
		$diff=0;
		#return undef if($a1s ne $a2s);	# must be same length
		for my $x (0..$a1s) {
			$a = $a1->[$x];
			$b = $a2->[$x];
			if($a!=$b) {
				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a);
				$diff+=(($a-$b)/$a)*100;
			}
		}

BackProp.pm  view on Meta::CPAN

			$tmp = $x if((@{$ref1})[$x] < (@{$ref1})[$tmp]);
		}
		return $tmp;
	}  
	
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		return AI::NeuralNet::BackProp::PCX->new($self,shift);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my (@map,$ic);
		my @ws 		=	split(/[\s\t]/,shift);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);
			if(!defined $ic) {
				$self->{_CRUNCHED}->{LIST}->[$self->{_CRUNCHED}->{_LENGTH}++]=$ws[$a];
				@map[$a]=$self->{_CRUNCHED}->{_LENGTH};
			} else {
				@map[$a]=$ic;
            }
		}
		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			return $a+1 if($self->{_CRUNCHED}->{LIST}->[$a] eq $_[0]);
		}
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_CRUNCHED}->{LIST}->[$el-1].' ';
		}
		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	}
	
	# Sets/Removes value ranging
	sub range {
		my $self	=	shift;
		my $ref		=	shift;
		my $b		=	shift;
		if(substr($ref,0,5) ne "ARRAY") {
			if(($ref == 0) && (!defined $b)) {
				$ref	= $self->crunch($ref);
				#print "\$ref is a string, crunching to ",join(',',@{$ref}),"\n";
			} else {
    			my $a	= $ref;
    			$a		= $self->crunch($a)->[0] if($a == 0);
				$b		= $self->crunch($b)->[0] if($b == 0);
				$_[++$#_] = $a;
    			$_[++$#_] = $b;
    			$ref	= \@_;
				#print "Found ranged definition, joined to ",join(',',@{$ref}),"\n";
			}
		}
		my $rA		=	0;
		my $rB		=	$#{$ref};
		my $rS		=	0; #shift;
		if(!$rA && !$rB) {
			$self->{rA}=$self->{rB}=-1;
			return undef;
		}
		if($rB<$rA){my $t=$rA;$rA=$rB;$rB=$t};
		$self->{rA}=$rA;
		$self->{rB}=$rB;
		$self->{rS}=$rS if($rS);
		$self->{rRef} = $ref;
		return $ref;
	}
	
	# Used internally to scale outputs to fit range
	sub _range {
		my $self	=	shift;  
		my $in		=	shift;
		my $rA		=	$self->{rA};
		my $rB		=	$self->{rB};
		my $rS		=	$self->{rS};
		my $r		=	$rB;#-$rA+1;
		return $in if(!$rA && !$rB);
		my $l		=	$self->{OUT}-1;
		my $out 	=	[];
		# Adjust for a maximum outside what we have seen so far
		for my $i (0..$l) {
			$rS=$in->[$i] if($in->[$i]>$rS);
		}
		#print "\$l:$l,\$rA:$rA,\$rB:$rB,\$rS:$rS,\$r:$r\n";
		# Loop through, convert values to percentage of maximum, then multiply
		# percentage by range and add to base of range to get finaly value
		for my $i (0..$l) {

BackProp.pm  view on Meta::CPAN

     \  /
    mapper

In this diagram, each neuron is connected to one input of every
neuron in the layer below it, but there are not connections
between neurons in the same layer. Weights of the connection
are controlled by the neuron it is connected to, not the connecting
neuron. (E.g. the connecting neuron has no idea how much weight
its output has when it sends it, it just sends its output and the
weighting is taken care of by the receiving neuron.) This is the 
method used to connect cells in every network built by this package.

Input is fed into the network via a call like this:

	use AI;
	my $net = new AI::NeuralNet::BackProp(2,2);
	
	my @map = (0,1);
	
	my $result = $net->run(\@map);
	

Now, this call would probably not give what you want, because
the network hasn't "learned" any patterns yet. But this
illustrates the call. Run now allows strings to be used as
input. See run() for more information.


Run returns a refrence with $size elements (Remember $size? $size
is what you passed as the second argument to the network
constructor.) This array contains the results of the mapping. If
you ran the example exactly as shown above, $result would probably 
contain (1,1) as its elements. 

To make the network learn a new pattern, you simply call the learn
method with a sample input and the desired result, both array
refrences of $size length. Example:

	use AI;
	my $net = new AI::NeuralNet::BackProp(2,2);
	
	my @map = (0,1);
	my @res = (1,0);
	
	$net->learn(\@map,\@res);
	
	my $result = $net->run(\@map);

Now $result will conain (1,0), effectivly flipping the input pattern
around. Obviously, the larger $size is, the longer it will take
to learn a pattern. Learn() returns a string in the form of

	Learning took X loops and X wallclock seconds (X.XXX usr + X.XXX sys = X.XXX CPU).

With the X's replaced by time or loop values for that loop call. So,
to view the learning stats for every learn call, you can just:
	
	print $net->learn(\@map,\@res);
	

If you call "$net->debug(4)" with $net being the 
refrence returned by the new() constructor, you will get benchmarking 
information for the learn function, as well as plenty of other information output. 
See notes on debug() in the METHODS section, below. 

If you do call $net->debug(1), it is a good 
idea to point STDIO of your script to a file, as a lot of information is output. I often
use this command line:

	$ perl some_script.pl > .out

Then I can simply go and use emacs or any other text editor and read the output at my leisure,
rather than have to wait or use some 'more' as it comes by on the screen.

=head2 METHODS

=over 4

=item new AI::NeuralNet::BackProp($layers, $size [, $outputs, $topology_flag])

Returns a newly created neural network from an C<AI::NeuralNet::BackProp>
object. The network will have C<$layers> number layers in it
and each layer will have C<$size> number of neurons in that layer.

There is an optional parameter of $outputs, which specifies the number
of output neurons to provide. If $outputs is not specified, $outputs
defaults to equal $size. $outputs may not exceed $size. If $outputs
exceeds $size, the new() constructor will return undef.

The optional parameter, $topology_flag, defaults to 0 when not used. There are
three valid topology flag values:

B<0> I<default>
My feed-foward style: Each neuron in layer X is connected to one input of every
neuron in layer Y. The best and most proven flag style.

	^   ^   ^               
	O\  O\ /O       Layer Y
	^\\/^/\/^
	| //|\/\|
	|/ \|/ \|		
	O   O   O       Layer X
	^   ^   ^


(Sorry about the bad art...I am no ASCII artist! :-)


B<1>
In addition to flag 0, each neuron in layer X is connected to every input of 
the neurons ahead of itself in layer X.

B<2> I<("L-U Style")>
No, its not "Learning-Unit" style. It gets its name from this: In a 2 layer, 3
neuron network, the connections form a L-U pair, or a W, however you want to look
at it.   


	^   ^   ^
	|   |   |
	O-->O-->O
	^   ^   ^
	|   |   |
	|   |   |
	O-->O-->O
	^   ^   ^

BackProp.pm  view on Meta::CPAN


=item $net->range(\@values);

This allows you to specify a range of values as an array refrence. As the ranges are stored internally
as a refrence, this is probably the most natural way. Any value specified by an element in @values
will be allows as an output, no other values will be allowed.


=item $net->range("string of values");

With this construct you can specify a string of values to be allowed as the outputs. This string
is simply taken an crunch() -ed internally and saved as an array ref. This has the same effect
as calling:

	$net->range($net->crunch("string of values"));


=item $net->range("first string","second string");

This is the same as calling:

	$net->range($net->crunch("first string"),$net->crunch("second string"));

Or:	

	@range = ($net->crunch("first string"),
			  $net->crunch("second string"));
	$net->range(\@range);


=item $net->range($value1,$value2);

This is the same as calling:

	$net->range([$value1,$value2]);

Or:
	
	@range = ($value1,$value2);
	$net->range(\@range);

The second example is the same as the first example.



=item $net->benchmarked();

UPDATE: bencmarked() now returns just the string from timestr() for the last run() or
loop() call. Exception: If the last call was a loop the string will be prefixed with "%d loops and ".

This returns a benchmark info string for the last learn() or the last run() call, 
whichever occured later. It is easily printed as a string,
as following:

	print $net->benchmarked() . "\n";





=item $net->debug($level)

Toggles debugging off if called with $level = 0 or no arguments. There are four levels
of debugging.

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : This causes ALL debugging information for the network to be dumped
as the network runs. In this mode, it is a good idea to pipe your STDIO to a file, especially
for large programs.

Level 2 ($level = 2) : A slightly-less verbose form of debugging, not as many internal 
data dumps.

Level 3 ($level = 3) : JUST prints weight mapping as weights change.

Level 4 ($level = 4) : JUST prints the benchmark info for EACH learn loop iteteration, not just
learning as a whole. Also prints the percentage difference for each loop between current network
results and desired results, as well as learning gradient ('incremenet').   

Level 4 is useful for seeing if you need to give a smaller learning incrememnt to learn() .
I used level 4 debugging quite often in creating the letters.pl example script and the small_1.pl
example script.

Toggles debuging off when called with no arguments. 



=item $net->save($filename);

This will save the complete state of the network to disk, including all weights and any
words crunched with crunch() . Also saves any output ranges set with range() .

This has now been modified to use a simple flat-file text storage format, and it does not
depend on any external modules now.



=item $net->load($filename);

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.




=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.10f (sprintf-format) 
percent sting.


=item $net->p($a,$b);

Returns a floating point number which represents $a as a percentage of $b.



=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

Returns the index of the element in array REF passed with the highest comparative value.


BackProp.pm  view on Meta::CPAN

UPDATE: Now you can use a variabled instead of using qw(). Strings will be split internally.
Do not use qw() to pass strings to crunch.

This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and 
run() directly. It returns an array ref.

The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Will probably return an array ref containing 1,2,3. A subsequent call of:

    $net->crunch("How is Jane?");

Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored
only once internally and the returned array ref reflects that.


=item $net->uncrunch($array_ref);

Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the crunch() method, above, possibly to 
uncrunch() the output of a run() call. Consider the below code (also in ./examples/ex_crunch.pl):
                           
	use AI::NeuralNet::BackProp;
	my $net = AI::NeuralNet::BackProp->new(2,3);
	
	for (0..3) {		# Note: The four learn() statements below could 
						# be replaced with learn_set() to do the same thing,
						# but use this form here for clarity.

		$net->learn($net->crunch("I love chips."),  $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love apples."), $net->crunch("Good, Healthy Food."));
		$net->learn($net->crunch("I love pop."),    $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."));
	}
	
	my $response = $net->run($net->crunch("I love corn."));
	
	print $net->uncrunch($response),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list. 


=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.


=item $net->random($rand);

This will set the randomness factor from the network. Default is 0.001. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. See NOTES on learning 
a 0 value in the input map with randomness disabled.



=item $net->load_pcx($filename);

Oh heres a treat... this routine will load a PCX-format file (yah, I know ... ancient format ... but
it is the only one I could find specs for to write it in Perl. If anyone can get specs for
any other formats, or could write a loader for them, I would be very grateful!) Anyways, a PCX-format
file that is exactly 320x200 with 8 bits per pixel, with pure Perl. It returns a blessed refrence to 
a AI::NeuralNet::BackProp::PCX object, which supports the following routinges/members. See example 
files ex_pcxl.pl and ex_pcx.pl in the ./examples/ directory.



=item $pcx->{image}

This is an array refrence to the entire image. The array containes exactly 64000 elements, each
element contains a number corresponding into an index of the palette array, details below.



=item $pcx->{palette}

This is an array ref to an AoH (array of hashes). Each element has the following three keys:
	
	$pcx->{palette}->[0]->{red};
	$pcx->{palette}->[0]->{green};
	$pcx->{palette}->[0]->{blue};

Each is in the range of 0..63, corresponding to their named color component.



=item $pcx->get_block($array_ref);

Returns a rectangular block defined by an array ref in the form of:
	
	[$left,$top,$right,$bottom]

These must be in the range of 0..319 for $left and $right, and the range of 0..199 for
$top and $bottom. The block is returned as an array ref with horizontal lines in sequental order.
I.e. to get a pixel from [2,5] in the block, and $left-$right was 20, then the element in 
the array ref containing the contents of coordinates [2,5] would be found by [5*20+2] ($y*$width+$x).
    
	print (@{$pcx->get_block(0,0,20,50)})[5*20+2];

This would print the contents of the element at block coords [2,5].




( run in 1.461 second using v1.01-cache-2.11-cpan-0bd6704ced7 )