AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

	use strict;
	
	# Dummy constructor
    sub new {
    	bless {}, shift
	}	
	
	# Rounds floats to ints
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	
	# Receives input from other neurons. They must
	# be registered as a synapse of this neuron to effectively
	# input.
	sub input {
		my $self 	 =	shift;
		my $sid		 =	shift;

BackProp.pm  view on Meta::CPAN

			#
			#next if($value eq $what);
			
			# Adjust increment by the weight of the synapse of 
			# this neuron & apply direction delta
			my $delta = 
					$ammount * 
						($value<$what?1:-1) * 
							$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT};
			
			#print "($value,$what) delta:$delta\n";
			
			# Recursivly apply
			$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT}  +=  $delta;
			$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
			    
		}
	}
	
	# Registers some neuron as a synapse of this neuron.           
	# This is called exclusively by connect(), except for

BackProp.pm  view on Meta::CPAN

		my ($b,$x);
		return undef if(substr($a,0,5) ne "ARRAY");
		foreach $b (@{$a}) { $x++ };
		return $x;
	}

	# Debugging subs
	$AI::NeuralNet::BackProp::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; } 
	sub out1  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
	sub out2  { print  shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
	sub out3  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG) }
	sub out4  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;
			$x++;
			if($x>$break-1) {
				print "\n";
				$x=0;
			}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;

BackProp.pm  view on Meta::CPAN

		for my $x (0..$a1s) {
			$a = $a1->[$x];
			$b = $a2->[$x];
			if($a!=$b) {
				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a);
				$diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift);
		sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in order. Usage:
	#

BackProp.pm  view on Meta::CPAN

		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my $p		=	(defined $args{flag})	?$args{flag}	   :1;
	    my $row		=	(defined $args{pattern})?$args{pattern}*2+1:1;
	    my ($fa,$fb);
		for my $x (0..$len) {
			print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
			my $str = $self->learn( $data->[$x*2],			# The list of data to input to the net
					  		  		$data->[$x*2+1], 		# The output desired
					    			inc=>$inc,				# The starting learning gradient
					    			max=>$max,				# The maximum num of loops allowed
					    			error=>$error);			# The maximum (%) error allowed
			print $str if($AI::NeuralNet::BackProp::DEBUG); 
		}
			
		
		my $res;
		$data->[$row] = $self->crunch($data->[$row]) if($data->[$row] == 0);
		
		if ($p) {
			$res=pdiff($data->[$row],$self->run($data->[$row-1]));
		} else {
			$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];

BackProp.pm  view on Meta::CPAN

		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my @learned;
		while(1) {
			_GET_X:
			my $x=$self->intr(rand()*$len);
			goto _GET_X if($learned[$x]);
			$learned[$x]=1;
			print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG); 
			my $str =  $self->learn($data->[$x*2],			# The list of data to input to the net
					  		  		$data->[$x*2+1], 		# The output desired
					    			inc=>$inc,				# The starting learning gradient
			 		    			max=>$max,				# The maximum num of loops allowed
					    			error=>$error);			# The maximum (%) error allowed
			print $str if($AI::NeuralNet::BackProp::DEBUG); 
		}
			
		
		return 1; 
	}

	# Returns the index of the element in array REF passed with the highest comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;

BackProp.pm  view on Meta::CPAN

	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	}
	
	# Sets/Removes value ranging
	sub range {
		my $self	=	shift;
		my $ref		=	shift;
		my $b		=	shift;
		if(substr($ref,0,5) ne "ARRAY") {
			if(($ref == 0) && (!defined $b)) {
				$ref	= $self->crunch($ref);
				#print "\$ref is a string, crunching to ",join(',',@{$ref}),"\n";
			} else {
    			my $a	= $ref;
    			$a		= $self->crunch($a)->[0] if($a == 0);
				$b		= $self->crunch($b)->[0] if($b == 0);
				$_[++$#_] = $a;
    			$_[++$#_] = $b;
    			$ref	= \@_;
				#print "Found ranged definition, joined to ",join(',',@{$ref}),"\n";
			}
		}
		my $rA		=	0;
		my $rB		=	$#{$ref};
		my $rS		=	0; #shift;
		if(!$rA && !$rB) {
			$self->{rA}=$self->{rB}=-1;
			return undef;
		}
		if($rB<$rA){my $t=$rA;$rA=$rB;$rB=$t};

BackProp.pm  view on Meta::CPAN

		my $rB		=	$self->{rB};
		my $rS		=	$self->{rS};
		my $r		=	$rB;#-$rA+1;
		return $in if(!$rA && !$rB);
		my $l		=	$self->{OUT}-1;
		my $out 	=	[];
		# Adjust for a maximum outside what we have seen so far
		for my $i (0..$l) {
			$rS=$in->[$i] if($in->[$i]>$rS);
		}
		#print "\$l:$l,\$rA:$rA,\$rB:$rB,\$rS:$rS,\$r:$r\n";
		# Loop through, convert values to percentage of maximum, then multiply
		# percentage by range and add to base of range to get finaly value
		for my $i (0..$l) {
			#print "\$i:$i,\$in:$in->[$i]\n";
			$rS=1 if(!$rS);
			my $t=intr((($rS-$in->[$i])/$rS)*$r+$rA);
			#print "t:$t,$self->{rRef}->[$t],i:$i\n";
			$out->[$i] = $self->{rRef}->[$t];
		}
		$self->{rS}=$rS;
		return $out;
	}
			
		
	# Initialzes the base for a new neural network.
	# It is recomended that you call learn() before run()ing a pattern.
	# See documentation above for usage.

BackProp.pm  view on Meta::CPAN

		# as a network file.
		if($layers == 0) {  
		    # We use a "1" flag as the second argument to indicate that we want load()
		    # to call the new constructor to make a network the same size as in the file
		    # and return a refrence to the network, instead of just creating the network from
		    # pre-exisiting refrence
			return $self->load($layers,1);
		}
				
		
		#print "Creating $size neurons in each layer for $layers layer(s)...\n";
		
		AI::NeuralNet::BackProp::out2 "Creating $size neurons in each layer for $layers layer(s)...\n";
		
		# Error checking
		return undef if($out>$size);
		
		# When this is called, they tell us howmany layers and neurons in each layer.
		# But really what we store is a long line of neurons that are only divided in theory
		# when connecting the outputs and inputs.
		my $div = $size;
		my $size = $layers * $size;
		
		AI::NeuralNet::BackProp::out2 "Creating RUN and MAP systems for network...\n";
		#print "Creating RUN and MAP systems for network...\n";
		
		# Create a new runner and mapper for the network.
		$self->{RUN} = new AI::NeuralNet::BackProp::_run($self);
		$self->{MAP} = new AI::NeuralNet::BackProp::_map($self);
		
		$self->{SIZE}	=	$size;
		$self->{DIV}	=	$div;
		$self->{OUT}	=	$out;
		$self->{FLAG}	=	$flag;
		$self->{col_width}= 5;

BackProp.pm  view on Meta::CPAN

	sub save {
		my $self	=	shift;
		my $file	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};

	    open(FILE,">$file");
	    
	    print FILE "size=$size\n";
	    print FILE "div=$div\n";
	    print FILE "out=$out\n";
	    print FILE "flag=$flag\n";
	    print FILE "rand=$self->{random}\n";
	    print FILE "cw=$self->{col_width}\n";
		print FILE "crunch=$self->{_CRUNCHED}->{_LENGTH}\n";
		print FILE "rA=$self->{rA}\n";
		print FILE "rB=$self->{rB}\n";
		print FILE "rS=$self->{rS}\n";
		print FILE "rRef=",(($self->{rRef})?join(',',@{$self->{rRef}}):''),"\n";
			
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			print FILE "c$a=$self->{_CRUNCHED}->{LIST}->[$a]\n";
		}
		
		my $w;
		for my $a (0..$self->{SIZE}-1) {
			$w="";
			for my $b (0..$self->{DIV}-1) {
				$w .= "$self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},";
			}
			chop($w);
			print FILE "n$a=$w\n";
		}
	
	    close(FILE);
	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self	=	shift;

BackProp.pm  view on Meta::CPAN

			}
		}
	
	    return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		for my $a (0..$self->{SIZE}-1) {
			print "Neuron $a: ";
			for my $b (0..$self->{DIV}-1) {
				print $self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},"\t";
			}
			print "\n";
		}
	}
	
	# Used internally by new() and learn().
	# This is the sub block that actually creats
	# the connections between the synapse chains and
	# also connects the run packages and the map packages
	# to the appropiate ends of the neuron grids.
	sub initialize_group() {
		my $self	=	shift;

BackProp.pm  view on Meta::CPAN

		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};
		my $x		=	0; 
		my $y		=	0;
		
		# Reset map and run synapse counters.
		$self->{RUN}->{REGISTRATION} = $self->{MAP}->{REGISTRATION} = 0;
		
		AI::NeuralNet::BackProp::out2 "There will be $size neurons in this network group, with a divison value of $div.\n";
		#print "There will be $size neurons in this network group, with a divison value of $div.\n";
		
		# Create initial neuron packages in one long array for the entire group
		for($y=0; $y<$size; $y++) {
			#print "Initalizing neuron $y...     \r";
			$self->{NET}->[$y]=new AI::NeuralNet::BackProp::neuron();
		}
		
		AI::NeuralNet::BackProp::out2 "Creating synapse grid...\n";
		
		my $z  = 0;    
		my $aa = 0;
		my ($n0,$n1,$n2);
		
		# Outer loop loops over every neuron in group, incrementing by the number

BackProp.pm  view on Meta::CPAN

	# Create a new network with 1 layer, 5 inputs, and 5 outputs.
	my $net = new AI::NeuralNet::BackProp(1,5,5);
	
	# Add a small amount of randomness to the network
	$net->random(0.001);

	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";



=head1 UPDATES

This is version 0.89. In this version I have included a new feature, output range limits, as
well as automatic crunching of run() and learn*() inputs. Included in the examples directory
are seven new practical-use example scripts. Also implemented in this version is a much cleaner 
learning function for individual neurons which is more accurate than previous verions and is 
based on the LMS rule. See range() for information on output range limits. I have also updated 

BackProp.pm  view on Meta::CPAN


Now $result will conain (1,0), effectivly flipping the input pattern
around. Obviously, the larger $size is, the longer it will take
to learn a pattern. Learn() returns a string in the form of

	Learning took X loops and X wallclock seconds (X.XXX usr + X.XXX sys = X.XXX CPU).

With the X's replaced by time or loop values for that loop call. So,
to view the learning stats for every learn call, you can just:
	
	print $net->learn(\@map,\@res);
	

If you call "$net->debug(4)" with $net being the 
refrence returned by the new() constructor, you will get benchmarking 
information for the learn function, as well as plenty of other information output. 
See notes on debug() in the METHODS section, below. 

If you do call $net->debug(1), it is a good 
idea to point STDIO of your script to a file, as a lot of information is output. I often
use this command line:

BackProp.pm  view on Meta::CPAN

	);
	
	# Learn the set
	my $f = learn_set(\@data, 
					  inc	=>	0.1,	
					  max	=>	500,
					  p		=>	1
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/ex_dow.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	

BackProp.pm  view on Meta::CPAN

The second example is the same as the first example.



=item $net->benchmarked();

UPDATE: bencmarked() now returns just the string from timestr() for the last run() or
loop() call. Exception: If the last call was a loop the string will be prefixed with "%d loops and ".

This returns a benchmark info string for the last learn() or the last run() call, 
whichever occured later. It is easily printed as a string,
as following:

	print $net->benchmarked() . "\n";





=item $net->debug($level)

Toggles debugging off if called with $level = 0 or no arguments. There are four levels
of debugging.

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : This causes ALL debugging information for the network to be dumped
as the network runs. In this mode, it is a good idea to pipe your STDIO to a file, especially
for large programs.

Level 2 ($level = 2) : A slightly-less verbose form of debugging, not as many internal 
data dumps.

Level 3 ($level = 3) : JUST prints weight mapping as weights change.

Level 4 ($level = 4) : JUST prints the benchmark info for EACH learn loop iteteration, not just
learning as a whole. Also prints the percentage difference for each loop between current network
results and desired results, as well as learning gradient ('incremenet').   

Level 4 is useful for seeing if you need to give a smaller learning incrememnt to learn() .
I used level 4 debugging quite often in creating the letters.pl example script and the small_1.pl
example script.

Toggles debuging off when called with no arguments. 



BackProp.pm  view on Meta::CPAN

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.




=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.10f (sprintf-format) 
percent sting.


=item $net->p($a,$b);

Returns a floating point number which represents $a as a percentage of $b.



=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

Returns the index of the element in array REF passed with the highest comparative value.



BackProp.pm  view on Meta::CPAN

						# but use this form here for clarity.

		$net->learn($net->crunch("I love chips."),  $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love apples."), $net->crunch("Good, Healthy Food."));
		$net->learn($net->crunch("I love pop."),    $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."));
	}
	
	my $response = $net->run($net->crunch("I love corn."));
	
	print $net->uncrunch($response),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

BackProp.pm  view on Meta::CPAN


Returns a rectangular block defined by an array ref in the form of:
	
	[$left,$top,$right,$bottom]

These must be in the range of 0..319 for $left and $right, and the range of 0..199 for
$top and $bottom. The block is returned as an array ref with horizontal lines in sequental order.
I.e. to get a pixel from [2,5] in the block, and $left-$right was 20, then the element in 
the array ref containing the contents of coordinates [2,5] would be found by [5*20+2] ($y*$width+$x).
    
	print (@{$pcx->get_block(0,0,20,50)})[5*20+2];

This would print the contents of the element at block coords [2,5].



=item $pcx->get($x,$y);

Returns the value of pixel at image coordinates $x,$y.
$x must be in the range of 0..319 and $y must be in the range of 0..199.



docs.htm  view on Meta::CPAN


        # Create a new network with 1 layer, 5 inputs, and 5 outputs.
        my $net = new AI::NeuralNet::BackProp(1,5,5);

        # Add a small amount of randomness to the network
        $net-&gt;random(0.001);
        # Demonstrate a simple learn() call
        my @inputs = ( 0,0,1,1,1 );
        my @ouputs = ( 1,0,1,0,1 );

        print $net-&gt;learn(\@inputs, \@outputs),&quot;\n&quot;;

        # Create a data set to learn
        my @set = (
                [ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
                [ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
                [ 1,1,1,0,0 ], [ 0,0,0,1,1 ]    
        );

        # Demo learn_set()
        my $f = $net-&gt;learn_set(\@set);
        print &quot;Forgetfulness: $f unit\n&quot;;

        # Crunch a bunch of strings and return array refs
        my $phrase1 = $net-&gt;crunch(&quot;I love neural networks!&quot;);
        my $phrase2 = $net-&gt;crunch(&quot;Jay Lenno is wierd.&quot;);
        my $phrase3 = $net-&gt;crunch(&quot;The rain in spain...&quot;);
        my $phrase4 = $net-&gt;crunch(&quot;Tired of word crunching yet?&quot;);


        # Make a data set from the array refs
        my @phrases = (

docs.htm  view on Meta::CPAN

        );

        # Learn the data set    
        $net-&gt;learn_set(\@phrases);


        # Run a test phrase through the network
        my $test_phrase = $net-&gt;crunch(&quot;I love neural networking!&quot;);
        my $result = $net-&gt;run($test_phrase);

        # Get this, it prints &quot;Jay Leno is  networking!&quot; ...  LOL!
        print $net-&gt;uncrunch($result),&quot;\n&quot;
        
</PRE>        
<P>
<HR SIZE=1 COLOR=BLACK>
<H1><A NAME="updates">UPDATES</A></H1>
<P>This is version 0.89. In this version I have included a new feature, output range limits, as
well as automatic crunching of <A HREF="#item_run"><CODE>run()</CODE></A> and learn*() inputs. Included in the examples directory
are seven new practical-use example scripts. Also implemented in this version is a much cleaner 
learning function for individual neurons which is more accurate than previous verions and is 
based on the LMS rule. See <A HREF="#item_range"><CODE>range()</CODE></A> for information on output range limits. I have also updated 

docs.htm  view on Meta::CPAN

        my $result = $net-&gt;run(\@map);</PRE>
<P>Now $result will conain (1,0), effectivly flipping the input pattern
around. Obviously, the larger $size is, the longer it will take
to learn a pattern. <CODE>Learn()</CODE> returns a string in the form of</P>
<PRE>
        Learning took X loops and X wallclock seconds (X.XXX usr + X.XXX sys = X.XXX CPU).</PRE>
<P>With the X's replaced by time or loop values for that loop call. So,
to view the learning stats for every learn call, you can just:
</P>
<PRE>
        print $net-&gt;learn(\@map,\@res);</PRE>
<P>If you call ``$net-&gt;debug(4)'' with $net being the 
refrence returned by the <CODE>new()</CODE> constructor, you will get benchmarking 
information for the learn function, as well as plenty of other information output. 
See notes on <A HREF="#item_debug"><CODE>debug()</CODE></A> in the METHODS section, below.</P>
<P>If you do call $net-&gt;debug(1), it is a good 
idea to point STDIO of your script to a file, as a lot of information is output. I often
use this command line:</P>
<PRE>
        $ perl some_script.pl &gt; .out</PRE>
<P>Then I can simply go and use emacs or any other text editor and read the output at my leisure,

docs.htm  view on Meta::CPAN

        );

        # Learn the set
        my $f = learn_set(\@data, 
                                          inc   =&gt;      0.1,    
                                          max   =&gt;      500,
                                          p             =&gt;      1
                                         );

        # Print it 
        print &quot;Forgetfullness: $f%&quot;;</PRE>
<P></P>
<P>
This is a snippet from the example script examples/ex_dow.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:</P>
<PRE>
        my @data = (
                [ 0,1 ], [ 1 ],
                [ 1,0 ], [ 0 ]
        );

docs.htm  view on Meta::CPAN


        @range = ($value1,$value2);
        $net-&gt;range(\@range);</PRE>
<P>The second example is the same as the first example.</P>
<P></P>
<DT><STRONG><A NAME="item_benchmarked">$net-&gt;benchmarked();</A></STRONG><BR>
<DD>
<B>UPDATED:</B> <CODE>bencmarked()</CODE> now returns just the string from <CODE>timestr()</CODE> for the last <A HREF="#item_run"><CODE>run()</CODE></A> or
<A HREF="#item_learn"><CODE>learn()</CODE></A> call. Exception: If the last call was a loop the string will be prefixed with ``%d loops and ''.
<P>This returns a benchmark info string for the last <A HREF="#item_learn"><CODE>learn()</CODE></A> or the last <A HREF="#item_run"><CODE>run()</CODE></A> call, 
whichever occured later. It is easily printed as a string,
as following:</P>
<PRE>
        print $net-&gt;benchmarked() . &quot;\n&quot;;</PRE>
<P></P>
<DT><STRONG><A NAME="item_debug">$net-&gt;debug($level)</A></STRONG><BR>
<DD>
Toggles debugging off if called with $level = 0 or no arguments. There are four levels
of debugging.
<P>Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.</P>
<P>Level 1 ($level = 1) : This causes ALL debugging information for the network to be dumped
as the network runs. In this mode, it is a good idea to pipe your STDIO to a file, especially
for large programs.</P>
<P>Level 2 ($level = 2) : A slightly-less verbose form of debugging, not as many internal 
data dumps.</P>
<P>Level 3 ($level = 3) : JUST prints weight mapping as weights change.</P>
<P>Level 4 ($level = 4) : JUST prints the benchmark info for EACH learn loop iteteration, not just
learning as a whole. Also prints the percentage difference for each loop between current network
results and desired results, as well as learning gradient ('incremenet').</P>
<P>Level 4 is useful for seeing if you need to give a smaller learning incrememnt to <A HREF="#item_learn"><CODE>learn()</CODE></A> .
I used level 4 debugging quite often in creating the letters.pl example script and the small_1.pl
example script.</P>
<P>Toggles debuging off when called with no arguments.</P>
<P></P>
<DT><STRONG><A NAME="item_save">$net-&gt;save($filename);</A></STRONG><BR>
<DD>
This will save the complete state of the network to disk, including all weights and any
words crunched with <A HREF="#item_crunch"><CODE>crunch()</CODE></A> . Also saves any output ranges set with <A HREF="#item_range"><CODE>range()</CODE></A> .

docs.htm  view on Meta::CPAN

<P></P>
<DT><STRONG><A NAME="item_load">$net-&gt;load($filename);</A></STRONG><BR>
<DD>
This will load from disk any network saved by <A HREF="#item_save"><CODE>save()</CODE></A> and completly restore the internal
state at the point it was <A HREF="#item_save"><CODE>save()</CODE></A> was called at.
<P></P>
<DT><STRONG><A NAME="item_join_cols">$net-&gt;join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);</A></STRONG><BR>
<DD>
This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like <CODE>join()</CODE> ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it <A HREF="#item_join_cols"><CODE>join_cols()</CODE></A> will just print the numerical value of each element seperated
by a null character (\0). <A HREF="#item_join_cols"><CODE>join_cols()</CODE></A> defaults to the latter behaviour.
<P></P>
<DT><STRONG><A NAME="item_pdiff">$net-&gt;pdiff($array_ref_A, $array_ref_B);</A></STRONG><BR>
<DD>
This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.10f (sprintf-format) 
percent sting.
<P></P>
<DT><STRONG><A NAME="item_p">$net-&gt;p($a,$b);</A></STRONG><BR>
<DD>
Returns a floating point number which represents $a as a percentage of $b.
<P></P>
<DT><STRONG><A NAME="item_intr">$net-&gt;intr($float);</A></STRONG><BR>
<DD>
Rounds a floating-point number rounded to an integer using <CODE>sprintf()</CODE> and <CODE>int()</CODE> , Provides
better rounding than just calling <CODE>int()</CODE> on the float. Also used very heavily internally.
<P></P>
<DT><STRONG><A NAME="item_high">$net-&gt;high($array_ref);</A></STRONG><BR>
<DD>
Returns the index of the element in array REF passed with the highest comparative value.
<P></P>
<DT><STRONG><A NAME="item_low">$net-&gt;low($array_ref);</A></STRONG><BR>
<DD>
Returns the index of the element in array REF passed with the lowest comparative value.
<P></P>

docs.htm  view on Meta::CPAN

                                                # be replaced with learn_set() to do the same thing,
                                                # but use this form here for clarity.
                $net-&gt;learn($net-&gt;crunch(&quot;I love chips.&quot;),  $net-&gt;crunch(&quot;That's Junk Food!&quot;));
                $net-&gt;learn($net-&gt;crunch(&quot;I love apples.&quot;), $net-&gt;crunch(&quot;Good, Healthy Food.&quot;));
                $net-&gt;learn($net-&gt;crunch(&quot;I love pop.&quot;),    $net-&gt;crunch(&quot;That's Junk Food!&quot;));
                $net-&gt;learn($net-&gt;crunch(&quot;I love oranges.&quot;),$net-&gt;crunch(&quot;Good, Healthy Food.&quot;));
        }

        my $response = $net-&gt;run($net-&gt;crunch(&quot;I love corn.&quot;));

        print $net-&gt;uncrunch($response),&quot;\n&quot;;</PRE>
<P>On my system, this responds with, ``Good, Healthy Food.'' If you try to run <A HREF="#item_crunch"><CODE>crunch()</CODE></A> with
``I love pop.'', though, you will probably get ``Food! apples. apples.'' (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!</P>
<P></P>
<DT><STRONG><A NAME="item_crunched">$net-&gt;crunched($word);</A></STRONG><BR>
<DD>
This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list.
<P></P>

docs.htm  view on Meta::CPAN

Returns a rectangular block defined by an array ref in the form of:

<PRE>
        [$left,$top,$right,$bottom]</PRE>
<P>These must be in the range of 0..319 for $left and $right, and the range of 0..199 for
$top and $bottom. The block is returned as an array ref with horizontal lines in sequental order.
I.e. to get a pixel from [2,5] in the block, and $left-$right was 20, then the element in 
the array ref containing the contents of coordinates [2,5] would be found by [5*20+2] ($y*$width+$x).
</P>
<PRE>
        print (@{$pcx-&gt;get_block(0,0,20,50)})[5*20+2];</PRE>
<P>This would print the contents of the element at block coords [2,5].</P>
<P></P>
<DT><STRONG><A NAME="item_get">$pcx-&gt;get($x,$y);</A></STRONG><BR>
<DD>
Returns the value of pixel at image coordinates $x,$y.
$x must be in the range of 0..319 and $y must be in the range of 0..199.
<P></P>
<DT><STRONG><A NAME="item_rgb">$pcx-&gt;rgb($index);</A></STRONG><BR>
<DD>
Returns a 3-element array (not array ref) with each element corresponding to the red, green, or
blue color components, respecitvely.

examples/ex_add.pl  view on Meta::CPAN

			[ 1,   2   ], [ 3    ],
			[ 2,   2   ], [ 4    ],
			[ 20,  20  ], [ 40   ],
			[ 100, 100 ], [ 200  ],
			[ 150, 150 ], [ 300  ],
			[ 500, 500 ], [ 1000 ],
		]);
		$addition->save('add.dat');
	}

	print "Enter first number to add  : "; chomp(my $a = <>);
	print "Enter second number to add : "; chomp(my $b = <>);
	print "Result: ",$addition->run([$a,$b])->[0],"\n";
	
	

examples/ex_add2.pl  view on Meta::CPAN

	my ( $layers, $inputs, $outputs, $top, $inc, $top, $runtime,
	$forgetfulness );
	my @answers;
	my @predictions;
	my @percent_diff;
	
	$inputs = 3;
	$outputs = 1;
	my ($maxinc,$maxtop,$incstep);
	select OUTFP; $OUTPUT_AUTOFLUSH = 1; select STDOUT;
	print OUTFP "layers inc top forgetfulness time \%diff1 \%diff2 \%diff3
	\%diff4\n\n";
	
	for( $layers = 1; $layers <= 3; $layers++ ){
	 if( $layers <= 2 ){
	  $incstep = 0.025;
	 }
	 else{
	  $incstep = 0.05;
	 }
	 for( $inc=0.025; $inc <= 0.4; $inc += $incstep ){
	  if( $inc > .3 ){
	   $maxtop = 3;
	  }
	  else{
	   $maxtop = 4;
	  }
	  for( $top=1; $top <=$maxtop; $top++ ){
	   addnet();
	   printf OUTFP "%d %.3f %d %g %s %f %f %f %f\n",
	   $layers, $inc, $top, $forgetfulness, timestr($runtime),
	$percent_diff[0],
	   $percent_diff[1], $percent_diff[2], $percent_diff[3];
	   print "layers inc top forgetfulness time \%diff1 \%diff2 \%diff3
	\%diff4\n";
	   printf "%d %.3f %d %g %s %f %f %f %f\n",
	   $layers, $inc, $top, $forgetfulness, timestr($runtime),
	$percent_diff[0],
	   $percent_diff[1], $percent_diff[2], $percent_diff[3];
	  }
	 }
	}
	
	#....................................................
	sub addnet
	{
	 print "\nCreate a new net with $layers layers, 3 inputs, and 1 output\n";
	 my $net = AI::NeuralNet::BackProp->new($layers,3,1);
	
	 # Disable debugging
	 $net->debug(0);
	
	
	 my @data = (
	  [   2633, 2665, 2685],  [ 2633 + 2665 + 2685 ],
	  [   2623, 2645, 2585],  [ 2623 + 2645 + 2585 ],
	  [  2627, 2633, 2579],  [ 2627 + 2633 + 2579 ],
	  [   2611, 2627, 2563],  [ 2611 + 2627 + 2563 ],
	  [  2640, 2637, 2592],  [ 2640 + 2637 + 2592 ]
	 );
	
	 print "Learning started, will cycle $top times with inc = $inc\n";
	
	  # Make it learn the whole dataset $top times
	  my @list;
	
	 my $t1=new Benchmark;
	 for my $a (1..$top)
	 {
	  print "Outer Loop: $a : ";
	
	  $forgetfulness = $net->learn_set( \@data,
	           inc  => $inc,
	           max  => 500,
	           error => -1);
	
	  print "Forgetfulness: $forgetfulness %\n";
	
	 }
	 my $t2=new Benchmark;
	
	 $runtime = timediff($t2,$t1);
	 print "run took ",timestr($runtime),"\n";
	
	
	 my @input = ( [ 2222, 3333, 3200 ],
	      [ 1111, 1222, 3211 ],
	      [ 2345, 2543, 3000 ],
	      [ 2654, 2234, 2534 ] );
	
	    test_net( $net, @input );
	}
	#.....................................................................

examples/ex_add2.pl  view on Meta::CPAN

	  my @set;
	  my $fb;
	  my $net = shift;
	  my @data = @_;
	  undef @percent_diff; #@answers; undef @predictions;
	
	  for( $i=0; defined( $data[$i] ); $i++ ){
	   @set = @{ $data[$i] };
	   $fb = $net->run(\@set)->[0];
	   # Print output
	   print "Test Factors: (",join(',',@set),")\n";
	   $answer = eval( join( '+',@set ));
	   push @percent_diff, 100.0 * abs( $answer - $fb )/ $answer;
	   print "Prediction : $fb      answer: $answer\n";
	  }
	 }
	
	

examples/ex_alpha.pl  view on Meta::CPAN

	# Build a test map 
	my $tmp	=	[2,1,1,1,2,
				 1,2,2,2,1,
				 1,2,2,2,1,
				 1,1,1,1,1,
				 1,2,2,2,1,
				 1,2,2,2,1,
				 1,2,2,2,1];
	
	# Display test map
	print "\nTest map:\n";
	$net->join_cols($tmp,5);
	
	# Display network results
	print "Letter index matched: ",$net->run($tmp)->[0],"\n";
	

examples/ex_bmp.pl  view on Meta::CPAN

			2,2,2,1,2,
			1,1,1,1,2	],		[	5	],
		
	);
    
    
	$net->range(1,5);
	
	# If we havnt saved the net already, do the learning
	if(!$net->load('images.net')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=3;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc		=>	0.1,	
											max		=>	500,
											error	=>	-1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
			$net->save('images.net');

			my $t2=new Benchmark;
			my $td=timediff($t2,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	my @set=(		2,1,1,1,2,
					1,2,2,2,2,
					1,1,1,2,2,
					1,2,2,2,2,
					2,1,1,1,2		);
		
	
	# Image number
	my $fb=$net->run(\@set)->[0];
	
	
	# Print output
	print "\nTest Map: \n";
	$net->join_cols(\@set,5);
	print "Image number matched: $fb\n";
	


examples/ex_bmp2.pl  view on Meta::CPAN

	# Create our model input
	my @map	=	(1,1,1,1,1,
				 0,0,1,0,0,
				 0,0,1,0,0,
				 0,0,1,0,0,
				 1,0,1,0,0,
				 1,0,1,0,0,
				 1,1,1,0,0);
				 
	
	print "\nLearning started...\n";
	
	print $net->learn(\@map,'J');
	
	print "Learning done.\n";
		
	# Build a test map 
	my @tmp	=	(0,0,1,1,1,
				 1,1,1,0,0,
				 0,0,0,1,0,
				 0,0,0,1,0,
				 0,0,0,1,0,                                          
				 0,0,0,0,0,
				 0,1,1,0,0);
	
	# Display test map
	print "\nTest map:\n";
	$net->join_cols(\@tmp,5,'');
	
	print "Running test...\n";
		                    
	# Run the actual test and get network output
	print "Result: ",$net->run_uc(\@tmp),"\n";
	
	print "Test run complete.\n";
	
	

examples/ex_crunch.pl  view on Meta::CPAN

		# learn() can use strings in two ways: As an array ref from crunch(), or
		# directly as a string, which it then will crunch internally.
		$net->learn($net->crunch("I love chips."),  $bad);
		$net->learn($net->crunch("I love apples."), $good);
		$net->learn("I love pop.",    				$bad);
		$net->learn("I love oranges.",				$good);
	}
	
	# run() automatically crunches the string (run_uc() uses run() internally) and
	# run_uc() automatically uncrunches the results.
	print $net->run_uc("I love corn.");

examples/ex_dow.pl  view on Meta::CPAN

		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],
		[	4, 	261, 244,  181, 	19.6, 19.6, 18.1, 	2611, 2627, 2563], 	[	2620  ],
		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
    
    
	# If we havnt saved the net already, do the learning
	if(!$net->load('dow.dat')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=1;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc		=>	0.2,	
											max		=>	2000,
											error	=>	-1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
			$net->save('dow.dat');
            
			my $t2=new Benchmark;
			my $td=timediff($t2,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	# Run a prediction using fake data
	#			Month	CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3    
	my @set=(	10,		352, 309,  203, 	18.3, 18.7, 16.1, 	2592, 2641, 2651	  ); 
	
	# Dow Ave (output)	
	my $fb=$net->run(\@set)->[0];
	
	
	# Print output
	print "\nTest Factors: (",join(',',@set),")\n";
	print "DOW Prediction for Month #11: $fb\n";
	

examples/ex_pat.pl  view on Meta::CPAN

	File:   examples/ex_pat.pl
	Author: Tobias Bronx, <tobiasb@odin.funcom.com>
	Desc:
	
		This demonstrates simply pattern learning.

=cut

	use AI::NeuralNet::BackProp;
	$net=AI::NeuralNet::BackProp->new(2,2,2);
	print $net->learn([2,2],[2,2],max=>3),"\n"; 
	for (0..1) {
		for my $a (1..2) { 
			for my $b (1..2) { 
				@a=($a,$b); 
				print join(",",@a),":",join(",",@{$net->run(\@a)}), "\n"; 
				$net->learn(\@a,\@a, max=>100,inc=>0.17);
				print join(",",@{$net->run(@a)}),"\n";
			}
		}
	}
	print "1,2:",join(",",@{$net->run([1,2])}),"\n";

examples/ex_pcx.pl  view on Meta::CPAN

	Desc:
		This teaches the network to classify 10x10 bitmaps from a PCX file based on their 
		whiteness. (This was taught on a b&w 320x200 PCX of the author at an early age :-)
=cut
	
	use AI::NeuralNet::BackProp;
	
	# Set block sizes
	my ($bx,$by)=(10,10);
	
	print "Creating Neural Net...";
	my $net=AI::NeuralNet::BackProp->new(1,$bx*$by,1);
	$net->{col_width} = $bx;
	print "Done!\n";
	
	print "Loading bitmap...";
	my $img = $net->load_pcx("josiah.pcx");             
	print "Done!\n";
	
	print "Comparing blocks...\n";
	my $white = $img->get_block([0,0,$bx,$by]);
	
	my ($x,$y,$tmp,@scores,$s,@blocks,$b);
	for ($x=0;$x<320;$x+=$bx) {
		for ($y=0;$y<200;$y+=$by) {
			$blocks[$b++]=$img->get_block([$x,$y,$x+$bx,$y+$by]);
			$score[$s++]=$net->pdiff($white,$blocks[$b-1]);
			print "Block at [$x,$y], index [$s] scored ".$score[$s-1]."%\n";
		}
	}
	print "Done!";
	
	print "High score:\n";
	print_ref($blocks[$net->high(\@score)],$bx); 
	print "Low score:\n";
	print_ref($blocks[$net->low(\@score)],$bx); 
	
	$net->debug(4);
	
	if(!$net->load("pcx.dat")) {
		print "Learning high block...\n";
		print $net->learn($blocks[$net->high(\@score)],"highest");
		
		$net->save("pcx.dat");
		
		print "Learning low block...\n";
		$net->learn($blocks[$net->low(\@score)],"lowest");
	}
	
	print "Testing random block...\n";
	
	print "Result: ",$net->run($blocks[rand()*$b])->[0],"\n";
	
	print "Bencmark for run: ", $net->benchmarked(), "\n";
	
	$net->save("pcx2.net");
	
	sub print_ref {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $x;
		my @els = (' ','.',',',':',';','%','#');
		foreach my $el (@{$map}) { 
			$str=$el/255*6;
			print $els[$str];
			$x++;
			if($x>$break-1) {
				print "\n";
				$x=0;
			}
		}
		print "\n";
	}
		                                         

examples/ex_sub.pl  view on Meta::CPAN

			[ 1,   1   ], [ 0      ] ,
			[ 2,   1   ], [ 1      ],
			[ 10,  5   ], [ 5      ],
			[ 20,  10  ], [ 10     ],
			[ 100, 50  ], [ 50     ],
			[ 500, 200 ], [ 300    ],
		]);
		$subtract->save('sub.dat');
	}
		
	print "Enter first number to subtract  : "; chomp(my $a = <>);
	print "Enter second number to subtract : "; chomp(my $b = <>);
	
	print "Result: ",$subtract->run([$a,$b])->[0],"\n";
	
	

examples/ex_synop.pl  view on Meta::CPAN

	# Create a new network with 1 layer, 5 inputs, and 5 outputs.
	my $net = new AI::NeuralNet::BackProp(1,5,5);
	
	# Add a small amount of randomness to the network
	$net->random(0.001);

	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";

test.pl  view on Meta::CPAN

# Before `make install' is performed this script should be runnable with
# `make test'. After `make install' it should work as `perl test.pl'

BEGIN { $| = 1; print "1..13\n"; }
END {print "not ok 1\n" unless $loaded;}
sub t { my $f=shift;$t++;my $str=($f)?"ok $t":"not ok $t";print $str,"\n";}
use AI::NeuralNet::BackProp;
$loaded = 1;
t 1;
my $net = new AI::NeuralNet::BackProp(2,2,1);
t $net;
t ($net->intr(0.51) eq 1);
t ($net->intr(0.00001) eq 0);
t ($net->intr(0.50001) eq 1);
t $net->learn_set([	
	[ 1,   1   ], [ 2    ] ,



( run in 1.403 second using v1.01-cache-2.11-cpan-de7293f3b23 )