AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

    use strict;
    use Benchmark; 

   	# See POD for usage of this variable.
	$AI::NeuralNet::Mesh::Connector = '_c';
	
	# Debugging subs
	$AI::NeuralNet::Mesh::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::Mesh::DEBUG = shift || 0; } 
	sub d { shift if(substr($_[0],0,4) eq 'AI::'); my ($a,$b,$c)=(shift,shift,$AI::NeuralNet::Mesh::DEBUG); print $a if($c == $b); return $c }
	sub verbose {debug @_};
	sub verbosity {debug @_};
	sub v {debug @_};
	
	
	# Return version of ::ID string passed or current version of this
	# module if no string is passed. Used in load() to detect file versions.
	sub version {
		shift if(substr($_[0],0,4) eq 'AI::');
		substr((split(/\s/,(shift || $AI::NeuralNet::Mesh::ID)))[2],1);
	}                                  
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Package constructor
	sub new {
		no strict 'refs';
		my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $nodes	=	shift;

Mesh.pm  view on Meta::CPAN

	}
	
	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		no strict 'refs';
		
		open(FILE,">$file");
	    
	    print FILE "header=$AI::NeuralNet::Mesh::ID\n";
	   	
		print FILE "total_layers=$self->{total_layers}\n";
		print FILE "total_nodes=$self->{total_nodes}\n";
	    print FILE "nodes=$self->{nodes}\n";
	    print FILE "inputs=$self->{inputs}\n";
	    print FILE "outputs=$self->{outputs}\n";
	    print FILE "layers=",(($self->{layers})?join(',',@{$self->{layers}}):''),"\n";
	    
	    print FILE "rand=$self->{random}\n";
	    print FILE "const=$self->{const}\n";
	    print FILE "cw=$self->{col_width}\n";
		print FILE "crunch=$self->{_crunched}->{_length}\n";
		print FILE "rA=$self->{rA}\n";
		print FILE "rB=$self->{rB}\n";
		print FILE "rS=$self->{rS}\n";
		print FILE "rRef=",(($self->{rRef})?join(',',@{$self->{rRef}}):''),"\n";
			
		for my $a (0..$self->{_crunched}->{_length}-1) {
			print FILE "c$a=$self->{_crunched}->{list}->[$a]\n";
		}
	
		my $n = 0;
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
			    my $w='';
				for my $z (0..$self->{layers}->[$x-1]-1) {
					$w.="$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				print FILE "n$n=$w$self->{mesh}->[$n]->{activation},$self->{mesh}->[$n]->{threshold},$self->{mesh}->[$n]->{mean}\n";
				$n++;
			}
		}
		
	    close(FILE);
	    
	    if(!(-f $file)) {
	    	$self->{error} = "Error writing to \"$file\".";
	    	return undef;
	    }

Mesh.pm  view on Meta::CPAN

	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		my $n 		=	0;    
		no strict 'refs';
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
				for my $z (0..$self->{layers}->[$x-1]-1) {
					print "$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				$n++;
			}
			print "\n";
		}
	}
	  
	# Set the activation type of a specific layer.
	# usage: $net->activation($layer,$type);
	# $type can be: "linear", "sigmoid", "sigmoid_2".
	# You can use "sigmoid_1" as a synonym to "sigmoid". 
	# Type can also be a CODE ref, ( ref($type) eq "CODE" ).
	# If $type is a CODE ref, then the function is called in this form:
	# 	$output	= &$type($sum_of_inputs,$self);

Mesh.pm  view on Meta::CPAN

	
	# Sets/gets randomness facter in the network. Setting a value of 0 
	# disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	} 

	# Sets/gets run const. facter in the network. Setting a value of 0 
	# disables run const. factor. 
	sub const {

Mesh.pm  view on Meta::CPAN

		my $self = shift;
		return undef if !$self->{error};
		chomp($self->{error});
		return $self->{error}."\n";
	}
	
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;	$x++;
			if($x>$break-1) { print "\n"; $x=0;	}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;

Mesh.pm  view on Meta::CPAN

		my ($a,$b,$diff,$t);
		$diff=0;
		for my $x (0..$a1s) {
			$a = $a1->[$x]; $b = $a2->[$x];
			if($a!=$b) {
				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a); $diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift); 
		sprintf("%.3f",$fa/$fb*100); #((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100
	}
	
	# Returns the index of the element in array REF passed with the highest 
	# comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] > $ref1->[$tmp]) }
		return $tmp;

Mesh.pm  view on Meta::CPAN

	$net->learn([0,0],[0]);
	$net->learn([0,1],[0]);
	$net->learn([1,0],[0]);
	$net->learn([1,1],[1]);
	
	# Present it with two test cases
	my $result_bit_1 = $net->run([0,1])->[0];
	my $result_bit_2 = $net->run([1,1])->[0];
	
	# Display the results
	print "AND test with inputs (0,1): $result_bit_1\n";
	print "AND test with inputs (1,1): $result_bit_2\n";
	

=head1 VERSION & UPDATES

This is version B<0.44>, an update release for version 0.43.

This fixed the usage conflict with perl 5.3.3.

With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),

Mesh.pm  view on Meta::CPAN

		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
	
	# Learn the set
	my $f = $net->learn_set(\@data, 
					  inc	=>	0.1,	
					  max	=>	500,
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/finance.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	

Mesh.pm  view on Meta::CPAN

=item $net->range();

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions.


=item $net->benchmark();

=item $net->benchmarked();

This returns a benchmark info string for the last learn() call.
It is easily printed as a string, as following:

	print "Last learn() took ",$net->benchmark(),"\n";



=item $net->verbose($level);

=item $net->verbosity($level);

=item $net->v($level);

=item $net->debug($level)

Note: verbose(), verbosity(), and v() are all functional aliases for debug().

Toggles debugging off if called with $level = 0 or no arguments. There are several levels
of debugging. 

NOTE: Debugging verbosity has been toned down somewhat from AI::NeuralNet::BackProp,
but level 4 still prints the same amount of information as you were used to. The other
levels, however, are mostly for  advanced use. Not much explanation in the other
levels, but they are included for those of you that feel daring (or just plain bored.)

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : Displays the activity between nodes, prints what values were
received and what they were weighted to.

Level 2 ($level = 2) : Just prints info from the learn() loop, in the form of "got: X, wanted Y"
type of information. This is about the third most useful debugging level, after level 12 and
level 4.

Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.

Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.

Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.

Toggles debuging off when called with no arguments. 



=item $net->save($filename);

Mesh.pm  view on Meta::CPAN


If there were no errors, it will return a refrence to $net.


=item $net->load($filename);

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.

If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.

If there were no errors, it will return a refrence to $net.

UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!



=item $net->activation($layer,$type);

Mesh.pm  view on Meta::CPAN


=item $net->node_threshold($layer,$node,$value);

This sets the activation threshold for a specific node in a layer. The threshold only is used
when activation is set to "sigmoid", "sigmoid_1", or "sigmoid_2".  

=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->extend(\@array_of_hashes);

This allows you to re-apply any activations and thresholds with the same array ref which
you created a network with. This is useful for re-applying code ref activations after a load()
call without having to type the code ref twice.

Mesh.pm  view on Meta::CPAN



=item $net->p($a,$b);

Returns a floating point number which represents $a as a percentage of $b.



=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

Returns the index of the element in array REF passed with the highest comparative value.



=item $net->low($array_ref);

Returns the index of the element in array REF passed with the lowest comparative value.



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.20f (sprintf-format) 
percent sting.




=item $net->show();

This will dump a simple listing of all the weights of all the connections of every neuron
in the network to STDIO.

Mesh.pm  view on Meta::CPAN

	
	for (0..3) {
		$net->learn_set([
			$net->crunch("I love chips."),  $net->crunch("That's Junk Food!")),
			$net->crunch("I love apples."), $net->crunch("Good, Healthy Food.")),
			$net->crunch("I love pop."),    $net->crunch("That's Junk Food!")),
			$net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."))
		]);
	}
	
	print $net->run_uc("I love corn.")),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

examples/ex_add.pl  view on Meta::CPAN

			[ 100, 100 ], [ 200  ],
			[ 150, 150 ], [ 300  ],
			[ 500, 500 ], [ 1000 ],
			[ 10,  10  ], [ 20   ],
			[ 15,  15  ], [ 30   ],
			[ 12,  8   ], [ 20   ],
		]);
		$addition->save('add.mesh');
	}

	print "Enter first number to add  : "; chomp(my $a = <>);
	print "Enter second number to add : "; chomp(my $b = <>);
	print "Result: ",$addition->run([$a,$b])->[0],"\n";
	
	

examples/ex_add2.pl  view on Meta::CPAN

	my ( $layers, $inputs, $outputs, $top, $inc, $top, $runtime,
	$forgetfulness );
	my @answers;
	my @predictions;
	my @percent_diff;
	
	$inputs = 3;
	$outputs = 1;
	my ($maxinc,$maxtop,$incstep);
	select OUTFP; $OUTPUT_AUTOFLUSH = 1; select STDOUT;
	print OUTFP "layers inc top forgetfulness time \%diff1 \%diff2 \%diff3
	\%diff4\n\n";
	
	for( $layers = 1; $layers <= 3; $layers++ ){
	 if( $layers <= 2 ){
	  $incstep = 0.025;
	 }
	 else{
	  $incstep = 0.05;
	 }
	 for( $inc=0.025; $inc <= 0.4; $inc += $incstep ){
	  if( $inc > .3 ){
	   $maxtop = 3;
	  }
	  else{
	   $maxtop = 4;
	  }
	  for( $top=1; $top <=$maxtop; $top++ ){
	   addnet();
	   printf OUTFP "%d %.3f %d %g %s %f %f %f %f\n",
	   $layers, $inc, $top, $forgetfulness, timestr($runtime),
	$percent_diff[0],
	   $percent_diff[1], $percent_diff[2], $percent_diff[3];
	   print "layers inc top forgetfulness time \%diff1 \%diff2 \%diff3
	\%diff4\n";
	   printf "%d %.3f %d %g %s %f %f %f %f\n",
	   $layers, $inc, $top, $forgetfulness, timestr($runtime),
	$percent_diff[0],
	   $percent_diff[1], $percent_diff[2], $percent_diff[3];
	  }
	 }
	}
	
	#....................................................
	sub addnet
	{
	 print "\nCreate a new net with $layers layers, 3 inputs, and 1 output\n";
	 my $net = AI::NeuralNet::Mesh->new($layers,3,1);
	
	 # Disable debugging
	 $net->debug(0);
	
	
	 my @data = (
	  [   2633, 2665, 2685],  [ 2633 + 2665 + 2685 ],
	  [   2623, 2645, 2585],  [ 2623 + 2645 + 2585 ],
	  [  2627, 2633, 2579],  [ 2627 + 2633 + 2579 ],
	  [   2611, 2627, 2563],  [ 2611 + 2627 + 2563 ],
	  [  2640, 2637, 2592],  [ 2640 + 2637 + 2592 ]
	 );
	
	 print "Learning started, will cycle $top times with inc = $inc\n";
	
	  # Make it learn the whole dataset $top times
	  my @list;
	
	 my $t1=new Benchmark;
	 for my $a (1..$top)
	 {
	  print "Outer Loop: $a : ";
	
	  $forgetfulness = $net->learn_set( \@data,
	           inc  => $inc,
	           max  => 500,
	           error => -1);
	
	  print "Forgetfulness: $forgetfulness %\n";
	
	 }
	 my $t2=new Benchmark;
	
	 $runtime = timediff($t2,$t1);
	 print "run took ",timestr($runtime),"\n";
	
	
	 my @input = ( [ 2222, 3333, 3200 ],
	      [ 1111, 1222, 3211 ],
	      [ 2345, 2543, 3000 ],
	      [ 2654, 2234, 2534 ] );
	
	    test_net( $net, @input );
	}
	#.....................................................................

examples/ex_add2.pl  view on Meta::CPAN

	  my @set;
	  my $fb;
	  my $net = shift;
	  my @data = @_;
	  undef @percent_diff; #@answers; undef @predictions;
	
	  for( $i=0; defined( $data[$i] ); $i++ ){
	   @set = @{ $data[$i] };
	   $fb = $net->run(\@set)->[0];
	   # Print output
	   print "Test Factors: (",join(',',@set),")\n";
	   $answer = eval( join( '+',@set ));
	   push @percent_diff, 100.0 * abs( $answer - $fb )/ $answer;
	   print "Prediction : $fb      answer: $answer\n";
	  }
	 }
	
	

examples/ex_aln.pl  view on Meta::CPAN


	File:	examples/ex_aln.pl
	Author:	Josiah Bryan, jdb@wcoil.com
	Desc:
	
	This is a simple example of a _basic_ ALN implementation in
	under 210 lines of code. In this demo we make use of the 
	custom node connector as described in the POD. We also 
	insert our own method over the node's internal adjust_weight()
	method to make ALN learning a bit easire. This demo also adds
	a temporary method to the network to print the logical type of 
	each node, called print_aln();

	print_aln() prints simple diagram of the
	network similar to this (this is for a $net=Tree(8,1) with 
	$net->learn([1,1,0,1,0,1,1,1],[0]), and each line represents 
	a layer):
	
	L R L L L L L L
	OR OR OR OR
	OR OR
	AND
	
	All the standard methods that work on AI::NeuralNet::Mesh work

examples/ex_aln.pl  view on Meta::CPAN

	# Create a new ALN tree with 2 leaves and 1 root node.
	# Note: Our ALN trees can have more than one root node! Yippe! :-)
	# Just a little benefit of deriving our ALNs from 
	# AI::NeuralNet::Mesh.
	#
	my $net = Tree(8,1);
	
	# Use our nifty dot verbosity.
	$net->v(12);
	
	# Learn a pattern and print stats.
	if(!$net->load('aln.mesh')) {
		print "Learning";
		print "Done!\nLearning took ",$net->learn([1,1,0,1,0,1,1,1],[0]),"\n";
		$net->save('aln.mesh');
	}
		
	# Print logic gate types
	$net->print_aln();
	
	# Test it out
	print "\nPattern: [1,1,0,1,0,1,1,1]".
		  "\nResult: ",$net->run([1,1,1,1,1,1,1,1])->[0],"\n";




######################################################################
#-################ ALN Implementation Code  ########################-#
######################################################################
	
	# Build a basic ALN tree network (_very_ basic, only implements
	# the node types, and only two learning benefits from ALN theory are
	# realized.) Also adds a method to the neural network gates, print_aln().
	sub Tree {
		# Grab our leaves and roots
		my $leaves = shift;
		my $roots  = shift || $leaves;
	    
	    # Replace the load function with a new one to preserve the
	    # load activations. We have to add this up here because next
	    # thing we do is check if they passed a file name as $leaves,
	    # and we need to have our new load sub already in place before
	    # we try to load anything in $leaves.

examples/ex_aln.pl  view on Meta::CPAN

        $leaves++,goto __LEAF_IT if(($leaves/2)%2);
        # Create a layer spec array with every layer having half
        # the number of nodes of the layer before it
        while($leaves!=$roots) { 
			$specs->[$level++]={ nodes=>$leaves, activation=>$act };
	        $leaves/=2;
	        $leaves++ if($leaves%2 && $leaves!=$roots);
		}
		$specs->[$level++]={ nodes=>$roots, activation=>$act };
		
		# Add a method to the net to print out the node types
		*{'AI::NeuralNet::Mesh::print_aln'} = sub {
			my $self=shift;
			my ($c,$l)=(0,0);
			for(0..$self->{total_nodes}-1) {
				my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
				my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			    print "OR "  if( $b1 &&  $b2);
				print "AND " if(!$b1 && !$b2);
				print "L "   if( $b1 && !$b2);
				print "R "   if(!$b1 &&  $b2);
				$c=0,$l++,print "\n" if++$c>=$self->{layers}->[$l];
			}
		};
		
		# Add a custom node weight adjuster to learn faster
		*{'AI::NeuralNet::Mesh::node::adjust_weight'} = sub {
			my ($self,$inc,$taget) = @_; 
			my $f;
			my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			$f=1 if( $b1 &&  $b2);

examples/ex_alpha.pl  view on Meta::CPAN

	# Build a test map 
	my $tmp	=	[0,1,1,1,0,
				 1,0,0,0,1,
				 1,0,0,0,1,
				 1,1,1,1,1,
				 1,0,0,0,1,
				 1,0,0,0,1,
				 1,0,0,0,1];
	
	# Display test map
	print "\nTest map:\n";
	$net->join_cols($tmp,5);
	
	# Display network results
	print "Letter index matched: ",$net->run($tmp)->[0],"\n";
	

examples/ex_and.pl  view on Meta::CPAN

	if(!$net->load('and.mesh')) {
		$net->learn_set([	
			[1,1], [1],
			[1,0], [0],
			[0,1], [0],
			[0,0], [0],
		]);
		$net->save('and.mesh');
	}

	print "Learning complete.\n";
	print "Testing with a gate value of (0,0):",$net->run([0,0])->[0],"\n";
	print "Testing with a gate value of (0,1):",$net->run([0,1])->[0],"\n";
	print "Testing with a gate value of (1,0):",$net->run([1,0])->[0],"\n";
	print "Testing with a gate value of (1,1):",$net->run([1,1])->[0],"\n";
	
	

examples/ex_bmp.pl  view on Meta::CPAN

			1,0,0,0,0,
			1,1,1,1,0,
			0,0,0,1,0,
			1,1,1,1,2	],		[	5	],
		
	);
    
    
	# If we havnt saved the net already, do the learning
	if(!$net->load('images.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=3;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc => 0.1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
			$net->save('images.mesh');

			my $t2=new Benchmark;
			my $td=timediff($t0,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	my @set=(		0,1,1,1,0,
					1,0,0,0,0,
					1,1,1,0,0,
					1,0,0,0,0,
					0,1,1,1,2		);
		
	
	# Image number
	my $fb=$net->run(\@set)->[0];
	
	
	# Print output
	print "\nTest Map: \n";
	$net->join_cols(\@set,5);
	print "Image number matched: $fb\n";
	


examples/ex_bmp2.pl  view on Meta::CPAN

	# Create our model input
	my @map	=	(1,1,1,1,1,
				 0,0,1,0,0,
				 0,0,1,0,0,
				 0,0,1,0,0,
				 1,0,1,0,0,
				 1,0,1,0,0,
				 1,1,1,0,0);
				 
	
	print "\nLearning started...\n";
	
	print $net->learn(\@map,'J');
	
	print "Learning done.\n";
		
	# Build a test map 
	my @tmp	=	(0,0,1,1,1,
				 1,1,1,0,0,
				 0,0,0,1,0,
				 0,0,0,1,0,
				 0,0,0,1,0,                                          
				 0,0,0,0,0,
				 0,1,1,0,0);
	
	# Display test map
	print "\nTest map:\n";
	$net->join_cols(\@tmp,5,'');
	
	print "Running test...\n";
		                    
	# Run the actual test and get network output
	print "Result: ",$net->run_uc(\@tmp),"\n";
	
	print "Test run complete.\n";
	
	

examples/ex_crunch.pl  view on Meta::CPAN

	my $set  = [
		"I love chips.",	$bad,
		"I love apples.",	$good,
		"I love pop.",		$bad,
		"I love oranges.",	$good
	];
	
	#$net->debug(4);
	for (0..2) {
		my $f = $net->learn_set($set);
		print "Forgotten: $f%\n";
	}
	
	# run() automatically crunches the string (run_uc() uses run() internally) and
	# run_uc() automatically uncrunches the results.
	print $net->run_uc("I love pop-tarts.");

examples/ex_dow.pl  view on Meta::CPAN

		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],
		[	4, 	261, 244,  181, 	19.6, 19.6, 18.1, 	2611, 2627, 2563], 	[	2620  ],
		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
    
    
	# If we havnt saved the net already, do the learning
        if(!$net->load('DOW.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=1;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc		=>	0.2,	
											max		=>	2000,
											error	=>	-1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
            $net->save('DOW.mesh');
            
			my $t2=new Benchmark;
			my $td=timediff($t2,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	# Run a prediction using fake data
	#			Month	CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3    
	my @set=(	10,		352, 309,  203, 	18.3, 18.7, 16.1, 	2592, 2641, 2651	  ); 
	
	# Dow Ave (output)	
	my $fb=$net->run(\@set)->[0];
	
	# Print output
	print "\nTest Factors: (",join(',',@set),")\n";
	print "DOW Prediction for Month #11: $fb\n";
	

examples/ex_mult.pl  view on Meta::CPAN

			[ 2,   4   ], [ 8      ],
			[ 2,   8   ], [ 16     ],
			[ 9,   9   ], [ 81     ],
			[ 10,  5   ], [ 50     ],
			[ 20,  10  ], [ 200    ],
			[ 100, 50  ], [ 5000   ],
		]);
		$multiply->save('mult.mesh');
	}
		
	print "Enter first number to multiply  : "; chomp(my $a = <>);
	print "Enter second number to multiply : "; chomp(my $b = <>);
	
	print "Result: ",$multiply->run([$a,$b])->[0],"\n";
	
	

examples/ex_or.pl  view on Meta::CPAN

	if(!$net->load('or.mesh')) {
		$net->learn_set([	
			[1,1], [1],
			[1,0], [1],
			[0,1], [1],
			[0,0], [0],
		]);
		$net->save('or.mesh');
	}

	print "Learning complete.\n";
	print "Testing with a gate value of (0,0):",$net->run([0,0])->[0],"\n";
	print "Testing with a gate value of (0,1):",$net->run([0,1])->[0],"\n";
	print "Testing with a gate value of (1,0):",$net->run([1,0])->[0],"\n";
	print "Testing with a gate value of (1,1):",$net->run([1,1])->[0],"\n";
	
	

examples/ex_pat.pl  view on Meta::CPAN

	File:   examples/ex_pat.pl
	Author: Tobias Bronx, <tobiasb@odin.funcom.com>
	Desc:
	
		This demonstrates simply pattern learning.

=cut

	use AI::NeuralNet::Mesh;
	$net=AI::NeuralNet::Mesh->new(2,2,2);
	print $net->learn([2,2],[2,2],max=>3),"\n"; 
	for (0..1) {
		for my $a (1..2) { 
			for my $b (1..2) { 
				@a=($a,$b); 
				print join(",",@a),":",join(",",@{$net->run(\@a)}), "\n"; 
				$net->learn(\@a,\@a, max=>100,inc=>0.17);
				print join(",",@{$net->run(@a)}),"\n";
			}
		}
	}
	print "1,2:",join(",",@{$net->run([1,2])}),"\n";

examples/ex_pcx.pl  view on Meta::CPAN

		10x10 bitmaps from a PCX file based on their 
		whiteness. (This was taught on a b&w 320x200 
		PCX of the author at an early age :-)
=cut
	
	use AI::NeuralNet::Mesh;
	
	# Set block sizes
	my ($bx,$by)=(10,10);
	
	print "Creating Neural Net...";
	my $net=AI::NeuralNet::Mesh->new(1,$bx*$by,1);
	$net->{col_width} = $bx;
	print "Done!\n";
	
	print "Loading bitmap...";
	my $img = $net->load_pcx("josiah.pcx");             
	print "Done!\n";
	
	print "Comparing blocks...\n";
	my $white = $img->get_block([0,0,$bx,$by]);
	
	my ($x,$y,$tmp,@scores,$s,@blocks,$b);
	for ($x=0;$x<320;$x+=$bx) {
		for ($y=0;$y<200;$y+=$by) {
			$blocks[$b++]=$img->get_block([$x,$y,$x+$bx,$y+$by]);
			$score[$s++]=$net->pdiff($white,$blocks[$b-1]);
			print "Block at [$x,$y], index [$s] scored ".$score[$s-1]."%\n";
		}
	}
	print "Done!";
	
	print "High score:\n";
	print_ref($blocks[$net->high(\@score)],$bx); 
	print "Low score:\n";
	print_ref($blocks[$net->low(\@score)],$bx); 
	
	$net->debug(4);
	
	if(!$net->load("pcx.mesh")) {
		print "Learning high block...\n";
		print $net->learn($blocks[$net->high(\@score)],"highest");
		
		$net->save("pcx.mesh");
		
		print "Learning low block...\n";
		$net->learn($blocks[$net->low(\@score)],"lowest");
	}
	
	print "Testing random block...\n";
	
	print "Result: ",$net->run($blocks[rand()*$b])->[0],"\n";
	
	print "Bencmark for run: ", $net->benchmarked(), "\n";
	
	$net->save("pcx2.net");
	
	sub print_ref {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $x;
		my @els = (' ','.',',',':',';','%','#');
		foreach my $el (@{$map}) { 
			$str=$el/255*6;
			print $els[$str];
			$x++;
			if($x>$break-1) {
				print "\n";
				$x=0;
			}
		}
		print "\n";
	}
		                                         

examples/ex_pcxl.pl  view on Meta::CPAN

		
		This just demonstrates simple usage of the pcx loader.
	
=cut

	use AI::NeuralNet::Mesh;
	
	my $net=AI::NeuralNet::Mesh->new(2,2);
	
	my $img = $net->load_pcx("josiah.pcx");
        print "ERROR: ",$net->error(),"\n" if($net->error());
	
	$net->join_cols($img->get_block([0,0,50,50]),50,0);
	

examples/ex_sub.pl  view on Meta::CPAN

			[ 1,   1   ], [ 0      ] ,
			[ 2,   1   ], [ 1      ],
			[ 10,  5   ], [ 5      ],
			[ 20,  10  ], [ 10     ],
			[ 100, 50  ], [ 50     ],
			[ 500, 200 ], [ 300    ],
		]);
		$subtract->save('sub.mesh');
	}
		
	print "Enter first number to subtract  : "; chomp(my $a = <>);
	print "Enter second number to subtract : "; chomp(my $b = <>);
	
	print "Result: ",$subtract->run([$a,$b])->[0],"\n";
	
	

examples/ex_synop.pl  view on Meta::CPAN

	# Create a new network with 1 layer, 5 inputs, and 5 outputs.
	my $net = new AI::NeuralNet::Mesh(1,5,5);
	
	# Add a small amount of randomness to the network
	$net->random(0.001);

	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";

examples/ex_wine.pl  view on Meta::CPAN

	# Enable debugging
	$net->verbose(4);

	# Load the data set
	my $data = $net->load_set('wine.dat',0);
	
	# Seperate data based on class
	my $sets=[];
	for my $i (0..$#{$data}/2) {
		my $c = $data->[$i*2+1]->[0];
		print "Class of set $i: $c                                  \r";
		# inputs
		$sets->[$c]->[++$#{$sets->[$c]}] = $data->[$i*2];
		# class
		$sets->[$c]->[++$#{$sets->[$c]}] = $data->[$i*2+1];
	}                                  
	
			
	for(0..$#{$sets}) {
		next if(!defined $sets->[$_]->[0]);
		print "Size of set $_: ",$#{$sets->[$_]}/2,"\n";
	}
	
	# If we havnt saved the net already, do the learning
    if(!$net->load('wine.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=5;
		for my $a (0..$top) {
			print "\n\nOuter Loop: $a\n";
			
			for(0..$#{$sets}) {
				next if(!defined $sets->[$_]->[0]);
				my $t1=new Benchmark;
				
				# Test fogetfullness
				my $f = $net->learn_set($sets->[$_],	inc		=>	0.2,	
														max		=>	2000,
														error	=>	0.01,
														leave	=>	2);
				
				# Print it 
				print "\n\nForgetfullness: $f%\n";
	
				my $t2=new Benchmark;
				my $td=timediff($t2,$t1);
				print "\nLoop [$a,$_] took ",timestr($td),"\n";
			}
			
			# Save net to disk				
            $net->save('wine.mesh');
		}
	}

	# Set activation on output node to contstrain values
	# to a specific range of values.    
    $net->activation(2,range(1..3));
	
	my @cnts;
	for(0..$#{$sets}) {
		next if(!defined $sets->[$_]->[0]);
		my $s=$#{$sets->[$_]}/2;
		my $cnt=0;
		print "Set: $_\n";
		my $results=$net->run_set($sets->[$_]);
		for my $x (0..$s-1) {
			$cnt++ if($results->[$x]->[0]==$_);
		}
		$cnts[$_]=$cnt;
	}
	
	for(0..$#{$sets}) {
		next if(!defined $sets->[$_]->[0]);
		my $s=$#{$sets->[$_]}/2;
		print "Class $_: $cnts[$_] correct out of $s (",$net->p($cnts[$_],$s),"%).\n";
	}
		

mesh.htm  view on Meta::CPAN

        $net-&gt;learn([0,0],[0]);
        $net-&gt;learn([0,1],[0]);
        $net-&gt;learn([1,0],[0]);
        $net-&gt;learn([1,1],[1]);

        # Present it with two test cases
        my $result_bit_1 = $net-&gt;run([0,1])-&gt;[0];
        my $result_bit_2 = $net-&gt;run([1,1])-&gt;[0];

        # Display the results
        print &quot;AND test with inputs (0,1): $result_bit_1\n&quot;;
        print &quot;AND test with inputs (1,1): $result_bit_2\n&quot;;

</PRE>
<P>
<HR>
<H1><A NAME="version & updates">VERSION &amp; UPDATES</A></H1>
<P>This is version <STRONG>0.43</STRONG>, the second release of this module.</P>
<P>With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),
as well as four custom activation functions, and several export 
tag sets. With this release, I have also included a few

mesh.htm  view on Meta::CPAN

                [       7,      296, 287,  212,         19.3, 19.5, 17.8,       2640, 2637, 2592],      [       2641  ]                 
        );

        # Learn the set
        my $f = $net-&gt;learn_set(\@data, 
                                          inc   =&gt;      0.1,    
                                          max   =&gt;      500,
                                         );

        # Print it 
        print &quot;Forgetfullness: $f%&quot;;</PRE>
<P></P>
<P>This is a snippet from the example script examples/finance.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:</P>
<PRE>
        my @data = (
                [ 0,1 ], [ 1 ],
                [ 1,0 ], [ 0 ]
        );

        $net-&gt;learn_set(\@data);</PRE>

mesh.htm  view on Meta::CPAN

<P></P>
<DT><STRONG><A NAME="item_range">$net-&gt;range();</A></STRONG><BR>
<DD>
See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions.
<P></P>
<DT><STRONG><A NAME="item_benchmark">$net-&gt;benchmark();</A></STRONG><BR>
<DD>
<DT><STRONG><A NAME="item_benchmarked">$net-&gt;benchmarked();</A></STRONG><BR>
<DD>
This returns a benchmark info string for the last <A HREF="#item_learn"><CODE>learn()</CODE></A> call.
It is easily printed as a string, as following:
<PRE>
        print &quot;Last learn() took &quot;,$net-&gt;benchmark(),&quot;\n&quot;;</PRE>
<P></P>
<DT><STRONG><A NAME="item_verbose">$net-&gt;verbose($level);</A></STRONG><BR>
<DD>
<DT><STRONG><A NAME="item_verbosity">$net-&gt;verbosity($level);</A></STRONG><BR>
<DD>
<DT><STRONG><A NAME="item_v">$net-&gt;v($level);</A></STRONG><BR>
<DD>
<DT><STRONG><A NAME="item_debug">$net-&gt;debug($level)</A></STRONG><BR>
<DD>
Note: verbose(), verbosity(), and <A HREF="#item_v"><CODE>v()</CODE></A> are all functional aliases for debug().
<P>Toggles debugging off if called with $level = 0 or no arguments. There are several levels
of debugging.</P>
<P>NOTE: Debugging verbosity has been toned down somewhat from AI::NeuralNet::BackProp,
but level 4 still prints the same amount of information as you were used to. The other
levels, however, are mostly for  advanced use. Not much explanation in the other
levels, but they are included for those of you that feel daring (or just plain bored.)</P>
<P>Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.</P>
<P>Level 1 ($level = 1) : Displays the activity between nodes, prints what values were
received and what they were weighted to.</P>
<P>Level 2 ($level = 2) : Just prints info from the <A HREF="#item_learn"><CODE>learn()</CODE></A> loop, in the form of ``got: X, wanted Y''
type of information. This is about the third most useful debugging level, after level 12 and
level 4.</P>
<P>Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.</P>
<P>Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.</P>
<P>Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.</P>
<P>Toggles debuging off when called with no arguments.</P>
<P></P>
<DT><STRONG><A NAME="item_save">$net-&gt;save($filename);</A></STRONG><BR>
<DD>
This will save the complete state of the network to disk, including all weights and any
words crunched with <A HREF="#item_crunch"><CODE>crunch()</CODE></A> . Also saves the layer size and activations of the network.
<P>NOTE: The only activation type NOT saved is the CODE ref type, which must be set again

mesh.htm  view on Meta::CPAN

<P>This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the <A HREF="#item_error"><CODE>error()</CODE></A> method,
below.</P>
<P>If there were no errors, it will return a refrence to $net.</P>
<P></P>
<DT><STRONG><A NAME="item_load">$net-&gt;load($filename);</A></STRONG><BR>
<DD>
This will load from disk any network saved by <A HREF="#item_save"><CODE>save()</CODE></A> and completly restore the internal
state at the point it was <A HREF="#item_save"><CODE>save()</CODE></A> was called at.
<P>If the file is of an invalid file type, then <A HREF="#item_load"><CODE>load()</CODE></A> will
return undef. Use the <A HREF="#item_error"><CODE>error()</CODE></A> method, below, to print the error message.</P>
<P>If there were no errors, it will return a refrence to $net.</P>
<P>UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net-&gt;load(join(``\n'',&lt;DATA&gt;)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!</P>
<P></P>
<DT><STRONG><A NAME="item_activation">$net-&gt;activation($layer,$type);</A></STRONG><BR>
<DD>
This sets the activation type for layer <CODE>$layer</CODE>.
<P><CODE>$type</CODE> can be one of four values:</P>
<PRE>

mesh.htm  view on Meta::CPAN

<P></P>
<DT><STRONG><A NAME="item_node_threshold">$net-&gt;node_threshold($layer,$node,$value);</A></STRONG><BR>
<DD>
This sets the activation threshold for a specific node in a layer. The threshold only is used
when activation is set to ``sigmoid'', ``sigmoid_1'', or ``sigmoid_2''.
<P></P>
<DT><STRONG><A NAME="item_join_cols">$net-&gt;join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);</A></STRONG><BR>
<DD>
This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like <CODE>join()</CODE> ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it <A HREF="#item_join_cols"><CODE>join_cols()</CODE></A> will just print the numerical value of each element seperated
by a null character (\0). <A HREF="#item_join_cols"><CODE>join_cols()</CODE></A> defaults to the latter behaviour.
<P></P>
<DT><STRONG><A NAME="item_extend">$net-&gt;extend(\@array_of_hashes);</A></STRONG><BR>
<DD>
This allows you to re-apply any activations and thresholds with the same array ref which
you created a network with. This is useful for re-applying code ref activations after a <A HREF="#item_load"><CODE>load()</CODE></A>
call without having to type the code ref twice.
<P>You can also specify the extension in a simple array ref like this:</P>
<PRE>
        $net-&gt;extend([2,3,1]);

mesh.htm  view on Meta::CPAN

can also be called independently. This will add nodes as needed to layer <CODE>$layer</CODE> to 
make the nodes in layer equal to $total_nodes.
<P>NOTE: Your net will probably require re-training after adding nodes.</P>
<P></P>
<DT><STRONG><A NAME="item_p">$net-&gt;p($a,$b);</A></STRONG><BR>
<DD>
Returns a floating point number which represents $a as a percentage of $b.
<P></P>
<DT><STRONG><A NAME="item_intr">$net-&gt;intr($float);</A></STRONG><BR>
<DD>
Rounds a floating-point number rounded to an integer using <CODE>sprintf()</CODE> and <CODE>int()</CODE> , Provides
better rounding than just calling <CODE>int()</CODE> on the float. Also used very heavily internally.
<P></P>
<DT><STRONG><A NAME="item_high">$net-&gt;high($array_ref);</A></STRONG><BR>
<DD>
Returns the index of the element in array REF passed with the highest comparative value.
<P></P>
<DT><STRONG><A NAME="item_low">$net-&gt;low($array_ref);</A></STRONG><BR>
<DD>
Returns the index of the element in array REF passed with the lowest comparative value.
<P></P>
<DT><STRONG><A NAME="item_pdiff">$net-&gt;pdiff($array_ref_A, $array_ref_B);</A></STRONG><BR>
<DD>
This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.20f (sprintf-format) 
percent sting.
<P></P>
<DT><STRONG><A NAME="item_show">$net-&gt;show();</A></STRONG><BR>
<DD>
This will dump a simple listing of all the weights of all the connections of every neuron
in the network to STDIO.
<P></P>
<DT><STRONG><A NAME="item_crunch">$net-&gt;crunch($string);</A></STRONG><BR>
<DD>
This splits a string passed with /[\s\t]/ into an array ref containing unique indexes

mesh.htm  view on Meta::CPAN


        for (0..3) {
                $net-&gt;learn_set([
                        $net-&gt;crunch(&quot;I love chips.&quot;),  $net-&gt;crunch(&quot;That's Junk Food!&quot;)),
                        $net-&gt;crunch(&quot;I love apples.&quot;), $net-&gt;crunch(&quot;Good, Healthy Food.&quot;)),
                        $net-&gt;crunch(&quot;I love pop.&quot;),    $net-&gt;crunch(&quot;That's Junk Food!&quot;)),
                        $net-&gt;crunch(&quot;I love oranges.&quot;),$net-&gt;crunch(&quot;Good, Healthy Food.&quot;))
                ]);
        }

        print $net-&gt;run_uc(&quot;I love corn.&quot;)),&quot;\n&quot;;</PRE>
<P>On my system, this responds with, ``Good, Healthy Food.'' If you try to run <A HREF="#item_crunch"><CODE>crunch()</CODE></A> with
``I love pop.'', though, you will probably get ``Food! apples. apples.'' (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!</P>
<P></P>
<DT><STRONG><A NAME="item_crunched">$net-&gt;crunched($word);</A></STRONG><BR>
<DD>
This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list.
<P>If the word is not in the list, it will set the internal error value with a text message

test.pl  view on Meta::CPAN

# Before `make install' is performed this script should be runnable with
# `make test'. After `make install' it should work as `perl test.pl'

BEGIN { $| = 1; print "1..13\n"; }
END {print "not ok 1\n" unless $loaded;}

sub t { my $f=shift;$t++;my $str=($f)?"ok $t":"not ok $t";print $str,"\n";}

use AI::NeuralNet::Mesh;
$loaded = 1;
t 1;

my $net = new AI::NeuralNet::Mesh(2,2,1);
t $net;
t ($net->intr(0.51) eq 1);
t ($net->intr(0.00001) eq 0);
t ($net->intr(0.50001) eq 1);



( run in 1.259 second using v1.01-cache-2.11-cpan-de7293f3b23 )