AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN

                - custom node activations
                - increased learning speed

0.43  Wed Sep 14 03:13:01 20000
        - Third release, by Josiah Bryan
        - Several bug fixes
                - fixed 'flag' option on learn_set()
                - fixed multiple-output bug
                - fixed learning gradient error
        - Improved learning function to not degrade increment automatically
        - Added CSV-style dataset loader
        - Added Export tags
        - Added four custom node activations, including range and ramp
        - Added several misc. extra functions
        - Added ALN example demo


Mesh.pm  view on Meta::CPAN

        	$layer_specs = [split(',',"$nodes," x $layers)];
        	$layer_specs->[$#{$layer_specs}+1]=$outputs;
        	$self->{layers}	= $layer_specs;
        }
        
        # First create the individual nodes
		for my $x (0..$tmp-1) {         
			$self->{mesh}->[$x] = AI::NeuralNet::Mesh::node->new($self);
        }              
        
        # Get an instance of an output (data collector) node
		$self->{output} = AI::NeuralNet::Mesh::output->new($self);
		
		# Connect the output layer to the data collector
        for $x (0..$outputs-1) {                    
			$self->{mesh}->[$tmp-$outputs+$x]->add_output_node($self->{output});
		}
		
		# Now we use the _c() method to connect the layers together.
        $y=0;
        my $c = $connector.'($self,$y,$y+$z,$y+$z,$y+$z+$layer_specs->[$x+1])';
        for $x (0..$layers-1) {
        	$z = $layer_specs->[$x];                         
        	d("layer $x size: $z (y:$y)\n,",1);

README  view on Meta::CPAN

** What is this?

AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
Contact Josiah Bryan at <jdb@wcoil.com>

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you. (See ex_aln.pl)

examples/ex_alpha.pl  view on Meta::CPAN

		{
			nodes		=>	35,
			activation	=>	linear
		},
		{
			nodes		=>	1,
			activation	=>	linear,
		}
	]);
	
	# Debug level of 4 gives JUST learn loop iteteration benchmark and comparrison data 
	# as learning progresses.
	$net->debug(4);

	my $letters = [            # All prototype inputs        
        [
        0,1,1,1,0,             # Inputs are   
        1,0,0,0,1,             #  5*7 digitalized caracters 
        1,0,0,0,1,              
        1,1,1,1,1,
        1,0,0,0,1,             # This is the alphabet of the

examples/ex_bmp.pl  view on Meta::CPAN

	# Set resolution
	my $xres=5;
	my $yres=5;
	
	# Create a new net with 3 layes, $xres*$yres inputs, and 1 output
	my $net = AI::NeuralNet::Mesh->new(1,$xres*$yres,1);
	
	# Enable debugging
	$net->debug(4);
	
	# Create datasets.
	my @data = ( 
		[	0,1,1,0,0,
			0,0,1,0,0,
			0,0,1,0,0,
			0,0,1,0,0,
			0,1,1,1,2	],		[	1	],
		
		[	1,1,1,0,0,
			0,0,0,1,0,
			0,1,1,1,0,
			1,0,0,0,0,

examples/ex_dow.pl  view on Meta::CPAN


    use AI::NeuralNet::Mesh;
	use Benchmark;

	# Create a new net with 5 layes, 9 inputs, and 1 output
        my $net = AI::NeuralNet::Mesh->new(2,9,1);
	
	# Disable debugging
        $net->debug(2);
	
	# Create datasets.
	#	Note that these are ficticious values shown for illustration purposes
	#	only.  In the example, CPI is a certain month's consumer price
	#	index, CPI-1 is the index one month before, CPI-3 is the the index 3
	#	months before, etc.

	my @data = ( 
		#	Mo  CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3   Dow Ave (output)
		[	1, 	229, 220,  146, 	20.0, 21.9, 19.5, 	2645, 2652, 2597], 	[	2647  ],
		[	2, 	235, 226,  155, 	19.8, 20.0, 18.3, 	2633, 2645, 2585], 	[	2637  ],
		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],
		[	4, 	261, 244,  181, 	19.6, 19.6, 18.1, 	2611, 2627, 2563], 	[	2620  ],
		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
    
    
	# If we havnt saved the net already, do the learning
        if(!$net->load('DOW.mesh')) {
		print "\nLearning started...\n";
		
		# Make it learn the whole dataset $top times
		my @list;
		my $top=1;
		for my $a (0..$top) {
			my $t1=new Benchmark;
			print "\n\nOuter Loop: $a\n";
			
			# Test fogetfullness
			my $f = $net->learn_set(\@data,	inc		=>	0.2,	
											max		=>	2000,
											error	=>	-1);
			
			# Print it 
			print "\n\nForgetfullness: $f%\n";

			# Save net to disk				
            $net->save('DOW.mesh');
            
			my $t2=new Benchmark;
			my $td=timediff($t2,$t1);
			print "\nLoop $a took ",timestr($td),"\n";
		}
	}
                                                                          
	# Run a prediction using fake data
	#			Month	CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3    
	my @set=(	10,		352, 309,  203, 	18.3, 18.7, 16.1, 	2592, 2641, 2651	  ); 
	
	# Dow Ave (output)	
	my $fb=$net->run(\@set)->[0];
	
	# Print output
	print "\nTest Factors: (",join(',',@set),")\n";
	print "DOW Prediction for Month #11: $fb\n";
	

examples/ex_synop.pl  view on Meta::CPAN

	
	# Add a small amount of randomness to the network
	$net->random(0.001);

	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";

examples/ex_wine.pl  view on Meta::CPAN

=begin

	File:   examples/ex_wine.pl
	Author: Josiah Bryan, <jdb@wcoil.com>
    Desc:
		
		This demonstrates wine cultivar prediction using the
		AI::NeuralNet::Mesh module.
		
        This script uses the data that is the results of a chemical analysis 
        of wines grown in the same region in Italy but derived from three
	    different cultivars. The analysis determined the quantities 
	    of 13 constituents found in each of the three types of wines. 

		The inputs of the net represent 13 seperate attributes
		of the wine's chemical analysis, as follows:
		
		 	1)  Alcohol
		 	2)  Malic acid
		 	3)  Ash

 view all matches for this distribution
 view release on metacpan -  search on metacpan

( run in 0.904 second using v1.00-cache-2.02-grep-82fe00e-cpan-4673cadbf75 )