AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

		'acts'       => [ qw ( ramp and_gate or_gate range ) ],
	);
    @EXPORT_OK   = ( @{ $EXPORT_TAGS{'all'} }, qw( p low high ramp and_gate or_gate ) );
    
    use strict;
    use Benchmark; 

   	# See POD for usage of this variable.
	$AI::NeuralNet::Mesh::Connector = '_c';
	
	# Debugging subs
	$AI::NeuralNet::Mesh::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::Mesh::DEBUG = shift || 0; } 
	sub d { shift if(substr($_[0],0,4) eq 'AI::'); my ($a,$b,$c)=(shift,shift,$AI::NeuralNet::Mesh::DEBUG); print $a if($c == $b); return $c }
	sub verbose {debug @_};
	sub verbosity {debug @_};
	sub v {debug @_};
	
	
	# Return version of ::ID string passed or current version of this
	# module if no string is passed. Used in load() to detect file versions.
	sub version {
		shift if(substr($_[0],0,4) eq 'AI::');
		substr((split(/\s/,(shift || $AI::NeuralNet::Mesh::ID)))[2],1);
	}                                  
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Package constructor
	sub new {
		no strict 'refs';
		my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $nodes	=	shift;
		my $outputs	=	shift || $nodes;
		my $inputs	=	shift || $nodes;
        
		bless $self, $type;
		                       

Mesh.pm  view on Meta::CPAN

			}
		}
				
		# Done!
		return $self;
	}	
    

    # Internal usage
    # Connects one range of nodes to another range
    sub _c {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;
    	my $m1		=	shift || $self->{mesh};
    	my $m2		=	shift || $m1;
		for my $y ($r1a..$r1b-1) {
			for my $z ($r2a..$r2b-1) {
				$m1->[$y]->add_output_node($m2->[$z]);
			}
		}
	}
    
    # Internal usage
    # Creates the mesh of neurons
    sub _init {
    	my $self		=	shift;
    	my $nodes		=	$self->{nodes};
    	my $outputs		=	$self->{outputs} || $nodes;
    	my $inputs		=	$self->{inputs}  || $nodes;
    	my $layers		=	$self->{total_layers};
        my $tmp 		=	$self->{total_nodes} || ($layers * $nodes + $outputs);
    	my $layer_specs	=	$self->{layers};
    	my $connector	=	$self->{connector};
        my ($x,$y,$z);
        no strict 'refs';

Mesh.pm  view on Meta::CPAN


		# Add a cap to the bottom of the mesh to stop it from trying
		# to recursivly adjust_weight() where there are no more nodes.		
		for my $x (0..$inputs-1) {
			$self->{input}->{IDs}->[$x] = 
				$self->{mesh}->[$x]->add_input_node($self->{input}->{cap});
		}
	}
    
    # See POD for usage
    sub extend {
    	my $self	=	shift;
    	my $layers	=	shift;
    
    	# Looks like we got ourselves a layer specs array
		if(ref($layers) eq "ARRAY") { 
			if($self->{total_layers}!=$#{$layers}) {
				$self->{error} = "extend(): Cannot add new layers. Create a new network to add layers.\n";
				return undef;
			}
			if(ref($layers->[0]) eq "HASH") {

Mesh.pm  view on Meta::CPAN

				for (0..$self->{total_layers}){$self->{total_nodes}+= $self->{layers}->[$_]}
			}
		} else {
			$self->{error} = "extend(): Invalid argument type.\n";
			return undef;
		}
		return 1;
	}
    
    # See POD for usage
    sub extend_layer {
    	my $self	=	shift;
    	my $layer	=	shift || 0;
    	my $specs	=	shift;
    	if(!$specs) {
    		$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";
    		return undef;
    	}
    	if(ref($specs) eq "HASH") {
    		$self->activation($layer,$specs->{activation}) if($specs->{activation});
    		$self->threshold($layer,$specs->{threshold})   if($specs->{threshold});
    		$self->mean($layer,$specs->{mean})             if($specs->{mean});
    		return $self->add_nodes($layer,$specs->{nodes});
    	} else { 
    		return $self->add_nodes($layer,$specs);
    	}
    	return 1;
    }
    
    # Pseudo-internal usage
    sub add_nodes {
    	no strict 'refs';
		my $self	=	shift;
    	my $layer	=	shift;
    	my $nodes	=	shift;
    	my $n		=	0;
		my $more	=	$nodes - $self->{layers}->[$layer] - 1;
        d("Checking on extending layer $layer to $nodes nodes (check:$self->{layers}->[$layer]).\n",9);
        return 1 if ($nodes == $self->{layers}->[$layer]);
        if ($self->{layers}->[$layer]>$nodes) {
        	$self->{error} = "add_nodes(): I cannot remove nodes from the network with this version of my module. You must create a new network to remove nodes.\n";

Mesh.pm  view on Meta::CPAN

        }
        d("Extending layer $layer by $more.\n",9);
        for (0..$more){$self->{mesh}->[$#{$self->{mesh}}+1]=AI::NeuralNet::Mesh::node->new($self)}
        for(0..$layer-2){$n+=$self->{layers}->[$_]}
		$self->_c($n,$n+$self->{layers}->[$layer-1],$#{$self->{mesh}}-$more+1,$#{$self->{mesh}});
		$self->_c($#{$self->{mesh}}-$more+1,$#{$self->{mesh}},$n+$self->{layers}->[$layer],$n+$self->{layers}->[$layer]+$self->{layers}->[$layer+1]);
    }
        
        
    # See POD for usage
    sub run {
    	my $self	=	shift;
    	my $inputs	=	shift;
    	my $const	=	$self->{const};
    	#my $start	=	new Benchmark;
    	$inputs		=	$self->crunch($inputs) if($inputs == 0);
    	no strict 'refs';
    	for my $x (0..$#{$inputs}) {
    		last if($x>$self->{inputs});
    		d("inputing $inputs->[$x] at index $x with ID $self->{input}->{IDs}->[$x].\n",1);
    		$self->{mesh}->[$x]->input($inputs->[$x]+$const,$self->{input}->{IDs}->[$x]);

Mesh.pm  view on Meta::CPAN

	    	for my $x ($#{$inputs}+1..$self->{inputs}-1) {
	 	    	d("inputing 1 at index $x with ID $self->{input}->{IDs}->[$x].\n",1);
	    		$self->{mesh}->[$x]->input(1,$self->{input}->{IDs}->[$x]);
	    	}
	    }
    	#$self->{benchmark} = timestr(timediff(new Benchmark, $start));
    	return $self->{output}->get_outputs();
    }    
    
    # See POD for usage
    sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# See POD for usage
	sub learn {
    	my $self	=	shift;					
    	my $inputs	=	shift;					# input set
    	my $outputs	=	shift;					# target outputs
    	my %args	=	@_;						# get args into hash
    	my $inc		=	$args{inc} || 0.002;	# learning gradient
    	my $max     =   $args{max} || 1024;     # max iteterations
    	my $degrade =   $args{degrade} || 0;    # enable gradient degrading
		my $error   = 	($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
  		my $dinc	=	0.0002;					# amount to adjust gradient by
		my $diff	=	100;					# error magin between results

Mesh.pm  view on Meta::CPAN

   			d('.',12);
   			d('['.join(',',@{$got})."-".join(',',@{$outputs}).']',13);
   		}  
   		my $str = "Learning took $loop loops and ".timestr(timediff(new Benchmark,$start))."\n";
   		d($str,3); $self->{benchmark} = "$loop loops and ".timestr(timediff(new Benchmark,$start))."\n";
   		return $str;
   	}


	# See POD for usage
	sub learn_set {
		my $self	=	shift;
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my $degrade	=	$args{degrade};
	    my $p		=	(defined $args{flag}) ?$args{flag} :1;
	    my $row		=	(defined $args{row})  ?$args{row}+1:1;

Mesh.pm  view on Meta::CPAN

		}
			
		if ($p) {
			return pdiff($data->[$row],$self->run($data->[$row-1]));
		} else {
			return $data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
	}
	
	# See POD for usage
	sub run_set {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my (@results,$res);
		for my $x (0..$len) {
			$res = $self->run($data->[$x*2]);
			for(0..$#{$res}){$results[$x]->[$_]=$res->[$_]}
			d("Running set $x [$res->[0]]...\r",4);
		}
		return \@results;

Mesh.pm  view on Meta::CPAN

	#
	# Returns a data set of the same format as required by the
	# learn_set() method. $file is the disk file to load set from.
	# $column an optional variable specifying the column in the 
	# data set to use as the class attribute. $class defaults to 0.
	# $seperator is an optional variable specifying the seperator
	# character between values. $seperator defaults to ',' (a single comma). 
	# NOTE: This does not handle quoted fields, or any other record
	# seperator other than "\n".
	#
	sub load_set {
		my $self	=	shift;
		my $file	=	shift;
		my $attr	=	shift || 0;
		my $sep		=	shift || ',';
		my $data	=	[];
		open(FILE,	$file);
		my @lines	=	<FILE>;
		close(FILE);
		for my $x (0..$#lines) {
			chomp($lines[$x]);

Mesh.pm  view on Meta::CPAN

				$tmp[$_]=$self->crunch($tmp[$_])->[0] if($tmp[$_]=~/[AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz]/);
				if($_!=$attr){$data->[$x*2]->[$c]=$tmp[$c];$c++}
			};             
			d("Loaded line $x, [@tmp]                            \r",4);
			$data->[$x*2+1]=[$tmp[$attr]];
		}
		return $data;
	}
	
	# See POD for usage
	sub get_outs {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my $outs	=	[];
		for my $x (0..$len) {
			$outs->[$x] = $data->[$x*2+1];
		}
		return $outs;
	}
	
	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		no strict 'refs';
		
		open(FILE,">$file");
	    
	    print FILE "header=$AI::NeuralNet::Mesh::ID\n";
	   	
		print FILE "total_layers=$self->{total_layers}\n";
		print FILE "total_nodes=$self->{total_nodes}\n";

Mesh.pm  view on Meta::CPAN

	    
	    if(!(-f $file)) {
	    	$self->{error} = "Error writing to \"$file\".";
	    	return undef;
	    }
	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    my @lines;
	    
	    if(-f $file) {
		    open(FILE,"$file");
		    @lines=<FILE>;
	    	close(FILE);

Mesh.pm  view on Meta::CPAN

				$self->{mesh}->[$n]->{threshold}  = $l[$z+1];
				$self->{mesh}->[$n]->{mean}       = $l[$z+2];
				$n++;
			}
		}
		
		return $self;
	}
	
	# Load entire network state from disk.
	sub load_old {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    if(!(-f $file)) {
	    	$self->{error} = "File \"$file\" does not exist.";
	    	return undef;
	    }
	    
	    open(FILE,"$file");

Mesh.pm  view on Meta::CPAN

			my @l = split /\,/, $db{"n".($tmp+$x)};
			for my $y (0..$div-1) {
				$self->{mesh}->[$tmp+$x]->{_inputs}->[$y]->{weight} = $l[$y];
		 	}
		} 
		
		return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		my $n 		=	0;    
		no strict 'refs';
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
				for my $z (0..$self->{layers}->[$x-1]-1) {
					print "$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				$n++;
			}

Mesh.pm  view on Meta::CPAN

	# usage: $net->activation($layer,$type);
	# $type can be: "linear", "sigmoid", "sigmoid_2".
	# You can use "sigmoid_1" as a synonym to "sigmoid". 
	# Type can also be a CODE ref, ( ref($type) eq "CODE" ).
	# If $type is a CODE ref, then the function is called in this form:
	# 	$output	= &$type($sum_of_inputs,$self);
	# The code ref then has access to all the data in that node (thru the
	# blessed refrence $self) and is expected to return the value to be used
	# as the output for that node. The sum of all the inputs to that node
	# is already summed and passed as the first argument.
	sub activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		for($n..$n+$self->{layers}->[$layer]-1) {
			$self->{mesh}->[$_]->{activation} = $value; 
		}
	}
	
	# Applies an activation type to a specific node
	sub node_activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		$self->{mesh}->[$n+$node]->{activation} = $value; 
	}
	
	# Set the activation threshold for a specific layer.
	# Only applicable if that layer uses "sigmoid" or "sigmoid_2"
	# usage: $net->threshold($layer,$threshold);
	sub threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		for($n..$n+$self->{layers}->[$layer]-1) {
			$self->{mesh}->[$_]->{threshold} = $value;
		}
	}
	
	# Applies a threshold to a specific node     
	sub node_threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		$self->{mesh}->[$n+$node]->{threshold} = $value;
	}
	
	# Set mean (avg.) flag for a layer.
	# usage: $net->mean($layer,$flag);
	# If $flag is true, it enables finding the mean for that layer,
	# If $flag is false, disables mean.
	sub mean {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0;
		my $n		=	0;
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		for($n..$n+$self->{layers}->[$layer]-1) {
			$self->{mesh}->[$_]->{mean} = $value;
		}
	}
	
	  
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		my $file	=	shift;
		eval('use PCX::Loader');
		if(@_) {
			$self->{error}="Cannot load PCX::Loader module: @_";
			return undef;
		}
		return PCX::Loader->new($self,$file);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my @ws 		=	split(/[\s\t]/,shift);
		my (@map,$ic);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);
			if(!defined $ic) {
				$self->{_crunched}->{list}->[$self->{_crunched}->{_length}++]=$ws[$a];
				$map[$a]=$self->{_crunched}->{_length};
			} else {
				$map[$a]=$ic;
            }
		}
		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_crunched}->{_length}-1) {
			return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
		}
		$self->{error} = "Word \"$_[0]\" not found.";
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) 
	# and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_crunched}->{list}->[$el-1].' ';
		}
		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 
	# disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	} 

	# Sets/gets run const. facter in the network. Setting a value of 0 
	# disables run const. factor. 
	sub const {
		my $self	=	shift;
		my $const	=	shift;
		return $self->{const}	if(!(defined $const));
		$self->{const}	=	$const;
	}
	
	# Return benchmark time from last learn() operation.
	sub benchmark {
		shift->{benchmarked};
	}
	
	# Same as benchmark()
	sub benchmarked {
		benchmark(shift);
	}
	
	# Return the last error in the mesh, or undef if no error.
	sub error {
		my $self = shift;
		return undef if !$self->{error};
		chomp($self->{error});
		return $self->{error}."\n";
	}
	
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;	$x++;
			if($x>$break-1) { print "\n"; $x=0;	}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1};
		my $a2s	=	$#{$a2};
		my ($a,$b,$diff,$t);
		$diff=0;
		for my $x (0..$a1s) {
			$a = $a1->[$x]; $b = $a2->[$x];
			if($a!=$b) {
				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a); $diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift); 
		sprintf("%.3f",$fa/$fb*100); #((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100
	}
	
	# Returns the index of the element in array REF passed with the highest 
	# comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] > $ref1->[$tmp]) }
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest 
	# comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] < $ref1->[$tmp]) }
		return $tmp;
	}  
	
	# Following is a collection of a few nifty custom activation functions.
	# range() is exported by default, the rest you can get with:
	#	use AI::NeuralNet::Mesh ':acts'
	# The ':all' tag also gets these into your namespace.

Mesh.pm  view on Meta::CPAN

	#
	#	$net->activation(4,range(@numbers));
	#	$net->activation(4,range(6,15,26,106,28,3));
	#
	# Note: when using a range() activatior, train the
	# net TWICE on the data set, because the first time
	# the range() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#	
	sub range {
		my @r=@_;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$r[intr($_[0]/$_[1]->{t}*$#r)]}
	}
	
	#
	# ramp() preforms smooth ramp activation between 0 and 1 if $r is 1, 
	# or between -1 and 1 if $r is 2. $r defaults to 1, as you can see.	
	#
	# Note: when using a ramp() activatior, train the
	# net at least TWICE on the data set, because the first 
	# time the ramp() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#
	sub ramp {
		my $r=shift||1;my $t=($r<2)?0:-1;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$_[0]/$_[1]->{t}*$r-$b}
	}

	# Self explanitory, pretty much. $threshold is used to decide if an input 
	# is true or false (1 or 0). If an input is below $threshold, it is false.
	sub and_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $self->{_parent}->{const} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $sum/$self->{_inputs_size};
		}
	}
	
	# Self explanitory, $threshold is used same as above.
	sub or_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $sum/$self->{_inputs_size} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $self->{_parent}->{const};
		}
	}
	
1;

package AI::NeuralNet::Mesh::node;
	
	use strict;

	# Node constructor
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
			_outputs	=>	[]
		};
		bless $self, $type;
	}

	# Receive inputs from other nodes, and also send
	# outputs on.	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		
		$self->{_inputs}->[$from_id]->{value} = $input * $self->{_inputs}->[$from_id]->{weight};
		$self->{_inputs}->[$from_id]->{input} = $input;
		$self->{_inputs}->[$from_id]->{fired} = 1;
		
		$self->{_parent}->d("got input $input from id $from_id, weighted to $self->{_inputs}->[$from_id]->{value}.\n",1);
		

Mesh.pm  view on Meta::CPAN

			# Handle CODE refs
			$output = &{$self->{activation}}($output,$self) if(ref($self->{activation}) eq "CODE");
			
			# Send output
			for my $o (@{$self->{_outputs}}) { $o->{node}->input($output,$o->{from_id}) }
		} else {
			$self->{_parent}->d("all inputs have NOT fired for $self.\n",1);
		}
	}

	sub add_input_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_inputs_size} || 0;
		$self->{_inputs}->[$i]->{node}	 = $node;
		$self->{_inputs}->[$i]->{value}	 = 0;
		$self->{_inputs}->[$i]->{weight} = 1; #rand()*1;
		$self->{_inputs}->[$i]->{fired}	 = 0;
		$self->{_inputs_size} = ++$i;
		return $i-1;
	}
	
	sub add_output_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_outputs_size} || 0;
		$self->{_outputs}->[$i]->{node}		= $node;
		$self->{_outputs}->[$i]->{from_id}	= $node->add_input_node($self);
		$self->{_outputs_size} = ++$i;
		return $i-1;
	}     
	
	sub adjust_weight {
		my $self	=	shift;
		my $inc		=	shift;
		for my $i (@{$self->{_inputs}}) {
			$i->{weight} += $inc * $i->{weight};
			$i->{node}->adjust_weight($inc) if($i->{node});
		}
	}

1;	
	
# Internal usage, prevents recursion on empty nodes.
package AI::NeuralNet::Mesh::cap;
	sub new     { bless {}, shift }
	sub input           {}
	sub adjust_weight   {}
	sub add_output_node {}
	sub add_input_node  {}
1;

# Internal usage, collects data from output layer.
package AI::NeuralNet::Mesh::output;
	
	use strict;
	
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
		};
		bless $self, $type;
	}
	
	sub add_input_node {
		my $self	=	shift;
		return (++$self->{_inputs_size})-1;
	}
	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		$self->{_parent}->d("GOT INPUT [$input] FROM [$from_id]\n",1);
		$self->{_inputs}->[$from_id] = $self->{_parent}->intr($input);
	}
	
	sub get_outputs {
		my $self	=	shift;
		return $self->{_inputs};
	}

1;
                                       
__END__

=head1 NAME

Mesh.pm  view on Meta::CPAN



=head1 DESCRIPTION

AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different

Mesh.pm  view on Meta::CPAN


	my $net = AI::NeuralNet::Mesh->new([2,3,1]);
	

Creates a network with 2 input nodes, 3 hidden nodes, and 1 output node.


=item AI::NeuralNet::Mesh->new(\@array_of_hashes);

Another dandy constructor...this is my favorite. It allows you to tailor the number of layers,
the size of the layers, the activation type (you can even add anonymous inline subs with this one),
and even the threshold, all with one array ref-ed constructor.

Example:

	my $net = AI::NeuralNet::Mesh->new([
	    {
		    nodes        => 2,
		    activation   => linear
		},
		{
		    nodes        => 3,
		    activation   => sub {
		        my $sum  =  shift;
		        return $sum + rand()*1;
		    }
		},
		{
		    nodes        => 1,
		    activation   => sigmoid,
		    threshold    => 0.75
		}
	]);

Mesh.pm  view on Meta::CPAN

	
The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.

The activation type for each layer is preserved across load/save calls. 

EXCEPTION: Due to the constraints of Perl, I cannot load/save the actual subs that the code
ref option points to. Therefore, you must re-apply any code ref activation types after a 
load() call.

=item $net->node_activation($layer,$node,$type);

This sets the activation function for a specific node in a layer. The same notes apply
here as to the activation() method above.


=item $net->threshold($layer,$value);

Mesh.pm  view on Meta::CPAN


This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and 
run() directly. It returns an array ref.

The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Will probably return an array ref containing 1,2,3. A subsequent call of:

    $net->crunch("How is Jane?");

Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored
only once internally and the returned array ref reflects that.


=item $net->uncrunch($array_ref);

Mesh.pm  view on Meta::CPAN

Note: when using a range() activatior, train the
net TWICE on the data set, because the first time
the range() function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.

The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.

	= line 1 = 	sub {
	= line 2 =		my @values = ( 6..10 );
	= line 3 =		my $sum   = shift;
	= line 4 =		my $self  = shift;
	= line 5 =		$self->{top_value}=$sum if($sum>$self->{top_value});
	= line 6 =		my $index = intr($sum/$self->{top_value}*$#values);
	= line 7 =		return $values[$index];
	= line 8 =	}

Now, the actual function fits in one line of code, but I expanded it a bit
here. Line 1 creates our array of allowed output values. Lines two and

Mesh.pm  view on Meta::CPAN

an input is below $threshold, it is false. $threshold defaults to 0.5.

You can get this into your namespace with the ':acts' export 
tag as so:
	
	use AI::NeuralNet::Mesh ':acts';

Let's look at the code real quick, as it shows how to get at the indivudal
input connections:

	= line 1 =	sub {
	= line 2 =		my $sum  = shift;
	= line 3 =		my $self = shift;
	= line 4 =		my $threshold = 0.50;
	= line 5 =		for my $x (0..$self->{_inputs_size}-1) { 
	= line 6 =			return 0.000001 if(!$self->{_inputs}->[$x]->{value}<$threshold)
	= line 7 =		}
	= line 8 =		return $sum/$self->{_inputs_size};
	= line 9 =	}

Line 2 and 3 pulls in our sum and self refrence. Line 5 opens a loop to go over

Mesh.pm  view on Meta::CPAN

in the ALN example, I use a connector in the main package called tree() instead of
the default connector. Before I call the new() constructor, I use this line of code:

	$AI::NeuralNet::Mesh::Connector = 'main::tree'
	
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. 

Example connector:

	sub connect_three {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;
    	my $mesh	=	$self->{mesh};
    	     
	    for my $y (0..($r1b-$r1a)-1) {
			$mesh->[$y+$r1a]->add_output_node($mesh->[$y+$r2a-1]) if($y>0);
			$mesh->[$y+$r1a]->add_output_node($mesh->[$y+$r2a]) if($y<($r2b-$r2a));

Mesh.pm  view on Meta::CPAN

$r1a..$r1b because we use the loop index with the next layer up as well.

$y + $r1a give the index into the mesh array of the current node to connect the output FROM.
We need to connect this nodes output lines to the next layers input nodes. We do this
with a simple method of the outputing node (the node at $y+$r1a), called add_output_node().

add_output_node() takes one simple arguemnt: A blessed refrence to a node that it is supposed
to output its final value TO. We get this blessed refrence with more simple addition.

$y + $r2a gives us the node directly above the first node (supposedly...I'll get to the "supposedly"
part in a minute.) By adding or subtracting from this number we get the neighbor nodes.
In the above example you can see we check the $y index to see that we havn't come close to
any of the edges of the range.

Using $y+$r2a we get the index of the node to pass to add_output_node() on the first node at
$y+B<$r1a>. 

And that's all there is to it!

For the fun of it, we'll take a quick look at the default connector.
Below is the actual default connector code, albeit a bit cleaned up, as well as
line numbers added.

	= line 1  =	sub _c {
	= line 2  =    	my $self	=	shift;
	= line 3  =    	my $r1a		=	shift;
	= line 4  =    	my $r1b		=	shift;
	= line 5  =    	my $r2a		=	shift;
	= line 6  =    	my $r2b		=	shift;
	= line 7  =    	my $mesh	=	$self->{mesh};
	= line 8  =		for my $y ($r1a..$r1b-1) {
	= line 9  =			for my $z ($r2a..$r2b-1) {
	= line 10 =				$mesh->[$y]->add_output_node($mesh->[$z]);
	= line 11 =			}

Mesh.pm  view on Meta::CPAN

=head1 MAILING LIST

A mailing list has been setup for AI::NeuralNet::Mesh and AI::NeuralNet::BackProp. 
The list is for discussion of AI and neural net related topics as they pertain to 
AI::NeuralNet::BackProp and AI::NeuralNet::mesh. I will also announce in the group
each time a new release of AI::NeuralNet::Mesh is available.

The list address is at:
	 ai-neuralnet-backprop@egroups.com 
	 
To subscribe, send a blank email:
	ai-neuralnet-backprop-subscribe@egroups.com  


=cut







README  view on Meta::CPAN

Greetings Perlfolk,

** What is this?

AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
Contact Josiah Bryan at <jdb@wcoil.com>

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different

examples/ex_add2.pl  view on Meta::CPAN

	\%diff4\n";
	   printf "%d %.3f %d %g %s %f %f %f %f\n",
	   $layers, $inc, $top, $forgetfulness, timestr($runtime),
	$percent_diff[0],
	   $percent_diff[1], $percent_diff[2], $percent_diff[3];
	  }
	 }
	}
	
	#....................................................
	sub addnet
	{
	 print "\nCreate a new net with $layers layers, 3 inputs, and 1 output\n";
	 my $net = AI::NeuralNet::Mesh->new($layers,3,1);
	
	 # Disable debugging
	 $net->debug(0);
	
	
	 my @data = (
	  [   2633, 2665, 2685],  [ 2633 + 2665 + 2685 ],

examples/ex_add2.pl  view on Meta::CPAN

	
	
	 my @input = ( [ 2222, 3333, 3200 ],
	      [ 1111, 1222, 3211 ],
	      [ 2345, 2543, 3000 ],
	      [ 2654, 2234, 2534 ] );
	
	    test_net( $net, @input );
	}
	#.....................................................................
	 sub test_net {
	  my @set;
	  my $fb;
	  my $net = shift;
	  my @data = @_;
	  undef @percent_diff; #@answers; undef @predictions;
	
	  for( $i=0; defined( $data[$i] ); $i++ ){
	   @set = @{ $data[$i] };
	   $fb = $net->run(\@set)->[0];
	   # Print output

examples/ex_aln.pl  view on Meta::CPAN




######################################################################
#-################ ALN Implementation Code  ########################-#
######################################################################
	
	# Build a basic ALN tree network (_very_ basic, only implements
	# the node types, and only two learning benefits from ALN theory are
	# realized.) Also adds a method to the neural network gates, print_aln().
	sub Tree {
		# Grab our leaves and roots
		my $leaves = shift;
		my $roots  = shift || $leaves;
	    
	    # Replace the load function with a new one to preserve the
	    # load activations. We have to add this up here because next
	    # thing we do is check if they passed a file name as $leaves,
	    # and we need to have our new load sub already in place before
	    # we try to load anything in $leaves.
	    *{'AI::NeuralNet::Mesh::load'} = sub {
	        my $self		=	shift;
			my $file		=	shift;  
			my $load_flag   =	shift;
			
		    if(!(-f $file)) {
		    	$self->{error} = "File \"$file\" does not exist.";
		    	return undef;
		    }
		    
		    open(FILE,"$file");

examples/ex_aln.pl  view on Meta::CPAN

		    # instead of just creating the network from pre-exisiting refrence
			my $self = AI::NeuralNet::Mesh->new(1,1);
			return $self->load($leaves,1);
		}
		
		# Initalize our counter and our specs ref
		my $specs  = [];
		my $level  = 0;
		
		# Create our custom node activation
		my $act    = sub {
			shift; my $self = shift;
			my $b1 = intr($self->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{_inputs}->[1]->{weight});
			my $x1 = intr($self->{_inputs}->[0]->{input});
			my $x2 = intr($self->{_inputs}->[1]->{input});
			# node type: $b1 $b2
			# OR       : 1   1
			# AND	   : 0   0
			# L        : 1   0
			# R        : 0   1

examples/ex_aln.pl  view on Meta::CPAN

        # Create a layer spec array with every layer having half
        # the number of nodes of the layer before it
        while($leaves!=$roots) { 
			$specs->[$level++]={ nodes=>$leaves, activation=>$act };
	        $leaves/=2;
	        $leaves++ if($leaves%2 && $leaves!=$roots);
		}
		$specs->[$level++]={ nodes=>$roots, activation=>$act };
		
		# Add a method to the net to print out the node types
		*{'AI::NeuralNet::Mesh::print_aln'} = sub {
			my $self=shift;
			my ($c,$l)=(0,0);
			for(0..$self->{total_nodes}-1) {
				my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
				my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			    print "OR "  if( $b1 &&  $b2);
				print "AND " if(!$b1 && !$b2);
				print "L "   if( $b1 && !$b2);
				print "R "   if(!$b1 &&  $b2);
				$c=0,$l++,print "\n" if++$c>=$self->{layers}->[$l];
			}
		};
		
		# Add a custom node weight adjuster to learn faster
		*{'AI::NeuralNet::Mesh::node::adjust_weight'} = sub {
			my ($self,$inc,$taget) = @_; 
			my $f;
			my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			$f=1 if( $b1 &&  $b2);
			$f=2 if(!$b1 && !$b2);
			my $lo = $self->{_last_output};
			if($lo!=$target) {
				# Adjust right lead if $lo, else adjust left lead
				($target &&  $lo)?$self->{_inputs}->[0]->{weight}++:$self->{_inputs}->[0]->{weight}--;

examples/ex_aln.pl  view on Meta::CPAN

		return $net;
	}
	
	# Our custom node connector for the tree function, above.
	# This connects every two nodes from the first range
	# to one node of the second range. This is only meant
	# to be used in a factored layer mesh, such as one with a
	# [8,4,2,1] node specification array. You should never
	# worry about what the node spec array is, as that is
	# built by tree().
	sub _c_tree {
    	my ($self,$r1a,$r1b,$r2a,$r2b)=@_;
    	my $mesh = $self->{mesh};
    	my $z=$r2a;
    	for(my $y=0;$y<($r1b-$r1a);$y+=2) {
			$mesh->[$y]->add_output_node($mesh->[$z]);
			$mesh->[$y+1]->add_output_node($mesh->[$z]);
			$z++;
		}
	}
    

examples/ex_pcx.pl  view on Meta::CPAN

	}
	
	print "Testing random block...\n";
	
	print "Result: ",$net->run($blocks[rand()*$b])->[0],"\n";
	
	print "Bencmark for run: ", $net->benchmarked(), "\n";
	
	$net->save("pcx2.net");
	
	sub print_ref {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $x;
		my @els = (' ','.',',',':',';','%','#');
		foreach my $el (@{$map}) { 
			$str=$el/255*6;
			print $els[$str];
			$x++;
			if($x>$break-1) {
				print "\n";

examples/ex_sub.pl  view on Meta::CPAN

=begin
    
    File:	examples/ex_sub.pl
	Author: Josiah Bryan, <jdb@wcoil.com>
	Desc: 

		This demonstrates the ability of a neural net to generalize and predict what the correct
		result is for inputs that it has never seen before.
		
		This teaches a network to subtract 6 sets of numbers, then it asks the user for 
		two numbers to subtract and then it displays the results of the user's input.

=cut

	use AI::NeuralNet::Mesh;
	
	my $subtract = new AI::NeuralNet::Mesh(2,2,1);
	
	if(!$subtract->load('sub.mesh')) {
		$subtract->learn_set([	
			[ 1,   1   ], [ 0      ] ,
			[ 2,   1   ], [ 1      ],
			[ 10,  5   ], [ 5      ],
			[ 20,  10  ], [ 10     ],
			[ 100, 50  ], [ 50     ],
			[ 500, 200 ], [ 300    ],
		]);
		$subtract->save('sub.mesh');
	}
		
	print "Enter first number to subtract  : "; chomp(my $a = <>);
	print "Enter second number to subtract : "; chomp(my $b = <>);
	
	print "Result: ",$subtract->run([$a,$b])->[0],"\n";
	
	

mesh.htm  view on Meta::CPAN

is fixed now, and you can have as many outputs as you want (how does 3 
inputs and 50 outputs sound? :-)</P>
<P>
<HR>
<H1><A NAME="description">DESCRIPTION</A></H1>
<P>AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind.</P>
<P>This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.</P>
<P>This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written

mesh.htm  view on Meta::CPAN

layer ($layer_sizes[0]) is to be the input layer, and the last layer in @layer_sizes is to be
the output layer.
<P>Example:</P>
<PRE>
        my $net = AI::NeuralNet::Mesh-&gt;new([2,3,1]);</PRE>
<P>Creates a network with 2 input nodes, 3 hidden nodes, and 1 output node.</P>
<P></P>
<DT><STRONG>AI::NeuralNet::Mesh-&gt;new(\@array_of_hashes);</STRONG><BR>
<DD>
Another dandy constructor...this is my favorite. It allows you to tailor the number of layers,
the size of the layers, the activation type (you can even add anonymous inline subs with this one),
and even the threshold, all with one array ref-ed constructor.
<P>Example:</P>
<PRE>
        my $net = AI::NeuralNet::Mesh-&gt;new([
            {
                    nodes        =&gt; 2,
                    activation   =&gt; linear
                },
                {
                    nodes        =&gt; 3,
                    activation   =&gt; sub {
                        my $sum  =  shift;
                        return $sum + rand()*1;
                    }
                },
                {
                    nodes        =&gt; 1,
                    activation   =&gt; sigmoid,
                    threshold    =&gt; 0.75
                }
        ]);

mesh.htm  view on Meta::CPAN

The code ref is called with this syntax:</P>
<PRE>
        $output = &amp;$code_ref($sum_of_inputs, $self);
</PRE>
<P>The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.</P>
<P>See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.</P>
<P>The activation type for each layer is preserved across load/save calls.</P>
<P>EXCEPTION: Due to the constraints of Perl, I cannot load/save the actual subs that the code
ref option points to. Therefore, you must re-apply any code ref activation types after a 
<A HREF="#item_load"><CODE>load()</CODE></A> call.</P>
<P></P>
<DT><STRONG><A NAME="item_node_activation">$net-&gt;node_activation($layer,$node,$type);</A></STRONG><BR>
<DD>
This sets the activation function for a specific node in a layer. The same notes apply
here as to the <A HREF="#item_activation"><CODE>activation()</CODE></A> method above.
<P></P>
<DT><STRONG><A NAME="item_threshold">$net-&gt;threshold($layer,$value);</A></STRONG><BR>
<DD>

mesh.htm  view on Meta::CPAN

<P></P>
<DT><STRONG><A NAME="item_crunch">$net-&gt;crunch($string);</A></STRONG><BR>
<DD>
This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across <A HREF="#item_load"><CODE>load()</CODE></A> and <A HREF="#item_save"><CODE>save()</CODE></A>
calls. This is designed to be used to generate unique maps sutible for passing to <A HREF="#item_learn"><CODE>learn()</CODE></A> and 
<A HREF="#item_run"><CODE>run()</CODE></A> directly. It returns an array ref.
<P>The words are not duplicated internally. For example:</P>
<PRE>
        $net-&gt;crunch(&quot;How are you?&quot;);</PRE>
<P>Will probably return an array ref containing 1,2,3. A subsequent call of:</P>
<PRE>
    $net-&gt;crunch(&quot;How is Jane?&quot;);</PRE>
<P>Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word ``How''. So, each word is stored
only once internally and the returned array ref reflects that.</P>
<P></P>
<DT><STRONG><A NAME="item_uncrunch">$net-&gt;uncrunch($array_ref);</A></STRONG><BR>
<DD>
Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the <A HREF="#item_crunch"><CODE>crunch()</CODE></A> method, above, possibly to 

mesh.htm  view on Meta::CPAN

        $net-&gt;activation(4,range(6,15,26,106,28,3));</PRE>
<P>Note: when using a <A HREF="#item_range"><CODE>range()</CODE></A> activatior, train the
net TWICE on the data set, because the first time
the <A HREF="#item_range"><CODE>range()</CODE></A> function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.</P>
<P>The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.</P>
<PRE>
        = line 1 =      sub {
        = line 2 =              my @values = ( 6..10 );
        = line 3 =              my $sum   = shift;
        = line 4 =              my $self  = shift;
        = line 5 =              $self-&gt;{top_value}=$sum if($sum&gt;$self-&gt;{top_value});
        = line 6 =              my $index = intr($sum/$self-&gt;{top_value}*$#values);
        = line 7 =              return $values[$index];
        = line 8 =      }</PRE>
<P>Now, the actual function fits in one line of code, but I expanded it a bit
here. Line 1 creates our array of allowed output values. Lines two and
three grab our parameters off the stack which allow us access to the

mesh.htm  view on Meta::CPAN

$threshold is used to decide if an input is true or false (1 or 0). If 
an input is below $threshold, it is false. $threshold defaults to 0.5.
<P>You can get this into your namespace with the ':acts' export 
tag as so:
</P>
<PRE>
        use AI::NeuralNet::Mesh ':acts';</PRE>
<P>Let's look at the code real quick, as it shows how to get at the indivudal
input connections:</P>
<PRE>
        = line 1 =      sub {
        = line 2 =              my $sum  = shift;
        = line 3 =              my $self = shift;
        = line 4 =              my $threshold = 0.50;
        = line 5 =              for my $x (0..$self-&gt;{_inputs_size}-1) { 
        = line 6 =                      return 0.000001 if(!$self-&gt;{_inputs}-&gt;[$x]-&gt;{value}&lt;$threshold)
        = line 7 =              }
        = line 8 =              return $sum/$self-&gt;{_inputs_size};
        = line 9 =      }</PRE>
<P>Line 2 and 3 pulls in our sum and self refrence. Line 5 opens a loop to go over
all the input lines into this node. Line 6 looks at each input line's value 

mesh.htm  view on Meta::CPAN

this variable, be sure to add the fully qualified name of the method. For example,
in the ALN example, I use a connector in the main package called <CODE>tree()</CODE> instead of
the default connector. Before I call the <A HREF="#item_new"><CODE>new()</CODE></A> constructor, I use this line of code:</P>
<PRE>
        $AI::NeuralNet::Mesh::Connector = 'main::tree'
</PRE>
<P>The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument.</P>
<P>Example connector:</P>
<PRE>
        sub connect_three {
        my $self        =       shift;
        my $r1a         =       shift;
        my $r1b         =       shift;
        my $r2a         =       shift;
        my $r2b         =       shift;
        my $mesh        =       $self-&gt;{mesh};

            for my $y (0..($r1b-$r1a)-1) {
                        $mesh-&gt;[$y+$r1a]-&gt;add_output_node($mesh-&gt;[$y+$r2a-1]) if($y&gt;0);
                        $mesh-&gt;[$y+$r1a]-&gt;add_output_node($mesh-&gt;[$y+$r2a]) if($y&lt;($r2b-$r2a));

mesh.htm  view on Meta::CPAN

refrence over and over.</P>
<P>The loop that folows the arguments in the above example is very simple. It opens
a <CODE>for()</CODE> loop over the range of numbers, calculating the size instead of just going
$r1a..$r1b because we use the loop index with the next layer up as well.</P>
<P>$y + $r1a give the index into the mesh array of the current node to connect the output FROM.
We need to connect this nodes output lines to the next layers input nodes. We do this
with a simple method of the outputing node (the node at $y+$r1a), called add_output_node().</P>
<P><CODE>add_output_node()</CODE> takes one simple arguemnt: A blessed refrence to a node that it is supposed
to output its final value TO. We get this blessed refrence with more simple addition.</P>
<P>$y + $r2a gives us the node directly above the first node (supposedly...I'll get to the ``supposedly''
part in a minute.) By adding or subtracting from this number we get the neighbor nodes.
In the above example you can see we check the $y index to see that we havn't come close to
any of the edges of the range.</P>
<P>Using $y+$r2a we get the index of the node to pass to <CODE>add_output_node()</CODE> on the first node at
$y+<STRONG>$r1a</STRONG>.</P>
<P>And that's all there is to it!</P>
<P>For the fun of it, we'll take a quick look at the default connector.
Below is the actual default connector code, albeit a bit cleaned up, as well as
line numbers added.</P>
<PRE>
        = line 1  =     sub _c {
        = line 2  =     my $self        =       shift;
        = line 3  =     my $r1a         =       shift;
        = line 4  =     my $r1b         =       shift;
        = line 5  =     my $r2a         =       shift;
        = line 6  =     my $r2b         =       shift;
        = line 7  =     my $mesh        =       $self-&gt;{mesh};
        = line 8  =             for my $y ($r1a..$r1b-1) {
        = line 9  =                     for my $z ($r2a..$r2b-1) {
        = line 10 =                             $mesh-&gt;[$y]-&gt;add_output_node($mesh-&gt;[$z]);
        = line 11 =                     }

mesh.htm  view on Meta::CPAN

<P>You can always download the latest copy of AI::NeuralNet::Mesh
from <A HREF="http://www.josiah.countystart.com/modules/get.pl?mesh:pod">http://www.josiah.countystart.com/modules/get.pl?mesh:pod</A></P>
<P>
<HR>
<H1><A NAME="mailing list">MAILING LIST</A></H1>
<P>A mailing list has been setup for AI::NeuralNet::Mesh and AI::NeuralNet::BackProp. 
The list is for discussion of AI and neural net related topics as they pertain to 
AI::NeuralNet::BackProp and AI::NeuralNet::mesh. I will also announce in the group
each time a new release of AI::NeuralNet::Mesh is available.</P>
The list address is: <A HREF="mailto:ai-neuralnet-backprop@egroups.com">ai-neuralnet-backprop@egroups.com</A> <BR>
To subscribe, send a blank email to: <A HREF="mailto:ai-neuralnet-backprop-subscribe@egroups.com">ai-neuralnet-backprop-subscribe@egroups.com</A> 

<BR><BR><BR>
<HR>
<A HREF="http://www.josiah.countystart.com/modules/get.pl?mesh:(c)"><B>AI::NeuralNet::Mesh</B></A> - An optimized, accurate neural network Mesh. By <A HREF="mailto:jdb@wcoil.com"><B>Josiah Bryan</B></A>.


</BODY>

</HTML>

test.pl  view on Meta::CPAN

# Before `make install' is performed this script should be runnable with
# `make test'. After `make install' it should work as `perl test.pl'

BEGIN { $| = 1; print "1..13\n"; }
END {print "not ok 1\n" unless $loaded;}

sub t { my $f=shift;$t++;my $str=($f)?"ok $t":"not ok $t";print $str,"\n";}

use AI::NeuralNet::Mesh;
$loaded = 1;
t 1;

my $net = new AI::NeuralNet::Mesh(2,2,1);
t $net;
t ($net->intr(0.51) eq 1);
t ($net->intr(0.00001) eq 0);
t ($net->intr(0.50001) eq 1);



( run in 0.597 second using v1.01-cache-2.11-cpan-88abd93f124 )