AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

examples/ex_aln.pl  view on Meta::CPAN

		    
		    if(!$db{"header"}) {
		    	$self->{error} = "Invalid format.";
		    	return undef;
		    }
		    
		    return $self->load_old($file) if($self->version($db{"header"})<0.21);
		    
		    if($load_flag) {
			    undef $self;
		        $self = Tree($db{inputs},$db{outputs});
			} else {
				$self->{inputs}			= $db{inputs};
			    $self->{nodes}			= $db{nodes};
				$self->{outputs}		= $db{outputs};
				$self->{layers} 		= [split(',',$db{layers})];
				$self->{total_layers}	= $db{total_layers};
				$self->{total_nodes}	= $db{total_nodes};
			}
			
		    # Load variables
		    $self->{random}		= $db{"rand"};
		    $self->{const}		= $db{"const"};
	        $self->{col_width}	= $db{"cw"};
		    $self->{rA}			= $db{"rA"};
			$self->{rB}			= $db{"rB"};
			$self->{rS}			= $db{"rS"};
			$self->{rRef}		= [split /\,/, $db{"rRef"}];
			
		   	$self->{_crunched}->{_length}	=	$db{"crunch"};
			
			for my $a (0..$self->{_crunched}->{_length}-1) {
				$self->{_crunched}->{list}->[$a] = $db{"c$a"}; 
			}
			
			$self->_init();
		    
			my $n = 0;
			for my $x (0..$self->{total_layers}) {
				for my $y (0..$self->{layers}->[$x]-1) {
				    my @l = split /\,/, $db{"n$n"};
					for my $z (0..$self->{layers}->[$x-1]-1) {
						$self->{mesh}->[$n]->{_inputs}->[$z]->{weight} = $l[$z];
					}
					my $z = $self->{layers}->[$x-1];
					$self->{mesh}->[$n]->{activation} = $l[$z];
					$self->{mesh}->[$n]->{threshold}  = $l[$z+1];
					$self->{mesh}->[$n]->{mean}       = $l[$z+2];
					$n++;
				}
			}
			
	    	$self->extend($self->{_original_specs});
	
			return $self;
	    };
	    
		# If $leavesis a string, then it will be numerically equal to 0, so 
		# try to load it as a network file.
		if($leaves == 0) {  
		    # We use a "1" flag as the second argument to indicate that we 
		    # want load() to call the new constructor to make a network the
		    # same size as in the file and return a refrence to the network,
		    # instead of just creating the network from pre-exisiting refrence
			my $self = AI::NeuralNet::Mesh->new(1,1);
			return $self->load($leaves,1);
		}
		
		# Initalize our counter and our specs ref
		my $specs  = [];
		my $level  = 0;
		
		# Create our custom node activation
		my $act    = sub {
			shift; my $self = shift;
			my $b1 = intr($self->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{_inputs}->[1]->{weight});
			my $x1 = intr($self->{_inputs}->[0]->{input});
			my $x2 = intr($self->{_inputs}->[1]->{input});
			# node type: $b1 $b2
			# OR       : 1   1
			# AND	   : 0   0
			# L        : 1   0
			# R        : 0   1
			# This is made possible by this little four-way 
			# forumla is from the ATREE 2.7 demo by 
			# M. Thomas, <monroe@cs.UAlberta.CA>
			$self->{_last_output} = ($b1+1)*$x1 + ($b2+1)*$x2 >= 2 ? 1 : 0;
			# We store the last output to use in our custom
			# weight adjustment function, below.
			return $self->{_last_output};
		};	
		
		# Adjust the leaves so it divides into a number divisible
		# evenly by two.
		__LEAF_IT:
        $leaves++ if($leaves%2 && $leaves!=1);
        $leaves++,goto __LEAF_IT if(($leaves/2)%2);
        # Create a layer spec array with every layer having half
        # the number of nodes of the layer before it
        while($leaves!=$roots) { 
			$specs->[$level++]={ nodes=>$leaves, activation=>$act };
	        $leaves/=2;
	        $leaves++ if($leaves%2 && $leaves!=$roots);
		}
		$specs->[$level++]={ nodes=>$roots, activation=>$act };
		
		# Add a method to the net to print out the node types
		*{'AI::NeuralNet::Mesh::print_aln'} = sub {
			my $self=shift;
			my ($c,$l)=(0,0);
			for(0..$self->{total_nodes}-1) {
				my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
				my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			    print "OR "  if( $b1 &&  $b2);
				print "AND " if(!$b1 && !$b2);
				print "L "   if( $b1 && !$b2);
				print "R "   if(!$b1 &&  $b2);
				$c=0,$l++,print "\n" if++$c>=$self->{layers}->[$l];
			}
		};
		
		# Add a custom node weight adjuster to learn faster
		*{'AI::NeuralNet::Mesh::node::adjust_weight'} = sub {
			my ($self,$inc,$taget) = @_; 
			my $f;
			my $b1 = intr($self->{mesh}->[$_]->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{mesh}->[$_]->{_inputs}->[1]->{weight});
			$f=1 if( $b1 &&  $b2);
			$f=2 if(!$b1 && !$b2);
			my $lo = $self->{_last_output};
			if($lo!=$target) {
				# Adjust right lead if $lo, else adjust left lead
				($target &&  $lo)?$self->{_inputs}->[0]->{weight}++:$self->{_inputs}->[0]->{weight}--;
				($target && !$lo)?$self->{_inputs}->[1]->{weight}++:$self->{_inputs}->[1]->{weight}--;
			}
			# Thanks to Rolf Mandersheidd for this set of nested conditions on one line
			# This determines heuristic error responsibilty on the children
			# and recurses the error up the tree.
			if($lo!=$target || $f!=($lo?1:2)) {
				$self->{_inputs}->[1]->{node}->adjust_weight($inc,$target) if($self->{_inputs}->[1]->{node});
			} else {
				$self->{_inputs}->[0]->{node}->adjust_weight($inc,$target) if($self->{_inputs}->[1]->{node});
			}
		};

	    # Set our custom node connector
		$AI::NeuralNet::Mesh::Connector = 'main::_c_tree'; 
		
		# Create a new network from our specs
		my $net = AI::NeuralNet::Mesh->new($specs);
		$net->{_original_specs} = $specs;
		
		# Return our new network
		return $net;
	}
	
	# Our custom node connector for the tree function, above.
	# This connects every two nodes from the first range
	# to one node of the second range. This is only meant
	# to be used in a factored layer mesh, such as one with a
	# [8,4,2,1] node specification array. You should never
	# worry about what the node spec array is, as that is
	# built by tree().
	sub _c_tree {
    	my ($self,$r1a,$r1b,$r2a,$r2b)=@_;
    	my $mesh = $self->{mesh};
    	my $z=$r2a;
    	for(my $y=0;$y<($r1b-$r1a);$y+=2) {
			$mesh->[$y]->add_output_node($mesh->[$z]);
			$mesh->[$y+1]->add_output_node($mesh->[$z]);
			$z++;
		}
	}
    
	



( run in 1.259 second using v1.01-cache-2.11-cpan-39bf76dae61 )