AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

examples/ex_aln.pl  view on Meta::CPAN

	# realized.) Also adds a method to the neural network gates, print_aln().
	sub Tree {
		# Grab our leaves and roots
		my $leaves = shift;
		my $roots  = shift || $leaves;
	    
	    # Replace the load function with a new one to preserve the
	    # load activations. We have to add this up here because next
	    # thing we do is check if they passed a file name as $leaves,
	    # and we need to have our new load sub already in place before
	    # we try to load anything in $leaves.
	    *{'AI::NeuralNet::Mesh::load'} = sub {
	        my $self		=	shift;
			my $file		=	shift;  
			my $load_flag   =	shift;
			
		    if(!(-f $file)) {
		    	$self->{error} = "File \"$file\" does not exist.";
		    	return undef;
		    }
		    
		    open(FILE,"$file");
		    my @lines=<FILE>;
		    close(FILE);
		    
		    my %db;
		    for my $line (@lines) {
		    	chomp($line);
		    	my ($a,$b) = split /=/, $line;
		    	$db{$a}=$b;
		    }
		    
		    if(!$db{"header"}) {
		    	$self->{error} = "Invalid format.";
		    	return undef;
		    }
		    
		    return $self->load_old($file) if($self->version($db{"header"})<0.21);
		    
		    if($load_flag) {
			    undef $self;
		        $self = Tree($db{inputs},$db{outputs});
			} else {
				$self->{inputs}			= $db{inputs};
			    $self->{nodes}			= $db{nodes};
				$self->{outputs}		= $db{outputs};
				$self->{layers} 		= [split(',',$db{layers})];
				$self->{total_layers}	= $db{total_layers};
				$self->{total_nodes}	= $db{total_nodes};
			}
			
		    # Load variables
		    $self->{random}		= $db{"rand"};
		    $self->{const}		= $db{"const"};
	        $self->{col_width}	= $db{"cw"};
		    $self->{rA}			= $db{"rA"};
			$self->{rB}			= $db{"rB"};
			$self->{rS}			= $db{"rS"};
			$self->{rRef}		= [split /\,/, $db{"rRef"}];
			
		   	$self->{_crunched}->{_length}	=	$db{"crunch"};
			
			for my $a (0..$self->{_crunched}->{_length}-1) {
				$self->{_crunched}->{list}->[$a] = $db{"c$a"}; 
			}
			
			$self->_init();
		    
			my $n = 0;
			for my $x (0..$self->{total_layers}) {
				for my $y (0..$self->{layers}->[$x]-1) {
				    my @l = split /\,/, $db{"n$n"};
					for my $z (0..$self->{layers}->[$x-1]-1) {
						$self->{mesh}->[$n]->{_inputs}->[$z]->{weight} = $l[$z];
					}
					my $z = $self->{layers}->[$x-1];
					$self->{mesh}->[$n]->{activation} = $l[$z];
					$self->{mesh}->[$n]->{threshold}  = $l[$z+1];
					$self->{mesh}->[$n]->{mean}       = $l[$z+2];
					$n++;
				}
			}
			
	    	$self->extend($self->{_original_specs});
	
			return $self;
	    };
	    
		# If $leavesis a string, then it will be numerically equal to 0, so 
		# try to load it as a network file.
		if($leaves == 0) {  
		    # We use a "1" flag as the second argument to indicate that we 
		    # want load() to call the new constructor to make a network the
		    # same size as in the file and return a refrence to the network,
		    # instead of just creating the network from pre-exisiting refrence
			my $self = AI::NeuralNet::Mesh->new(1,1);
			return $self->load($leaves,1);
		}
		
		# Initalize our counter and our specs ref
		my $specs  = [];
		my $level  = 0;
		
		# Create our custom node activation
		my $act    = sub {
			shift; my $self = shift;
			my $b1 = intr($self->{_inputs}->[0]->{weight});
			my $b2 = intr($self->{_inputs}->[1]->{weight});
			my $x1 = intr($self->{_inputs}->[0]->{input});
			my $x2 = intr($self->{_inputs}->[1]->{input});
			# node type: $b1 $b2
			# OR       : 1   1
			# AND	   : 0   0
			# L        : 1   0
			# R        : 0   1
			# This is made possible by this little four-way 
			# forumla is from the ATREE 2.7 demo by 
			# M. Thomas, <monroe@cs.UAlberta.CA>
			$self->{_last_output} = ($b1+1)*$x1 + ($b2+1)*$x2 >= 2 ? 1 : 0;
			# We store the last output to use in our custom
			# weight adjustment function, below.
			return $self->{_last_output};
		};	



( run in 0.796 second using v1.01-cache-2.11-cpan-39bf76dae61 )