AI-NNFlex

 view release on metacpan or  search on metacpan

README.txt  view on Meta::CPAN

			[1,0],[1],
			[1,1],[0]]);



my $counter=0;
my $err = 10;
while ($err >.001)
{
	$err = $dataset->learn($network);
	print "Epoch = $counter error = $err\n";
	$counter++;
}


foreach (@{$dataset->run($network)})
{
	foreach (@$_){print $_}
	print "\n";	
}



examples/add.pl  view on Meta::CPAN

[ 10,  10  ], [ 20   ],
[ 15,  15  ], [ 30   ],
[ 12,  8   ], [ 20   ],

]);

my $err = 10;
# Stop after 4096 epochs -- don't want to wait more than that
for ( my $i = 0; ($err > 0.0001) && ($i < 4096); $i++ ) {
    $err = $dataset->learn($network);
    print "Epoch = $i error = $err\n";
}

foreach (@{$dataset->run($network)})
{
    foreach (@$_){print $_}
    print "\n";    
}

print "this should be 4000 - ";
$network->run([2000,2000]);
foreach ( @{$network->output}){print $_."\n";}

 foreach my $a ( 1..10 ) {
     foreach my $b ( 1..10 ) {
     my($ans) = $a+$b;
     my($nnans) = @{$network->run([$a,$b])};
     print "[$a] [$b] ans=$ans but nnans=$nnans\n" unless $ans == $nnans;
     }
 }


examples/bp.pl  view on Meta::CPAN


            #change network weights
            WeightChangesHO();
            WeightChangesIH();
        }

        #display the overall network error
        #after each epoch
        calcOverallError();

        print "epoch = ".$j."  RMS Error = ".$RMSerror."\n";

    }

    #training has finished
    #display the results
    displayResults();

 }

#============================================================

examples/bp.pl  view on Meta::CPAN

    }
  }

 }


#************************************
 sub initData()
 {

    print "initialising data\n";

    # the data here is the XOR data
    # it has been rescaled to the range
    # [-1][1]
    # an extra input valued 1 is also added
    # to act as the bias

    $trainInputs[0][0]  = 1;
    $trainInputs[0][1]  = -1;
    $trainInputs[0][2]  = 1;    #bias

examples/bp.pl  view on Meta::CPAN

 }


#************************************
 sub displayResults()
    {
     for(my $i = 0;$i<$numPatterns;$i++)
        {
        $patNum = $i;
        calcNet();
        print "pat = ".($patNum+1)." actual = ".$trainOutput[$patNum]." neural model = ".$outPred."\n";
        }
    }


#************************************
sub calcOverallError()
    {
     $RMSerror = 0.0;
     for(my $i = 0;$i<$numPatterns;$i++)
        {

examples/cars/cars.pl  view on Meta::CPAN

	.$translate{'lug_boot'}->{$lug_boot}. " "
	.$translate{'safety'}->{$safety};

	my $outputString = $translate{'accept'}->{$accept};


	my @inputArray = split / /,$inputString;
	my @outputArray = split / /,$outputString;
if (scalar @inputArray != 12 || scalar @outputArray != 2)
{
	print "--$inputString $outputString\n";
}

	push @dataArray,\@inputArray,\@outputArray;
	
}

close CARS;


######################################################################

examples/cars/cars.pl  view on Meta::CPAN

$network->init();

$network->connect(fromlayer=>2,tolayer=>2);

my $counter=0;
my $err = 10;
while ($err >.001)
{
	$err = $dataset->learn($network);

	print "Epoch $counter: Error = $err\n";
	$counter++;
}


foreach (@{$dataset->run($network)})
{
	foreach (@$_){print $_}
	print "\n";	
}



examples/hopfield.pl  view on Meta::CPAN


$network->learn($dataset);

#my $outputref = $network->run([-1,1,-1,1]);
#my $outputref = $network->run([-1,1,-1,1]);
#my $outputref = $network->run([-1,1,-1,1]);
my $outputref = $network->run([1,-1,1,1]);
my $outputref = $network->run([1,-1,1,1]);
my $outputref = $network->run([1,-1,1,1]);

print @$outputref;

examples/lesion.pl  view on Meta::CPAN

			[1,1],[0]]);



my $counter=0;
my $err = 10;
while ($err >.001)
{
	$err = $dataset->learn($network);

	print "Epoch $counter: Error = $err\n";
	$counter++;
}

$network->lesion(nodes=>0.5,connections=>0.5);

$network->dump_state(filename=>"weights-learned.wts",activations=>1);

foreach (@{$dataset->run($network)})
{
	foreach (@$_){print $_}
	print "\n";	
}


examples/reinforceTest.pl  view on Meta::CPAN


my $object = AI::NNFlex->new([{"nodes"=>2,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"tanh","random weights"=>1},
                        {"nodes"=>2,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"tanh","random weights"=>1},
                       {"nodes"=>1,"persistent activation"=>0,"decay"=>0.0,"random activation"=>0,"threshold"=>0.0,"activation function"=>"linear","random weights"=>1}],{'random connections'=>0,'networktype'=>'feedforward', 'random weights'=>1,'learn...


$object->run([1,0]);
$output = $object->output();
foreach (@$output)
{
	print "1,0 - $_ ";
}
print "\n";

$object->run([0,1]);
$err = $object->learn([1]);
$output = $object->output();
foreach (@$output)
{
	print "0,1 - $_ ";
}
print "\n";


$object->run([0,1]);
$err = $object->learn([1]);
$output = $object->output();
foreach (@$output)
{
	print "0,1 - $_ ";
}
print "\n";

$object->run([0,1]);
$output = $object->output();
foreach (@$output)
{
	print "0,1 - $_ ";
}
print "\n";



$object->run([1,0]);
$output = $object->output();
foreach (@$output)
{
	print "1,0 - $_ ";
}
print "\n";

examples/test.pl  view on Meta::CPAN


use strict;
use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;


# create the numbers
my %numbers;
for (0..255)
{	
	my @array = split //,sprintf("%08b",$_);
	$numbers{$_} = \@array;
}

my @data;
for (my $counter=0;$counter < 14;$counter+=2)
{
	push @data,$numbers{$counter};

	push @data,$numbers{$counter*$counter};

examples/test.pl  view on Meta::CPAN


my $dataset = AI::NNFlex::Dataset->new(\@data);



my $counter=0;
my $err = 10;
while ($err >.01)
{
	$err = $dataset->learn($network);
	print "Epoch = $counter error = $err\n";
	$counter++;
}

$network->run([0,0,0,0,0,1,0,1]);
my $output = $network->output();
print $output."\n";

foreach (@$output){print $_}
print "\n";

examples/xor.pl  view on Meta::CPAN

$dataset->save(filename=>'xor.pat');
$dataset->load(filename=>'xor.pat');


my $counter=0;
my $err = 10;
while ($err >.001)
#for (1..1500)
{
	$err = $dataset->learn($network);
	print "Epoch = $counter error = $err\n";
	$counter++;
}


foreach (@{$dataset->run($network)})
{
	foreach (@$_){print $_}
	print "\n";	
}

print "this should be 1 - ".@{$network->run([0,1])}."\n";

examples/xor_minimal.pl  view on Meta::CPAN

			[1,1],[0]]);



my $counter=0;
my $err = 10;
while ($err >.001)
{
	$err = $dataset->learn($network);

	print "Epoch $counter: Error = $err\n";
	$counter++;
}


foreach (@{$dataset->run($network)})
{
	foreach (@$_){print $_}
	print "\n";	
}



examples/xorminus.pl  view on Meta::CPAN

$dataset->save(filename=>'xor.pat');
$dataset->load(filename=>'xor.pat');


my $counter=0;
my $err = 10;
while ($err >.001)
#for (1..1500)
{
	$err = $dataset->learn($network);
	print "Epoch = $counter error = $err\n";
	$counter++;
}


foreach (@{$dataset->run($network)})
{
	foreach (@$_){print $_}
	print "\n";	
}

print "this should be 1 - ".@{$network->run([-1,1])}."\n";

lib/AI/NNFlex.pm  view on Meta::CPAN



	# 0 is error so ALWAYS display
	if (!(grep /0/,@DEBUGLEVELS)){push @DEBUGLEVELS,0}

	foreach (@DEBUGLEVELS)
	{
	
		if ($level == $_)
		{
			print "$message\n";
		}
	}
}


###############################################################################
# AI::NNFlex::dump_state
###############################################################################
sub dump_state
{

lib/AI/NNFlex.pm  view on Meta::CPAN

	
	open (OFILE,">$filename") or return "Can't create weights file $filename";


	foreach my $layer (@{$network->{'layers'}})
	{
		foreach my $node (@{$layer->{'nodes'}})
		{
			if ($activations)
			{
				print OFILE $node->{'nodeid'}." activation = ".$node->{'activation'}."\n";
			}
			my $connectedNodeCounter=0;
			foreach my $connectedNode (@{$node->{'connectedNodesEast'}->{'nodes'}})
			{
				my $weight = ${$node->{'connectedNodesEast'}->{'weights'}}[$connectedNodeCounter];
				print OFILE $node->{'nodeid'}." <- ".$connectedNode->{'nodeid'}." = ".$weight."\n";
				$connectedNodeCounter++;
			}

			if ($node->{'connectedNodesWest'})
			{
				my $connectedNodeCounter=0;
				foreach my $connectedNode (@{$node->{'connectedNodesWest'}->{'nodes'}})
				{
					#FIXME - a more easily read format would be connectedNode first in the file
					my $weight = ${$node->{'connectedNodesWest'}->{'weights'}}[$connectedNodeCounter];
					print OFILE $node->{'nodeid'}." -> ".$connectedNode->{'nodeid'}." = ".$weight."\n";
				}
			}
		}
	}




	close OFILE;
}

lib/AI/NNFlex/Dataset.pm  view on Meta::CPAN

#################################################################
# save a dataset in an snns .pat file
#################################################################
sub save
{
	my $dataset = shift;
	my %config = @_;

	open (OFILE,">".$config{'filename'}) or return "File error $!";

	print OFILE "No. of patterns : ".((scalar @{$dataset->{'data'}})/2)."\n";
	print OFILE "No. of input units : ".(scalar @{$dataset->{'data'}->[0]})."\n";
	print OFILE "No. of output units : ".(scalar @{$dataset->{'data'}->[1]})."\n\n";

	my $counter = 1;
	my @values = @{$dataset->{'data'}};
	while (@values)
	{
		print OFILE "# Input pattern $counter:\n";
		my $input = shift (@values); 
		my @array = join " ",@$input;
		print OFILE @array;
		print OFILE "\n";

		print OFILE "# Output pattern $counter:\n";
		my $output = shift(@values); 
		@array = join " ",@$output;
		print OFILE @array;
		print OFILE "\n";

		$counter++;
	}

	close OFILE;
	return 1;
}


#############################################################



( run in 0.627 second using v1.01-cache-2.11-cpan-de7293f3b23 )