AI-NeuralNet-Mesh
view release on metacpan - search on metacpan
view release on metacpan or search on metacpan
examples/ex_add2.pl view on Meta::CPAN
my @set;
my $fb;
my $net = shift;
my @data = @_;
undef @percent_diff; #@answers; undef @predictions;
for( $i=0; defined( $data[$i] ); $i++ ){
@set = @{ $data[$i] };
$fb = $net->run(\@set)->[0];
# Print output
print "Test Factors: (",join(',',@set),")\n";
$answer = eval( join( '+',@set ));
push @percent_diff, 100.0 * abs( $answer - $fb )/ $answer;
print "Prediction : $fb answer: $answer\n";
}
}
examples/ex_aln.pl view on Meta::CPAN
# Learn a pattern and print stats.
if(!$net->load('aln.mesh')) {
print "Learning";
print "Done!\nLearning took ",$net->learn([1,1,0,1,0,1,1,1],[0]),"\n";
$net->save('aln.mesh');
}
# Print logic gate types
$net->print_aln();
# Test it out
print "\nPattern: [1,1,0,1,0,1,1,1]".
"\nResult: ",$net->run([1,1,1,1,1,1,1,1])->[0],"\n";
######################################################################
#-################ ALN Implementation Code ########################-#
######################################################################
examples/ex_alpha.pl view on Meta::CPAN
# Build a test map
my $tmp = [0,1,1,1,0,
1,0,0,0,1,
1,0,0,0,1,
1,1,1,1,1,
1,0,0,0,1,
1,0,0,0,1,
1,0,0,0,1];
# Display test map
print "\nTest map:\n";
$net->join_cols($tmp,5);
# Display network results
print "Letter index matched: ",$net->run($tmp)->[0],"\n";
examples/ex_and.pl view on Meta::CPAN
$net->learn_set([
[1,1], [1],
[1,0], [0],
[0,1], [0],
[0,0], [0],
]);
$net->save('and.mesh');
}
print "Learning complete.\n";
print "Testing with a gate value of (0,0):",$net->run([0,0])->[0],"\n";
print "Testing with a gate value of (0,1):",$net->run([0,1])->[0],"\n";
print "Testing with a gate value of (1,0):",$net->run([1,0])->[0],"\n";
print "Testing with a gate value of (1,1):",$net->run([1,1])->[0],"\n";
examples/ex_bmp.pl view on Meta::CPAN
if(!$net->load('images.mesh')) {
print "\nLearning started...\n";
# Make it learn the whole dataset $top times
my @list;
my $top=3;
for my $a (0..$top) {
my $t1=new Benchmark;
print "\n\nOuter Loop: $a\n";
# Test fogetfullness
my $f = $net->learn_set(\@data, inc => 0.1);
# Print it
print "\n\nForgetfullness: $f%\n";
# Save net to disk
$net->save('images.mesh');
my $t2=new Benchmark;
my $td=timediff($t0,$t1);
examples/ex_dow.pl view on Meta::CPAN
if(!$net->load('DOW.mesh')) {
print "\nLearning started...\n";
# Make it learn the whole dataset $top times
my @list;
my $top=1;
for my $a (0..$top) {
my $t1=new Benchmark;
print "\n\nOuter Loop: $a\n";
# Test fogetfullness
my $f = $net->learn_set(\@data, inc => 0.2,
max => 2000,
error => -1);
# Print it
print "\n\nForgetfullness: $f%\n";
# Save net to disk
$net->save('DOW.mesh');
examples/ex_pcx.pl view on Meta::CPAN
if(!$net->load("pcx.mesh")) {
print "Learning high block...\n";
print $net->learn($blocks[$net->high(\@score)],"highest");
$net->save("pcx.mesh");
print "Learning low block...\n";
$net->learn($blocks[$net->low(\@score)],"lowest");
}
print "Testing random block...\n";
print "Result: ",$net->run($blocks[rand()*$b])->[0],"\n";
print "Bencmark for run: ", $net->benchmarked(), "\n";
$net->save("pcx2.net");
sub print_ref {
no strict 'refs';
shift if(substr($_[0],0,4) eq 'AI::');
examples/ex_wine.pl view on Meta::CPAN
# Make it learn the whole dataset $top times
my @list;
my $top=5;
for my $a (0..$top) {
print "\n\nOuter Loop: $a\n";
for(0..$#{$sets}) {
next if(!defined $sets->[$_]->[0]);
my $t1=new Benchmark;
# Test fogetfullness
my $f = $net->learn_set($sets->[$_], inc => 0.2,
max => 2000,
error => 0.01,
leave => 2);
# Print it
print "\n\nForgetfullness: $f%\n";
my $t2=new Benchmark;
my $td=timediff($t2,$t1);
view all matches for this distributionview release on metacpan - search on metacpan
( run in 0.611 second using v1.00-cache-2.02-grep-82fe00e-cpan-585fae043c8 )