AI-Nerl
view release on metacpan or search on metacpan
b) cause the whole of any work that you distribute or publish, that
in whole or in part contains the Program or any part thereof, either
with or without modifications, to be licensed at no charge to all
third parties under the terms of this General Public License (except
that you may choose to grant warranty protection to some or all
third parties, at your option).
c) If the modified program normally reads commands interactively when
run, you must cause it, when started running for such interactive use
in the simplest and most usual way, to print or display an
announcement including an appropriate copyright notice and a notice
that there is no warranty (or else, saying that you provide a
warranty) and that users may redistribute the program under these
conditions, and telling the user how to view a copy of this General
Public License.
d) You may charge a fee for the physical act of transferring a
copy, and you may at your option offer warranty protection in
exchange for a fee.
examples/digits/deep_digits.pl view on Meta::CPAN
my $prev_cost = 10000;
my $passes=0;
for(1..3000){
my @test = ($images(9000:9999)->sever,$y(9000:9999)->sever);
my $n = int rand(8000);
my $m = $n+499;
my @train = ($images->slice("$n:$m")->copy, $y->slice("$n:$m")->copy);
$nerl->train(@train,passes=>10);
my ($cost, $nc) = $nerl->cost( @test );
print "cost:$cost\n,num correct: $nc / 1000\n";
# $nerl->network->show_neuron(1);
$passes++;
if ($cost < $prev_cost or $passes<10){
$prev_cost = $cost;
$prev_nerl = $nerl;
} else { # use $nerl as basis for $nerl
$passes=0;
print "New layer!";
$prev_cost = 1000;
$nerl = AI::Nerl->new(
basis => $prev_nerl,
l2 => int(rand(12))+5,
);
$nerl->init_network();
$prev_nerl = $nerl;
#die $nerl->network->theta1->slice("1:2") . $nerl->network->theta2->slice("1:2");
}
#print "example output, images 0 to 4\n";
#print "Labels: " . $y(0:4) . "\n";
#print $nerl->run($images(0:4));
# $nerl->network->show_neuron($_) for (0..4);
}
examples/digits/digits.pl view on Meta::CPAN
$nerl->init_network(l1 => 784, l3=>10, l2=>80,alpha=>.45);#method=batch,hidden=>12345,etc
for(1..300){
my $n = int rand(8000);
my $m = $n+999;
my $ix = $images->slice("$n:$m");
my $iy = $y->slice("$n:$m");
$nerl->network->train($ix,$iy,passes=>5);
my ($cost,$nc) = $nerl->network->cost($images(9000:9999),$y(9000:9999));
print "cost:$cost\n,num correct: $nc / 1000\n";
print "example output, images 0 to 4\n";
print "Labels: " . $y(0:4) . "\n";
print $nerl->network->run($images(0:4));
$nerl->network->show_neuron($_) for (0..4);
}
__END__
#my $label_targets = identity(10)->($labels);
my $id = identity(10);
$images = $images(10:11);
show784($images(0));
show784($images(1));
$labels = $labels(10:11);
lib/AI/Nerl/Network.pm view on Meta::CPAN
my $nn = AI::Nerl::Network->new(
l1 => 3, # 3 inputs
l2 => 18, # 18 hidden neurons
l3 => 1, # 1 output
alpha => .3, # learning rate
lambda => .01, # 'squashing' parameter
);
$nn->train($x,$y, passes=>45);
my ($cost,$num_correct) = $nn->cost($x,$y);
#$nn wasn't programmed with this input. could be anything:
print $nn->run(pdl([0,0,0]));
=head1 DESCRIPTION
=head1 METHODS
=head2 train($x,$y, %params)
Train with backpropagation using $x as input & $y as target.
$x and $y are both pdls. If there are multiple cases, each one will
occupy a column (dimension 2) of the pdl. If your dimensions are off,
train_y => $AND,
);
$AND_nerl->build_network;
my $AND_output = $AND_nerl->run($x);
}
#task: mod 3
#in: 8 bits from (n=0..255);
#out: 1 output: (n%3 != 0)
my $x = map{split '',sprintf("%b",$_)} 0..255;
$x = pdl($x)->transpose;
my $y = pdl map{$_%3 ? 1 : 0} 0..255;
$y = identity(3)->range($y->transpose);
my $nerl = AI::Nerl->new(
train_x => $x,
train_y => $y,
l2 => 4,
);
( run in 0.424 second using v1.01-cache-2.11-cpan-de7293f3b23 )