AI-Nerl

 view release on metacpan or  search on metacpan

examples/digits/digits.pl  view on Meta::CPAN

   if ($pass%200==0){
      warn $delta(100:104);
      warn $out_neurons(100:104);
   }  
   show784($delta(:,0));
   show784($delta(:,6));
   show784($delta(:,4));
}
#die join (',',$nncost->dims);
use PDL::Graphics2D;
sub show784{
   my $w = shift;
   $w = $w->squeeze;
   my $min = $w->minimum;
   $w -= $min;
   my $max = $w->maximum;
   $w /= $max;
   $w = $w->reshape(28,28);
   imag2d $w;
}
sub sigmoid{
   my $foo = shift;
   return 1/(1+E**-$foo);
}

sub logistic{
   #find sigmoid before calling this.
   #grad=logistic(sigmoid(foo))
   my $foo = shift;
   return $foo * (1-$foo);
}

lib/AI/Nerl.pm  view on Meta::CPAN

);

has basis => (
   is => 'ro',
   isa => 'AI::Nerl',
   required => 0,
);

#initialize $self->network, but don't train.
# any parameters AI::Nerl::Network takes are fine here.
sub init_network{
   my $self = shift;
   my %nn_params = @_;
   #input layer size:
   unless ($nn_params{l1}){
      if ($self->basis){
         $nn_params{l1} = $self->basis->network->l1 + $self->basis->network->l2;
      } elsif($self->train_x) {
         $nn_params{l1} ||= $self->train_x->dim(1);
      }
   }

lib/AI/Nerl.pm  view on Meta::CPAN

   }
   $nn_params{l2} ||= $self->l2;
   $nn_params{scale_input} ||= $self->scale_input;

   my $nn = AI::Nerl::Network->new(
      %nn_params
   );
   $self->network($nn);
}

sub build_network{
   my $self = shift;
   my $nn = AI::Nerl::Network->new(
      l1 => $self->train_x->dim(1),
      l2 => $self->l2,
      l3 => $self->train_y->dim(1),
      scale_input => $self->scale_input,
   );
   $nn->train($self->train_x, $self->train_y, passes=>$self->passes);
   $self->network($nn);
}

sub append_l2{
   my ($self,$x) = @_;
   if($self->basis){
      $x = $self->basis->append_l2($x);
   }
   return $self->network->append_l2($x);
}


sub run{
   my ($self,$x) = @_;
   $x->sever;
   if($self->basis){
      $x = $self->basis->append_l2($x);
   }
   return $self->network->run($x);
}
sub train{
   my ($self,$x,$y) = @_;
   $x->sever;
   if($self->basis){
      $x = $self->basis->append_l2($x);
   }
   return $self->network->train($x,$y);
}

sub cost{
   my ($self,$x,$y) = @_;
   $x->sever();
   if($self->basis){
      $x = $self->basis->append_l2($x);
   }
   return $self->network->cost($x,$y);
}

'a neural network has your dog.';

lib/AI/Nerl/Network.pm  view on Meta::CPAN

   isa => 'Num',
   is => 'rw',
   default => .6,
);
has lambda => (
   isa => 'Num',
   is => 'rw',
   default => .01,
);

sub _mk_theta1{
   my $self = shift;
   return grandom($self->l1, $self->l2) * .01;
}
sub _mk_theta2{
   my $self = shift;
   return grandom($self->l2, $self->l3) * .01;
}
sub _mk_b1{
   my $self = shift;
   return grandom($self->l2) * .01;
}
sub _mk_b2{
   my $self = shift;
   return grandom($self->l3) * .01;
}


sub train{
   my ($self,$x,$y, %params) = @_;
   $x->sever();
   my $passes = $params{passes} // 10;

   if ($self->scale_input){
      $x *= $self->scale_input;
   }
   my $num_examples = $x->dim(0);

   for my $pass (1..$passes){

lib/AI/Nerl/Network.pm  view on Meta::CPAN

         warn "delta1: $delta1\n";
         warn "delta2: $delta2\n";
      }
      $self->{theta2} -= $self->alpha * ($delta2 / $num_examples + $self->theta2 * $self->lambda);
      $self->{theta1} -= $self->alpha * ($delta1 / $num_examples + $self->theta1 * $self->lambda);
      $self->{b1} -= $self->alpha * $deltab1 / $num_examples;
      $self->{b2} -= $self->alpha * $deltab2 / $num_examples;
   }
}

sub run{
   my ($self,$x) = @_;
   $x->sever();
   if ($self->scale_input){
      $x *= $self->scale_input;
   }

   $x = $x->transpose if $self->l1 != $x->dim(1);
   my $y = $self->theta1 x $x;
   $y += $self->b1->transpose;
   $y->inplace()->tanh;# = tanhx($y);

   $y = $self->theta2 x $y;
   $y += $self->b2->transpose;
   $y->inplace()->tanh();# = tanhx($y);
   return $y;
}

sub append_l2{
   my ($self,$x) = @_;
   $x->sever();
   if ($self->scale_input){
      $x *= $self->scale_input;
   }
   $x = $x->transpose if $self->l1 != $x->dim(1);
   my $l2 = $self->theta1 x $x;
   $l2 += $self->b1->transpose;
   $l2->inplace()->tanh;
#   warn join ',',$x->dims;
#   warn join ',',$l2->dims;
   return $x->glue(1,$l2);
}

sub cost{
   my ($self,$x,$y) = @_;
   $x->sever();# = $x->copy();
   my $n = $x->dim(0);
   if ($self->scale_input){
      $x *= $self->scale_input;
   }
   my $num_correct = 0;
   #die join(',',$x->dims) .',,,'. join(',',$y->dims);
   my $total_cost = 0; 
   for my $i (0..$n-1){

lib/AI/Nerl/Network.pm  view on Meta::CPAN

      $total_cost += ($y(($i))-$a3)->abs()->power(2,0)->sum()/2;
      #warn $a3->maximum_ind . '    ' . $y(($i))->maximum_ind;;
      $num_correct++ if $a3->maximum_ind == $y(($i))->maximum_ind;
   }
   $total_cost /= $n;
   $total_cost += $self->theta1->flat->power(2,0)->sum * $self->lambda;
   $total_cost += $self->theta2->flat->power(2,0)->sum * $self->lambda;
   return ($total_cost, $num_correct);
}

sub tanhx{ #don't use this. pdl has $pdl->tanh which can be used in place.
   my $foo = shift;
   my $p = E**$foo;
   my $n = E**-$foo;
   return (($p-$n)/($p+$n));
}
sub tanhxderivative{ #use: tanhxderivative($pdl->tanh()). save time by finding tanh first.
   my $tanhx = shift;
   return (1 - $tanhx**2);
}

sub sigmoid{
   my $foo = shift;
   return 1/(1+E**-$foo);
}

sub logistic{
   #find sigmoid before calling this.
   #grad=logistic(sigmoid(foo))
   my $foo = shift;
   return $foo * (1-$foo);
}

my $g2d_tried = 0;
my $g2d_failed = '';
sub USE_G2D{
   return 0 if $g2d_tried and $g2d_failed;
   return 1 if $g2d_tried;
   eval{
      require PDL::Graphics2D;
      PDL::Graphics2d->import('imag2d');
      1;
   } or do {
      $g2d_failed = $@;
      warn "PDL::Graphics2d failed to load. perhaps $g2d_failed";
   };
   $g2d_tried = 1;
   return USE_G2D();
}
#display 28x28 grayscale pdl.
sub show784{
   return unless USE_G2D();
   my $w = shift;
   $w = $w->copy;
   #warn join',', $w->dims;
   $w = $w->squeeze;
   my $min = $w->minimum;
   $w -= $min;
   my $max = $w->maximum;
   $w /= $max;
   $w = $w->reshape(28,28);
   imag2d $w;
}

sub show_neuron{
   unless (USE_G2D()){
      warn 'Can\'t display neuron. Get OpenGL?';
      return;
   }
   my $self = shift;
   my $n = shift // 0;
   my $x = shift || 28;
   my $y = shift || 28;
   my $w = $self->theta1->slice(":,$n")->copy;
   $w = $w->squeeze;



( run in 0.261 second using v1.01-cache-2.11-cpan-4d50c553e7e )