AI-ML
view release on metacpan or search on metacpan
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
$var->{"dz$i"} = $var->{"da$i"} * $functions->{ $self->{"l$i"}{dfunc} }->($var->{"Z$i"}) ;
$var->{"dw$i"} = (1 / $m) * ( $var->{"dz$i"} x T($var->{"A$j"}) );
$var->{"db$i"} = (1 / $m) * sum( $var->{"dz$i"} , 0 );
$var->{"da$j"} = T($self->{"l$i"}{w}) x $var->{"dz$i"} if $j >= 1;
$self->{"l$i"}{w} = $self->{"l$i"}{w} - ( $alpha * $var->{"dw$i"} );
$self->{"l$i"}{b} = $self->{"l$i"}{b} - ( $alpha * $var->{"db$i"} );
if($iter == 100){
$aux = $var->{"dz$i"};
#$aux->save_csv("/tmp/DZ$i.csv");
$aux = $var->{"dw$i"};
#$aux->save_csv("/tmp/DW$i.csv");
$aux = $var->{"db$i"};
#$aux->save_csv("/tmp/DB$i.csv");
#if ($j>=1){$aux = $var->{"da$j"};
#$aux->save_csv("/tmp/da$j.m");}
$aux = $self->{"l$i"}{w};
#$aux->save_csv("/tmp/W$i.csv");
$aux = $self->{"l$i"}{b};
#$aux->save_csv("/tmp/B$i.csv");
}
}
}
$self->{grads} = %$var if exists $opts{grads};
}
=head2 gradient_checking
=cut
sub gradient_checking {
my ($self, $x, $y) = @_;
my ($params, $grads, %dims) = $self->_get_params_grads();
#print STDERR Dumper($params);
#print STDERR Dumper($grads);
#print STDERR Dumper(%dims);
#my $n = $params->rows;
#my $m = $params->columns;
#print STDERR "elements:$n,$m\nParams vector\n";
#for my $i (0..$n-1){
# print STDERR "$i:" .$params->get_element($i,0)."\n";
#}
#print STDERR "Grads vector\n";
#for my $j (0..$n-1){
# print STDERR $params->get_element($j,0)."\n";
#}
#my $epsilon = 1e-7;
#my $J_plus = Math::Lapack::Matrix->zeros($n,1);
#my $J_minus = Math::Lapack::Matrix->zeros($n,1);
#my $grad_aprox = Math::Lapack::Matrix->zeros($n,1);
#for my $i (0..$n-1){
# $theta_plus = $params;
# $theta_plus->set_element($i,0) = $theta_plus->get_element($i,0) + $epsilon;
# $J_plus($i,0) = _forward_prop_n($x, $y, _vector_to_hash($theta_plus, $n, %dims));
#
# $theta_minus = $params;
# $theta_minus->set_element($i,0) = $theta_minus->get_element($i,0) - $epsilon;
# $J_minus($i,0) = _forward_prop_n($x, $y, _vector_to_hash($theta_minus, $n));
# $grad_aprox($i,0) = ($J_plus($i,0) - $j_minus($i,0)) / (2*$epsilon);
#}
}
=head2 _vector_to_hash
=cut
sub _vector_to_hash {
my ($vector, $n, %dims) = @_;
my $size = $vector->rows;
my $pos = 0;
my ($n_values, $weight, $bias);
my %hash = {};
for my $i (1..$n-1){
$n_values = $dims{"w$i"}{rows} * $dims{"w$i"}{cols};
$weight = $vector->slice( row_range => [$pos, $pos+$n_values-1] );
$hash{"l$i"}{w} = $weight->reshape($dims{"w$i"}{rows}, $dims{"w$i"}{cols});
$pos += $n_values;
$n_values = $dims{"b$i"}{rows} * $dims{"b$i"}{cols};
$bias = $vector->reshape( row_range => [$pos, $pos+$n_values-1]);
$hash{"l$i"}{b} = $bias->reshape($dims{"b$i"}{rows},$dims{"b$i"}{cols});
$pos += $n_values;
}
return %hash;
}
=head2 _get_params_grads
=cut
sub _get_params_grads {
my ($self) = @_;
my ($matrix, $params, $grads, $n, %dims);
my ($r, $c);
$n = $self->{layers};
$matrix = $self->{"l1"}{w};
$dims{"w1"}{rows} = $matrix->rows;
$dims{"w1"}{cols} = $matrix->columns;
($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
$params = $matrix->reshape($matrix->rows * $matrix->columns, 1);
($r, $c) = $params->shape;
print STDERR "$r,$c\n";
$matrix = $self->{grads}{"dw1"};
$grads = $matrix->reshape($matrix->rows * $matrix->columns, 1);
for my $i (1..$n-1){
print STDERR "layer: $i\n";
if( $i > 1 ){
( run in 0.993 second using v1.01-cache-2.11-cpan-39bf76dae61 )