view release on metacpan or search on metacpan
}
# endif
return cost/m;
}
Matrix *mini_batch(Matrix *m, int start, int size, int axis){
Matrix *r;
int end;
if(start < 0){
fprintf(stderr, "start index need to be bigger or equal index 0\n");
exit(1);
}
end = start + size - 1;
if(axis == 0) // every training example is a column
{
if(end >= m->columns){
fprintf(stderr, "Out of index of columns\n");
exit(1);
}
r = slice(m, -1, -1, start, end);
}
else if(axis == 1){
if(end >= m->rows){
fprintf(stderr, "Out of index of rows\n");
exit(1);
}
r = slice(m, start, end, -1, -1);
}
else{
fprintf(stderr, "Invalid axis\n");
exit(1);
}
return r;
}
Matrix *predict_binary_classification(Matrix *m, REAL threshold){
Matrix *yatt;
int i, rows, cols, size;
rows = m->rows;
cols = m->columns;
size = rows * cols;
NEW_MATRIX(REAL, yatt, rows, cols);
for( i = 0; i < size; i++ ){
//fprintf(stderr, "%f\n", m->values[i]);
if(m->values[i] >= threshold){
yatt->values[i] = 1;
}
else{
yatt->values[i] = 0;
}
//fprintf(stderr, "%f -> %f\n", m->values[i], yatt->values[i]);
}
return yatt;
}
double accuracy(Matrix *y, Matrix *yatt){
double total = 0;
int size = y->columns * y->rows;
for ( int i = 0; i < size; i++ ){
if(y->values[i] == yatt->values[i]) total++;
b) cause the whole of any work that you distribute or publish, that
in whole or in part contains the Program or any part thereof, either
with or without modifications, to be licensed at no charge to all
third parties under the terms of this General Public License (except
that you may choose to grant warranty protection to some or all
third parties, at your option).
c) If the modified program normally reads commands interactively when
run, you must cause it, when started running for such interactive use
in the simplest and most usual way, to print or display an
announcement including an appropriate copyright notice and a notice
that there is no warranty (or else, saying that you provide a
warranty) and that users may redistribute the program under these
conditions, and telling the user how to view a copy of this General
Public License.
d) You may charge a fee for the physical act of transferring a
copy, and you may at your option offer warranty protection in
exchange for a fee.
inc/MyBuilder.pm view on Meta::CPAN
sub update_XS {
my ($self, $file) = @_;
my $output = $file;
$output =~ s/\.inc$//;
open my $i_fh, "<", $file or die "$!";
open my $o_fh, ">", $output or die "$!";
while (<$i_fh>) {
s/REAL/float/g;
print {$o_fh} $_;
}
close $o_fh;
close $i_fh;
}
sub ACTION_create_objects {
my $self = shift;
my $cbuilder = $self->cbuilder;
my $c_progs = $self->rscan_dir("C", qr/\.c$/);
lib/AI/ML/Expr.pm view on Meta::CPAN
}
sub eval_softmax {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
my $s = $tree->max();
my $e_x = exp( $tree - $s );
my $div = sum( $e_x, 1 );
return $e_x / $div;
#use Data::Dumper;
#print STDERR Dumper $matrix;
# return _bless _softmax($tree->matrix_id);
}
die "softmax for non matrix";
}
=head2 d_softmax
Allows apply the function d_softmax to every element of the matrix.
$m = d_softmax($m);
$m = $m->d_softmax();
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub plot {
my ($x, $y, $theta, $file) = @_;
my @xdata = $x->vector_to_list();
my @ydata = $y->vector_to_list();
my @thetas = $theta->vector_to_list();
my $f = $thetas[0] . "+" . $thetas[1] . "*x";
#print STDERR "$_\n" for(@xdata);
#rint STDERR "$_\n" for(@ydata);
#print STDERR "$f\n";
#print STDERR "\n\nFILE == $file\n\n";
my $chart = Chart::Gnuplot->new(
output => $file,
title => "Nice one",
xlabel => "x",
ylabel => "y"
);
my $points = Chart::Gnuplot::DataSet->new(
xdata => \@xdata,
ydata => \@ydata,
lib/AI/ML/LogisticRegression.pm view on Meta::CPAN
}
=head2 accuracy
=cut
sub accuracy {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::accuracy($y, $self->{yatt});
}
=head2 precision
=cut
sub precision {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::precision($y, $self->{yatt});
}
=head2 recall
=cut
sub recall {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::recall($y, $self->{yatt});
}
=head2 f1
=cut
sub f1 {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::f1($y, $self->{yatt});
}
1;
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
#$aux->save_csv("/tmp/DB$i.csv");
$aux = $var->{"da$j"};
#$aux->save_csv("/tmp/da$j.m");
$aux = $self->{"l$i"}{w};
#$aux->save_csv("/tmp/W$i.csv");
$aux = $self->{"l$i"}{b};
#$aux->save_csv("/tmp/B$i.csv");
}
##print STDERR Dumper($self,$var);
##
$i--;$j--;
for(; $j >= 0; $i--, $j--) {
#print STDERR "Iter: $i\n";
$var->{"dz$i"} = $var->{"da$i"} * $functions->{ $self->{"l$i"}{dfunc} }->($var->{"Z$i"}) ;
$var->{"dw$i"} = (1 / $m) * ( $var->{"dz$i"} x T($var->{"A$j"}) );
$var->{"db$i"} = (1 / $m) * sum( $var->{"dz$i"} , 0 );
$var->{"da$j"} = T($self->{"l$i"}{w}) x $var->{"dz$i"} if $j >= 1;
$self->{"l$i"}{w} = $self->{"l$i"}{w} - ( $alpha * $var->{"dw$i"} );
$self->{"l$i"}{b} = $self->{"l$i"}{b} - ( $alpha * $var->{"db$i"} );
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
$self->{grads} = %$var if exists $opts{grads};
}
=head2 gradient_checking
=cut
sub gradient_checking {
my ($self, $x, $y) = @_;
my ($params, $grads, %dims) = $self->_get_params_grads();
#print STDERR Dumper($params);
#print STDERR Dumper($grads);
#print STDERR Dumper(%dims);
#my $n = $params->rows;
#my $m = $params->columns;
#print STDERR "elements:$n,$m\nParams vector\n";
#for my $i (0..$n-1){
# print STDERR "$i:" .$params->get_element($i,0)."\n";
#}
#print STDERR "Grads vector\n";
#for my $j (0..$n-1){
# print STDERR $params->get_element($j,0)."\n";
#}
#my $epsilon = 1e-7;
#my $J_plus = Math::Lapack::Matrix->zeros($n,1);
#my $J_minus = Math::Lapack::Matrix->zeros($n,1);
#my $grad_aprox = Math::Lapack::Matrix->zeros($n,1);
#for my $i (0..$n-1){
# $theta_plus = $params;
# $theta_plus->set_element($i,0) = $theta_plus->get_element($i,0) + $epsilon;
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
my ($matrix, $params, $grads, $n, %dims);
my ($r, $c);
$n = $self->{layers};
$matrix = $self->{"l1"}{w};
$dims{"w1"}{rows} = $matrix->rows;
$dims{"w1"}{cols} = $matrix->columns;
($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
$params = $matrix->reshape($matrix->rows * $matrix->columns, 1);
($r, $c) = $params->shape;
print STDERR "$r,$c\n";
$matrix = $self->{grads}{"dw1"};
$grads = $matrix->reshape($matrix->rows * $matrix->columns, 1);
for my $i (1..$n-1){
print STDERR "layer: $i\n";
if( $i > 1 ){
$matrix = $self->{"l$i"}{w};
$dims{"w$i"}{rows} = $matrix->rows;
$dims{"w$i"}{cols} = $matrix->columns;
$matrix = $matrix->reshape($matrix->rows* $matrix->columns, 1);
($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
$params->append($matrix,1);
$matrix = $self->{grads}{"dw$i"};
$grads->append($matrix->reshape($matrix->rows*$matrix->columns, 1),0);
}
($r, $c) = $params->shape;
print STDERR "$r,$c\n";
$matrix = $self->{"l$i"}{b};
$dims{"b$i"}{rows} = $matrix->rows;
$dims{"b$i"}{cols} = $matrix->columns;
($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
$params->append($matrix->reshape($matrix->rows *$matrix->columns,1), 0);
($r, $c) = $params->shape;
print STDERR "$r,$c\n";
$matrix = $self->{grads}{"db$i"};
$grads->append($matrix->reshape($matrix->rows *$matrix->columns,1), 0);
}
#print STDERR "cols: $c, rows: $r\n";
#print STDERR Dumper(%dims);
return ($params, $grads, %dims);
}
=head2 prediction
=cut
sub prediction {
my ($self, $x, %opts) = @_;
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
}
=head2 accuracy
=cut
sub accuracy {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::accuracy($y, $self->{yatt});
}
=head2 precision
=cut
sub precision {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::precision($y, $self->{yatt});
}
=head2 recall
=cut
sub recall {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::recall($y, $self->{yatt});
}
=head2 f1
=cut
sub f1 {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::f1($y, $self->{yatt});
}
1;
scripts/load_data.c view on Meta::CPAN
endianSwap(&col);
Matrix *matrices;
int size = row * col;
NEW_MATRIX(matrices, num, size);
for(i = 0; i < num * size; i++){
if(fread(&pixel, 1, 1, fimage) <= 0){
exit(1);
}
//printf("value: %f\n", (double)pixel);
matrices->values[i] = (double)pixel;
}
fclose(fimage);
return matrices;
}
/*Matrix *read_label_csv(char *path){
FILE *flabel = fopen(path, "rb");
unsigned int magic, num;
scripts/load_data.c view on Meta::CPAN
if( fread(&num, 4, 1, flabel) <= 0) exit(1);
endianSwap(&num);
Matrix *m;
NEW_MATRIX(m, num, 1);
int i;
for( i = 0; i < num; i++){
if( fread(&value, 1 , 1, flabel) <= 0 ) exit(1);
m->values[i] = (float)value;
//printf("%d\n",value);
}
fclose(flabel);
return m;
}*/
Matrix *read_label_csv(char *path){
FILE *flabel = fopen(path, "rb");
unsigned int magic, num;
unsigned char value;
if( fread(&magic, 4, 1, flabel) <= 0) exit(1);
assert(magic = 0x01080000);
if( fread(&num, 4, 1, flabel) <= 0) exit(1);
endianSwap(&num);
Matrix *m;
NEW_MATRIX(m, num, 10);
int i;
for( i = 0; i < num; i++){
if( fread(&value, 1 , 1, flabel) <= 0 ) {exit(1);}
if(i < 10) fprintf(stderr, "%d\n",value);
m->values[i * m->columns + value] = 1;
//printf("%d\n",pos);
}
fclose(flabel);
return m;
}
void save(Matrix *m, char *path){
FILE *f;
f = fopen(path, "w");
if(f == NULL) {
fprintf(stderr, "save: can't create file\n");
exit(1);
}
int row, col;
for( row = 0; row < m->rows; row++ ) {
for( col = 0; col < m->columns; col++ ) {
if( col == 0 ) {
fprintf(f, "%f", (double)m->values[row * m->columns + col]);
} else {
fprintf(f, ",%f", (double)m->values[row * m->columns + col]);
}
}
if( row < m->rows ) fprintf(f, "\n");
}
fclose(f);
}
void destroy(Matrix *m){
free(m->values);
free(m);
}
void print_values(Matrix *m){
int rows = 10;
int cols = m->columns;
for(int i = 0; i < rows; i++){
for(int j = 0; j < cols; j++){
fprintf(stderr, "%f\t", m->values[i * cols + j]);
}
fprintf(stderr, "\n");
}
}
int main(int argc, char *argv[]){
if (argc != 4) {
fprintf(stderr, "Usage: load <type> input-ubyte output-txt\n");
fprintf(stderr, "\ttype can be:\n");
fprintf(stderr, "\t\timages\n");
fprintf(stderr, "\t\tlabels\n");
exit(1);
}
char *path = argv[2];
Matrix *m;
if (strcmp(argv[1], "images") == 0) {
m = read_csv(path);
} else if (strcmp(argv[1], "labels") == 0) {
m = read_label_csv(path);
} else {
fprintf(stderr, "Unknown file type: %s\n", argv[1]);
}
save(m, argv[3]);
destroy(m);
return 0;
}
scripts/mnist.pl view on Meta::CPAN
sub _download_data{
my $http = HTTP::Tiny->new();
my $url = "http://yann.lecun.com/exdb/mnist";
my $res;
for my $key ( keys %opt ) {
my $file = "$url/$opt{$key}.gz";
my $ff = File::Fetch->new(uri => $file);
my $aux = $ff->fetch() or die $ff->error;
#print "$file\n";
#$res = $http->get("$file");
#my $content = $res->{content};
# # $res = $http->get("$route/".$opt{$key});
#print STDERR Dumper $content;
}
}
t/00-report-prereqs.t view on Meta::CPAN
if ( @reports ) {
push @full_reports, "=== $title ===\n\n";
my $ml = _max( map { length $_->[0] } @reports );
my $wl = _max( map { length $_->[1] } @reports );
my $hl = _max( map { length $_->[2] } @reports );
if ($type eq 'modules') {
splice @reports, 1, 0, ["-" x $ml, "", "-" x $hl];
push @full_reports, map { sprintf(" %*s %*s\n", -$ml, $_->[0], $hl, $_->[2]) } @reports;
}
else {
splice @reports, 1, 0, ["-" x $ml, "-" x $wl, "-" x $hl];
push @full_reports, map { sprintf(" %*s %*s %*s\n", -$ml, $_->[0], $wl, $_->[1], $hl, $_->[2]) } @reports;
}
push @full_reports, "\n";
}
}
}
if ( @full_reports ) {
diag "\nVersions for all modules listed in $source (including optional ones):\n\n", @full_reports;
}
t/02-cost_functions.t view on Meta::CPAN
float($a->get_element(0,0), 0, "Element correct at 0,0");
float($a->get_element(0,1), 1, "Element correct at 0,1");
float($a->get_element(0,2), 1, "Element correct at 0,2");
float($a->get_element(0,3), 1, "Element correct at 0,3");
my $b = $a x $m_1->transpose;
float($b->get_element(0,0), 6, "Element correct at 0,0");
print "1..$nr_tests\n";
sub float {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if (abs($a-$b) > 0.000001){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
sub is {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if ($a != $b){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
t/03-mini-batch.t view on Meta::CPAN
my $m_1 = Math::Lapack::Matrix->random(1000,20);
$start = 0;
$axis = 1;
for my $i (0..4){
my $b = mini_batch($m_1, $start, $size, $axis);
is($b->rows, 200, "Right number of rows\n");
is($b->columns, 20, "Right number of columns\n");
$start += $size;
}
print "1..$nr_tests\n";
sub float {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if (abs($a-$b) > 0.000001){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
sub is {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if ($a != $b){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
t/05-logistic-regression.t view on Meta::CPAN
$m->prediction($x);
_float($m->accuracy($y), 0.7483660130718954, "Right value of accuracy");
_float($m->precision($y), 0.5833333333333334, "Right value of precision");
_float($m->recall($y), 0.1728395061728395, "Right value of recall");
_float($m->f1($y), 0.26666666666666666, "Right value of f1");
#print STDERR "Accuracy: $acc\n";
#print STDERR "Precison: $prec\n";
#print STDERR "Recall: $rec\n";
#print STDERR "F1: $f1\n";
my $n = AI::ML::LogisticRegression->new(
n => 10000,
alpha => 0.5,
t/06-accuracy-precision-recall-f1.t view on Meta::CPAN
my $prec = AI::ML::Expr::precision($y, $yatt);
float($prec, 0.571428571, "Right Precision");
my $rec = AI::ML::Expr::recall($y, $yatt);
float($rec, 0.5, "Right recall");
my $f_1 = AI::ML::Expr::f1($y, $yatt);
float($f_1, 0.533333334, "Right f1");
print "1..$nr_tests\n";
sub float {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if (abs($a-$b) > 0.000001){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
sub is {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if ($a != $b){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
t/08-gradient-checking.t_ view on Meta::CPAN
$params{"w2"} = Math::Lapack::Matrix::read_csv("t/W2_grad_check.csv");
$params{"w3"} = Math::Lapack::Matrix::read_csv("t/W3_grad_check.csv");
$params{"b1"} = Math::Lapack::Matrix::read_csv("t/b1_grad_check.csv");
$params{"b2"} = Math::Lapack::Matrix::read_csv("t/b2_grad_check.csv");
$params{"b3"} = Math::Lapack::Matrix::read_csv("t/b3_grad_check.csv");
gradient_checking($x, $y, $params, 1);
#$x->norm_std_deviation();
#print STDERR $x->columns."\n";
#my $NN = AI::ML::NeuralNetwork->new(
# [
# 2,
# {func => "tanh", units => 3},
# 1
# ],
# n => 2,
# alpha => 1.2
#
t/08-gradient-checking.t_ view on Meta::CPAN
#
#$NN->train($x, $y);
#
#$NN->prediction($x);
#
#my $acc = $NN->accuracy($y);
#my $prec = $NN->precision($y);
#my $rec = $NN->recall($y);
#my $f1 = $NN->f1($y);
#
#print STDERR "Accuracy: $acc\n";
#print STDERR "Precison: $prec\n";
#print STDERR "Recall: $rec\n";
#print STDERR "F1: $f1\n";
#my $t1 = $NN->{"l1"}{w};
#$t1->save_matlab("/tmp/t0.m");
#print STDERR Dumper($NN->{"l1"}{w});
done_testing;
sub _float {
my ($a, $b, $c) = @_;
is($a, float($b, tolerance => 0.01), $c);
}