view release on metacpan or search on metacpan
Matrix *sf = div_matrices(em, vm);
destroy(sm);
destroy(vm);
destroy(em);
return sf;
}
Matrix *matrix_d_softmax(Matrix *m){
Matrix *exp = matrix_exp(m);
Matrix *s = sum(exp, VERTICAL);
return div_matrices( mul_matrices(exp, sub_matrices(s, exp)) ,s);
}
Matrix *matrix_d_sigmoid(Matrix *m){
return element_wise(m, d_sigmoid, NULL);
}
Matrix *matrix_tanh(Matrix *m){
return element_wise(m, hyperbolic_tan, NULL);
}
int columns;
int rows;
REAL *values;
}Matrix;
typedef enum _axis{
HORIZONTAL = 0,
VERTICAL = 1
} Axis;
Matrix *sub_matrices(Matrix *A, Matrix *B);
Matrix *mul_matrices(Matrix *A, Matrix *B);
Matrix *div_matrices(Matrix *A, Matrix *B);
Matrix *dot(Matrix *A, Matrix *B, int A_t, int B_t);
Matrix *slice(Matrix *m, int x0, int x1, int y0, int y1);
Matrix *mini_batch(Matrix *m, int start, int size, int axis);
Matrix *sum(Matrix *m, Axis axis);
Matrix *div_matrices(Matrix *A, Matrix *B);
Matrix *broadcasting(Matrix *A, Matrix *B, Axis axis, REAL f(REAL, REAL));
REAL real_mul(REAL a, REAL b);
REAL get_max(Matrix*);
Matrix* matrix_sum(Matrix*, REAL);
REAL real_sub(REAL a, REAL b);
REAL real_sum(REAL a, REAL b);
REAL real_div(REAL a, REAL b);
REAL sigmoid(REAL a, void* v);
REAL ReLU(REAL a, void* v);
REAL d_ReLU(REAL a, void* v);
received the program in object code or executable form alone.)
Source code for a work means the preferred form of the work for making
modifications to it. For an executable file, complete source code means
all the source code for all modules it contains; but, as a special
exception, it need not include source code for modules which are standard
libraries that accompany the operating system on which the executable
file runs, or for standard header files or definitions files that
accompany that operating system.
4. You may not copy, modify, sublicense, distribute or transfer the
Program except as expressly provided under this General Public License.
Any attempt otherwise to copy, modify, sublicense, distribute or transfer
the Program is void, and will automatically terminate your rights to use
the Program under this License. However, parties who have received
copies, or rights to use copies, from you under this General Public
License will not have their licenses terminated so long as such parties
remain in full compliance.
5. By copying, distributing or modifying the Program (or any work based
on the Program) you indicate your acceptance of this license to do so,
and all its terms and conditions.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the original
licensor to copy, distribute or modify the Program subject to these
terms and conditions. You may not impose any further restrictions on the
recipients' exercise of the rights granted herein.
7. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of the license which applies to it and "any
may not charge a fee for this Package itself. However, you may distribute this
Package in aggregate with other (possibly commercial) programs as part of a
larger (possibly commercial) software distribution provided that you do not
advertise this Package as a product of your own.
6. The scripts and library files supplied as input to or produced as output
from the programs of this Package do not automatically fall under the copyright
of this Package, but belong to whomever generated them, and may be sold
commercially, and may be aggregated with this Package.
7. C or perl subroutines supplied by you and linked into this Package shall not
be considered part of this Package.
8. The name of the Copyright Holder may not be used to endorse or promote
products derived from this software without specific prior written permission.
9. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
The End
inc/MyBuilder.pm view on Meta::CPAN
use Config;
use ExtUtils::ParseXS;
use ExtUtils::Mkbootstrap;
use Path::Tiny;
my $EXTRA_O_FLAGS = "";
my $EXTRA_FLAGS = "-lblas -llapack";
sub ACTION_code {
my $self = shift;
$EXTRA_O_FLAGS .= " -DUSE_REAL" unless exists $self->args->{'with-float'};
$self->update_XS("XS/ML.xs.inc");
$self->dispatch("create_objects");
$self->dispatch("compile_xs");
$self->SUPER::ACTION_code;
}
sub update_XS {
my ($self, $file) = @_;
my $output = $file;
$output =~ s/\.inc$//;
open my $i_fh, "<", $file or die "$!";
open my $o_fh, ">", $output or die "$!";
while (<$i_fh>) {
s/REAL/float/g;
print {$o_fh} $_;
}
close $o_fh;
close $i_fh;
}
sub ACTION_create_objects {
my $self = shift;
my $cbuilder = $self->cbuilder;
my $c_progs = $self->rscan_dir("C", qr/\.c$/);
for my $file (@$c_progs) {
my $object = $file;
$object =~ s/\.c$/.o/;
next if $self->up_to_date($file, $object);
$cbuilder->compile(
object_file => $object,
extra_compiler_flags => $EXTRA_O_FLAGS,
source => $file,
include_dirs => ["."]
);
}
}
sub ACTION_compile_xs {
my $self = shift;
my $cbuilder = $self->cbuilder;
my $archdir = path($self->blib, "arch", "auto", "AI", "ML");
$archdir->mkpath unless -d $archdir;
my $xs = path("XS", "ML.xs");
my $xs_c = path("XS", "ML.c");
if (!$self->up_to_date($xs, $xs_c)) {
lib/AI/ML.pm view on Meta::CPAN
# ABSTRACT: Perl interface to ML
use strict;
use warnings;
package AI::ML;
use parent 'DynaLoader';
use Math::Lapack;
bootstrap AI::ML;
#sub dl_load_flags { 1 }
1;
lib/AI/ML/Expr.pm view on Meta::CPAN
use AI::ML;
use Math::Lapack;
use aliased 'Math::Lapack::Matrix' => 'M';
use parent 'Exporter';
use parent 'Math::Lapack::Expr';
our @EXPORT = qw(mini_batch tanh sigmoid relu lrelu d_sigmoid d_relu d_lrelu d_tanh softmax sigmoid_cost plot plot_cost);
use Math::Lapack::Expr;
sub _bless {
my $matrix = shift;
return bless { _matrix => $matrix, type => 'matrix' } => "Math::Lapack::Matrix";
}
=head2 sigmoid
Allow apply the function sigmoid to every element of the matrix.
$m = $m->sigmoid();
$m = sigmoid($m);
=cut
sub sigmoid {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'sigmoid', args => [$self] } => __PACKAGE__
}
sub eval_sigmoid {
my $tree = shift;
if (blessed($tree) && $tree->isa("Math::Lapack::Matrix")) {
return _bless _sigmoid($tree->matrix_id);
}
die "Sigmoid for non matrix: " . ref($tree);
}
=head2 relu
Allows apply the function relu to every element of the matrix.
$m = $m->relu();
$m = relu($m);
=cut
sub relu {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'relu', args => [$self] } => __PACKAGE__;
}
sub eval_relu {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _relu($tree->matrix_id);
}
die "ReLU for non matrix";
}
=head2 d_relu
Allows apply the function d_relu to every element of the matrix.
$m = $m->d_relu();
$m = d_relu($m);
=cut
sub d_relu {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_relu', args => [$self] } => __PACKAGE__;
}
sub eval_d_relu {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _d_relu($tree->matrix_id);
}
die "ReLU for non matrix";
}
=head2 lrelu
Allows apply the function lrelu to every element of the matrix.
$th::Lapack::Matrixref(1)m = lrelu($m, 0.0001);
$m = m->lrelu(0.1);
=cut
sub lrelu {
my ($self, $v) = @_;
return bless { package => __PACKAGE__, type => 'lrelu', args => [$self, $v] } => __PACKAGE__;
}
sub eval_lrelu {
my ($tree, $v) = @_;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _lrelu($tree->matrix_id, $v);
}
die "lReLU for non matrix";
}
=head2 d_lrelu
Allows apply the function d_lrelu to every element of the matrix.
$th::Lapack::Matrixref(1)m = lrelu($m, 0.0001);
$m = m->lrelu(0.1);
=cut
sub d_lrelu {
my ($self, $v) = @_;
return bless { package => __PACKAGE__, type => 'd_lrelu', args => [$self, $v] } => __PACKAGE__;
}
sub eval_d_lrelu {
my ($tree, $v) = @_;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _d_lrelu($tree->matrix_id, $v);
}
die "lReLU for non matrix";
}
=head2 softmax
Allows apply the function softmax to every element of the matrix.
$m = softmax($m);
$m = $m->softmax();
=cut
sub softmax {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'softmax', args => [$self] } => __PACKAGE__;
}
sub eval_softmax {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
my $s = $tree->max();
my $e_x = exp( $tree - $s );
my $div = sum( $e_x, 1 );
return $e_x / $div;
#use Data::Dumper;
#print STDERR Dumper $matrix;
# return _bless _softmax($tree->matrix_id);
}
die "softmax for non matrix";
}
=head2 d_softmax
Allows apply the function d_softmax to every element of the matrix.
$m = d_softmax($m);
$m = $m->d_softmax();
=cut
sub d_softmax {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_softmax', args => [$self] } => __PACKAGE__;
}
sub eval_d_softmax {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _d_softmax($tree->matrix_id);
}
die "d_softmax for non matrix";
}
=head2 tanh
Allows apply the function tanh to every element of the matrix.
$m = tanh($m);
$m = $m->tanh();
=cut
sub tanh {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'tanh', args => [$self] } => __PACKAGE__;
}
sub eval_tanh {
my $tree = shift;
if( ref($tree) eq "Math::Lapack::Matrix"){
return _bless _tanh($tree->matrix_id);
}
die "tanh for non matrix";
}
=head2 d_tanh
Allows apply the function d_tanh to every element of the matrix.
$m = d_tanh($m);
$m = $m->d_tanh();
=cut
sub d_tanh {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_tanh', args => [$self] } => __PACKAGE__;
}
sub eval_d_tanh {
my $tree = shift;
if( ref($tree) eq "Math::Lapack::Matrix"){
return _bless _d_tanh($tree->matrix_id);
}
die "d_tanh for non matrix";
}
=head2 d_sigmoid
Allow apply the derivate of function sigmoid to every element of the matrix.
$m = $m->d_sigmoid();
$m = d_sigmoid($m);
=cut
sub d_sigmoid {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_sigmoid', args => [$self] } => __PACKAGE__;
}
sub eval_d_sigmoid {
my $tree = shift;
if( ref($tree) eq "Math::Lapack::Matrix"){
return _bless _d_sigmoid($tree->matrix_id);
}
return "d_sigmoid for non matrix";
}
=head2 sigmoid_cost
Allows get the value of the cost of sigmoid function.
put examples
=cut
sub sigmoid_cost {
my ($x, $y, $weights) = @_;
return _sigmoid_cost($x->matrix_id, $y->matrix_id, $weights->matrix_id);
}
=head2 mini-batch
=cut
sub mini_batch {
my ($self, $start, $size, $axis) = @_;
$axis = 0 unless defined $axis; #default
return _bless _mini_batch($self->matrix_id, $start, $size, $axis);
}
=head2 prediction
=cut
sub prediction {
my ($self, %opts) = @_;
my $t = exists $opts{threshold} ? $opts{threshold} : 0.50;
return _bless _predict_binary_classification($self->matrix_id, $t);
}
=head2 precision
=cut
sub precision {
my ($y, $yatt) = @_;
return _precision($y->matrix_id, $yatt->matrix_id);
}
=head2 accuracy
=cut
sub accuracy {
my ($y, $yatt) = @_;
return _accuracy($y->matrix_id, $yatt->matrix_id);
}
=head2 recall
=cut
sub recall {
my ($y, $yatt) = @_;
return _recall($y->matrix_id, $yatt->matrix_id);
}
=head2 f1
=cut
sub f1 {
my ($y, $yatt) = @_;
return _f1($y->matrix_id, $yatt->matrix_id);
}
=head2 plot
=cut
sub plot {
my ($x, $y, $theta, $file) = @_;
my @xdata = $x->vector_to_list();
my @ydata = $y->vector_to_list();
my @thetas = $theta->vector_to_list();
my $f = $thetas[0] . "+" . $thetas[1] . "*x";
#print STDERR "$_\n" for(@xdata);
#rint STDERR "$_\n" for(@ydata);
#print STDERR "$f\n";
#print STDERR "\n\nFILE == $file\n\n";
lib/AI/ML/Expr.pm view on Meta::CPAN
my $func = Chart::Gnuplot::DataSet->new(
func => $f
);
$chart->plot2d($points, $func);
}
=head2 plot_cost
=cut
sub plot_cost{
my ($file, @costs) = @_;
my @iters = (1 .. scalar(@costs));
my $chart = Chart::Gnuplot->new(
output => $file,
title => "Cost",
xlabel => "Iter",
ylabel => "Cost"
);
$chart->png;
lib/AI/ML/LinearRegression.pm view on Meta::CPAN
use Scalar::Util 'blessed';
use aliased 'Math::Lapack::Matrix' => 'M';
use Math::Lapack::Expr;
use parent 'AI::ML::Expr';
use Data::Dumper;
=head2 new
=cut
sub new {
my ($self, %opts) = @_;
$self = bless {} => 'AI::ML::LinearRegression';
$self->{grad} = $opts{gradient} if exists $opts{gradient};
$self->{reg} = $opts{lambda} if exists $opts{lambda};
$self->{cost} = $opts{cost} if exists $opts{cost};
$self->{plot} = $opts{plot} if exists $opts{plot};
$self->{n} = exists $opts{n} ? $opts{n} : 100;
$self->{alpha} = exists $opts{alpha} ? $opts{alpha} : 0.1;
lib/AI/ML/LinearRegression.pm view on Meta::CPAN
#Default is normal equation
#Option
#gradient => not use normal equation
#plot => plot data and linear
#cost => plot cost
#alpha
#n => number of iterations
=cut
sub train {
my ($self, $x, $y) = @_;
my ($thetas, $iters, $alpha, $lambda);
if( exists $self->{grad} ) {
$iters = $self->{n};
$alpha = $self->{alpha};
my ($cost, $grads, $reg_thetas);
my $x = Math::Lapack::Matrix::concatenate(
M->ones($x->rows, 1),
$x
lib/AI/ML/LinearRegression.pm view on Meta::CPAN
$thetas = normal_eq($x, $y);
AI::ML::Expr::plot($x, $y, $thetas, $self->{plot}) if defined $self->{plot};
}
$self->{thetas} = $thetas;
}
=head2 normal_eq
=cut
sub normal_eq {
my ($x, $y) = @_;
#adiciona coluna de uns a matrix X
$x = Math::Lapack::Matrix::concatenate(
M->ones($x->rows, 1),
$x
);
return ((($x->T x $x)->inverse) x $x->T) x $y;
}
=head2 linear_regression_pred
devolve o valor previsto
considerando X com as dimensoes(m,n) e theta com as dimensoes (n,1)
=cut
sub linear_regression_pred {
my ($x, $thetas) = @_;
return $x x $thetas;
}
1;
lib/AI/ML/LogisticRegression.pm view on Meta::CPAN
use aliased 'Math::Lapack::Matrix' => 'M';
use Math::Lapack::Expr;
use AI::ML::Expr;
use parent 'AI::ML::Expr';
use Data::Dumper;
=head2 new
=cut
sub new {
my ($self, %opts) = @_;
$self = bless {} => 'AI::ML::LogisticRegression';
$self->{reg} = $opts{reg} if exists $opts{reg};
$self->{cost} = $opts{cost} if exists $opts{cost};
$self->{plot} = $opts{plot} if exists $opts{plot};
$self->{n} = exists $opts{n} ? $opts{n} : 100;
$self->{alpha} = exists $opts{alpha} ? $opts{alpha} : 0.1;
return $self;
}
=head2 logistic_regression
considerando x [m,n]
considerando y [m,1]
=cut
sub train {
my ($self, $x, $y) = @_;
my ($lambda, $thetas, $h, $cost, $reg, $reg_thetas, $grad);
my $iters = $self->{n};
my $alpha = $self->{alpha};
#my $cost_file = exists $opts{cost} ? $opts{cost} : undef;
$x = Math::Lapack::Matrix::concatenate(
M->ones($x->rows,1),
$x
lib/AI/ML/LogisticRegression.pm view on Meta::CPAN
}
}
AI::ML::Expr::plot_cost($self->{cost}, @cost_values) if exists $self->{cost};
$self->{thetas} = $thetas;
}
=head2 classification
=cut
sub classification {
my ($self, $x) = @_;
$x = (M->ones($x->rows,1))->append($x);
$self->{classification} = sigmoid($x x $self->{thetas});
}
=head2 prediction
=cut
sub prediction {
my ($self, $x, %opts) = @_;
$x = Math::Lapack::Matrix::concatenate(
M->ones($x->rows,1),
$x
);
my $h = sigmoid($x x $self->{thetas});
$self->{yatt} = AI::ML::Expr::prediction($h, %opts);
}
=head2 accuracy
=cut
sub accuracy {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::accuracy($y, $self->{yatt});
}
=head2 precision
=cut
sub precision {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::precision($y, $self->{yatt});
}
=head2 recall
=cut
sub recall {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::recall($y, $self->{yatt});
}
=head2 f1
=cut
sub f1 {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::f1($y, $self->{yatt});
}
1;
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
tanh => \&AI::ML::Expr::tanh,
dsigmoid => \&AI::ML::Expr::d_sigmoid,
drelu => \&AI::ML::Expr::d_relu,
dlrelu => \&AI::ML::Expr::d_lrelu,
dtanh => \&AI::ML::Expr::d_tanh
};
=head2 new
=cut
sub new {
my ($self, $layers, %opts) = @_;
$self = bless {} => 'AI::ML::NeuralNetwork';
my $i = 0;
for my $href ( @$layers ) {
if( $i == 0 ){
$self->{"l$i"} = { units => $href };
}
else {
if( $href =~ qw.^\d+$. ){
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
$self->{reg} = exists $opts{reg} ? $opts{reg} : undef;
$self->{cost} = exists $opts{cost} ? $opts{cost} : undef;
$self->{plot} = exists $opts{plot} ? $opts{plot} : undef;
return $self;
}
=head2 load_weights_bias
=cut
sub load_weights_bias {
my ($self) = @_;
my $size = keys %$self;
$self->{layers} = $size;
for my $i ( 1 .. $size-1 ) {
my $j = $i - 1;
$self->{"l$i"}{w} = Math::Lapack::Matrix->random($self->{"l$i"}{units}, $self->{"l$j"}{units});
$self->{"l$i"}{b} = Math::Lapack::Matrix->zeros($self->{"l$i"}{units}, 1);
}
}
=head2 train
=cut
sub train {
my ($self, $x, $y, %opts) = @_;
my $m = $x->columns;
my $layers = $self->{layers};
die "Wrong number of units in input layer" if ( $x->rows != $self->{"l0"}{units} );
die "Wrong number of units in output layer" if ( $y->rows != $self->{"l".($layers-1)}{units} );
my $var = { A0 => $x };
my $iters = $self->{n};
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
}
}
}
$self->{grads} = %$var if exists $opts{grads};
}
=head2 gradient_checking
=cut
sub gradient_checking {
my ($self, $x, $y) = @_;
my ($params, $grads, %dims) = $self->_get_params_grads();
#print STDERR Dumper($params);
#print STDERR Dumper($grads);
#print STDERR Dumper(%dims);
#my $n = $params->rows;
#my $m = $params->columns;
#print STDERR "elements:$n,$m\nParams vector\n";
#for my $i (0..$n-1){
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
# $grad_aprox($i,0) = ($J_plus($i,0) - $j_minus($i,0)) / (2*$epsilon);
#}
}
=head2 _vector_to_hash
=cut
sub _vector_to_hash {
my ($vector, $n, %dims) = @_;
my $size = $vector->rows;
my $pos = 0;
my ($n_values, $weight, $bias);
my %hash = {};
for my $i (1..$n-1){
$n_values = $dims{"w$i"}{rows} * $dims{"w$i"}{cols};
$weight = $vector->slice( row_range => [$pos, $pos+$n_values-1] );
$hash{"l$i"}{w} = $weight->reshape($dims{"w$i"}{rows}, $dims{"w$i"}{cols});
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
$pos += $n_values;
}
return %hash;
}
=head2 _get_params_grads
=cut
sub _get_params_grads {
my ($self) = @_;
my ($matrix, $params, $grads, $n, %dims);
my ($r, $c);
$n = $self->{layers};
$matrix = $self->{"l1"}{w};
$dims{"w1"}{rows} = $matrix->rows;
$dims{"w1"}{cols} = $matrix->columns;
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
#print STDERR "cols: $c, rows: $r\n";
#print STDERR Dumper(%dims);
return ($params, $grads, %dims);
}
=head2 prediction
=cut
sub prediction {
my ($self, $x, %opts) = @_;
my $layers = $self->{layers};
my $var = { A0 => $x };
my ($i, $j);
for ( 1 .. $layers-1){
$i = $_;
$j = $i - 1;
$var->{"Z$i"} = $self->{"l$i"}{w} x $var->{"A$j"} + $self->{"l$i"}{b};
$var->{"A$i"} = $functions->{ $self->{"l$i"}{func} }->($var->{"Z$i"});
$i++;
lib/AI/ML/NeuralNetwork.pm view on Meta::CPAN
$i--;
$self->{yatt} = AI::ML::Expr::prediction($var->{"A$i"}, %opts);
}
=head2 accuracy
=cut
sub accuracy {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::accuracy($y, $self->{yatt});
}
=head2 precision
=cut
sub precision {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::precision($y, $self->{yatt});
}
=head2 recall
=cut
sub recall {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::recall($y, $self->{yatt});
}
=head2 f1
=cut
sub f1 {
my ($self, $y) = @_;
unless( exists $self->{yatt} ) {
print STDERR "You should first predict the values!\n";
exit;
}
return AI::ML::Expr::f1($y, $self->{yatt});
}
1;
scripts/mnist.pl view on Meta::CPAN
"train-images" => "train-images-idx3-ubyte",
"train-labels" => "train-labels-idx1-ubyte",
"test-images" => "t10k-images-idx3-ubyte",
"test-labels" => "t10k-labels-idx1-ubyte"
);
_load_data();
sub _load_data {
_download_data();
# compile c file
system("gcc load_data.c -o load");
my @matrices;
for my $key ( keys %opt ) {
my (undef, $type) = split /-/, $key;
system("gunzip $opt{$key}.gz");
system("./load $type $opt{$key} $key.csv");
}
}
sub _download_data{
my $http = HTTP::Tiny->new();
my $url = "http://yann.lecun.com/exdb/mnist";
my $res;
for my $key ( keys %opt ) {
my $file = "$url/$opt{$key}.gz";
my $ff = File::Fetch->new(uri => $file);
my $aux = $ff->fetch() or die $ff->error;
#print "$file\n";
t/00-report-prereqs.t view on Meta::CPAN
# hide optional CPAN::Meta modules from prereq scanner
# and check if they are available
my $cpan_meta = "CPAN::Meta";
my $cpan_meta_pre = "CPAN::Meta::Prereqs";
my $HAS_CPAN_META = eval "require $cpan_meta; $cpan_meta->VERSION('2.120900')" && eval "require $cpan_meta_pre"; ## no critic
# Verify requirements?
my $DO_VERIFY_PREREQS = 1;
sub _max {
my $max = shift;
$max = ( $_ > $max ) ? $_ : $max for @_;
return $max;
}
sub _merge_prereqs {
my ($collector, $prereqs) = @_;
# CPAN::Meta::Prereqs object
if (ref $collector eq $cpan_meta_pre) {
return $collector->with_merged_prereqs(
CPAN::Meta::Prereqs->new( $prereqs )
);
}
# Raw hashrefs
t/01-activation-funcs.t view on Meta::CPAN
#prob of third col
_float($soft->get_element(0,2), 0.00626879, "Element correct at 0,2");
_float($soft->get_element(1,2), 0.01704033, "Element correct at 1,2");
_float($soft->get_element(2,2), 0.04632042, "Element correct at 2,2");
_float($soft->get_element(3,2), 0.93037047, "Element correct at 3,2");
done_testing;
sub _float {
my ($a, $b, $c) = @_;
is($a, float($b, tolerance => 0.00001), $c);
}
t/02-cost_functions.t view on Meta::CPAN
float($a->get_element(0,1), 1, "Element correct at 0,1");
float($a->get_element(0,2), 1, "Element correct at 0,2");
float($a->get_element(0,3), 1, "Element correct at 0,3");
my $b = $a x $m_1->transpose;
float($b->get_element(0,0), 6, "Element correct at 0,0");
print "1..$nr_tests\n";
sub float {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if (abs($a-$b) > 0.000001){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
sub is {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if ($a != $b){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
t/03-mini-batch.t view on Meta::CPAN
$axis = 1;
for my $i (0..4){
my $b = mini_batch($m_1, $start, $size, $axis);
is($b->rows, 200, "Right number of rows\n");
is($b->columns, 20, "Right number of columns\n");
$start += $size;
}
print "1..$nr_tests\n";
sub float {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if (abs($a-$b) > 0.000001){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
sub is {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if ($a != $b){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
t/04-linear-regression.t view on Meta::CPAN
is($n->{thetas}->rows, 2, "Right number of rows");
is($n->{thetas}->columns, 1, "Right number of columns");
_float($n->{thetas}->get_element(0,0), 0.78473628, "Normal Equation - Right value of theta 0,0");
_float($n->{thetas}->get_element(1,0), 0.83133813, "Normal Equation - Right value of theta 1,0");
### FIXME: if the tests generate files, you should test them.
## and delete them afterwads
done_testing();
sub _float {
my ($a, $b, $c) = @_;
is($a, float($b, tolerance => 0.000001), $c);
}
t/05-logistic-regression.t view on Meta::CPAN
$n->prediction($x);
_float($n->accuracy($y), 0.7450980392156863, "Right value of accuracy");
_float($n->precision($y), 0.5652173913043478, "Right value of precision");
_float($n->recall($y), 0.16049382716049382, "Right value of recall");
_float($n->f1($y), .25, "Right value of f1");
done_testing;
sub _float {
my ($a, $b, $c) = @_;
is($a, float($b, tolerance => 0.01), $c);
}
t/06-accuracy-precision-recall-f1.t view on Meta::CPAN
float($prec, 0.571428571, "Right Precision");
my $rec = AI::ML::Expr::recall($y, $yatt);
float($rec, 0.5, "Right recall");
my $f_1 = AI::ML::Expr::f1($y, $yatt);
float($f_1, 0.533333334, "Right f1");
print "1..$nr_tests\n";
sub float {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if (abs($a-$b) > 0.000001){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
sub is {
$nr_tests++;
my ($a, $b, $explanation) = @_;
if ($a != $b){
print "not ";
$explanation .= " ($a vs $b)";
}
print "ok $nr_tests - $explanation\n";
}
t/07-neural-network.t view on Meta::CPAN
);
}
_float($NN->accuracy($y), 0.98, "Right accuracy");
_float($NN->precision($y), 0.970588235294118, "Right precision");
_float($NN->recall($y), .99, "Right recall");
_float($NN->f1($y), 0.98019801980198, "Right F1");
done_testing();
sub _float {
my ($a, $b, $c) = @_;
is($a, float($b, tolerance => 0.1), $c);
}
t/08-gradient-checking.t_ view on Meta::CPAN
#print STDERR "Precison: $prec\n";
#print STDERR "Recall: $rec\n";
#print STDERR "F1: $f1\n";
#my $t1 = $NN->{"l1"}{w};
#$t1->save_matlab("/tmp/t0.m");
#print STDERR Dumper($NN->{"l1"}{w});
done_testing;
sub _float {
my ($a, $b, $c) = @_;
is($a, float($b, tolerance => 0.01), $c);
}