AI-ML

 view release on metacpan or  search on metacpan

Build.PL  view on Meta::CPAN


# This file was automatically generated by Dist::Zilla::Plugin::ModuleBuild v6.010.
use strict;
use warnings;

use Module::Build 0.28;
use lib qw{inc}; use MyBuilder;

my %module_build_args = (
  "build_requires" => {
    "Module::Build" => "0.28"
  },
  "configure_requires" => {
    "Module::Build" => "0.28"
  },
  "dist_abstract" => "Perl interface to ML",
  "dist_author" => [
    "Rui Meira <ruimiguelcm96\@gmail.com>"
  ],
  "dist_name" => "AI-ML",
  "dist_version" => "0.001",
  "license" => "perl",
  "module_name" => "AI::ML",
  "recursive_test_files" => 1,
  "test_requires" => {
    "ExtUtils::MakeMaker" => 0,
    "File::Spec" => 0,
    "Test2::V0" => "0.000060",
    "Test::More" => 0
  }
);


my %fallback_build_requires = (
  "ExtUtils::MakeMaker" => 0,
  "File::Spec" => 0,
  "Module::Build" => "0.28",
  "Test2::V0" => "0.000060",
  "Test::More" => 0
);


unless ( eval { Module::Build->VERSION(0.4004) } ) {
  delete $module_build_args{test_requires};
  $module_build_args{build_requires} = \%fallback_build_requires;
}

my $build = MyBuilder->new(%module_build_args);


$build->create_build_script;

C/config.h  view on Meta::CPAN

#ifndef __ML_CONFIG_H__
#define __ML_CONFIG_H__

#ifdef USE_REAL
#	define REAL double
#else
#	define REAL float
#endif

#endif

C/nn.c  view on Meta::CPAN

#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "C/nn.h"

#define NEW_MATRIX(t,m,r,c) 	m = (Matrix*)malloc(sizeof(Matrix));\
								m->rows = r; m->columns = c;\
								m->values = (t*)malloc(r*c*sizeof(t));

REAL sigmoid(REAL a, void* v){
	#ifdef USE_REAL
		return (1 / (1 + exp(-a)));
	#else
		return (1 / (1 + expf(-a)));
	#endif
}

REAL d_sigmoid(REAL a, void* v){
	REAL s = sigmoid(a, NULL);
	return s * (1 - s);
}

REAL hyperbolic_tan(REAL a, void* v){
#ifdef USE_REAL
	return tanh(a);
#else
	return tanhf(a); 
#endif
}

REAL d_hyperbolic_tan(REAL a, void* v){
#ifdef USE_REAL
	return (1 - pow(tanh(a),2));
#else
	return (1 - powf(tanhf(a),2)); 
#endif
}


Matrix *matrix_sigmoid(Matrix *m){
	return element_wise(m, sigmoid, NULL);
}

Matrix *matrix_ReLU(Matrix *m){
	return element_wise(m, ReLU, NULL);
}

Matrix *matrix_d_ReLU(Matrix *m){
	return element_wise(m, d_ReLU, NULL);
}

Matrix *matrix_LReLU(Matrix *m, REAL v){
	return element_wise(m, LReLU, &v);
}

Matrix *matrix_d_LReLU(Matrix *m, REAL v){
	return element_wise(m, d_LReLU, &v);
}

REAL ReLU(REAL a, void* v){
	if(a >= 0) return a;
	return 0;
}

REAL d_ReLU(REAL a, void *v){
    if(a < 0) return 0;
    return 1;
}

REAL LReLU(REAL a, void *v){
	if(a >= 0) return a;
	return a * * ((REAL*)v);
}

REAL d_LReLU(REAL a, void *v){
	if(a >= 0) return 1;
	return *((REAL*)v);
}

Matrix *matrix_softmax(Matrix *m){
	Matrix *sm = matrix_sum(m, -get_max(m));
	Matrix *em = matrix_exp(sm);
	Matrix *vm;
	if(m->rows == 1 && m->columns > 1){
			vm = sum(em, HORIZONTAL);
	}
	else if(m->columns == 1 && m->rows > 1){
			vm = sum(em, VERTICAL);
	}
	Matrix *sf = div_matrices(em, vm);
	destroy(sm);
	destroy(vm);
	destroy(em);
	return sf;
}

Matrix *matrix_d_softmax(Matrix *m){
    Matrix *exp = matrix_exp(m);
    Matrix *s = sum(exp, VERTICAL);
    return div_matrices( mul_matrices(exp, sub_matrices(s, exp)) ,s);
} 
    

Matrix *matrix_d_sigmoid(Matrix *m){
    return element_wise(m, d_sigmoid, NULL);
}

Matrix *matrix_tanh(Matrix *m){
	return element_wise(m, hyperbolic_tan, NULL);
}

Matrix *matrix_d_tanh(Matrix *m){
	return element_wise(m, d_hyperbolic_tan, NULL);
}

Matrix *matrix_exp(Matrix *m){
	return element_wise(m, exponential, NULL);
}

REAL sigmoid_cost(Matrix *X, Matrix *Y, Matrix *weights){
	Matrix *h;
	int m, i, size;
	m = Y->rows;
	h = matrix_sigmoid(dot(X,weights,0,0));
	
	//class1 = matrix_mul(mul_matrices(transpose(Y), matrix_log(h)),-1);
	//class2 = mul_matrices(matrix_sum(matrix_mul(Y, -1), 1), matrix_sum(matrix_mul(h, -1), 1));
	
	size = Y->rows * Y->columns;

# ifdef USE_REAL
	double cost = 0;
	for(i = 0; i < size; i++){
		cost += -Y->values[i]*log(h->values[i]) - ( 1 - Y->values[i]) * log(1 - h->values[i]); 
	}
# else
	float cost = 0;
	for(i = 0; i < size; i++){
		cost += -Y->values[i]*logf(h->values[i]) - ( 1 - Y->values[i]) * logf(1 - h->values[i]); 
	}
	
# endif
	return cost/m;
}

Matrix *mini_batch(Matrix *m, int start, int size, int axis){
    Matrix *r;
    int end;
    if(start < 0){
        fprintf(stderr, "start index need to be bigger or equal index 0\n");
        exit(1);
    }
    end = start + size - 1;
    if(axis == 0) // every training example is a column
    {
        if(end >= m->columns){
            fprintf(stderr, "Out of index of columns\n");
            exit(1);
        }
        r = slice(m, -1, -1, start, end);
    }
    else if(axis == 1){
        if(end >= m->rows){
            fprintf(stderr, "Out of index of rows\n");
            exit(1);
        }
        r = slice(m, start, end, -1, -1);
    }
    else{
        fprintf(stderr, "Invalid axis\n");
        exit(1);
    }
    return r;
}

Matrix *predict_binary_classification(Matrix *m, REAL threshold){
	Matrix *yatt;
	int i, rows, cols, size;
	rows = m->rows;
	cols = m->columns;
	size = rows * cols;
	NEW_MATRIX(REAL, yatt, rows, cols);
	for( i = 0; i < size; i++ ){
			//fprintf(stderr, "%f\n", m->values[i]);
			if(m->values[i] >= threshold){
				yatt->values[i] = 1;
			}
			else{
				yatt->values[i] = 0;
			}
		  //fprintf(stderr, "%f -> %f\n", m->values[i], yatt->values[i]);
	}
	return yatt;
}


double accuracy(Matrix *y, Matrix *yatt){
	double total = 0;
	int size = y->columns * y->rows;
	for ( int i = 0; i < size; i++ ){
		if(y->values[i] == yatt->values[i]) total++;
	}
	return (total /size);
}


double precision(Matrix *y, Matrix *yatt){
	double tp = 0, fp = 0;
	int size = y->columns * y->rows;
	for ( int i = 0; i < size; i++ ){
		// Total predicted positive
		if(yatt->values[i] == 1){
			// True positive
			if(y->values[i] == 1) tp++;
			else fp++; // False positive
		}
	}
	return (tp / (tp + fp));
}


double recall(Matrix *y, Matrix *yatt){
	double tp = 0, fn = 0;
	int size = y->columns * y->rows;
	for ( int i = 0; i < size; i++ ){
		//Total actual positive
		if(y->values[i] == 1){
			// True Positive
			if(yatt->values[i] == 1) tp++;
			else fn++; 
		}
	}
	return (tp / (tp + fn));
}


double f1(Matrix *y, Matrix *yatt){
	double prec = precision(y, yatt);
  double rec  = recall(y, yatt);
	double v = 2* ( ( prec * rec ) / ( prec + rec ) );
	return v;	
}

C/nn.h  view on Meta::CPAN

#include "C/config.h"

typedef struct s_matrix{
	int columns;
	int rows;
	REAL *values;
}Matrix;

typedef enum _axis{
	HORIZONTAL = 0,
	VERTICAL = 1
} Axis;

Matrix *sub_matrices(Matrix *A, Matrix *B);

Matrix *mul_matrices(Matrix *A, Matrix *B);

Matrix *div_matrices(Matrix *A, Matrix *B);

Matrix *dot(Matrix *A, Matrix *B, int A_t, int B_t);

Matrix *slice(Matrix *m, int x0, int x1, int y0, int y1);

Matrix *mini_batch(Matrix *m, int start, int size, int axis);
Matrix *sum(Matrix *m, Axis axis);

Matrix *div_matrices(Matrix *A, Matrix *B);

Matrix *broadcasting(Matrix *A, Matrix *B, Axis axis, REAL f(REAL, REAL));

REAL real_mul(REAL a, REAL b);
REAL get_max(Matrix*);
Matrix* matrix_sum(Matrix*, REAL);

REAL real_sub(REAL a, REAL b);

REAL real_sum(REAL a, REAL b);

REAL real_div(REAL a, REAL b);

REAL sigmoid(REAL a, void* v);

REAL ReLU(REAL a, void* v);

REAL d_ReLU(REAL a, void* v);

REAL LReLU(REAL a, void *v);

REAL d_LReLU(REAL a, void *v);

REAL d_sigmoid(REAL a, void* v);

REAL exponential(REAL a, void* v);

Matrix *element_wise(Matrix *m, REAL f(REAL, void*), void* data);

Matrix *matrix_sigmoid(Matrix *m);

Matrix *matrix_ReLU(Matrix *m);

Matrix *matrix_d_ReLU(Matrix *m);

Matrix *matrix_LReLU(Matrix *m, REAL v);

Matrix *matrix_d_LReLU(Matrix *m, REAL v);

Matrix *matrix_softmax(Matrix *m);

Matrix *matrix_d_softmax(Matrix *m);

Matrix *matrix_d_sigmoid(Matrix *m);

Matrix *matrix_tanh(Matrix *m);

Matrix *matrix_d_tanh(Matrix *m);

Matrix *matrix_exp(Matrix *m);

void destroy(Matrix *m);

REAL sigmoid_cost(Matrix *X, Matrix *Y, Matrix *weights);

Matrix *predict_binary_classification(Matrix *m, REAL threshold);

double accuracy(Matrix *y, Matrix *yatt);
double precision(Matrix *y, Matrix *yatt);
double recall(Matrix *y, Matrix *yatt);
double f1(Matrix *y, Matrix *yatt);
#ifdef USE_REAL
void dgemm_ (char*, char*, int*, int*, int*, double*, double*, int*, double*, int*, double*, double*, int*);
#else
void sgemm_ (char*, char*, int*, int*, int*, float*, float*, int*, float*, int*, float*, float*, int*);
#endif

Changes  view on Meta::CPAN

0.001 2019-08-31
    - First Version. Complete useless.

LICENSE  view on Meta::CPAN

This software is copyright (c) 2018 by Rui Meira.

This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.

Terms of the Perl programming language system itself

a) the GNU General Public License as published by the Free
   Software Foundation; either version 1, or (at your option) any
   later version, or
b) the "Artistic License"

--- The GNU General Public License, Version 1, February 1989 ---

This software is Copyright (c) 2018 by Rui Meira.

This is free software, licensed under:

  The GNU General Public License, Version 1, February 1989

                    GNU GENERAL PUBLIC LICENSE
                     Version 1, February 1989

 Copyright (C) 1989 Free Software Foundation, Inc.
 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA

 Everyone is permitted to copy and distribute verbatim copies
 of this license document, but changing it is not allowed.

                            Preamble

  The license agreements of most software companies try to keep users
at the mercy of those companies.  By contrast, our General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users.  The
General Public License applies to the Free Software Foundation's
software and to any other program whose authors commit to using it.
You can use it for your programs, too.

  When we speak of free software, we are referring to freedom, not
price.  Specifically, the General Public License is designed to make
sure that you have the freedom to give away or sell copies of free
software, that you receive source code or can get it if you want it,
that you can change the software or use pieces of it in new free
programs; and that you know you can do these things.

  To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.

  For example, if you distribute copies of a such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have.  You must make sure that they, too, receive or can get the
source code.  And you must tell them their rights.

  We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.

  Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software.  If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.

  The precise terms and conditions for copying, distribution and
modification follow.

                    GNU GENERAL PUBLIC LICENSE
   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION

  0. This License Agreement applies to any program or other work which
contains a notice placed by the copyright holder saying it may be
distributed under the terms of this General Public License.  The
"Program", below, refers to any such program or work, and a "work based
on the Program" means either the Program or any work containing the
Program or a portion of it, either verbatim or with modifications.  Each
licensee is addressed as "you".

  1. You may copy and distribute verbatim copies of the Program's source
code as you receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice and
disclaimer of warranty; keep intact all the notices that refer to this
General Public License and to the absence of any warranty; and give any
other recipients of the Program a copy of this General Public License
along with the Program.  You may charge a fee for the physical act of
transferring a copy.

  2. You may modify your copy or copies of the Program or any portion of
it, and copy and distribute such modifications under the terms of Paragraph
1 above, provided that you also do the following:

    a) cause the modified files to carry prominent notices stating that
    you changed the files and the date of any change; and

    b) cause the whole of any work that you distribute or publish, that
    in whole or in part contains the Program or any part thereof, either
    with or without modifications, to be licensed at no charge to all
    third parties under the terms of this General Public License (except
    that you may choose to grant warranty protection to some or all
    third parties, at your option).

    c) If the modified program normally reads commands interactively when
    run, you must cause it, when started running for such interactive use
    in the simplest and most usual way, to print or display an
    announcement including an appropriate copyright notice and a notice
    that there is no warranty (or else, saying that you provide a
    warranty) and that users may redistribute the program under these
    conditions, and telling the user how to view a copy of this General
    Public License.

    d) You may charge a fee for the physical act of transferring a
    copy, and you may at your option offer warranty protection in
    exchange for a fee.

Mere aggregation of another independent work with the Program (or its
derivative) on a volume of a storage or distribution medium does not bring
the other work under the scope of these terms.

  3. You may copy and distribute the Program (or a portion or derivative of
it, under Paragraph 2) in object code or executable form under the terms of
Paragraphs 1 and 2 above provided that you also do one of the following:

    a) accompany it with the complete corresponding machine-readable
    source code, which must be distributed under the terms of
    Paragraphs 1 and 2 above; or,

    b) accompany it with a written offer, valid for at least three
    years, to give any third party free (except for a nominal charge
    for the cost of distribution) a complete machine-readable copy of the
    corresponding source code, to be distributed under the terms of
    Paragraphs 1 and 2 above; or,

    c) accompany it with the information you received as to where the
    corresponding source code may be obtained.  (This alternative is
    allowed only for noncommercial distribution and only if you
    received the program in object code or executable form alone.)

Source code for a work means the preferred form of the work for making
modifications to it.  For an executable file, complete source code means
all the source code for all modules it contains; but, as a special
exception, it need not include source code for modules which are standard
libraries that accompany the operating system on which the executable
file runs, or for standard header files or definitions files that
accompany that operating system.

  4. You may not copy, modify, sublicense, distribute or transfer the
Program except as expressly provided under this General Public License.
Any attempt otherwise to copy, modify, sublicense, distribute or transfer
the Program is void, and will automatically terminate your rights to use
the Program under this License.  However, parties who have received
copies, or rights to use copies, from you under this General Public
License will not have their licenses terminated so long as such parties
remain in full compliance.

  5. By copying, distributing or modifying the Program (or any work based
on the Program) you indicate your acceptance of this license to do so,
and all its terms and conditions.

  6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the original
licensor to copy, distribute or modify the Program subject to these
terms and conditions.  You may not impose any further restrictions on the
recipients' exercise of the rights granted herein.

  7. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time.  Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

Each version is given a distinguishing version number.  If the Program
specifies a version number of the license which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation.  If the Program does not specify a version number of
the license, you may choose any version ever published by the Free Software
Foundation.

  8. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission.  For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this.  Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.

                            NO WARRANTY

  9. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.

  10. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.

                     END OF TERMS AND CONDITIONS

        Appendix: How to Apply These Terms to Your New Programs

  If you develop a new program, and you want it to be of the greatest
possible use to humanity, the best way to achieve this is to make it
free software which everyone can redistribute and change under these
terms.

  To do so, attach the following notices to the program.  It is safest to
attach them to the start of each source file to most effectively convey
the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.

    <one line to give the program's name and a brief idea of what it does.>
    Copyright (C) 19yy  <name of author>

    This program is free software; you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation; either version 1, or (at your option)
    any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program; if not, write to the Free Software
    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA


Also add information on how to contact you by electronic and paper mail.

If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:

    Gnomovision version 69, Copyright (C) 19xx name of author
    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
    This is free software, and you are welcome to redistribute it
    under certain conditions; type `show c' for details.

The hypothetical commands `show w' and `show c' should show the
appropriate parts of the General Public License.  Of course, the
commands you use may be called something other than `show w' and `show
c'; they could even be mouse-clicks or menu items--whatever suits your
program.

You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary.  Here a sample; alter the names:

  Yoyodyne, Inc., hereby disclaims all copyright interest in the
  program `Gnomovision' (a program to direct compilers to make passes
  at assemblers) written by James Hacker.

  <signature of Ty Coon>, 1 April 1989
  Ty Coon, President of Vice

That's all there is to it!


--- The Artistic License 1.0 ---

This software is Copyright (c) 2018 by Rui Meira.

This is free software, licensed under:

  The Artistic License 1.0

The Artistic License

Preamble

The intent of this document is to state the conditions under which a Package
may be copied, such that the Copyright Holder maintains some semblance of
artistic control over the development of the package, while giving the users of
the package the right to use and distribute the Package in a more-or-less
customary fashion, plus the right to make reasonable modifications.

Definitions:

  - "Package" refers to the collection of files distributed by the Copyright
    Holder, and derivatives of that collection of files created through
    textual modification. 
  - "Standard Version" refers to such a Package if it has not been modified,
    or has been modified in accordance with the wishes of the Copyright
    Holder. 
  - "Copyright Holder" is whoever is named in the copyright or copyrights for
    the package. 
  - "You" is you, if you're thinking about copying or distributing this Package.
  - "Reasonable copying fee" is whatever you can justify on the basis of media
    cost, duplication charges, time of people involved, and so on. (You will
    not be required to justify it to the Copyright Holder, but only to the
    computing community at large as a market that must bear the fee.) 
  - "Freely Available" means that no fee is charged for the item itself, though
    there may be fees involved in handling the item. It also means that
    recipients of the item may redistribute it under the same conditions they
    received it. 

1. You may make and give away verbatim copies of the source form of the
Standard Version of this Package without restriction, provided that you
duplicate all of the original copyright notices and associated disclaimers.

2. You may apply bug fixes, portability fixes and other modifications derived
from the Public Domain or from the Copyright Holder. A Package modified in such
a way shall still be considered the Standard Version.

3. You may otherwise modify your copy of this Package in any way, provided that
you insert a prominent notice in each changed file stating how and when you
changed that file, and provided that you do at least ONE of the following:

  a) place your modifications in the Public Domain or otherwise make them
     Freely Available, such as by posting said modifications to Usenet or an
     equivalent medium, or placing the modifications on a major archive site
     such as ftp.uu.net, or by allowing the Copyright Holder to include your
     modifications in the Standard Version of the Package.

  b) use the modified Package only within your corporation or organization.

  c) rename any non-standard executables so the names do not conflict with
     standard executables, which must also be provided, and provide a separate
     manual page for each non-standard executable that clearly documents how it
     differs from the Standard Version.

  d) make other distribution arrangements with the Copyright Holder.

4. You may distribute the programs of this Package in object code or executable
form, provided that you do at least ONE of the following:

  a) distribute a Standard Version of the executables and library files,
     together with instructions (in the manual page or equivalent) on where to
     get the Standard Version.

  b) accompany the distribution with the machine-readable source of the Package
     with your modifications.

  c) accompany any non-standard executables with their corresponding Standard
     Version executables, giving the non-standard executables non-standard
     names, and clearly documenting the differences in manual pages (or
     equivalent), together with instructions on where to get the Standard
     Version.

  d) make other distribution arrangements with the Copyright Holder.

5. You may charge a reasonable copying fee for any distribution of this
Package.  You may charge any fee you choose for support of this Package. You
may not charge a fee for this Package itself. However, you may distribute this
Package in aggregate with other (possibly commercial) programs as part of a
larger (possibly commercial) software distribution provided that you do not
advertise this Package as a product of your own.

6. The scripts and library files supplied as input to or produced as output
from the programs of this Package do not automatically fall under the copyright
of this Package, but belong to whomever generated them, and may be sold
commercially, and may be aggregated with this Package.

7. C or perl subroutines supplied by you and linked into this Package shall not
be considered part of this Package.

8. The name of the Copyright Holder may not be used to endorse or promote
products derived from this software without specific prior written permission.

9. THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF
MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.

The End

MANIFEST  view on Meta::CPAN

# This file was automatically generated by Dist::Zilla::Plugin::Manifest v6.010.
Build.PL
C/config.h
C/nn.c
C/nn.h
Changes
LICENSE
MANIFEST
META.json
META.yml
README
README.md
XS/ML.xs.inc
dist.ini
inc/MyBuilder.pm
lib/AI/ML.pm
lib/AI/ML/Expr.pm
lib/AI/ML/LinearRegression.pm
lib/AI/ML/LogisticRegression.pm
lib/AI/ML/NeuralNetwork.pm
run.sh
scripts/load_data.c
scripts/mnist.pl
t/00-report-prereqs.dd
t/00-report-prereqs.t
t/01-activation-funcs.t
t/02-cost_functions.t
t/03-mini-batch.t
t/04-linear-regression.t
t/05-logistic-regression.t
t/06-accuracy-precision-recall-f1.t
t/07-neural-network.t
t/08-gradient-checking.t_
t/W1_grad_check.csv
t/W2_grad_check.csv
t/W3_grad_check.csv
t/b1_grad_check.csv
t/b2_grad_check.csv
t/b3_grad_check.csv
t/dW1_grad_check.csv
t/dW2_grad_check.csv
t/dW3_grad_check.csv
t/dataset_adjetivos.csv
t/db1_grad_check.csv
t/db2_grad_check.csv
t/db3_grad_check.csv
t/logistic.csv
t/pred-nn.csv
t/w1.csv
t/w2.csv
t/x.csv
t/x_grad_check.csv
t/y.csv
t/y_grad_check.csv
xt/author/00-compile.t
xt/author/pod-syntax.t

META.json  view on Meta::CPAN

{
   "abstract" : "Perl interface to ML",
   "author" : [
      "Rui Meira <ruimiguelcm96@gmail.com>"
   ],
   "dynamic_config" : 0,
   "generated_by" : "Dist::Zilla version 6.010, CPAN::Meta::Converter version 2.150010",
   "license" : [
      "perl_5"
   ],
   "meta-spec" : {
      "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
      "version" : 2
   },
   "name" : "AI-ML",
   "no_index" : {
      "directory" : [
         "eg",
         "examples",
         "inc",
         "share",
         "t",
         "xt"
      ]
   },
   "prereqs" : {
      "build" : {
         "requires" : {
            "Module::Build" : "0.28"
         }
      },
      "configure" : {
         "requires" : {
            "Module::Build" : "0.28"
         }
      },
      "develop" : {
         "requires" : {
            "File::Spec" : "0",
            "IO::Handle" : "0",
            "IPC::Open3" : "0",
            "Test::More" : "0",
            "Test::Pod" : "1.41"
         }
      },
      "test" : {
         "recommends" : {
            "CPAN::Meta" : "2.120900"
         },
         "requires" : {
            "ExtUtils::MakeMaker" : "0",
            "File::Spec" : "0",
            "Test2::V0" : "0.000060",
            "Test::More" : "0"
         }
      }
   },
   "provides" : {
      "AI::ML" : {
         "file" : "lib/AI/ML.pm",
         "version" : "0.001"
      },
      "AI::ML::Expr" : {
         "file" : "lib/AI/ML/Expr.pm",
         "version" : "0.001"
      },
      "AI::ML::LinearRegression" : {
         "file" : "lib/AI/ML/LinearRegression.pm",
         "version" : "0.001"
      },
      "AI::ML::LogisticRegression" : {
         "file" : "lib/AI/ML/LogisticRegression.pm",
         "version" : "0.001"
      },
      "AI::ML::NeuralNetwork" : {
         "file" : "lib/AI/ML/NeuralNetwork.pm",
         "version" : "0.001"
      }
   },
   "release_status" : "stable",
   "version" : "0.001",
   "x_Dist_Zilla" : {
      "perl" : {
         "version" : "5.026001"
      },
      "plugins" : [
         {
            "class" : "Dist::Zilla::Plugin::MetaYAML",
            "name" : "@Starter/MetaYAML",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::MetaJSON",
            "name" : "@Starter/MetaJSON",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::License",
            "name" : "@Starter/License",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::ReadmeAnyFromPod",
            "config" : {
               "Dist::Zilla::Role::FileWatcher" : {
                  "version" : "0.006"
               }
            },
            "name" : "@Starter/ReadmeAnyFromPod",
            "version" : "0.163250"
         },
         {
            "class" : "Dist::Zilla::Plugin::PodSyntaxTests",
            "name" : "@Starter/PodSyntaxTests",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::Test::ReportPrereqs",
            "name" : "@Starter/Test::ReportPrereqs",
            "version" : "0.027"
         },
         {
            "class" : "Dist::Zilla::Plugin::Test::Compile",
            "config" : {
               "Dist::Zilla::Plugin::Test::Compile" : {
                  "bail_out_on_fail" : 0,
                  "fail_on_warning" : "author",
                  "fake_home" : 0,
                  "filename" : "xt/author/00-compile.t",
                  "module_finder" : [
                     ":InstallModules"
                  ],
                  "needs_display" : 0,
                  "phase" : "develop",
                  "script_finder" : [
                     ":PerlExecFiles"
                  ],
                  "skips" : [],
                  "switch" : []
               }
            },
            "name" : "@Starter/Test::Compile",
            "version" : "2.058"
         },
         {
            "class" : "Dist::Zilla::Plugin::Manifest",
            "name" : "@Starter/Manifest",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::PruneCruft",
            "name" : "@Starter/PruneCruft",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::ManifestSkip",
            "name" : "@Starter/ManifestSkip",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::RunExtraTests",
            "config" : {
               "Dist::Zilla::Role::TestRunner" : {
                  "default_jobs" : 1
               }
            },
            "name" : "@Starter/RunExtraTests",
            "version" : "0.029"
         },
         {
            "class" : "Dist::Zilla::Plugin::TestRelease",
            "name" : "@Starter/TestRelease",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::ConfirmRelease",
            "name" : "@Starter/ConfirmRelease",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::UploadToCPAN",
            "name" : "@Starter/UploadToCPAN",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::MetaConfig",
            "name" : "@Starter/MetaConfig",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::MetaNoIndex",
            "name" : "@Starter/MetaNoIndex",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::MetaProvides::Package",
            "config" : {
               "Dist::Zilla::Plugin::MetaProvides::Package" : {
                  "finder_objects" : [
                     {
                        "class" : "Dist::Zilla::Plugin::FinderCode",
                        "name" : "@Starter/MetaProvides::Package/AUTOVIV/:InstallModulesPM",
                        "version" : "6.010"
                     }
                  ],
                  "include_underscores" : 0
               },
               "Dist::Zilla::Role::MetaProvider::Provider" : {
                  "$Dist::Zilla::Role::MetaProvider::Provider::VERSION" : "2.002004",
                  "inherit_missing" : 1,
                  "inherit_version" : 1,
                  "meta_noindex" : 1
               },
               "Dist::Zilla::Role::ModuleMetadata" : {
                  "Module::Metadata" : "1.000033",
                  "version" : "0.006"
               }
            },
            "name" : "@Starter/MetaProvides::Package",
            "version" : "2.004003"
         },
         {
            "class" : "Dist::Zilla::Plugin::ShareDir",
            "name" : "@Starter/ShareDir",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::ExecDir",
            "name" : "@Starter/ExecDir",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::ModuleBuild",
            "config" : {
               "Dist::Zilla::Role::TestRunner" : {
                  "default_jobs" : 1
               }
            },
            "name" : "ModuleBuild",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::Prereqs",
            "config" : {
               "Dist::Zilla::Plugin::Prereqs" : {
                  "phase" : "test",
                  "type" : "requires"
               }
            },
            "name" : "TestRequires",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::Git::GatherDir",
            "config" : {
               "Dist::Zilla::Plugin::GatherDir" : {
                  "exclude_filename" : [],
                  "exclude_match" : [],
                  "follow_symlinks" : 0,
                  "include_dotfiles" : 0,
                  "prefix" : "",
                  "prune_directory" : [],
                  "root" : "."
               },
               "Dist::Zilla::Plugin::Git::GatherDir" : {
                  "include_untracked" : 0
               }
            },
            "name" : "Git::GatherDir",
            "version" : "2.045"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":InstallModules",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":IncModules",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":TestFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":ExtraTestFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":ExecFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":PerlExecFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":ShareFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":MainModule",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":AllFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : ":NoFiles",
            "version" : "6.010"
         },
         {
            "class" : "Dist::Zilla::Plugin::FinderCode",
            "name" : "@Starter/MetaProvides::Package/AUTOVIV/:InstallModulesPM",
            "version" : "6.010"
         }
      ],
      "zilla" : {
         "class" : "Dist::Zilla::Dist::Builder",
         "config" : {
            "is_trial" : 0
         },
         "version" : "6.010"
      }
   },
   "x_serialization_backend" : "Cpanel::JSON::XS version 3.0239"
}

META.yml  view on Meta::CPAN

---
abstract: 'Perl interface to ML'
author:
  - 'Rui Meira <ruimiguelcm96@gmail.com>'
build_requires:
  ExtUtils::MakeMaker: '0'
  File::Spec: '0'
  Module::Build: '0.28'
  Test2::V0: '0.000060'
  Test::More: '0'
configure_requires:
  Module::Build: '0.28'
dynamic_config: 0
generated_by: 'Dist::Zilla version 6.010, CPAN::Meta::Converter version 2.150010'
license: perl
meta-spec:
  url: http://module-build.sourceforge.net/META-spec-v1.4.html
  version: '1.4'
name: AI-ML
no_index:
  directory:
    - eg
    - examples
    - inc
    - share
    - t
    - xt
provides:
  AI::ML:
    file: lib/AI/ML.pm
    version: '0.001'
  AI::ML::Expr:
    file: lib/AI/ML/Expr.pm
    version: '0.001'
  AI::ML::LinearRegression:
    file: lib/AI/ML/LinearRegression.pm
    version: '0.001'
  AI::ML::LogisticRegression:
    file: lib/AI/ML/LogisticRegression.pm
    version: '0.001'
  AI::ML::NeuralNetwork:
    file: lib/AI/ML/NeuralNetwork.pm
    version: '0.001'
version: '0.001'
x_Dist_Zilla:
  perl:
    version: '5.026001'
  plugins:
    -
      class: Dist::Zilla::Plugin::MetaYAML
      name: '@Starter/MetaYAML'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::MetaJSON
      name: '@Starter/MetaJSON'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::License
      name: '@Starter/License'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::ReadmeAnyFromPod
      config:
        Dist::Zilla::Role::FileWatcher:
          version: '0.006'
      name: '@Starter/ReadmeAnyFromPod'
      version: '0.163250'
    -
      class: Dist::Zilla::Plugin::PodSyntaxTests
      name: '@Starter/PodSyntaxTests'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::Test::ReportPrereqs
      name: '@Starter/Test::ReportPrereqs'
      version: '0.027'
    -
      class: Dist::Zilla::Plugin::Test::Compile
      config:
        Dist::Zilla::Plugin::Test::Compile:
          bail_out_on_fail: '0'
          fail_on_warning: author
          fake_home: 0
          filename: xt/author/00-compile.t
          module_finder:
            - ':InstallModules'
          needs_display: 0
          phase: develop
          script_finder:
            - ':PerlExecFiles'
          skips: []
          switch: []
      name: '@Starter/Test::Compile'
      version: '2.058'
    -
      class: Dist::Zilla::Plugin::Manifest
      name: '@Starter/Manifest'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::PruneCruft
      name: '@Starter/PruneCruft'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::ManifestSkip
      name: '@Starter/ManifestSkip'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::RunExtraTests
      config:
        Dist::Zilla::Role::TestRunner:
          default_jobs: 1
      name: '@Starter/RunExtraTests'
      version: '0.029'
    -
      class: Dist::Zilla::Plugin::TestRelease
      name: '@Starter/TestRelease'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::ConfirmRelease
      name: '@Starter/ConfirmRelease'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::UploadToCPAN
      name: '@Starter/UploadToCPAN'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::MetaConfig
      name: '@Starter/MetaConfig'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::MetaNoIndex
      name: '@Starter/MetaNoIndex'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::MetaProvides::Package
      config:
        Dist::Zilla::Plugin::MetaProvides::Package:
          finder_objects:
            -
              class: Dist::Zilla::Plugin::FinderCode
              name: '@Starter/MetaProvides::Package/AUTOVIV/:InstallModulesPM'
              version: '6.010'
          include_underscores: 0
        Dist::Zilla::Role::MetaProvider::Provider:
          $Dist::Zilla::Role::MetaProvider::Provider::VERSION: '2.002004'
          inherit_missing: '1'
          inherit_version: '1'
          meta_noindex: '1'
        Dist::Zilla::Role::ModuleMetadata:
          Module::Metadata: '1.000033'
          version: '0.006'
      name: '@Starter/MetaProvides::Package'
      version: '2.004003'
    -
      class: Dist::Zilla::Plugin::ShareDir
      name: '@Starter/ShareDir'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::ExecDir
      name: '@Starter/ExecDir'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::ModuleBuild
      config:
        Dist::Zilla::Role::TestRunner:
          default_jobs: 1
      name: ModuleBuild
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::Prereqs
      config:
        Dist::Zilla::Plugin::Prereqs:
          phase: test
          type: requires
      name: TestRequires
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::Git::GatherDir
      config:
        Dist::Zilla::Plugin::GatherDir:
          exclude_filename: []
          exclude_match: []
          follow_symlinks: 0
          include_dotfiles: 0
          prefix: ''
          prune_directory: []
          root: .
        Dist::Zilla::Plugin::Git::GatherDir:
          include_untracked: 0
      name: Git::GatherDir
      version: '2.045'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':InstallModules'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':IncModules'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':TestFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':ExtraTestFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':ExecFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':PerlExecFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':ShareFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':MainModule'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':AllFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: ':NoFiles'
      version: '6.010'
    -
      class: Dist::Zilla::Plugin::FinderCode
      name: '@Starter/MetaProvides::Package/AUTOVIV/:InstallModulesPM'
      version: '6.010'
  zilla:
    class: Dist::Zilla::Dist::Builder
    config:
      is_trial: '0'
    version: '6.010'
x_serialization_backend: 'YAML::Tiny version 1.70'

README.md  view on Meta::CPAN

This Perl model, AI::ML, is a module with some machine learning algorithms.

XS/ML.xs.inc  view on Meta::CPAN

#include "EXTERN.h"
#include "perl.h"
#include "XSUB.h"
#include "C/nn.h"

MODULE = AI::ML 	PACKAGE = AI::ML::Expr

SV *
_sigmoid(a)
	unsigned long a;

	PREINIT:
		Matrix *m;

	CODE:
		m = matrix_sigmoid((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

SV *
_tanh(a)
	unsigned long a;

	PREINIT:
		Matrix *m;

	CODE:
		m = matrix_tanh((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);

	OUTPUT:
		RETVAL

SV *
_d_tanh(a)
	unsigned long a;

	PREINIT:
		Matrix *m;

	CODE:
		m = matrix_d_tanh((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);

	OUTPUT:
		RETVAL

SV *
_relu(a)
    	unsigned long a;

	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_ReLU((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

SV *
_d_relu(a)
    	unsigned long a;

	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_d_ReLU((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

SV *
_lrelu(a, v)
    	unsigned long a;
    	REAL v;

	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_LReLU((Matrix*)a, v);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

SV *
_d_lrelu(a, v)
    	unsigned long a;
    	REAL v;

	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_d_LReLU((Matrix*)a, v);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

SV *
_predict_binary_classification(a, v)
    	unsigned long a;
    	REAL v;

	PREINIT:	
		Matrix* m;

	CODE:
		m = predict_binary_classification((Matrix*)a, v);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

		
SV *
_softmax(a)
    	unsigned long a;

	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_softmax((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
		RETVAL

SV *
_d_softmax(a)
    	unsigned long a;

	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_d_softmax((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:
        RETVAL

SV *
_d_sigmoid(a)
    	unsigned long a;
    	
	PREINIT:	
		Matrix* m;

	CODE:
		m = matrix_d_sigmoid((Matrix*)a);
		RETVAL = newSVuv((unsigned long)m);
	
	OUTPUT:	
		RETVAL

SV *
_sigmoid_cost(x, y, w)
	unsigned long x;
	unsigned long y;
	unsigned long w;

	CODE:
        	RETVAL = newSVnv(sigmoid_cost((Matrix*)x, (Matrix*)y, (Matrix*)w));

	OUTPUT:
		RETVAL


SV *
_mini_batch(m, start, s, axis)
    unsigned long m;
    int start;
    int s;
    int axis;

    PREINIT:
        Matrix* r;

    CODE:
        r = mini_batch((Matrix*)m, start, s, axis);
        RETVAL = newSVnv((unsigned long)r);

    OUTPUT:
        RETVAL

SV *
_accuracy(y, yatt)
	unsigned long y;
	unsigned long yatt;

	CODE:
		RETVAL = newSVnv(accuracy((Matrix*)y, (Matrix*)yatt));
	OUTPUT:
		RETVAL


SV *
_precision(y, yatt)
	unsigned long y;
	unsigned long yatt;

	CODE:
		RETVAL = newSVnv(precision((Matrix*)y, (Matrix*)yatt));
	OUTPUT:
		RETVAL


SV *
_recall(y, yatt)
	unsigned long y;
	unsigned long yatt;

	CODE:
		RETVAL = newSVnv(recall((Matrix*)y, (Matrix*)yatt));
	OUTPUT:
		RETVAL


SV *
_f1(y, yatt)
	unsigned long y;
	unsigned long yatt;

	CODE:
		RETVAL = newSVnv(f1((Matrix*)y, (Matrix*)yatt));
	OUTPUT:
		RETVAL


dist.ini  view on Meta::CPAN

name    = AI-ML
author  = Rui Meira <ruimiguelcm96@gmail.com>
license = Perl_5
copyright_holder = Rui Meira
copyright_year   = 2018

version = 0.001

[@Starter]
-remove = MakeMaker
-remove = GatherDir

;[PodCoverageTests]

[ModuleBuild]
mb_class = MyBuilder

[Prereqs / TestRequires]
Test2::V0 = 0.000060

[Git::GatherDir]
exclude_mathc = ^_

inc/MyBuilder.pm  view on Meta::CPAN

package MyBuilder;
use base 'Module::Build';

use warnings;
use strict;

use Config;
use ExtUtils::ParseXS;
use ExtUtils::Mkbootstrap;

use Path::Tiny;

my $EXTRA_O_FLAGS = "";
my $EXTRA_FLAGS = "-lblas -llapack";

sub ACTION_code {
    my $self = shift;

    $EXTRA_O_FLAGS .= " -DUSE_REAL" unless exists $self->args->{'with-float'};

    $self->update_XS("XS/ML.xs.inc");

    $self->dispatch("create_objects");
    $self->dispatch("compile_xs");

    $self->SUPER::ACTION_code;
}

sub update_XS {
    my ($self, $file) = @_;
    my $output = $file;
    $output =~ s/\.inc$//;

    open my $i_fh, "<", $file   or die "$!";
    open my $o_fh, ">", $output or die "$!";
    while (<$i_fh>) {
        s/REAL/float/g;
        print {$o_fh} $_;
    }
    close $o_fh;
    close $i_fh;
}

sub ACTION_create_objects {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $c_progs = $self->rscan_dir("C", qr/\.c$/);
    for my $file (@$c_progs) {
        my $object = $file;
        $object =~ s/\.c$/.o/;
        next if $self->up_to_date($file, $object);
        $cbuilder->compile(
            object_file => $object,
            extra_compiler_flags => $EXTRA_O_FLAGS,
            source => $file,
            include_dirs => ["."]
        );
    }
}

sub ACTION_compile_xs {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $archdir = path($self->blib, "arch", "auto", "AI", "ML");
    $archdir->mkpath unless -d $archdir;

    my $xs = path("XS", "ML.xs");
    my $xs_c = path("XS", "ML.c");

    if (!$self->up_to_date($xs, $xs_c)) {
        ExtUtils::ParseXS::process_file(
            filename => $xs->stringify, 
            prototypes => 0,
            output => $xs_c->stringify
        );
    }

    my $xs_o = path("XS", "ML.o");
    if (!$self->up_to_date($xs_c, $xs_o)) {
        $cbuilder->compile(
            source => $xs_c,
            extra_compiler_flags => $EXTRA_O_FLAGS,
            include_dirs => ["."], 
            object_file => $xs_o
        );
    }
    my $bs_file = path( $archdir, "ML.bs");
    if (!$self->up_to_date($xs_o, $bs_file) ) {
        ExtUtils::Mkbootstrap::Mkbootstrap($bs_file);
        if (!-f $bs_file) {
            $bs_file->touch;
        }
    }

    my $objects = $self->rscan_dir("C", qr/\.o$/);
    push @$objects, $xs_o;
    my $lib_file = path($archdir, "ML.$Config{dlext}");
    if (!$self->up_to_date( $objects, $lib_file )) {
        $cbuilder->link(
            module_name => 'AI::ML',
            extra_linker_flags => $EXTRA_FLAGS,
            objects => $objects,
            lib_file => $lib_file,
        );
    }
}

1;

lib/AI/ML.pm  view on Meta::CPAN

# ABSTRACT: Perl interface to ML
use strict;
use warnings;
package AI::ML;

use parent 'DynaLoader';
use Math::Lapack;
bootstrap AI::ML;
#sub dl_load_flags { 1 }
1;

lib/AI/ML/Expr.pm  view on Meta::CPAN

# ABSTRACT: turns baubles into trinkets

package AI::ML::Expr;
use strict;
use warnings;

use Chart::Gnuplot;
use Scalar::Util 'blessed';
use AI::ML;
use Math::Lapack;
use aliased 'Math::Lapack::Matrix' => 'M';


use parent 'Exporter';
use parent 'Math::Lapack::Expr';
our @EXPORT = qw(mini_batch tanh sigmoid relu lrelu d_sigmoid d_relu d_lrelu d_tanh softmax sigmoid_cost plot plot_cost);
use Math::Lapack::Expr;

sub _bless {
    my $matrix = shift;
    return bless { _matrix => $matrix, type => 'matrix' } => "Math::Lapack::Matrix";
}

=head2 sigmoid

Allow apply the function sigmoid to every element of the matrix.

    $m = $m->sigmoid();
    $m = sigmoid($m);

=cut

sub sigmoid {
    my ($self) = @_;

    return bless { package => __PACKAGE__, type => 'sigmoid', args => [$self] } => __PACKAGE__
}

sub eval_sigmoid {
    my $tree = shift;
    if (blessed($tree) && $tree->isa("Math::Lapack::Matrix")) {
      return _bless _sigmoid($tree->matrix_id);
    }

    die "Sigmoid for non matrix: " . ref($tree);
}

=head2 relu

Allows apply the function relu to every element of the matrix.

    $m = $m->relu();
    $m = relu($m);

=cut

sub relu {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'relu', args => [$self] } => __PACKAGE__;
}

sub eval_relu {
    my $tree = shift;
    if (ref($tree) eq "Math::Lapack::Matrix") {
        return _bless _relu($tree->matrix_id);
    }
    die "ReLU for non matrix";
}

=head2 d_relu

Allows apply the function d_relu to every element of the matrix.

    $m = $m->d_relu();
    $m = d_relu($m);

=cut

sub d_relu {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'd_relu', args => [$self] } => __PACKAGE__;
}

sub eval_d_relu {
    my $tree = shift;
    if (ref($tree) eq "Math::Lapack::Matrix") {
        return _bless _d_relu($tree->matrix_id);
    }
    die "ReLU for non matrix";
}

=head2 lrelu

Allows apply the function lrelu to every element of the matrix.

    $th::Lapack::Matrixref(1)m = lrelu($m, 0.0001);
    $m = m->lrelu(0.1);

=cut

sub lrelu {
    my ($self, $v) = @_;
    return bless { package => __PACKAGE__, type => 'lrelu', args => [$self, $v] } => __PACKAGE__;
}

sub eval_lrelu {
    my ($tree, $v) = @_;
    if (ref($tree) eq "Math::Lapack::Matrix") {
        return _bless _lrelu($tree->matrix_id, $v);
    }
    die "lReLU for non matrix";
}

=head2 d_lrelu

Allows apply the function d_lrelu to every element of the matrix.

    $th::Lapack::Matrixref(1)m = lrelu($m, 0.0001);
    $m = m->lrelu(0.1);

=cut

sub d_lrelu {
    my ($self, $v) = @_;
    return bless { package => __PACKAGE__, type => 'd_lrelu', args => [$self, $v] } => __PACKAGE__;
}

sub eval_d_lrelu {
    my ($tree, $v) = @_;
    if (ref($tree) eq "Math::Lapack::Matrix") {
        return _bless _d_lrelu($tree->matrix_id, $v);
    }
    die "lReLU for non matrix";
}


=head2 softmax
Allows apply the function softmax to every element of the matrix.

    $m = softmax($m);
    $m = $m->softmax();
=cut

sub softmax {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'softmax', args => [$self] } => __PACKAGE__; 
}

sub eval_softmax {
    my $tree = shift;
    if (ref($tree) eq "Math::Lapack::Matrix") {
        my $s = $tree->max();
        my $e_x = exp( $tree - $s );
        my $div = sum( $e_x, 1 );
        return $e_x / $div;
        #use Data::Dumper;
        #print STDERR Dumper $matrix;
#        return _bless _softmax($tree->matrix_id);
    }
    die "softmax for non matrix";
}

=head2 d_softmax
Allows apply the function d_softmax to every element of the matrix.

    $m = d_softmax($m);
    $m = $m->d_softmax();
=cut

sub d_softmax {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'd_softmax', args => [$self] } => __PACKAGE__; 
}

sub eval_d_softmax {
    my $tree = shift;
    if (ref($tree) eq "Math::Lapack::Matrix") {
        return _bless _d_softmax($tree->matrix_id);
    }
    die "d_softmax for non matrix";
}

=head2 tanh
Allows apply the function tanh to every element of the matrix.

    $m = tanh($m);
    $m = $m->tanh();

=cut
sub tanh {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'tanh', args => [$self] } => __PACKAGE__;
}

sub eval_tanh {
    my $tree = shift;
    if( ref($tree) eq "Math::Lapack::Matrix"){
        return _bless _tanh($tree->matrix_id);
    }
    die "tanh for non matrix";
}

=head2 d_tanh
Allows apply the function d_tanh to every element of the matrix.

    $m = d_tanh($m);
    $m = $m->d_tanh();

=cut
sub d_tanh {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'd_tanh', args => [$self] } => __PACKAGE__;
}

sub eval_d_tanh {
    my $tree = shift;
    if( ref($tree) eq "Math::Lapack::Matrix"){
        return _bless _d_tanh($tree->matrix_id);
    }
    die "d_tanh for non matrix";
}

 
=head2 d_sigmoid

Allow apply the derivate of function sigmoid to every element of the matrix.

    $m = $m->d_sigmoid();
    $m = d_sigmoid($m);

=cut

sub d_sigmoid {
    my ($self) = @_;
    return bless { package => __PACKAGE__, type => 'd_sigmoid', args => [$self] } => __PACKAGE__;
} 

sub eval_d_sigmoid {
    my $tree = shift;
    if( ref($tree) eq "Math::Lapack::Matrix"){
        return _bless _d_sigmoid($tree->matrix_id);
    }
    return "d_sigmoid for non matrix";
}

=head2 sigmoid_cost
Allows get the value of the cost of sigmoid function.

    put examples

=cut
sub sigmoid_cost {
    my ($x, $y, $weights) = @_;
    return _sigmoid_cost($x->matrix_id, $y->matrix_id, $weights->matrix_id);
}


=head2 mini-batch

=cut
sub mini_batch {
    my ($self, $start, $size, $axis) = @_;
    $axis = 0 unless defined $axis; #default
    return _bless _mini_batch($self->matrix_id, $start, $size, $axis);
}


=head2 prediction

=cut
sub prediction {
    my ($self, %opts) = @_;
		my $t = exists $opts{threshold} ? $opts{threshold} : 0.50;
		return _bless _predict_binary_classification($self->matrix_id, $t);
}

=head2 precision

=cut
sub precision {
    my ($y, $yatt) = @_;
    return _precision($y->matrix_id, $yatt->matrix_id);
}


=head2 accuracy

=cut
sub accuracy {
    my ($y, $yatt) = @_;
    return _accuracy($y->matrix_id, $yatt->matrix_id);
}


=head2 recall 

=cut
sub recall {
    my ($y, $yatt) = @_;
    return _recall($y->matrix_id, $yatt->matrix_id);
}


=head2 f1

=cut
sub f1 {
    my ($y, $yatt) = @_;
    return _f1($y->matrix_id, $yatt->matrix_id);
}



=head2 plot

=cut

sub plot {
    my ($x, $y, $theta, $file) = @_;
    my @xdata  = $x->vector_to_list();
    my @ydata  = $y->vector_to_list();
    my @thetas = $theta->vector_to_list();
    my $f = $thetas[0] . "+" . $thetas[1] . "*x";

    #print STDERR "$_\n" for(@xdata);
    #rint STDERR "$_\n" for(@ydata);
    #print STDERR "$f\n";
    #print STDERR "\n\nFILE == $file\n\n";
    my $chart = Chart::Gnuplot->new(
            output     => $file,
            title     => "Nice one",
            xlabel     => "x",
            ylabel     => "y"
    );

    my $points = Chart::Gnuplot::DataSet->new(
            xdata     => \@xdata,
            ydata     => \@ydata,
            style     => "points"
    );

    my $func = Chart::Gnuplot::DataSet->new(
            func     => $f
    );

    $chart->plot2d($points, $func);
}

=head2 plot_cost

=cut
sub plot_cost{
    my ($file, @costs) = @_;
    my @iters = (1 .. scalar(@costs));

    my $chart = Chart::Gnuplot->new(
            output     => $file,
            title     => "Cost",
            xlabel  => "Iter",
            ylabel     => "Cost"
    );
    $chart->png;
    my $data = Chart::Gnuplot::DataSet->new(
            xdata     => \@iters,
            ydata     => \@costs,
            style     => "linespoints"
    );
    $chart->plot2d($data);

}

1;

lib/AI/ML/LinearRegression.pm  view on Meta::CPAN

# ABSTRACT: turns baubles into trinkets

package AI::ML::LinearRegression;
use strict;
use warnings;

use Scalar::Util 'blessed';
use aliased 'Math::Lapack::Matrix' => 'M';
use Math::Lapack::Expr;
use parent 'AI::ML::Expr';

use Data::Dumper;

=head2 new

=cut
sub new {
	my ($self, %opts) = @_;
	$self = bless {} => 'AI::ML::LinearRegression';
		
	$self->{grad} 	= $opts{gradient} if exists $opts{gradient}; 
	$self->{reg} 	= $opts{lambda}   if exists $opts{lambda};
    $self->{cost} 	= $opts{cost}	  if exists $opts{cost};
    $self->{plot} 	= $opts{plot}	  if exists $opts{plot};

	$self->{n} 	    = exists $opts{n}     ? $opts{n}     : 100;
    $self->{alpha} 	= exists $opts{alpha} ? $opts{alpha} : 0.1;
		
	return $self;
}

=head2 linear_regression

    considerando X com as dimensoes(m,n) e theta com as dimensoes (n,1)
    #Default is normal equation
    #Option
    #gradient => not use normal equation
    #plot => plot data and linear
    #cost => plot cost
    #alpha
    #n => number of iterations

=cut

sub train {
    my ($self, $x, $y) = @_;
    my ($thetas, $iters, $alpha, $lambda);
	
    if( exists $self->{grad} ) {
        $iters     = $self->{n};
        $alpha     = $self->{alpha};
        my ($cost, $grads, $reg_thetas);
        my $x = Math::Lapack::Matrix::concatenate(
            M->ones($x->rows, 1),
            $x
        );
        my ($m, $n) = $x->shape();
        $thetas = M->random($n,1);
        my @cost_values = ();
        if(defined $self->{reg}){
            $lambda = $self->{reg};
            for my $i (1..$iters){

                $cost = sum( ( ($x x $thetas) - $y) ** 2) / (2 * $m) + $lambda * sum( $thetas->slice(x0 => 1) );
                push @cost_values, $cost->get_element(0,0) if defined $self->{cost};

                $grads =  ($x->T x (($x x $thetas)-$y)) / $m;

                $reg_thetas = ($lambda / $m) * $thetas;
                # do not regularize theta 0
                $reg_thetas->set_element(0,0,0);
                $thetas = $thetas - $alpha * ( $grads + $reg_thetas );
            }
        }
        else{
            for my $i (1..$iters)
            {
        				if( exists $self->{cost} ) {
                            $cost = sum( ( ($x x $thetas) - $y) ** 2) / (2 * $m);
                            push @cost_values,$cost->get_element(0,0);
                        }

        				$grads =  ($x->T x (($x x $thetas)-$y)) / $m;
        				$thetas = $thetas - $alpha * $grads;
            }

        }

				AI::ML::Expr::plot($x->slice(col => 1), $y, $thetas, $self->{plot}) if defined $self->{plot};
				AI::ML::Expr::plot_cost($self->{cost}, @cost_values) if exists $self->{cost};
    }
    else{
        $thetas = normal_eq($x, $y);
		AI::ML::Expr::plot($x, $y, $thetas, $self->{plot}) if defined $self->{plot};
    }
    $self->{thetas} = $thetas;
}

=head2 normal_eq

=cut

sub normal_eq {
    my ($x, $y) = @_;
    #adiciona coluna de uns a matrix X
    $x = Math::Lapack::Matrix::concatenate(
        M->ones($x->rows, 1),
        $x
    );
    return ((($x->T x $x)->inverse) x $x->T) x $y;
}

=head2 linear_regression_pred

    devolve o valor previsto
    considerando X com as dimensoes(m,n) e theta com as dimensoes (n,1)

=cut

sub linear_regression_pred {
    my ($x, $thetas) = @_;
    return $x x $thetas;
}

1;

lib/AI/ML/LogisticRegression.pm  view on Meta::CPAN

# ABSTRACT: turns baubles into trinketsA

package AI::ML::LogisticRegression;
use strict;
use warnings;

use Scalar::Util 'blessed';
use aliased 'Math::Lapack::Matrix' => 'M';
use Math::Lapack::Expr;
use AI::ML::Expr;
use parent 'AI::ML::Expr';
use Data::Dumper;


=head2 new

=cut
sub new {
	my ($self, %opts) = @_;
	$self = bless {} => 'AI::ML::LogisticRegression';

	$self->{reg} 	= $opts{reg} 			if exists $opts{reg};
    $self->{cost} 	= $opts{cost}			if exists $opts{cost};
    $self->{plot} 	= $opts{plot}			if exists $opts{plot};

	$self->{n} 	    = exists $opts{n}     ? $opts{n}     : 100;
    $self->{alpha} 	= exists $opts{alpha} ? $opts{alpha} : 0.1;

	return $self;
}

=head2 logistic_regression

    considerando x [m,n]
    considerando y [m,1]

=cut

sub train {
    my ($self, $x, $y) = @_;
    my ($lambda, $thetas, $h, $cost, $reg, $reg_thetas, $grad);

    my $iters     = $self->{n};
    my $alpha     = $self->{alpha};
		#my $cost_file = exists $opts{cost}  ? $opts{cost}  : undef;

    $x = Math::Lapack::Matrix::concatenate(
        M->ones($x->rows,1),
        $x
    );

    my($m, $n) = $x->shape;

    $thetas = M->random($n,1);
    my @cost_values=();
    if ( exists $self->{reg} ) {
				$lambda = $self->{reg};
    		for my $i (1 .. $iters) {
        		$h = sigmoid($x x $thetas);
        		$reg = ($lambda / (2 * $m)) * sum( $thetas->slice(x0 => 1) ** 2 );
        		$cost = (-1 / $m) * sum($y * log($h) + (1 - $y) * log(1-$h)) + $reg;

        		push @cost_values, $cost->get_element(0,0) if exists $self->{cost};

        		$reg_thetas = ($lambda / $m) * $thetas;
        		$reg_thetas->set_element(0,0,0);

        		$grad = ($x->T x ($h - $y)) / $m;

        		$thetas = $thetas - $alpha * ( $grad + $reg_thetas );
      	}
    }
    else {
      	for my $i (1 .. $iters) {
        	$h = sigmoid($x x $thetas);
        	$cost = (-1 / $m)*sum(($y * log($h)) + ((1-$y) * log(1-$h)));

        	push @cost_values, $cost->get_element(0,0) if exists $self->{cost};

        	$grad = ($x->T x ($h - $y)) / $m;
        	$thetas = $thetas - $alpha * $grad;
      	}
    }
		AI::ML::Expr::plot_cost($self->{cost}, @cost_values) if exists $self->{cost};
		$self->{thetas} = $thetas;
}


=head2 classification

=cut
sub classification {
    my ($self, $x) = @_;
    $x = (M->ones($x->rows,1))->append($x);
    $self->{classification} = sigmoid($x x $self->{thetas});
}


=head2 prediction

=cut
sub prediction {
    my ($self, $x, %opts) = @_;
    $x = Math::Lapack::Matrix::concatenate(
        M->ones($x->rows,1),
        $x
    );
    my $h = sigmoid($x x $self->{thetas});
    $self->{yatt} =  AI::ML::Expr::prediction($h, %opts);
}



=head2 accuracy

=cut
sub accuracy {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::accuracy($y, $self->{yatt});
}


=head2 precision 

=cut
sub precision {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::precision($y, $self->{yatt});
}


=head2 recall 

=cut
sub recall {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::recall($y, $self->{yatt});
}



=head2 f1

=cut
sub f1 {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::f1($y, $self->{yatt});
}

1;

lib/AI/ML/NeuralNetwork.pm  view on Meta::CPAN

# ABSTRACT: turns baubles into trinkets

package AI::ML::NeuralNetwork;
use strict;
use warnings;

use Scalar::Util 'blessed';
use Math::Lapack::Matrix;
use Math::Lapack::Expr;
use parent 'AI::ML::Expr';

my $functions = {
		sigmoid 	=> \&AI::ML::Expr::sigmoid,
		relu   		=> \&AI::ML::Expr::relu,
		lrelu 		=> \&AI::ML::Expr::lrelu,
		softmax 	=> \&AI::ML::Expr::softmax,
		tanh 		=> \&AI::ML::Expr::tanh,
		dsigmoid    => \&AI::ML::Expr::d_sigmoid,
		drelu   	=> \&AI::ML::Expr::d_relu,
		dlrelu 		=> \&AI::ML::Expr::d_lrelu,
		dtanh  		=> \&AI::ML::Expr::d_tanh
};

=head2 new

=cut
sub new {
	my ($self, $layers, %opts) = @_;
	$self = bless {} => 'AI::ML::NeuralNetwork';

	my $i = 0;
	for my $href ( @$layers ) {
		if( $i == 0 ){
			$self->{"l$i"} = { units => $href };
		}		
		else {
			if( $href =~ qw.^\d+$. ){
				$self->{"l$i"} = { units => $href, func => "sigmoid", dfunc => "dsigmoid" };
			}
			elsif( ref($href) eq "HASH" ) {
				if ( exists $href->{func} ) {
					if ( exists $functions->{$href->{func}} ) 
					{
						$self->{"l$i"}{func}  = $href->{func};
						$self->{"l$i"}{dfunc} = 'd' . $href->{func};
					}
					else
					{
						die "Invalid activation function for layer $i: $href->{func}\n";
					}	
				}
				else {
					$self->{"l$i"}{func} = "sigmoid";		
				}
				if( exists($href->{units}) && $href->{units} =~ qw. ^\d+$ . ) {
					$self->{"l$i"}{units} = $href->{units};
				} else{
					die "undefined number of units in layer $i\n";
				}
			}
		}
		$i++;
	}
	$self->load_weights_bias();
		
	$self->{n} 	    = exists $opts{n}     ? $opts{n}     : 100;
    $self->{alpha} 	= exists $opts{alpha} ? $opts{alpha} : 0.1;
	$self->{reg}    = exists $opts{reg}   ? $opts{reg}   : undef;
    $self->{cost} 	= exists $opts{cost}  ? $opts{cost}  : undef;
    $self->{plot} 	= exists $opts{plot}  ? $opts{plot}  : undef;
	return $self;
}


=head2 load_weights_bias

=cut
sub load_weights_bias {
	my ($self) = @_;
	my $size = keys %$self;
	$self->{layers} = $size;
	for my $i ( 1 .. $size-1 ) {
		my $j = $i - 1;
		$self->{"l$i"}{w} = Math::Lapack::Matrix->random($self->{"l$i"}{units}, $self->{"l$j"}{units});
		$self->{"l$i"}{b} = Math::Lapack::Matrix->zeros($self->{"l$i"}{units}, 1);
	}
}


=head2 train

=cut
sub train {
	my ($self, $x, $y, %opts) = @_;
	my $m = $x->columns;
	my $layers = $self->{layers};

	die "Wrong number of units in input layer" 	if ( $x->rows != $self->{"l0"}{units} );
	die "Wrong number of units in output layer" if ( $y->rows != $self->{"l".($layers-1)}{units} );

	my $var = { A0 => $x };

	my $iters     = $self->{n};
    my $alpha     = $self->{alpha};

	my ($rows, $cols, $cost);

	for my $iter (1 .. $iters) {
		my $aux;
		# forward propagation
		my ($i,$j);
		for ( 1 .. $layers-1){
			$i = $_;
			$j = $i - 1;
			$var->{"Z$i"} = $self->{"l$i"}{w} x $var->{"A$j"} + $self->{"l$i"}{b};
			$var->{"A$i"} = $functions->{ $self->{"l$i"}{func} }->($var->{"Z$i"});
			$i++;
		}
		$i--;

        if ($iter % 1000 == 0){
        	$cost = (-1 / $m)*sum(($y * log($var->{"A$i"})) + ((1-$y) * log(1-$var->{"A$i"})));
            $cost = $cost->get_element(0,0);
        }
		#
		## back propagation
		$var->{"dz$i"} = $var->{"A$i"} - $y;
		$aux = $var->{"dz$i"};
        #$aux->save_csv("/tmp/DZ$i.csv");

		$var->{"dw$i"} = (1 / $m) * ( $var->{"dz$i"} x T($var->{"A$j"}) );
		$var->{"db$i"} = (1 / $m) * sum( $var->{"dz$i"} , 0 );
		$var->{"da$j"} = T($self->{"l$i"}{w}) x $var->{"dz$i"};

        $self->{"l$i"}{w} = $self->{"l$i"}{w} - ( $alpha * $var->{"dw$i"} );
		$self->{"l$i"}{b} = $self->{"l$i"}{b} - ( $alpha * $var->{"db$i"} );

        $self->{"l$i"}{b}->get_element(0,0); #force eval
        $self->{"l$i"}{w}->get_element(0,0);

        if($iter == 100){
            $aux = $var->{"dw$i"};
            #$aux->save_csv("/tmp/DW$i.csv");
            $aux = $var->{"db$i"};
            #$aux->save_csv("/tmp/DB$i.csv");
            $aux = $var->{"da$j"};
            #$aux->save_csv("/tmp/da$j.m");

            $aux = $self->{"l$i"}{w};
            #$aux->save_csv("/tmp/W$i.csv");
            $aux = $self->{"l$i"}{b};
            #$aux->save_csv("/tmp/B$i.csv");
        }

		##print STDERR Dumper($self,$var);
		##
		$i--;$j--;
		for(; $j >= 0; $i--, $j--) {
			#print STDERR "Iter: $i\n";

			$var->{"dz$i"} = $var->{"da$i"} * $functions->{ $self->{"l$i"}{dfunc} }->($var->{"Z$i"}) ;
			$var->{"dw$i"} = (1 / $m) * ( $var->{"dz$i"} x T($var->{"A$j"}) );
			$var->{"db$i"} = (1 / $m) * sum( $var->{"dz$i"} , 0 );
			$var->{"da$j"} = T($self->{"l$i"}{w}) x $var->{"dz$i"} if $j >= 1;

            $self->{"l$i"}{w} = $self->{"l$i"}{w} - ( $alpha * $var->{"dw$i"} ); 
			$self->{"l$i"}{b} = $self->{"l$i"}{b} - ( $alpha * $var->{"db$i"} ); 


            if($iter == 100){
                $aux = $var->{"dz$i"};
                #$aux->save_csv("/tmp/DZ$i.csv");
                $aux = $var->{"dw$i"};
                #$aux->save_csv("/tmp/DW$i.csv");
                $aux = $var->{"db$i"};
                #$aux->save_csv("/tmp/DB$i.csv");
                #if ($j>=1){$aux = $var->{"da$j"};
                #$aux->save_csv("/tmp/da$j.m");}

                $aux = $self->{"l$i"}{w};
                #$aux->save_csv("/tmp/W$i.csv");
                $aux = $self->{"l$i"}{b};
                #$aux->save_csv("/tmp/B$i.csv");
            }
	    }
	}
    $self->{grads} = %$var if exists $opts{grads};
}


=head2 gradient_checking

=cut
sub gradient_checking {
    my ($self, $x, $y) = @_;
    my ($params, $grads, %dims) = $self->_get_params_grads();
    #print STDERR Dumper($params);
    #print STDERR Dumper($grads);
    #print STDERR Dumper(%dims);

    #my $n = $params->rows;
    #my $m = $params->columns;
    #print STDERR "elements:$n,$m\nParams vector\n";
    #for my $i (0..$n-1){
    #    print STDERR "$i:" .$params->get_element($i,0)."\n";
    #}
    #print STDERR "Grads vector\n";

    #for my $j (0..$n-1){
    #    print STDERR $params->get_element($j,0)."\n";
    #}
  
    #my $epsilon = 1e-7;
    #my $J_plus = Math::Lapack::Matrix->zeros($n,1);
    #my $J_minus = Math::Lapack::Matrix->zeros($n,1);
    #my $grad_aprox = Math::Lapack::Matrix->zeros($n,1);
    
    #for my $i (0..$n-1){
    #    $theta_plus = $params;
    #    $theta_plus->set_element($i,0) = $theta_plus->get_element($i,0) + $epsilon;       
    #    $J_plus($i,0) = _forward_prop_n($x, $y, _vector_to_hash($theta_plus, $n, %dims));
    #    
    #    $theta_minus = $params;
    #    $theta_minus->set_element($i,0) = $theta_minus->get_element($i,0) - $epsilon;       
    #    $J_minus($i,0) = _forward_prop_n($x, $y, _vector_to_hash($theta_minus, $n));

    #    $grad_aprox($i,0) = ($J_plus($i,0) - $j_minus($i,0)) / (2*$epsilon);
    #}

} 
    

=head2 _vector_to_hash

=cut
sub _vector_to_hash {
    my ($vector, $n, %dims) = @_;
    my $size = $vector->rows;
    my $pos = 0;
    my ($n_values, $weight, $bias);
    my %hash = {};
    
    for my $i (1..$n-1){
        $n_values = $dims{"w$i"}{rows} * $dims{"w$i"}{cols};
        $weight = $vector->slice( row_range => [$pos, $pos+$n_values-1] );
        $hash{"l$i"}{w} = $weight->reshape($dims{"w$i"}{rows}, $dims{"w$i"}{cols});         
        $pos += $n_values;

        $n_values = $dims{"b$i"}{rows} * $dims{"b$i"}{cols};
        $bias = $vector->reshape( row_range => [$pos, $pos+$n_values-1]);
        $hash{"l$i"}{b} = $bias->reshape($dims{"b$i"}{rows},$dims{"b$i"}{cols});
    
        $pos += $n_values;
    }
    return %hash;
}


=head2 _get_params_grads


=cut
sub _get_params_grads {
    my ($self) = @_;
    
    my ($matrix, $params, $grads, $n, %dims);

    my ($r, $c);
    $n = $self->{layers};

    $matrix = $self->{"l1"}{w};
    $dims{"w1"}{rows} = $matrix->rows; 
    $dims{"w1"}{cols} = $matrix->columns;
 ($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
    $params = $matrix->reshape($matrix->rows * $matrix->columns, 1);
    ($r, $c) = $params->shape;
    print STDERR "$r,$c\n";
    
    $matrix = $self->{grads}{"dw1"};
    $grads = $matrix->reshape($matrix->rows * $matrix->columns, 1);
    for my $i (1..$n-1){
        print STDERR "layer: $i\n";
        if( $i > 1 ){
            $matrix = $self->{"l$i"}{w};
            $dims{"w$i"}{rows} = $matrix->rows; 
            $dims{"w$i"}{cols} = $matrix->columns;
            
            $matrix = $matrix->reshape($matrix->rows* $matrix->columns, 1);
 ($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
            $params->append($matrix,1);
        
            $matrix = $self->{grads}{"dw$i"};
            $grads->append($matrix->reshape($matrix->rows*$matrix->columns, 1),0);
        }

    ($r, $c) = $params->shape;
    print STDERR "$r,$c\n";
        $matrix = $self->{"l$i"}{b};
        $dims{"b$i"}{rows} = $matrix->rows; 
        $dims{"b$i"}{cols} = $matrix->columns;
 ($r, $c) = $matrix->shape;
print STDERR "New dimension shape: $r,$c\n";
        $params->append($matrix->reshape($matrix->rows *$matrix->columns,1), 0);
        

    ($r, $c) = $params->shape;
    print STDERR "$r,$c\n";
        $matrix = $self->{grads}{"db$i"};
        $grads->append($matrix->reshape($matrix->rows *$matrix->columns,1), 0);
    }

    #print STDERR "cols: $c, rows: $r\n";
    #print STDERR Dumper(%dims);

    return ($params, $grads, %dims);
}


=head2 prediction

=cut
sub prediction {
    my ($self, $x, %opts) = @_;
    my $layers = $self->{layers};
    my $var = { A0 => $x };
    my ($i, $j);
	for ( 1 .. $layers-1){
		$i = $_; 
		$j = $i - 1;
		$var->{"Z$i"} = $self->{"l$i"}{w} x $var->{"A$j"} + $self->{"l$i"}{b};
		$var->{"A$i"} = $functions->{ $self->{"l$i"}{func} }->($var->{"Z$i"});
        $i++;				
    }
    $i--;
    $self->{yatt} = AI::ML::Expr::prediction($var->{"A$i"}, %opts);
    
}



=head2 accuracy

=cut
sub accuracy {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::accuracy($y, $self->{yatt});
}


=head2 precision 

=cut
sub precision {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::precision($y, $self->{yatt});
}


=head2 recall 

=cut
sub recall {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::recall($y, $self->{yatt});
}



=head2 f1

=cut
sub f1 {
		my ($self, $y) = @_;
		unless( exists $self->{yatt} ) {
				print STDERR "You should first predict the values!\n";
				exit;
		}
		return AI::ML::Expr::f1($y, $self->{yatt});
}

1;

run.sh  view on Meta::CPAN

#!/bin/bash

dzil build;
cd AI-ML-0.001;
perl Build.PL --with-double;
./Build
./Build test

scripts/load_data.c  view on Meta::CPAN

#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
#include <string.h>

//https://gist.github.com/spaghetti-source/5620288

typedef struct s_matrix {
        int columns;
        int rows;
        double *values;
} Matrix;

#define NEW_MATRIX(m,r,c)   m=(Matrix*)malloc(sizeof(Matrix));\
                            m->rows = r; m->columns = c;\
                            m->values = (double*) malloc (r * c * sizeof(double));


void endianSwap(unsigned int *x) {
  *x = (*x>>24)|((*x<<8)&0x00FF0000)|((*x>>8)&0x0000FF00)|(*x<<24);
}

Matrix *read_csv(char *path){
  FILE *fimage = fopen(path, "rb");
  unsigned int magic, num, row, col;
  int i, j;
  unsigned char pixel;

  if(fread(&magic, 4, 1, fimage) <= 0) exit(1);
  assert(magic == 0x03080000);

  if(fread(&num, 4, 1, fimage) <= 0) exit(1);
  endianSwap(&num);

  if(fread(&row, 4, 1, fimage) <= 0) exit(1);
  endianSwap(&row);

  if(fread(&col, 4, 1, fimage) <= 0) exit(1);
  endianSwap(&col);
	
  Matrix *matrices;
  int size = row * col;

  NEW_MATRIX(matrices, num, size);
  for(i = 0; i < num * size; i++){
    if(fread(&pixel, 1, 1, fimage) <= 0){
      exit(1);
    }
    //printf("value: %f\n", (double)pixel);
	matrices->values[i] = (double)pixel;
  }
  fclose(fimage);

  return matrices;
}

/*Matrix *read_label_csv(char *path){
	FILE *flabel = fopen(path, "rb");
	unsigned int magic, num;
	unsigned char value;	
	if( fread(&magic, 4, 1, flabel) <= 0) exit(1);
	assert(magic = 0x01080000);
	if( fread(&num, 4, 1, flabel) <= 0) exit(1);
	endianSwap(&num);

	Matrix *m;
	NEW_MATRIX(m, num, 1);
	
	int i;
	for( i = 0; i < num; i++){
		if( fread(&value, 1 , 1, flabel) <= 0 ) exit(1);
		m->values[i] = (float)value;
		//printf("%d\n",value);
	}
	fclose(flabel);
	return m;
}*/

Matrix *read_label_csv(char *path){
	FILE *flabel = fopen(path, "rb");
	unsigned int magic, num;
	unsigned char value;	
	if( fread(&magic, 4, 1, flabel) <= 0) exit(1);
	assert(magic = 0x01080000);
	if( fread(&num, 4, 1, flabel) <= 0) exit(1);
	endianSwap(&num);

	Matrix *m;
	NEW_MATRIX(m, num, 10);
	int i;
	for( i = 0; i < num; i++){
		if( fread(&value, 1 , 1, flabel) <= 0 ) {exit(1);}
        if(i < 10) fprintf(stderr, "%d\n",value);
        m->values[i * m->columns + value] = 1;
		//printf("%d\n",pos);
	}
	fclose(flabel);
	return m;
}

void save(Matrix *m, char *path){
  FILE *f;
  f = fopen(path, "w");

  if(f == NULL) {
    fprintf(stderr, "save: can't create file\n");
    exit(1);
  }
  
    int row, col;
  for( row = 0; row < m->rows; row++ ) {
        for( col = 0; col < m->columns; col++ ) {
            if( col == 0 ) {
                fprintf(f, "%f", (double)m->values[row * m->columns + col]);
            } else {
                fprintf(f, ",%f", (double)m->values[row * m->columns + col]);
            }
        }
        if( row < m->rows ) fprintf(f, "\n");
  }
                 

  fclose(f);
}

void destroy(Matrix *m){
  free(m->values);
  free(m);
}

void print_values(Matrix *m){
    int rows = 10;
    int cols = m->columns;
    for(int i = 0; i < rows; i++){
        for(int j = 0; j < cols; j++){
            fprintf(stderr, "%f\t", m->values[i * cols + j]);
        }
        fprintf(stderr, "\n");
    }
}

int main(int argc, char *argv[]){
    if (argc != 4) {
        fprintf(stderr, "Usage: load <type> input-ubyte output-txt\n");
        fprintf(stderr, "\ttype can be:\n");
        fprintf(stderr, "\t\timages\n");
        fprintf(stderr, "\t\tlabels\n");
        exit(1);
    }

    char *path = argv[2];
    Matrix *m;
    if (strcmp(argv[1], "images") == 0) {
        m = read_csv(path);
    } else if (strcmp(argv[1], "labels") == 0) {
        m = read_label_csv(path);
    } else {
        fprintf(stderr, "Unknown file type: %s\n", argv[1]);
    } 
	save(m, argv[3]);
	destroy(m);

    return 0;
}

scripts/mnist.pl  view on Meta::CPAN

#!/usr/bin/env perl

use warnings;
use strict;

use HTTP::Tiny;
use Data::Dumper;
use File::Fetch;

use Math::Lapack;
use Math::Lapack::Matrix;

use AI::ML;
use AI::ML::NeuralNetwork;
    
my %opt = (
        "train-images" => "train-images-idx3-ubyte",
        "train-labels" => "train-labels-idx1-ubyte",
        "test-images"  => "t10k-images-idx3-ubyte",
        "test-labels"  => "t10k-labels-idx1-ubyte"
    );


_load_data();


sub _load_data {
    _download_data();
    # compile c file
    system("gcc load_data.c -o load");

    my @matrices;
    
    for my $key ( keys %opt ) {
        my (undef, $type) = split /-/, $key;
        system("gunzip $opt{$key}.gz");
        system("./load $type $opt{$key} $key.csv");
    }
}

sub _download_data{
    my $http = HTTP::Tiny->new();

    my $url = "http://yann.lecun.com/exdb/mnist";

    my $res;
    for my $key ( keys %opt ) {
        my $file = "$url/$opt{$key}.gz";
        my $ff = File::Fetch->new(uri => $file);
        my $aux = $ff->fetch() or die $ff->error;
        #print "$file\n";
        #$res = $http->get("$file");
        #my $content = $res->{content};
#       # $res = $http->get("$route/".$opt{$key});
        #print STDERR Dumper $content;
    } 
}
    








t/00-report-prereqs.dd  view on Meta::CPAN

do { my $x = {
       'build' => {
                    'requires' => {
                                    'Module::Build' => '0.28'
                                  }
                  },
       'configure' => {
                        'requires' => {
                                        'Module::Build' => '0.28'
                                      }
                      },
       'develop' => {
                      'requires' => {
                                      'File::Spec' => '0',
                                      'IO::Handle' => '0',
                                      'IPC::Open3' => '0',
                                      'Test::More' => '0',
                                      'Test::Pod' => '1.41'
                                    }
                    },
       'test' => {
                   'recommends' => {
                                     'CPAN::Meta' => '2.120900'
                                   },
                   'requires' => {
                                   'ExtUtils::MakeMaker' => '0',
                                   'File::Spec' => '0',
                                   'Test2::V0' => '0.000060',
                                   'Test::More' => '0'
                                 }
                 }
     };
  $x;
 }

t/00-report-prereqs.t  view on Meta::CPAN

#!perl

use strict;
use warnings;

# This test was generated by Dist::Zilla::Plugin::Test::ReportPrereqs 0.027

use Test::More tests => 1;

use ExtUtils::MakeMaker;
use File::Spec;

# from $version::LAX
my $lax_version_re =
    qr/(?: undef | (?: (?:[0-9]+) (?: \. | (?:\.[0-9]+) (?:_[0-9]+)? )?
            |
            (?:\.[0-9]+) (?:_[0-9]+)?
        ) | (?:
            v (?:[0-9]+) (?: (?:\.[0-9]+)+ (?:_[0-9]+)? )?
            |
            (?:[0-9]+)? (?:\.[0-9]+){2,} (?:_[0-9]+)?
        )
    )/x;

# hide optional CPAN::Meta modules from prereq scanner
# and check if they are available
my $cpan_meta = "CPAN::Meta";
my $cpan_meta_pre = "CPAN::Meta::Prereqs";
my $HAS_CPAN_META = eval "require $cpan_meta; $cpan_meta->VERSION('2.120900')" && eval "require $cpan_meta_pre"; ## no critic

# Verify requirements?
my $DO_VERIFY_PREREQS = 1;

sub _max {
    my $max = shift;
    $max = ( $_ > $max ) ? $_ : $max for @_;
    return $max;
}

sub _merge_prereqs {
    my ($collector, $prereqs) = @_;

    # CPAN::Meta::Prereqs object
    if (ref $collector eq $cpan_meta_pre) {
        return $collector->with_merged_prereqs(
            CPAN::Meta::Prereqs->new( $prereqs )
        );
    }

    # Raw hashrefs
    for my $phase ( keys %$prereqs ) {
        for my $type ( keys %{ $prereqs->{$phase} } ) {
            for my $module ( keys %{ $prereqs->{$phase}{$type} } ) {
                $collector->{$phase}{$type}{$module} = $prereqs->{$phase}{$type}{$module};
            }
        }
    }

    return $collector;
}

my @include = qw(

);

my @exclude = qw(

);

# Add static prereqs to the included modules list
my $static_prereqs = do './t/00-report-prereqs.dd';

# Merge all prereqs (either with ::Prereqs or a hashref)
my $full_prereqs = _merge_prereqs(
    ( $HAS_CPAN_META ? $cpan_meta_pre->new : {} ),
    $static_prereqs
);

# Add dynamic prereqs to the included modules list (if we can)
my ($source) = grep { -f } 'MYMETA.json', 'MYMETA.yml';
my $cpan_meta_error;
if ( $source && $HAS_CPAN_META
    && (my $meta = eval { CPAN::Meta->load_file($source) } )
) {
    $full_prereqs = _merge_prereqs($full_prereqs, $meta->prereqs);
}
else {
    $cpan_meta_error = $@;    # capture error from CPAN::Meta->load_file($source)
    $source = 'static metadata';
}

my @full_reports;
my @dep_errors;
my $req_hash = $HAS_CPAN_META ? $full_prereqs->as_string_hash : $full_prereqs;

# Add static includes into a fake section
for my $mod (@include) {
    $req_hash->{other}{modules}{$mod} = 0;
}

for my $phase ( qw(configure build test runtime develop other) ) {
    next unless $req_hash->{$phase};
    next if ($phase eq 'develop' and not $ENV{AUTHOR_TESTING});

    for my $type ( qw(requires recommends suggests conflicts modules) ) {
        next unless $req_hash->{$phase}{$type};

        my $title = ucfirst($phase).' '.ucfirst($type);
        my @reports = [qw/Module Want Have/];

        for my $mod ( sort keys %{ $req_hash->{$phase}{$type} } ) {
            next if $mod eq 'perl';
            next if grep { $_ eq $mod } @exclude;

            my $file = $mod;
            $file =~ s{::}{/}g;
            $file .= ".pm";
            my ($prefix) = grep { -e File::Spec->catfile($_, $file) } @INC;

            my $want = $req_hash->{$phase}{$type}{$mod};
            $want = "undef" unless defined $want;
            $want = "any" if !$want && $want == 0;

            my $req_string = $want eq 'any' ? 'any version required' : "version '$want' required";

            if ($prefix) {
                my $have = MM->parse_version( File::Spec->catfile($prefix, $file) );
                $have = "undef" unless defined $have;
                push @reports, [$mod, $want, $have];

                if ( $DO_VERIFY_PREREQS && $HAS_CPAN_META && $type eq 'requires' ) {
                    if ( $have !~ /\A$lax_version_re\z/ ) {
                        push @dep_errors, "$mod version '$have' cannot be parsed ($req_string)";
                    }
                    elsif ( ! $full_prereqs->requirements_for( $phase, $type )->accepts_module( $mod => $have ) ) {
                        push @dep_errors, "$mod version '$have' is not in required range '$want'";
                    }
                }
            }
            else {
                push @reports, [$mod, $want, "missing"];

                if ( $DO_VERIFY_PREREQS && $type eq 'requires' ) {
                    push @dep_errors, "$mod is not installed ($req_string)";
                }
            }
        }

        if ( @reports ) {
            push @full_reports, "=== $title ===\n\n";

            my $ml = _max( map { length $_->[0] } @reports );
            my $wl = _max( map { length $_->[1] } @reports );
            my $hl = _max( map { length $_->[2] } @reports );

            if ($type eq 'modules') {
                splice @reports, 1, 0, ["-" x $ml, "", "-" x $hl];
                push @full_reports, map { sprintf("    %*s %*s\n", -$ml, $_->[0], $hl, $_->[2]) } @reports;
            }
            else {
                splice @reports, 1, 0, ["-" x $ml, "-" x $wl, "-" x $hl];
                push @full_reports, map { sprintf("    %*s %*s %*s\n", -$ml, $_->[0], $wl, $_->[1], $hl, $_->[2]) } @reports;
            }

            push @full_reports, "\n";
        }
    }
}

if ( @full_reports ) {
    diag "\nVersions for all modules listed in $source (including optional ones):\n\n", @full_reports;
}

if ( $cpan_meta_error || @dep_errors ) {
    diag "\n*** WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING ***\n";
}

if ( $cpan_meta_error ) {
    my ($orig_source) = grep { -f } 'MYMETA.json', 'MYMETA.yml';
    diag "\nCPAN::Meta->load_file('$orig_source') failed with: $cpan_meta_error\n";
}

if ( @dep_errors ) {
    diag join("\n",
        "\nThe following REQUIRED prerequisites were not satisfied:\n",
        @dep_errors,
        "\n"
    );
}

pass;

# vim: ts=4 sts=4 sw=4 et:

t/01-activation-funcs.t  view on Meta::CPAN

#!perl

use Test2::V0 qw'is float done_testing';
use Math::Lapack::Matrix;
use Math::Lapack::Expr;
use AI::ML::Expr;

use Data::Dumper;


my $m = Math::Lapack::Matrix->new(
				[ 
					[0, 0.002, 3, 4, 7.333, -.00008, -2.03456, 9, 100.3456, -300] 
				]
);

is($m->rows, 1, "Right number of rows");
is($m->columns, 10, "Right number of columns");

#Test Sigmoid
my $a = sigmoid($m);
_float($a->get_element(0,0), 0.5, "Element correct at 0,0");
_float($a->get_element(0,1), 5.005e-1, "Element correct at 0,1");
_float($a->get_element(0,2), 9.52574127e-001, "Element correct at 0,2");
_float($a->get_element(0,3), 9.82013790e-001, "Element correct at 0,3");
_float($a->get_element(0,4), 9.99346817e-001, "Element correct at 0,4");
_float($a->get_element(0,5), 4.99980000e-001, "Element correct at 0,5");
_float($a->get_element(0,6), 1.15621829e-001, "Element correct at 0,6");
_float($a->get_element(0,7), 9.99876605e-001, "Element correct at 0,7");
_float($a->get_element(0,8), 1, "Element correct at 0,8");
_float($a->get_element(0,9), 5.14820022e-131, "Element correct at 0,9");

#Test Derivative of Sigmoid
$a = d_sigmoid($m);
_float($a->get_element(0,0), 2.50000000e-001, "Element correct at 0,0");
_float($a->get_element(0,1), 2.49999750e-001, "Element correct at 0,1");
_float($a->get_element(0,2), 4.51766597e-002, "Element correct at 0,2");
_float($a->get_element(0,3), 1.76627062e-002, "Element correct at 0,3");
_float($a->get_element(0,4), 6.52756239e-004, "Element correct at 0,4");
_float($a->get_element(0,5), 2.50000000e-001, "Element correct at 0,5");
_float($a->get_element(0,6), 1.02253421e-001, "Element correct at 0,6");
_float($a->get_element(0,7), 1.23379350e-004, "Element correct at 0,7");
_float($a->get_element(0,8), 0, "Element correct at 0,8");
_float($a->get_element(0,9), 5.14820022e-131, "Element correct at 0,9");

#Test Relu
my $b = relu($m);
_float($b->get_element(0,0), 0, "Element correct at 0,0");
_float($b->get_element(0,1), 2.000000e-03, "Element correct at 0,1");
_float($b->get_element(0,2), 3, "Element correct at 0,2");
_float($b->get_element(0,3), 4, "Element correct at 0,3");
_float($b->get_element(0,4), 7.333000e+00, "Element correct at 0,4");
_float($b->get_element(0,5), 0, "Element correct at 0,5");
_float($b->get_element(0,6), 0, "Element correct at 0,6");
_float($b->get_element(0,7), 9, "Element correct at 0,7");
_float($b->get_element(0,8), 1.003456e+02, "Element correct at 0,8");
_float($b->get_element(0,9), 0, "Element correct at 0,9");

#Test derivative of Relu
$b = d_relu($m);
_float($b->get_element(0,0), 1, "Element correct at 0,0");
_float($b->get_element(0,1), 1, "Element correct at 0,1");
_float($b->get_element(0,2), 1, "Element correct at 0,2");
_float($b->get_element(0,3), 1, "Element correct at 0,3");
_float($b->get_element(0,4), 1, "Element correct at 0,4");
_float($b->get_element(0,5), 0, "Element correct at 0,5");
_float($b->get_element(0,6), 0, "Element correct at 0,6");
_float($b->get_element(0,7), 1, "Element correct at 0,7");
_float($b->get_element(0,8), 1, "Element correct at 0,8");
_float($b->get_element(0,9), 0, "Element correct at 0,9");

#Test leaky Relu
my $c = lrelu($m, .001);
_float($c->get_element(0,0), 0, "Element correct at 0,0");
_float($c->get_element(0,1), 2.000000e-03, "Element correct at 0,1");
_float($c->get_element(0,2), 3.000000e+00, "Element correct at 0,2");
_float($c->get_element(0,3), 4.000000e+00, "Element correct at 0,3");
_float($c->get_element(0,4), 7.333000e+00, "Element correct at 0,4");
_float($c->get_element(0,5), -8.000000e-08, "Element correct at 0,5");
_float($c->get_element(0,6), -2.034560e-03, "Element correct at 0,6");
_float($c->get_element(0,7), 9.000000e+00, "Element correct at 0,7");
_float($c->get_element(0,8), 1.003456e+02, "Element correct at 0,8");
_float($c->get_element(0,9), -3.000000e-01, "Element correct at 0,9");

#Test derivative of leaky Relu
$c = d_lrelu($m, .001);
_float($c->get_element(0,0), 1, "Element correct at 0,0");
_float($c->get_element(0,1), 1, "Element correct at 0,1");
_float($c->get_element(0,2), 1, "Element correct at 0,2");
_float($c->get_element(0,3), 1, "Element correct at 0,3");
_float($c->get_element(0,4), 1, "Element correct at 0,4");
_float($c->get_element(0,5), .001, "Element correct at 0,5");
_float($c->get_element(0,6), .001, "Element correct at 0,6");
_float($c->get_element(0,7), 1, "Element correct at 0,7");
_float($c->get_element(0,8), 1, "Element correct at 0,8");
_float($c->get_element(0,9), .001, "Element correct at 0,9");

# Test tanh
my $d = tanh($m);
_float($d->get_element(0,0), 0, "Element correct at 0,0");
_float($d->get_element(0,1), 1.99999733e-03, "Element correct at 0,1");
_float($d->get_element(0,2), 9.95054754e-01, "Element correct at 0,2");
_float($d->get_element(0,3), 9.99329300e-01, "Element correct at 0,3");
_float($d->get_element(0,4), 9.99999146e-01, "Element correct at 0,4");
_float($d->get_element(0,5), -7.99999998e-05, "Element correct at 0,5");
_float($d->get_element(0,6), -9.66389636e-01, "Element correct at 0,6");
_float($d->get_element(0,7), 9.99999970e-01, "Element correct at 0,7");
_float($d->get_element(0,8), 1, "Element correct at 0,8");
_float($d->get_element(0,9), -1, "Element correct at 0,9");

#Test derivative of tanh
$d = d_tanh($m);
_float($d->get_element(0,0), 1.00000000e+00, "Element correct at 0,0");
_float($d->get_element(0,1), 9.99996000e-01, "Element correct at 0,1");
_float($d->get_element(0,2), 9.86603717e-03, "Element correct at 0,2");
_float($d->get_element(0,3), 1.34095068e-03, "Element correct at 0,3");
_float($d->get_element(0,4), 1.70882169e-06, "Element correct at 0,4");
_float($d->get_element(0,5), 9.99999994e-01, "Element correct at 0,5");
_float($d->get_element(0,6), 6.60910712e-02, "Element correct at 0,6");
_float($d->get_element(0,7), 6.09199171e-08, "Element correct at 0,7");
_float($d->get_element(0,8), 0, "Element correct at 0,8");
_float($d->get_element(0,9), 0, "Element correct at 0,9");


# Test softmax
my $e =  Math::Lapack::Matrix->new(
               [
                [1, 2, 1],  # sample 1
                [2, 4, 2],  # sample 1
                [3, 5, 3],  # sample 2
                [6, 6, 6]
            ]);


my $soft = softmax($e);

is($e->rows, 4, "Right number of rows - softmax");
is($e->columns, 3, "Right number of cols - softmax");

# prob of first col
_float($soft->get_element(0,0), 0.00626879, "Element correct at 0,0");
_float($soft->get_element(1,0), 0.01704033, "Element correct at 1,0");
_float($soft->get_element(2,0), 0.04632042, "Element correct at 2,0");
_float($soft->get_element(3,0), 0.93037047, "Element correct at 3,0");

# prob of second col
_float($soft->get_element(0,1), 0.01203764, "Element correct at 0,1");
_float($soft->get_element(1,1), 0.08894682, "Element correct at 1,1");
_float($soft->get_element(2,1), 0.24178252, "Element correct at 2,1");
_float($soft->get_element(3,1), 0.65723302, "Element correct at 3,1");

#prob of third col
_float($soft->get_element(0,2), 0.00626879, "Element correct at 0,2");
_float($soft->get_element(1,2), 0.01704033, "Element correct at 1,2");
_float($soft->get_element(2,2), 0.04632042, "Element correct at 2,2");
_float($soft->get_element(3,2), 0.93037047, "Element correct at 3,2");


done_testing;

sub _float {
  my ($a, $b, $c) = @_;
	is($a, float($b, tolerance => 0.00001), $c);
}

t/02-cost_functions.t  view on Meta::CPAN

#!perl

use Math::Lapack::Matrix;
use Math::Lapack::Expr;
use AI::ML::Expr;

my $m_1 = Math::Lapack::Matrix->new([ [-2, 1, 2, 3] ]);
my $nr_tests = 0;

#Test d_relu
my $a = d_relu($m_1);

float($a->get_element(0,0), 0, "Element correct at 0,0");
float($a->get_element(0,1), 1, "Element correct at 0,1");
float($a->get_element(0,2), 1, "Element correct at 0,2");
float($a->get_element(0,3), 1, "Element correct at 0,3");

my $b = $a x $m_1->transpose;

float($b->get_element(0,0), 6, "Element correct at 0,0");

print "1..$nr_tests\n";

sub float {
  $nr_tests++;
  my ($a, $b, $explanation) = @_;
  if (abs($a-$b) > 0.000001){
    print "not ";
    $explanation .= " ($a vs $b)";
  }
  print "ok $nr_tests - $explanation\n";
}

sub is {
  $nr_tests++;
  my ($a, $b, $explanation) = @_;
  if ($a != $b){
    print "not ";
    $explanation .= " ($a vs $b)";
  }
  print "ok $nr_tests - $explanation\n";
}

t/03-mini-batch.t  view on Meta::CPAN

#!perl

use Math::Lapack::Matrix;
use Math::Lapack::Expr;
use AI::ML::Expr;

my $m = Math::Lapack::Matrix->random(50,1000);
my $nr_tests = 0;
my $axis = 0;
my $size = 200;
my $start = 0;

for my $v (0..4){
    my $a = mini_batch($m, $start, $size, $axis);
    is($a->rows, 50, "Right number of rows\n");
    is($a->columns, 200, "Right number of columns\n");
    $start += $size;    
}

my $m_1 = Math::Lapack::Matrix->random(1000,20);
$start = 0;
$axis = 1;
for my $i (0..4){
    my $b = mini_batch($m_1, $start, $size, $axis);
    is($b->rows, 200, "Right number of rows\n");
    is($b->columns, 20, "Right number of columns\n");
    $start += $size;    
}
    
print "1..$nr_tests\n";

sub float {
  $nr_tests++;
  my ($a, $b, $explanation) = @_;
  if (abs($a-$b) > 0.000001){
    print "not ";
    $explanation .= " ($a vs $b)";
  }
  print "ok $nr_tests - $explanation\n";
}

sub is {
  $nr_tests++;
  my ($a, $b, $explanation) = @_;
  if ($a != $b){
    print "not ";
    $explanation .= " ($a vs $b)";
  }
  print "ok $nr_tests - $explanation\n";
}

t/04-linear-regression.t  view on Meta::CPAN

#!perl

use Test2::V0;

use Math::Lapack::Matrix;
use AI::ML::LinearRegression;


my $x = Math::Lapack::Matrix->new([[12.39999962],[14.30000019],[14.5],[14.89999962],[16.10000038],[16.89999962],[16.5],[15.39999962],[17],[17.89999962],[18.79999924],[20.29999924],[22.39999962],[19.39999962],[15.5],[16.70000076],[17.29999924],[18.399...

my $y = Math::Lapack::Matrix->new([[11.19999981],[12.5],[12.69999981],[13.10000038],[14.10000038],[14.80000019],[14.39999962],[13.39999962],[14.89999962],[15.60000038],[16.39999962],[17.70000076],[19.60000038],[16.89999962],[14],[14.60000038],[15.100...

my $m = AI::ML::LinearRegression->new(plot => "../../plot.png");

$m->train($x, $y);

my $t = $m->{thetas};

is($t->rows, 2, "Right number of rows");
is($t->columns, 1, "Right number of columns");
_float($t->get_element(0,0), 0.43458449, "Normal Equation - Right value of theta 0,0");
_float($t->get_element(1,0), 0.85114404, "Normal Equation - Right value of theta 1,0");

my $m1 = AI::ML::LinearRegression->new(
    cost     => "../../cost1.png",
    gradient => "foo",
    plot     => "../../plot1.png",
    n        => 50,
    alpha    => 0.001
);


$m1->train($x, $y);
is($m1->{thetas}->rows, 2, "Right number of rows");
is($m1->{thetas}->columns, 1, "Right number of columns");
_float($m1->{thetas}->get_element(0,0), 0.86412871, "Normal Equation - Right value of theta 0,0");
_float($m1->{thetas}->get_element(1,0), 0.8269897, "Normal Equation - Right value of theta 1,0");


my $n = AI::ML::LinearRegression->new(
                                             lambda   => 1,
                                             cost     => "../../cost2.png",
                                             gradient => "foo",
                                             plot     => "../../plot2.png",
                                             n        => 50,
                                             alpha    => 0.001);
$n->train($x, $y);
is($n->{thetas}->rows, 2, "Right number of rows");
is($n->{thetas}->columns, 1, "Right number of columns");

_float($n->{thetas}->get_element(0,0), 0.78473628, "Normal Equation - Right value of theta 0,0");
_float($n->{thetas}->get_element(1,0), 0.83133813, "Normal Equation - Right value of theta 1,0");
### FIXME: if the tests generate files, you should test them.
##         and delete them afterwads

done_testing();

sub _float {
    my ($a, $b, $c) = @_;
    is($a, float($b, tolerance => 0.000001), $c);
}

t/05-logistic-regression.t  view on Meta::CPAN

#!perl

use Test2::V0 qw'is float done_testing';
use Math::Lapack::Matrix;
use Math::Lapack;
use AI::ML::LogisticRegression;

Math::Lapack->seed_rng(0);

my $x = Math::Lapack::Matrix::read_csv("t/logistic.csv", col_range =>[0,2]);
my $y = Math::Lapack::Matrix::read_csv("t/logistic.csv", col => 3);


is($x->rows, 306, "Right number of rows");
is($y->rows, 306, "Right number of rows");
is($x->columns, 3, "Right number of cols");
is($y->columns, 1, "Right number of cols");


is($x->get_element(0,0),30, "Right element 0,0 of x");
is($x->get_element(1,2),3, "Right element 1,2 of x");
is($x->get_element(3,1),59, "Right element 3,1 of x");
is($x->get_element(2,2),0, "Right element 2,2 of x");
is($y->get_element(3,0),1, "Right element 3,0 of y");
is($y->get_element(7,0),2, "Right element 7,0 of y");

$x->norm_std_deviation();
$y = $y - 1;

my $m = AI::ML::LogisticRegression->new(
				n 		=> 10000,
				alpha => 0.5,
				cost 	=> "../../logistcost.png"
);


$m->train($x, $y);
my $thetas = $m->{thetas};
_float($thetas->get_element(0,0), -1.07661735,"Right vale of theta 0,0");
_float($thetas->get_element(1,0), 0.21463009,"Right vale of theta 1,0");
_float($thetas->get_element(2,0), -0.03173973,"Right vale of theta 2,0");
_float($thetas->get_element(3,0), 0.63483062,"Right vale of theta 3,0");


$m->prediction($x);
_float($m->accuracy($y), 0.7483660130718954, "Right value of accuracy");
_float($m->precision($y), 0.5833333333333334, "Right value of precision");
_float($m->recall($y), 0.1728395061728395, "Right value of recall");
_float($m->f1($y), 0.26666666666666666, "Right value of f1");



#print STDERR "Accuracy: $acc\n";

#print STDERR "Precison: $prec\n";

#print STDERR "Recall: $rec\n";

#print STDERR "F1: $f1\n";







my $n = AI::ML::LogisticRegression->new(
				n 		=> 10000,
				alpha => 0.5,
				cost 	=> "../../logistcost_reg.png",
				reg 	=> 2
);


$n->train($x, $y);
$thetas = $n->{thetas};
_float($thetas->get_element(0,0), -1.07368839, "Right vale of theta 0,0");
_float($thetas->get_element(1,0), 0.204271, "Right vale of theta 1,0");
_float($thetas->get_element(2,0), -0.02933972, "Right vale of theta 2,0");
_float($thetas->get_element(3,0), 0.60950995, "Right vale of theta 3,0");


$n->prediction($x);
_float($n->accuracy($y), 0.7450980392156863, "Right value of accuracy");
_float($n->precision($y), 0.5652173913043478, "Right value of precision");
_float($n->recall($y), 0.16049382716049382, "Right value of recall");
_float($n->f1($y), .25, "Right value of f1");


done_testing;

sub _float {
    my ($a, $b, $c) = @_;
	is($a, float($b, tolerance => 0.01), $c);
}

t/06-accuracy-precision-recall-f1.t  view on Meta::CPAN

#!perl

use Math::Lapack::Matrix;
use AI::ML::Expr;

my $nr_tests=0;
my $y = Math::Lapack::Matrix->new([
				[1],[0],[1],[1],[0],[0],[1],[1],[1],[1],[0],[0],[1],[0],[0]
		]);
my $yatt = Math::Lapack::Matrix->new([
				[1],[0],[0],[0],[1],[1],[0],[1],[1],[1],[1],[0],[0],[0],[0]
		]);

# True Positive = 4
# False Positive = 3
# False Negative = 4

my $acc = AI::ML::Expr::accuracy($y, $yatt);
float($acc, 0.5333333, "Right accuracy");

my $prec = AI::ML::Expr::precision($y, $yatt);
float($prec, 0.571428571, "Right Precision");

my $rec = AI::ML::Expr::recall($y, $yatt);
float($rec, 0.5, "Right recall");

my $f_1 = AI::ML::Expr::f1($y, $yatt);
float($f_1, 0.533333334, "Right f1");

print "1..$nr_tests\n";

sub float {
  $nr_tests++;
  my ($a, $b, $explanation) = @_;
  if (abs($a-$b) > 0.000001){
    print "not ";
    $explanation .= " ($a vs $b)";
  }
  print "ok $nr_tests - $explanation\n";
}

sub is {
  $nr_tests++;
  my ($a, $b, $explanation) = @_;
  if ($a != $b){
    print "not ";
    $explanation .= " ($a vs $b)";
  }
  print "ok $nr_tests - $explanation\n";
}

t/07-neural-network.t  view on Meta::CPAN

#!perl

use Test2::V0 qw'is float done_testing';
use Math::Lapack::Matrix;
use AI::ML::NeuralNetwork;
use Math::Lapack;
use Data::Dumper;


Math::Lapack->seed_rng(0);

my $x = Math::Lapack::Matrix::read_csv("t/x.csv");
my $y = Math::Lapack::Matrix::read_csv("t/y.csv");

my $NN = AI::ML::NeuralNetwork->new(
				[
								2,
								{func => "tanh", units => 3}, 
								1
				],
                n => 5000,
                alpha => 1.2

);

$NN->{"l1"}{w} = Math::Lapack::Matrix::read_csv("t/w1.csv");
$NN->{"l2"}{w} = Math::Lapack::Matrix::read_csv("t/w2.csv");
my $pred = Math::Lapack::Matrix::read_csv("t/pred-nn.csv");

$NN->train($x, $y);

$NN->prediction($x);

is($NN->{yatt}->rows, $pred->rows, "Right number of rows");
is($NN->{yatt}->columns, $pred->columns, "Right number of columns");

for (0..$NN->{yatt}->columns-1) {
    is( $NN->{yatt}->get_element(0, $_),
        $pred->get_element(0, $_),
        "Right element on 0,$_"
    );
}

_float($NN->accuracy($y), 0.98, "Right accuracy");
_float($NN->precision($y), 0.970588235294118, "Right precision");
_float($NN->recall($y), .99, "Right recall");
_float($NN->f1($y), 0.98019801980198, "Right F1");

done_testing();

sub _float {
  my ($a, $b, $c) = @_;
	is($a, float($b, tolerance => 0.1), $c);
}

 view all matches for this distribution
 view release on metacpan -  search on metacpan

( run in 2.237 seconds using v1.00-cache-2.02-grep-82fe00e-cpan-cec75d87357c )