AI-LibNeural
view release on metacpan or search on metacpan
LibNeural.pm view on Meta::CPAN
require Exporter;
require DynaLoader;
use AutoLoader;
our @ISA = qw(Exporter DynaLoader);
# This allows declaration use AI::LibNeural ':all';
our %EXPORT_TAGS = ( 'all' => [ qw(
ALL
HIDDEN
INPUT
OUTPUT
) ] );
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
our @EXPORT = qw(
);
our $VERSION = '0.02';
sub AUTOLOAD {
LibNeural.pm view on Meta::CPAN
=item $nn = AI:LibNeural->new()
Creates an empty AI::LibNeural object, should only be used when the load method
will be called soon after.
=item $nn = AI::LibNeural->new(FILENAME)
Creates a new AI::LibNeural object from the supplied memory file.
=item $nn = AI::LibNeural->new(INTPUTS,HIDDENS,OUTPUTS)
Creates a new AI::LibNeural object with INPUTS input nodes, HIDDENS hidden
nodes, and OUTPUTS output nodes.
=item $nn->train([I1,I2,...],[O1,O2,...],MINERR,TRAINRATE)
Completes a training cycle for the given inputs I1-IN, with the expected
results of O1-OM, where N is the number of inputs and M is the number of
outputs. MINERR is the mean squared error at the output that you wish to be achieved. TRAINRATE is the learning rate to be used.
=item (O1,O2) = $nn->run([I1,I2,...])
Calculate the corresponding outputs (O1-OM) for the given inputs (I1-ON) based
on the previous training. Should only be called after the network has been
suitably trained.
=item NUM = $nn->get_layersize(WHICH)
Retrieves the number of nodes at the specified layer, WHICH. WHICH should be
one of ALL, INPUT, HIDDEN, OUTPUT. Usefully mainly with a network is loaded
from a file.
=item status = $nn->load(FILENAME)
=item status = $nn->save(FILENAME)
Loads and saves respectively the 'memory,' node configuration and weights,
of the network. FILENAME should be the location of the file in which the
memory is stored/retrieved.
LibNeural.pm view on Meta::CPAN
=over
=item all
=over
=item ALL
The total number of nodes on all three layers
=item INPUT
The number of nodes on the input layer
=item HIDDEN
The number of nodes on the hidden layer
=item OUTPUT
The number of nodes on the output layer
=back
=back
=head1 AUTHOR
Ross McFarland E<lt>rmcfarla at neces dot comE<gt>
LibNeural.xs view on Meta::CPAN
}
case 'H':
if (strEQ(name + 0, "HIDDEN")) { /* removed */
#ifdef HIDDEN
return HIDDEN;
#else
goto not_there;
#endif
}
case 'I':
if (strEQ(name + 0, "INPUT")) { /* removed */
#ifdef INPUT
return INPUT;
#else
goto not_there;
#endif
}
case 'O':
if (strEQ(name + 0, "OUTPUT")) { /* removed */
#ifdef OUTPUT
return OUTPUT;
#else
goto not_there;
#endif
}
}
errno = EINVAL;
return 0;
not_there:
errno = ENOENT;
LibNeural.xs view on Meta::CPAN
return array;
}
MODULE = AI::LibNeural PACKAGE = AI::LibNeural
double
constant(sv,arg)
PREINIT:
STRLEN len;
INPUT:
SV * sv
char * s = SvPV(sv, len);
int arg
CODE:
RETVAL = constant(s,len,arg);
OUTPUT:
RETVAL
nnwork *
nnwork::new (...)
PREINIT:
char * filename;
int inputs;
int hiddens;
int outputs;
CODE:
LibNeural.xs view on Meta::CPAN
else if( items == 4 )
{
/* given node counts */
int inputs = (int)SvIV(ST(1));
int hiddens = (int)SvIV(ST(2));
int outputs = (int)SvIV(ST(3));
RETVAL = new nnwork(inputs, hiddens, outputs);
}
else
Perl_croak(aTHX_ "Usage: Neural::new([ins, hids, outs])");
OUTPUT:
RETVAL
int
nnwork::get_layersize (which)
int which
void
nnwork::train (ins, outs, minerr, trainrate)
SV * ins
SV * outs
float minerr
float trainrate
PREINIT:
int i;
int nin;
int nout;
float * ains;
float * aouts;
CODE:
nin = THIS->get_layersize(INPUT);
nout = THIS->get_layersize(OUTPUT);
ains = svpvav_to_float_array(ins, nin);
aouts = svpvav_to_float_array(outs, nout);
THIS->train(ains, aouts, minerr, trainrate);
if( ains ) free(ains);
if( aouts ) free(aouts);
void
nnwork::run (ins)
SV * ins
PREINIT:
int i;
int nin;
int nout;
float * ains;
float * aouts;
PPCODE:
nin = THIS->get_layersize(INPUT);
nout = THIS->get_layersize(OUTPUT);
ains = svpvav_to_float_array(ins, nin);
aouts = (float*)malloc(nout * sizeof(float));
if( aouts == NULL )
XSRETURN_UNDEF;
THIS->run(ains, aouts);
EXTEND(SP, nout);
t/00.AILibNeural.t view on Meta::CPAN
use strict;
use Test::More tests => 31;
BEGIN { use_ok('AI::LibNeural', ':all') };
#########################
my $nn;
ok( $nn = AI::LibNeural->new( 2, 4, 1 ) );
ok( $nn =~ m/AI::LibNeural=SCALAR(.*)/ );
ok( ALL == 0 );
ok( INPUT == 1 );
ok( HIDDEN == 2 );
ok( OUTPUT == 3 );
ok( $nn->get_layersize(ALL) == 7 );
ok( $nn->get_layersize(INPUT) == 2 );
ok( $nn->get_layersize(HIDDEN) == 4 );
ok( $nn->get_layersize(OUTPUT) == 1 );
for( my $i = 0; $i < 20; $i++ )
{
$nn->train( [ 0, 0 ], [ 0.05 ], 0.0000000005, 0.2 );
$nn->train( [ 0, 1 ], [ 0.05 ], 0.0000000005, 0.2 );
$nn->train( [ 1, 0 ], [ 0.05 ], 0.0000000005, 0.2 );
$nn->train( [ 1, 1 ], [ 0.95 ], 0.0000000005, 0.2 );
}
ok(1);
ok( $nn->run( [ 0, 0 ] ) < 0.5 );
ok( $nn->run( [ 0, 1 ] ) < 0.5 );
t/01.AILibNeuralChild.t view on Meta::CPAN
package AI::LibNeural::Child;
our @ISA = qw(AI::LibNeural);
package main;
my $nn;
ok( $nn = AI::LibNeural::Child->new( 2, 4, 1 ) );
ok( $nn =~ m/AI::LibNeural::Child=SCALAR(.*)/ );
ok( ALL == 0 );
ok( INPUT == 1 );
ok( HIDDEN == 2 );
ok( OUTPUT == 3 );
ok( $nn->get_layersize(ALL) == 7 );
ok( $nn->get_layersize(INPUT) == 2 );
ok( $nn->get_layersize(HIDDEN) == 4 );
ok( $nn->get_layersize(OUTPUT) == 1 );
for( my $i = 0; $i < 20; $i++ )
{
$nn->train( [ 0, 0 ], [ 0.05 ], 0.0000000005, 0.2 );
$nn->train( [ 0, 1 ], [ 0.05 ], 0.0000000005, 0.2 );
$nn->train( [ 1, 0 ], [ 0.05 ], 0.0000000005, 0.2 );
$nn->train( [ 1, 1 ], [ 0.95 ], 0.0000000005, 0.2 );
}
ok(1);
ok( $nn->run( [ 0, 0 ] ) < 0.5 );
ok( $nn->run( [ 0, 1 ] ) < 0.5 );
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307 USA.
#
TYPEMAP
float T_NV
float * T_PV
nnwork * O_OBJECT
OUTPUT
O_OBJECT
sv_setref_pv( $arg, CLASS, (void*)$var );
INPUT
O_OBJECT
if( sv_isobject($arg) && (SvTYPE(SvRV($arg)) == SVt_PVMG) )
$var = ($type)SvIV((SV*)SvRV( $arg ));
else{
warn( \"${Package}::$func_name() -- $var is not a blessed SV reference\" );
XSRETURN_UNDEF;
}
( run in 0.388 second using v1.01-cache-2.11-cpan-4e96b696675 )