AI-FANN
view release on metacpan or search on metacpan
#define _sv2fann_errorfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _sv2fann_stopfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")
#define _fann_train_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_TRAIN_NAMES, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _fann_activationfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ACTIVATIONFUNC_NAMES, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _fann_errorfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ERRORFUNC_NAMES, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _fann_stopfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_STOPFUNC_NAMES, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")
/* normalized names for train_data methods */
#define fann_train_data_create_from_file fann_read_train_from_file
#define fann_train_data_shuffle fann_shuffle_train_data
#define fann_train_data_scale_input fann_scale_input_train_data
#define fann_train_data_scale_output fann_scale_output_train_data
#define fann_train_data_scale fann_scale_train_data
#define fann_train_data_merge fann_merge_train_data
#define fann_train_data_subset fann_subset_train_data
#define fann_train_data_length fann_length_train_data
#define fann_train_data_num_input fann_num_input_train_data
#define fann_train_data_num_output fann_num_output_train_data
#define fann_train_data_save fann_save_train
MODULE = AI::FANN PACKAGE = AI::FANN PREFIX = fann_
PROTOTYPES: DISABLE
BOOT:
fann_set_error_log(0, 0);
void
_constants()
fann_train_on_file(self, filename, max_epochs, epochs_between_reports, desired_error)
struct fann *self;
const char *filename;
unsigned int max_epochs;
unsigned int epochs_between_reports;
double desired_error;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_on_data(self, data, max_epochs, epochs_between_reports, desired_error)
struct fann *self;
struct fann_train_data *data;
unsigned int max_epochs;
unsigned int epochs_between_reports;
double desired_error;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
_check_error(aTHX_ (struct fann_error *)data);
void
fann_cascadetrain_on_file(self, filename, max_neurons, neurons_between_reports, desired_error)
struct fann *self;
const char *filename;
unsigned int max_neurons;
unsigned int neurons_between_reports;
double desired_error;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_cascadetrain_on_data(self, data, max_neurons, neurons_between_reports, desired_error)
struct fann *self;
struct fann_train_data *data;
unsigned int max_neurons;
unsigned int neurons_between_reports;
double desired_error;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
_check_error(aTHX_ (struct fann_error *)data);
double
fann_train_epoch(self, data)
struct fann *self;
struct fann_train_data *data;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
_check_error(aTHX_ (struct fann_error *)data);
void
fann_print_connections(self)
struct fann * self;
void
fann_print_parameters(self)
struct fann * self;
void
ST(i) = sv_2mortal(newSVuv(steepnesses[i]));
}
XSRETURN(count);
}
else {
ST(0) = sv_2mortal(newSVuv(count));
XSRETURN(1);
}
MODULE = AI::FANN PACKAGE = AI::FANN::TrainData PREFIX = fann_train_data_
struct fann_train_data *
fann_train_data_new_from_file(klass, filename)
SV *klass;
const char *filename;
CODE:
RETVAL = fann_train_data_create_from_file(filename);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
struct fann_train_data *
fann_train_data_new_empty(klass, num_data, num_input, num_output)
SV *klass;
unsigned int num_data;
unsigned int num_input;
unsigned int num_output;
CODE:
RETVAL = fann_train_data_create(num_data, num_input, num_output);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
void
fann_train_data_data(self, index, ...)
struct fann_train_data *self;
unsigned int index;
PREINIT:
AV *input;
AV *output;
unsigned int i;
PPCODE:
if (index >= self->num_data)
Perl_croak(aTHX_"index %d is out of range", index);
switch (items) {
case 4:
input = _srv2av(aTHX_ ST(2), self->num_input, "input");
for (i = 0; i < self->num_input; i++) {
SV **svp = av_fetch(input, i, 0);
self->input[index][i] = SvNV(svp ? *svp : &PL_sv_undef);
}
output = _srv2av(aTHX_ ST(3), self->num_output, "output");
for (i = 0; i < self->num_output; i++) {
ST(0) = sv_2mortal(newRV_inc((SV*)input));
ST(1) = sv_2mortal(newRV_inc((SV*)output));
XSRETURN(2);
}
else {
ST(0) = &PL_sv_yes;
XSRETURN(1);
}
break;
default:
Perl_croak(aTHX_ "Usage: AI::FANN::TrainData::data(self, index [, input, output])");
}
struct fann_train_data *
fann_train_data_new(klass, input, output, ...)
SV *klass;
AV *input;
AV *output;
PREINIT:
unsigned int num_data;
unsigned int num_input;
unsigned int num_output;
unsigned int i;
CODE:
if (!(items & 1)) {
Perl_croak(aTHX_ "wrong number of arguments in constructor");
}
num_data = items >> 1;
num_input = av_len(input) + 1;
if (!num_input)
Perl_croak(aTHX_ "input array is empty");
num_output = av_len(output) + 1;
if (!num_output)
Perl_croak(aTHX_ "output array is empty");
RETVAL = fann_train_data_create(num_data, num_input, num_output);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
/* we do that at cleanup to ensure that the just created object is
* freed if we croak */
if (RETVAL) {
for (i = 0; i < num_data; i++) {
unsigned int j;
input = _srv2av(aTHX_ ST(1 + i * 2), num_input, "input");
for (j = 0; j < num_input; j++) {
SV **svp = av_fetch(input, j, 0);
RETVAL->input[i][j] = SvNV(svp ? *svp : &PL_sv_undef);
}
output = _srv2av(aTHX_ ST(2 + i * 2), num_output, "output");
for (j = 0; j < num_output; j++) {
SV **svp = av_fetch(output, j, 0);
RETVAL->output[i][j] = SvNV(svp ? *svp : &PL_sv_undef);
}
}
}
void
fann_train_data_DESTROY(self)
struct fann_train_data * self;
CODE:
fann_destroy_train(self);
sv_unmagic(SvRV(ST(0)), '~');
void
fann_train_data_shuffle(self)
struct fann_train_data *self;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_data_scale_input(self, new_min, new_max)
struct fann_train_data *self;
fann_type new_min;
fann_type new_max;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_data_scale_output(self, new_min, new_max)
struct fann_train_data *self;
fann_type new_min;
fann_type new_max;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_data_scale(self, new_min, new_max)
struct fann_train_data *self;
fann_type new_min;
fann_type new_max;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
struct fann_train_data*
fann_train_data_subset(self, pos, length)
struct fann_train_data *self;
unsigned int pos;
unsigned int length;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
_check_error(aTHX_ (struct fann_error *)RETVAL);
INCLUDE: accessors.xsh
Makefile.PL
MANIFEST
ppport.h
README
t/AI-FANN.t
t/pods.t
lib/AI/FANN.pm
samples/ox.pl
samples/add.pl
samples/xor.pl
META.yml Module meta-data (added by MakeMaker)
accessors.xsh view on Meta::CPAN
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
MODULE = AI::FANN PACKAGE = AI::FANN::TrainData PREFIX = accessor_
unsigned int
accessor_num_inputs(self)
struct fann_train_data * self;
CODE:
RETVAL = fann_train_data_num_input(self);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
MODULE = AI::FANN PACKAGE = AI::FANN::TrainData PREFIX = accessor_
unsigned int
accessor_num_outputs(self)
struct fann_train_data * self;
CODE:
RETVAL = fann_train_data_num_output(self);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
MODULE = AI::FANN PACKAGE = AI::FANN::TrainData PREFIX = accessor_
unsigned int
accessor_length(self)
struct fann_train_data * self;
CODE:
RETVAL = fann_train_data_length(self);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
genaccessors view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
$| = 1;
my %struct = ( 'AI::FANN' => 'struct fann *',
'AI::FANN::TrainData' => 'struct fann_train_data *' );
sub accessor {
my ($name, $type, $getter, $setter, @ixs) = @_;
my ($package, $method) = $name =~ /^(?:(.*)::)?(.*)$/
or die "wrong accessor name $name";
$package = $package ? "AI::FANN::$package" : 'AI::FANN';
my $struct = $struct{$package}
or die "wrong package name $package";
genaccessors view on Meta::CPAN
output_activation_function, enum fann_activationfunc_enum, , fann_set_activation_function_output
neuron_activation_steepness, fann_type, fann_get_activation_steepness, fann_set_activation_steepness, value, layer, neuron
layer_activation_steepness, fann_type, , fann_set_activation_steepness_layer, value, layer
hidden_activation_steepness, fann_type, , fann_set_activation_steepness_hidden
output_activation_steepness, fann_type, , fann_set_activation_steepness_output
layer_num_neurons, unsigned int, fann_get_num_neurons, , layer
num_layers, unsigned int, fann_get_num_layers
# neuron, struct fann_neuron *, fann_get_neuron, , layer, neuron_index
TrainData::num_inputs, unsigned int, fann_train_data_num_input
TrainData::num_outputs, unsigned int, fann_train_data_num_output
TrainData::length, unsigned int, fann_train_data_length
lib/AI/FANN.pm view on Meta::CPAN
use AI::FANN qw(:all);
# create an ANN with 2 inputs, a hidden layer with 3 neurons and an
# output layer with 1 neuron:
my $ann = AI::FANN->new_standard(2, 3, 1);
$ann->hidden_activation_function(FANN_SIGMOID_SYMMETRIC);
$ann->output_activation_function(FANN_SIGMOID_SYMMETRIC);
# create the training data for a XOR operator:
my $xor_train = AI::FANN::TrainData->new( [-1, -1], [-1],
[-1, 1], [1],
[1, -1], [1],
[1, 1], [-1] );
$ann->train_on_data($xor_train, 500000, 1000, 0.001);
$ann->save("xor.ann");
Run...
use AI::FANN;
my $ann = AI::FANN->new_from_file("xor.ann");
for my $a (-1, 1) {
lib/AI/FANN.pm view on Meta::CPAN
MAY CONTAIN CRITICAL BUGS!!!
AI::FANN is a Perl wrapper for the Fast Artificial Neural Network
(FANN) Library available from L<http://fann.sourceforge.net>:
Fast Artificial Neural Network Library is a free open source neural
network library, which implements multilayer artificial neural
networks in C with support for both fully connected and sparsely
connected networks. Cross-platform execution in both fixed and
floating point are supported. It includes a framework for easy
handling of training data sets. It is easy to use, versatile, well
documented, and fast. PHP, C++, .NET, Python, Delphi, Octave, Ruby,
Pure Data and Mathematica bindings are available. A reference manual
accompanies the library with examples and recommendations on how to
use the library. A graphical user interface is also available for
the library.
AI::FANN object oriented interface provides an almost direct map to
the C library API. Some differences have been introduced to make it
more perlish:
=over 4
=item *
Two classes are used: C<AI::FANN> that wraps the C C<struct fann> type
and C<AI::FANN::TrainData> that wraps C<struct fann_train_data>.
=item *
Prefixes and common parts on the C function names referring to those
structures have been removed. For instance C
C<fann_train_data_shuffle> becomes C<AI::FANN::TrainData::shuffle> that
will be usually called as...
$train_data->shuffle;
=item *
Pairs of C get/set functions are wrapped in Perl with dual accessor
methods named as the attribute (and without any C<set_>/C<get_>
prefix). For instance:
$ann->bit_fail_limit($limit); # sets the bit_fail_limit
$bfl = $ann->bit_fail_limit; # gets the bit_fail_limit
lib/AI/FANN.pm view on Meta::CPAN
It returns an array with the values of the output layer.
=item $ann->reset_MSE
-
=item $ann->train_on_file($filename, $max_epochs, $epochs_between_reports, $desired_error)
-
=item $ann->train_on_data($train_data, $max_epochs, $epochs_between_reports, $desired_error)
C<$train_data> is a AI::FANN::TrainData object.
=item $ann->cascadetrain_on_file($filename, $max_neurons, $neurons_between_reports, $desired_error)
-
=item $ann->cascadetrain_on_data($train_data, $max_neurons, $neurons_between_reports, $desired_error)
C<$train_data> is a AI::FANN::TrainData object.
=item $ann->train_epoch($train_data)
C<$train_data> is a AI::FANN::TrainData object.
=item $ann->print_connections
-
=item $ann->print_parameters
-
=item $ann->cascade_activation_functions()
lib/AI/FANN.pm view on Meta::CPAN
return the number of neurons on layer C<$layer_index>.
=item $ann->num_neurons
return a list with the number of neurons on every layer
=back
=head2 AI::FANN::TrainData
Wraps C C<struct fann_train_data> and provides the following method:
=over 4
=item AI::FANN::TrainData->new_from_file($filename)
-
=item AI::FANN::TrainData->new($input1, $output1 [, $input2, $output2, ...])
C<$inputx> and C<$outputx> are arrays with the values of the input and
output layers.
=item AI::FANN::TrainData->new_empty($num_data, $num_inputs, $num_outputs)
returns a new AI::FANN::TrainData object of the sizes indicated on the
arguments. The initial values of the data contained inside the object
are random and should be set before using the train data object for
training an ANN.
=item $train->data($index)
returns two arrays with the values of the input and output layer
respectively for that index.
=item $train->data($index, $input, $output)
C<$input> and C<$output> are two arrays.
The input and output layers at the index C<$index> are set to the
values on these arrays.
=item $train->shuffle
-
if (ptr && v) {
unsigned int i;
for (i = 0; i < n1; i++) {
ptr[i] = v + i * n2;
}
return ptr;
}
return 0;
}
struct fann_train_data *
fann_train_data_create(unsigned int num_data, unsigned int num_input, unsigned int num_output) {
struct fann_train_data *data = (struct fann_train_data *)calloc(1, sizeof(*data));
if (data) {
fann_init_error_data((struct fann_error *)data);
data->input = allocvv(num_data, num_input);
data->output = allocvv(num_data, num_output);
if (data->input && data->output) {
data->num_data = num_data;
data->num_input = num_input;
data->num_output = num_output;
return data;
}
}
return 0;
}
void
fann_train_data_set(struct fann_train_data *data, unsigned int ix,
fann_type *input, fann_type *output ) {
if (ix < data->num_data) {
memcpy(data->input[ix], input, data->num_input * sizeof(fann_type));
memcpy(data->output[ix], output, data->num_output * sizeof(fann_type));
}
else {
fann_error((struct fann_error *)data, FANN_E_INDEX_OUT_OF_BOUND, ix);
}
}
/*
enum fann_activationfunc_enum
fann_get_activation_function(struct fann *ann, unsigned int layer, int neuron_index) {
struct fann_neuron *neuron = fann_get_neuron(ann, layer, neuron_index);
if (neuron) {
return neuron->activation_function;
}
#include <doublefann.h>
void
fann_train_data_set(struct fann_train_data *data, unsigned int ix,
fann_type *input, fann_type *output );
struct fann_train_data *
fann_train_data_create(unsigned int num_data,
unsigned int num_input, unsigned int num_output);
struct fann_layer*
fann_get_layer(struct fann *ann, int layer);
struct fann_neuron*
fann_get_neuron_layer(struct fann *ann, struct fann_layer* layer, int neuron);
struct fann_neuron*
PERL_MAGIC_glob|5.009005||p
PERL_MAGIC_isaelem|5.007002||p
PERL_MAGIC_isa|5.007002||p
PERL_MAGIC_mutex|5.009005||p
PERL_MAGIC_nkeys|5.007002||p
PERL_MAGIC_overload_elem|5.007002||p
PERL_MAGIC_overload_table|5.007002||p
PERL_MAGIC_overload|5.007002||p
PERL_MAGIC_pos|5.007002||p
PERL_MAGIC_qr|5.007002||p
PERL_MAGIC_regdata|5.007002||p
PERL_MAGIC_regdatum|5.007002||p
PERL_MAGIC_regex_global|5.007002||p
PERL_MAGIC_shared_scalar|5.007003||p
PERL_MAGIC_shared|5.007003||p
PERL_MAGIC_sigelem|5.007002||p
PERL_MAGIC_sig|5.007002||p
PERL_MAGIC_substr|5.007002||p
PERL_MAGIC_sv|5.007002||p
PERL_MAGIC_taint|5.007002||p
PERL_MAGIC_tiedelem|5.007002||p
ZeroD|5.009002||p
Zero|||
_aMY_CXT|5.007003||p
_pMY_CXT|5.007003||p
aMY_CXT_|5.007003||p
aMY_CXT|5.007003||p
aTHXR_|5.009005||p
aTHXR|5.009005||p
aTHX_|5.006000||p
aTHX|5.006000||p
add_data|||n
addmad|||
allocmy|||
amagic_call|||
amagic_cmp_locale|||
amagic_cmp|||
amagic_i_ncmp|||
amagic_ncmp|||
any_dup|||
ao|||
append_elem|||
magic_getsubstr|||
magic_gettaint|||
magic_getuvar|||
magic_getvec|||
magic_get|||
magic_killbackrefs|||
magic_len|||
magic_methcall|||
magic_methpack|||
magic_nextpack|||
magic_regdata_cnt|||
magic_regdatum_get|||
magic_regdatum_set|||
magic_scalarpack|||
magic_set_all_env|||
magic_setamagic|||
magic_setarylen|||
magic_setbm|||
magic_setcollxfrm|||
magic_setdbline|||
magic_setdefelem|||
#endif
/* Older perls (<=5.003) lack AvFILLp */
#ifndef AvFILLp
# define AvFILLp AvFILL
#endif
#ifndef ERRSV
# define ERRSV get_sv("@",FALSE)
#endif
#ifndef newSVpvn
# define newSVpvn(data,len) ((data) \
? ((len) ? newSVpv((data), (len)) : newSVpv("", 0)) \
: newSV(0))
#endif
/* Hint: gv_stashpvn
* This function's backport doesn't support the length parameter, but
* rather ignores it. Portability can only be ensured if the length
* parameter is used for speed reasons, but the length can always be
* correctly computed from the string argument.
*/
#ifndef gv_stashpvn
PL_hints = oldhints;
PL_curcop->cop_stash = old_cop_stash;
PL_curstash = old_curstash;
PL_curcop->cop_line = oldline;
}
#endif
#endif
/*
* Boilerplate macros for initializing and accessing interpreter-local
* data from C. All statics in extensions should be reworked to use
* this, if you want to make the extension thread-safe. See ext/re/re.xs
* for an example of the use of these macros.
*
* Code that uses these macros is responsible for the following:
* 1. #define MY_CXT_KEY to a unique string, e.g. "DynaLoader_guts"
* 2. Declare a typedef named my_cxt_t that is a structure that contains
* all the data that needs to be interpreter-local.
* 3. Use the START_MY_CXT macro after the declaration of my_cxt_t.
* 4. Use the MY_CXT_INIT macro such that it is called exactly once
* (typically put in the BOOT: section).
* 5. Use the members of the my_cxt_t structure everywhere as
* MY_CXT.member.
* 6. Use the dMY_CXT macro (a declaration) in all the functions that
* access MY_CXT.
*/
#if defined(MULTIPLICITY) || defined(PERL_OBJECT) || \
defined(PERL_CAPI) || defined(PERL_IMPLICIT_CONTEXT)
#ifndef START_MY_CXT
/* This must appear in all extensions that define a my_cxt_t structure,
* right after the definition (i.e. at file scope). The non-threads
* case below uses it to declare the data as static. */
#define START_MY_CXT
#if (PERL_BCDVERSION < 0x5004068)
/* Fetches the SV that keeps the per-interpreter data. */
#define dMY_CXT_SV \
SV *my_cxt_sv = get_sv(MY_CXT_KEY, FALSE)
#else /* >= perl5.004_68 */
#define dMY_CXT_SV \
SV *my_cxt_sv = *hv_fetch(PL_modglobal, MY_CXT_KEY, \
sizeof(MY_CXT_KEY)-1, TRUE)
#endif /* < perl5.004_68 */
/* This declaration should be used within all functions that use the
* interpreter-local data. */
#define dMY_CXT \
dMY_CXT_SV; \
my_cxt_t *my_cxtp = INT2PTR(my_cxt_t*,SvUV(my_cxt_sv))
/* Creates and zeroes the per-interpreter data.
* (We allocate my_cxtp in a Perl SV so that it will be released when
* the interpreter goes away.) */
#define MY_CXT_INIT \
dMY_CXT_SV; \
/* newSV() allocates one more than needed */ \
my_cxt_t *my_cxtp = (my_cxt_t*)SvPVX(newSV(sizeof(my_cxt_t)-1));\
Zero(my_cxtp, 1, my_cxt_t); \
sv_setuv(my_cxt_sv, PTR2UV(my_cxtp))
/* This macro must be used to access members of the my_cxt_t structure.
* e.g. MYCXT.some_data */
#define MY_CXT (*my_cxtp)
/* Judicious use of these macros can reduce the number of times dMY_CXT
* is used. Use is similar to pTHX, aTHX etc. */
#define pMY_CXT my_cxt_t *my_cxtp
#define pMY_CXT_ pMY_CXT,
#define _pMY_CXT ,pMY_CXT
#define aMY_CXT my_cxtp
#define aMY_CXT_ aMY_CXT,
#define _aMY_CXT ,aMY_CXT
#endif /* START_MY_CXT */
#ifndef MY_CXT_CLONE
/* Clones the per-interpreter data. */
#define MY_CXT_CLONE \
dMY_CXT_SV; \
my_cxt_t *my_cxtp = (my_cxt_t*)SvPVX(newSV(sizeof(my_cxt_t)-1));\
Copy(INT2PTR(my_cxt_t*, SvUV(my_cxt_sv)), my_cxtp, 1, my_cxt_t);\
sv_setuv(my_cxt_sv, PTR2UV(my_cxtp))
#endif
#else /* single interpreter */
#ifndef START_MY_CXT
#endif
#ifndef PERL_MAGIC_overload_table
# define PERL_MAGIC_overload_table 'c'
#endif
#ifndef PERL_MAGIC_bm
# define PERL_MAGIC_bm 'B'
#endif
#ifndef PERL_MAGIC_regdata
# define PERL_MAGIC_regdata 'D'
#endif
#ifndef PERL_MAGIC_regdatum
# define PERL_MAGIC_regdatum 'd'
#endif
#ifndef PERL_MAGIC_env
# define PERL_MAGIC_env 'E'
#endif
if (radix && IN_LOCALE) {
STRLEN len = strlen(radix);
if (*sp + len <= send && memEQ(*sp, radix, len)) {
*sp += len;
return TRUE;
}
}
#endif
#endif /* USE_LOCALE_NUMERIC */
/* always try "." if numeric radix didn't match because
* we may have data from different locales mixed */
if (*sp < send && **sp == '.') {
++*sp;
return TRUE;
}
return FALSE;
}
#endif
#endif
#ifndef grok_number
samples/add.pl view on Meta::CPAN
$ann->hidden_activation_function(FANN_SIGMOID_SYMMETRIC);
$ann->output_activation_function(FANN_SIGMOID_SYMMETRIC);
my $train = AI::FANN::TrainData->new_empty($n, 2, 1);
for (0..$n-1) {
my $a = rand(2) - 1;
my $b = rand(2) - 1;
my $c = 0.5 * ($a + $b);
$train->data($_, [$a, $b], [$c]);
}
$ann->train_on_data($train, 25000, 250, 0.00001);
$ann->save("add.ann");
}
elsif ($ARGV[0] eq 'test') {
my $ann = AI::FANN->new_from_file("add.ann");
for (1..10) {
my $a = rand(2) - 1;
my $b = rand(2) - 1;
samples/ox.pl view on Meta::CPAN
my $r = int (0.2 + rand(0.25 * $size));
my $x0 = $r + int rand($size - 2 * $r);
my $y0 = $r + int rand($size - 2 * $r);
$im->line($x0-$r, $y0-$r, $x0+$r, $y0+$r, $color);
$im->line($x0-$r, $y0+$r, $x0+$r, $y0-$r, $color);
return 2*$r/$size
}
sub image_to_input {
my ($im, $type) = @_;
my @data;
for my $x (0..$size-1) {
for my $y (0..$size-1) {
push @data, $im->getPixel($x, $y);
}
}
return \@data;
}
sub make_train {
my $train = AI::FANN::TrainData->new_empty($num, $size * $size, 2);
for (0..$num - 1) {
print ".";
my $im = GD::Image->new($size, $size);
my $white = $im->colorAllocate(255,255,255);
my $black = $im->colorAllocate(0,0,0);
my $type = (rand > .5);
my $r = $type ? draw_x($im, $black) : draw_o($im, $black);
$train->data($_, image_to_input($im), [$type, $r]);
}
print "\n";
my $ann = AI::FANN->new_standard(@_);
for (1..40) {
$ann->train_on_data($train, 100, 1, 0.0001);
# $ann->print_connections;
$ann->print_parameters;
$ann->save("ox.ann");
}
}
sub make_test {
my $rep = shift;
my $ann = AI::FANN->new_from_file("ox.ann");
print "ann read\n";
samples/xor.pl view on Meta::CPAN
if ($ARGV[0] eq 'train') {
# create an ANN with 2 inputs, a hidden layer with 3 neurons and an
# output layer with 1 neuron:
my $ann = AI::FANN->new_standard(2, 3, 1);
$ann->hidden_activation_function(FANN_SIGMOID_SYMMETRIC);
$ann->output_activation_function(FANN_SIGMOID_SYMMETRIC);
# create the training data for a XOR operator:
my $xor_train = AI::FANN::TrainData->new( [-1, -1], [-1],
[-1, 1], [1],
[1, -1], [1],
[1, 1], [-1] );
$ann->train_on_data($xor_train, 500000, 100, 0.0001);
$ann->save("xor.ann");
}
elsif ($ARGV[0] eq 'test') {
my $ann = AI::FANN->new_from_file("xor.ann");
for my $a (-1, 1) {
for my $b (-1, 1) {
TYPEMAP
struct fann * T_PTROBJ_MAGIC
struct fann_train_data * T_PTROBJ_MAGIC
fann_type T_DOUBLE
fta_input T_FTA_INPUT
fta_output T_FTA_OUTPUT
enum fann_train_enum T_UV
enum fann_stopfunc_enum T_UV
enum fann_errorfunc_enum T_UV
enum fann_activationfunc_enum T_UV
( run in 0.418 second using v1.01-cache-2.11-cpan-8d75d55dd25 )