AI-FANN
view release on metacpan or search on metacpan
return fta;
}
static void
_check_error(pTHX_ struct fann_error *self) {
if (self) {
if (fann_get_errno(self) != FANN_E_NO_ERROR) {
ERRSV = newSVpv(self->errstr, strlen(self->errstr) - 2);
fann_get_errstr(self);
Perl_croak(aTHX_ Nullch);
}
}
else {
Perl_croak(aTHX_ "Constructor failed");
}
}
static unsigned int
_sv2enum(pTHX_ SV *sv, unsigned int top, char * const name) {
unsigned int value = SvUV(sv);
if (value > top) {
Perl_croak(aTHX_ "value %d is out of range for %s", value, name);
}
return value;
}
static SV *
_enum2sv(pTHX_ unsigned int value, char const * const * const names, unsigned int top, char const * const name) {
SV *sv;
if (value > top) {
Perl_croak(aTHX_ "internal error: value %d out of range for %s", value, name);
}
sv = newSVpv(names[value], 0);
SvUPGRADE(sv, SVt_PVIV);
SvUV_set(sv, value);
SvIOK_on(sv);
SvIsUV_on(sv);
return sv;
}
#define _sv2fann_train_enum(sv) _sv2enum(aTHX_ sv, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _sv2fann_activationfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _sv2fann_errorfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _sv2fann_stopfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")
#define _fann_train_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_TRAIN_NAMES, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _fann_activationfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ACTIVATIONFUNC_NAMES, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _fann_errorfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ERRORFUNC_NAMES, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _fann_stopfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_STOPFUNC_NAMES, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")
/* normalized names for train_data methods */
#define fann_train_data_create_from_file fann_read_train_from_file
#define fann_train_data_shuffle fann_shuffle_train_data
#define fann_train_data_scale_input fann_scale_input_train_data
#define fann_train_data_scale_output fann_scale_output_train_data
#define fann_train_data_scale fann_scale_train_data
#define fann_train_data_merge fann_merge_train_data
#define fann_train_data_subset fann_subset_train_data
#define fann_train_data_length fann_length_train_data
#define fann_train_data_num_input fann_num_input_train_data
#define fann_train_data_num_output fann_num_output_train_data
#define fann_train_data_save fann_save_train
MODULE = AI::FANN PACKAGE = AI::FANN PREFIX = fann_
PROTOTYPES: DISABLE
BOOT:
fann_set_error_log(0, 0);
void
_constants()
PREINIT:
unsigned int i;
PPCODE:
for (i = 0; my_constant_names[i]; i++) {
SV *sv = sv_2mortal(newSVpv(my_constant_names[i], 0));
SvUPGRADE(sv, SVt_PVIV);
SvUV_set(sv, my_constant_values[i]);
SvIOK_on(sv);
SvIsUV_on(sv);
XPUSHs(sv);
}
XSRETURN(i);
struct fann *
fann_new_standard(klass, ...)
SV *klass;
PREINIT:
unsigned int *layers;
unsigned int i;
unsigned int num_layers;
CODE:
num_layers = items - 1;
Newx(layers, num_layers, unsigned int);
SAVEFREEPV(layers);
for (i = 0; i < num_layers; i++) {
layers[i] = SvIV(ST(i+1));
}
RETVAL = fann_create_standard_array(num_layers, layers);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
struct fann *
fann_new_sparse(klass, connection_rate, ...)
SV *klass;
double connection_rate;
PREINIT:
unsigned int *layers;
unsigned int i;
unsigned int num_layers;
CODE:
num_layers = items - 2;
Newx(layers, num_layers, unsigned int);
SAVEFREEPV(layers);
for (i = 0; i < num_layers; i++) {
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
/* we do that at cleanup to ensure that the just created object is
* freed if we croak */
if (RETVAL) {
for (i = 0; i < num_data; i++) {
unsigned int j;
input = _srv2av(aTHX_ ST(1 + i * 2), num_input, "input");
for (j = 0; j < num_input; j++) {
SV **svp = av_fetch(input, j, 0);
RETVAL->input[i][j] = SvNV(svp ? *svp : &PL_sv_undef);
}
output = _srv2av(aTHX_ ST(2 + i * 2), num_output, "output");
for (j = 0; j < num_output; j++) {
SV **svp = av_fetch(output, j, 0);
RETVAL->output[i][j] = SvNV(svp ? *svp : &PL_sv_undef);
}
}
}
void
fann_train_data_DESTROY(self)
struct fann_train_data * self;
CODE:
fann_destroy_train(self);
sv_unmagic(SvRV(ST(0)), '~');
void
fann_train_data_shuffle(self)
struct fann_train_data *self;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_data_scale_input(self, new_min, new_max)
struct fann_train_data *self;
fann_type new_min;
fann_type new_max;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_data_scale_output(self, new_min, new_max)
struct fann_train_data *self;
fann_type new_min;
fann_type new_max;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
void
fann_train_data_scale(self, new_min, new_max)
struct fann_train_data *self;
fann_type new_min;
fann_type new_max;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
struct fann_train_data*
fann_train_data_subset(self, pos, length)
struct fann_train_data *self;
unsigned int pos;
unsigned int length;
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
_check_error(aTHX_ (struct fann_error *)RETVAL);
INCLUDE: accessors.xsh
( run in 0.608 second using v1.01-cache-2.11-cpan-39bf76dae61 )