AI-FANN

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN

Revision history for Perl extension AI::FANN.

0.10 Mar 10, 2009
    - syntax error in Makefile.PL (bug report by Michael Stevens)
    - backport OpenBSD patch for Makefile.PL

0.09 Oct 16, 2008
    - AI::FANN::TrainData->new_from_file was nos working (bug reported
      by Renata Camargo)
    - constructors were not reporting errors via exceptions as
      documented
    - compiler warnings cleaned

0.08 Jul 5, 2007
    - use doubles instead of floats on accessors
    - MSE is a float (bug and patch by Alex Lang)
    - add support for FANN_SIN_SYMMETRIC, FANN_COS_SYMMETRIC,
      FANN_SIN and FANN_COS constants.

0.07 Nov 22, 2006

FANN.xs  view on Meta::CPAN

    if (flags & WANT_MORTAL) SAVEFREEPV(fta);

    for (i = 0; i < len; i++) {
        SV ** svp = av_fetch(av, i, 0);
        fta[i] = SvNV(svp ? *svp : &PL_sv_undef);
    }
    return fta;
}

static void
_check_error(pTHX_ struct fann_error *self) {
    if (self) {
        if (fann_get_errno(self) != FANN_E_NO_ERROR) {
            ERRSV = newSVpv(self->errstr, strlen(self->errstr) - 2);
            fann_get_errstr(self);
            Perl_croak(aTHX_ Nullch);
        }
    }
    else {
        Perl_croak(aTHX_ "Constructor failed");
    }
}

static unsigned int
_sv2enum(pTHX_ SV *sv, unsigned int top, char * const name) {

FANN.xs  view on Meta::CPAN

	if (value > top) {
		Perl_croak(aTHX_ "value %d is out of range for %s", value, name);
	}
	return value;
}

static SV *
_enum2sv(pTHX_ unsigned int value, char const * const * const names, unsigned int top, char const * const name) {
    SV *sv;
    if (value > top) {
        Perl_croak(aTHX_ "internal error: value %d out of range for %s", value, name);
    }
    sv = newSVpv(names[value], 0);
    SvUPGRADE(sv, SVt_PVIV);
    SvUV_set(sv, value);
    SvIOK_on(sv);
    SvIsUV_on(sv);
    return sv;
}

#define _sv2fann_train_enum(sv) _sv2enum(aTHX_ sv, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _sv2fann_activationfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _sv2fann_errorfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _sv2fann_stopfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")

#define _fann_train_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_TRAIN_NAMES, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _fann_activationfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ACTIVATIONFUNC_NAMES, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _fann_errorfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ERRORFUNC_NAMES, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _fann_stopfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_STOPFUNC_NAMES, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")



/* normalized names for train_data methods */

#define fann_train_data_create_from_file fann_read_train_from_file
#define fann_train_data_shuffle fann_shuffle_train_data
#define fann_train_data_scale_input fann_scale_input_train_data
#define fann_train_data_scale_output fann_scale_output_train_data

FANN.xs  view on Meta::CPAN

#define fann_train_data_length fann_length_train_data
#define fann_train_data_num_input fann_num_input_train_data
#define fann_train_data_num_output fann_num_output_train_data
#define fann_train_data_save fann_save_train

MODULE = AI::FANN		PACKAGE = AI::FANN		PREFIX = fann_

PROTOTYPES: DISABLE

BOOT:
    fann_set_error_log(0, 0);

void
_constants()
  PREINIT:
    unsigned int i;
  PPCODE:
    for (i = 0; my_constant_names[i]; i++) {
        SV *sv = sv_2mortal(newSVpv(my_constant_names[i], 0));
        SvUPGRADE(sv, SVt_PVIV);
        SvUV_set(sv, my_constant_values[i]);

FANN.xs  view on Meta::CPAN

    num_layers = items - 1;
    Newx(layers, num_layers, unsigned int);
    SAVEFREEPV(layers);
    for (i = 0; i < num_layers; i++) {
		layers[i] = SvIV(ST(i+1));
    }
    RETVAL = fann_create_standard_array(num_layers, layers);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);

struct fann *
fann_new_sparse(klass, connection_rate, ...)
    SV *klass;
    double connection_rate;
  PREINIT:
    unsigned int *layers;
    unsigned int i;
    unsigned int num_layers;
  CODE:
    num_layers = items - 2;
    Newx(layers, num_layers, unsigned int);
    SAVEFREEPV(layers);
    for (i = 0; i < num_layers; i++) {
		layers[i] = SvIV(ST(i+2));
    }
    RETVAL = fann_create_sparse_array(connection_rate, num_layers, layers);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);

struct fann *
fann_new_shortcut(klass, ...)
    SV *klass;
  PREINIT:
    unsigned int *layers;
    unsigned int i;
    unsigned int num_layers;
  CODE:
    num_layers = items - 1;
    Newx(layers, num_layers, unsigned int);
    SAVEFREEPV(layers);
    for (i = 0; i < num_layers; i++) {
		layers[i] = SvIV(ST(i+1));
    }
    RETVAL = fann_create_shortcut_array(num_layers, layers);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);

struct fann *
fann_new_from_file(klass, filename)
    SV *klass;
    char *filename;
  CODE:
    RETVAL = fann_create_from_file(filename);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);

void
fann_DESTROY(self)
    struct fann * self;
  CODE:
    fann_destroy(self);
    sv_unmagic(SvRV(ST(0)), '~');

int
fann_save(self, filename)
    struct fann *self;
    char * filename;
  CODE:
    RETVAL = !fann_save(self, filename);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

fta_output
fann_run(self, input)
    struct fann *self;
    fta_input input;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_randomize_weights(self, min_weight, max_weight)
    struct fann *self;
    fann_type min_weight;
    fann_type max_weight;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_train(self, input, desired_output)
    struct fann *self;
    fta_input input;
    fta_output desired_output;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

fta_output
fann_test(self, input, desired_output)
    struct fann *self;
    fta_input input;
    fta_output desired_output;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_reset_MSE(self)
    struct fann * self;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_train_on_file(self, filename, max_epochs, epochs_between_reports, desired_error) 
    struct fann *self;
    const char *filename;
    unsigned int max_epochs;
    unsigned int epochs_between_reports;
    double desired_error;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_train_on_data(self, data, max_epochs, epochs_between_reports, desired_error)
    struct fann *self;
    struct fann_train_data *data;
    unsigned int max_epochs;
    unsigned int epochs_between_reports;
    double desired_error;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);
    _check_error(aTHX_ (struct fann_error *)data);

void
fann_cascadetrain_on_file(self, filename, max_neurons, neurons_between_reports, desired_error)
    struct fann *self;
	const char *filename;
    unsigned int max_neurons;
    unsigned int neurons_between_reports;
    double desired_error;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_cascadetrain_on_data(self, data, max_neurons, neurons_between_reports, desired_error)
    struct fann *self;
    struct fann_train_data *data;
    unsigned int max_neurons;
    unsigned int neurons_between_reports;
    double desired_error;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);
    _check_error(aTHX_ (struct fann_error *)data);

double
fann_train_epoch(self, data)
    struct fann *self;
    struct fann_train_data *data;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);
    _check_error(aTHX_ (struct fann_error *)data);

void
fann_print_connections(self)
    struct fann * self;

void
fann_print_parameters(self)
    struct fann * self;

void

FANN.xs  view on Meta::CPAN


struct fann_train_data *
fann_train_data_new_from_file(klass, filename)
    SV *klass;
    const char *filename;
  CODE:
    RETVAL = fann_train_data_create_from_file(filename);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);

struct fann_train_data *
fann_train_data_new_empty(klass, num_data, num_input, num_output)
    SV *klass;
    unsigned int num_data;
    unsigned int num_input;
    unsigned int num_output;
  CODE:
    RETVAL = fann_train_data_create(num_data, num_input, num_output);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);

void
fann_train_data_data(self, index, ...)
    struct fann_train_data *self;
    unsigned int index;
  PREINIT:
    AV *input;
    AV *output;
    unsigned int i;
  PPCODE:

FANN.xs  view on Meta::CPAN

    num_input = av_len(input) + 1;
    if (!num_input)
        Perl_croak(aTHX_ "input array is empty");
    num_output = av_len(output) + 1;
    if (!num_output)
        Perl_croak(aTHX_ "output array is empty");
    RETVAL = fann_train_data_create(num_data, num_input, num_output);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)RETVAL);
    /* we do that at cleanup to ensure that the just created object is
     * freed if we croak */
    if (RETVAL) {
        for (i = 0; i < num_data; i++) {
            unsigned int j;
            input = _srv2av(aTHX_ ST(1 + i * 2), num_input, "input");
            for (j = 0; j < num_input; j++) {
                SV **svp = av_fetch(input, j, 0);
                RETVAL->input[i][j] = SvNV(svp ? *svp : &PL_sv_undef);
            }

FANN.xs  view on Meta::CPAN

fann_train_data_DESTROY(self)
    struct fann_train_data * self;
  CODE:
    fann_destroy_train(self);
    sv_unmagic(SvRV(ST(0)), '~');

void
fann_train_data_shuffle(self)
    struct fann_train_data *self;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_train_data_scale_input(self, new_min, new_max)
    struct fann_train_data *self;
    fann_type new_min;
    fann_type new_max;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_train_data_scale_output(self, new_min, new_max)
    struct fann_train_data *self;
    fann_type new_min;
    fann_type new_max;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

void
fann_train_data_scale(self, new_min, new_max)
    struct fann_train_data *self;
    fann_type new_min;
    fann_type new_max;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

struct fann_train_data*
fann_train_data_subset(self, pos, length)
    struct fann_train_data *self;
    unsigned int pos;
    unsigned int length;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);
    _check_error(aTHX_ (struct fann_error *)RETVAL);


INCLUDE: accessors.xsh

accessors.xsh  view on Meta::CPAN

    struct fann * self;
    enum fann_train_enum value
  CODE:
    if (items > 1) {
        fann_set_training_algorithm(self, value);
    }
    RETVAL = fann_get_training_algorithm(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_errorfunc_enum
accessor_train_error_function(self, value = NO_INIT)
    struct fann * self;
    enum fann_errorfunc_enum value
  CODE:
    if (items > 1) {
        fann_set_train_error_function(self, value);
    }
    RETVAL = fann_get_train_error_function(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_stopfunc_enum
accessor_train_stop_function(self, value = NO_INIT)
    struct fann * self;
    enum fann_stopfunc_enum value
  CODE:
    if (items > 1) {
        fann_set_train_stop_function(self, value);
    }
    RETVAL = fann_get_train_stop_function(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_learning_rate(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_learning_rate(self, value);
    }
    RETVAL = fann_get_learning_rate(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_learning_momentum(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_learning_momentum(self, value);
    }
    RETVAL = fann_get_learning_momentum(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_bit_fail_limit(self, value = NO_INIT)
    struct fann * self;
    fann_type value
  CODE:
    if (items > 1) {
        fann_set_bit_fail_limit(self, value);
    }
    RETVAL = fann_get_bit_fail_limit(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_quickprop_decay(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_quickprop_decay(self, value);
    }
    RETVAL = fann_get_quickprop_decay(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_quickprop_mu(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_quickprop_mu(self, value);
    }
    RETVAL = fann_get_quickprop_mu(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_increase_factor(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_increase_factor(self, value);
    }
    RETVAL = fann_get_rprop_increase_factor(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_decrease_factor(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_decrease_factor(self, value);
    }
    RETVAL = fann_get_rprop_decrease_factor(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_delta_min(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_delta_min(self, value);
    }
    RETVAL = fann_get_rprop_delta_min(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_delta_max(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_delta_max(self, value);
    }
    RETVAL = fann_get_rprop_delta_max(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_num_inputs(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_num_input(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_num_outputs(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_num_output(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_total_neurons(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_total_neurons(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_total_connections(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_total_connections(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_connection_rate(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_connection_rate(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_MSE(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_MSE(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_bit_fail(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_bit_fail(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_cascade_output_change_fraction(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_cascade_output_change_fraction(self, value);
    }
    RETVAL = fann_get_cascade_output_change_fraction(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_cascade_output_stagnation_epochs(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_cascade_output_stagnation_epochs(self, value);
    }
    RETVAL = fann_get_cascade_output_stagnation_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_cascade_candidate_change_fraction(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_cascade_candidate_change_fraction(self, value);
    }
    RETVAL = fann_get_cascade_candidate_change_fraction(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_candidate_stagnation_epochs(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_candidate_stagnation_epochs(self, value);
    }
    RETVAL = fann_get_cascade_candidate_stagnation_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_cascade_weight_multiplier(self, value = NO_INIT)
    struct fann * self;
    fann_type value
  CODE:
    if (items > 1) {
        fann_set_cascade_weight_multiplier(self, value);
    }
    RETVAL = fann_get_cascade_weight_multiplier(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_cascade_candidate_limit(self, value = NO_INIT)
    struct fann * self;
    fann_type value
  CODE:
    if (items > 1) {
        fann_set_cascade_candidate_limit(self, value);
    }
    RETVAL = fann_get_cascade_candidate_limit(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_max_out_epochs(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_max_out_epochs(self, value);
    }
    RETVAL = fann_get_cascade_max_out_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_max_cand_epochs(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_max_cand_epochs(self, value);
    }
    RETVAL = fann_get_cascade_max_cand_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_num_candidates(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_cascade_num_candidates(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_num_candidate_groups(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_num_candidate_groups(self, value);
    }
    RETVAL = fann_get_cascade_num_candidate_groups(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_activationfunc_enum
accessor_neuron_activation_function(self, layer, neuron_index, value = NO_INIT)
    struct fann * self;
    unsigned int layer;
    unsigned int neuron_index;
    enum fann_activationfunc_enum value
  CODE:
    if (items > 3) {
        fann_set_activation_function(self, value, layer, neuron_index);
    }
    RETVAL = fann_get_activation_function(self, layer, neuron_index);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_layer_activation_function(self, layer, value)
    struct fann * self;
    unsigned int layer;
    enum fann_activationfunc_enum value;
  CODE:
    fann_set_activation_function_layer(self, value, layer);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_hidden_activation_function(self, value)
    struct fann * self;
    enum fann_activationfunc_enum value;
  CODE:
    fann_set_activation_function_hidden(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_output_activation_function(self, value)
    struct fann * self;
    enum fann_activationfunc_enum value;
  CODE:
    fann_set_activation_function_output(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_neuron_activation_steepness(self, layer, neuron, value = NO_INIT)
    struct fann * self;
    unsigned int layer;
    unsigned int neuron;
    fann_type value
  CODE:
    if (items > 3) {
        fann_set_activation_steepness(self, value, layer, neuron);
    }
    RETVAL = fann_get_activation_steepness(self, layer, neuron);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_layer_activation_steepness(self, layer, value)
    struct fann * self;
    unsigned int layer;
    fann_type value;
  CODE:
    fann_set_activation_steepness_layer(self, value, layer);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_hidden_activation_steepness(self, value)
    struct fann * self;
    fann_type value;
  CODE:
    fann_set_activation_steepness_hidden(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_output_activation_steepness(self, value)
    struct fann * self;
    fann_type value;
  CODE:
    fann_set_activation_steepness_output(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_layer_num_neurons(self, layer)
	struct fann * self;
    unsigned int layer;
  CODE:
    RETVAL = fann_get_num_neurons(self, layer);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_num_layers(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_num_layers(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN::TrainData    PREFIX = accessor_

unsigned int
accessor_num_inputs(self)
	struct fann_train_data * self;
  CODE:
    RETVAL = fann_train_data_num_input(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN::TrainData    PREFIX = accessor_

unsigned int
accessor_num_outputs(self)
	struct fann_train_data * self;
  CODE:
    RETVAL = fann_train_data_num_output(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN::TrainData    PREFIX = accessor_

unsigned int
accessor_length(self)
	struct fann_train_data * self;
  CODE:
    RETVAL = fann_train_data_length(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

genaccessors  view on Meta::CPAN

    $setter;
EOA

    }
    else {
        die "both setter and getter are null"
    }

    print <<EOA;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

EOA
}


while(<DATA>) {
	chomp;
	next if /^\s*(?:#.*)?$/;
	my (@args) = split /\s*,\s*/;
	@args > 2 or die "wrong number of arguments: $_";

	accessor(@args);
}

__DATA__

training_algorithm, enum fann_train_enum, fann_get_training_algorithm, fann_set_training_algorithm
train_error_function, enum fann_errorfunc_enum, fann_get_train_error_function, fann_set_train_error_function
train_stop_function, enum fann_stopfunc_enum, fann_get_train_stop_function, fann_set_train_stop_function
learning_rate, double, fann_get_learning_rate, fann_set_learning_rate
learning_momentum, double, fann_get_learning_momentum, fann_set_learning_momentum
bit_fail_limit, fann_type, fann_get_bit_fail_limit, fann_set_bit_fail_limit
quickprop_decay, double, fann_get_quickprop_decay, fann_set_quickprop_decay
quickprop_mu, double, fann_get_quickprop_mu, fann_set_quickprop_mu
rprop_increase_factor, double, fann_get_rprop_increase_factor, fann_set_rprop_increase_factor
rprop_decrease_factor, double, fann_get_rprop_decrease_factor, fann_set_rprop_decrease_factor
rprop_delta_min, double, fann_get_rprop_delta_min, fann_set_rprop_delta_min
rprop_delta_max, double, fann_get_rprop_delta_max, fann_set_rprop_delta_max

genconstants  view on Meta::CPAN

FANN_GAUSSIAN_STEPWISE
FANN_ELLIOT
FANN_ELLIOT_SYMMETRIC
FANN_LINEAR_PIECE
FANN_LINEAR_PIECE_SYMMETRIC
FANN_SIN_SYMMETRIC
FANN_COS_SYMMETRIC
FANN_SIN
FANN_COS

# enum fann_errorfunc_enum:
FANN_ERRORFUNC_LINEAR
FANN_ERRORFUNC_TANH

# enum fann_stopfunc_enum:
FANN_STOPFUNC_MSE
FANN_STOPFUNC_BIT

lib/AI/FANN.pm  view on Meta::CPAN


=over 4

=item *

Two classes are used: C<AI::FANN> that wraps the C C<struct fann> type
and C<AI::FANN::TrainData> that wraps C<struct fann_train_data>.

=item *

Prefixes and common parts on the C function names referring to those
structures have been removed. For instance C
C<fann_train_data_shuffle> becomes C<AI::FANN::TrainData::shuffle> that
will be usually called as...

  $train_data->shuffle;

=item *

Pairs of C get/set functions are wrapped in Perl with dual accessor
methods named as the attribute (and without any C<set_>/C<get_>

lib/AI/FANN.pm  view on Meta::CPAN

  fann_set_activation_function_layer  => layer_activation_function
  fann_set_activation_function_hidden => hidden_activation_function
  fann_set_activation_function_output => output_activation_function

=item *

Boolean methods return true on success and undef on failure.

=item *

Any error reported from the C side is automaticaly converter to a Perl
exception. No manual error checking is required after calling FANN
functions.

=item *

Memory management is automatic, no need to call destroy methods.

=item *

Doubles are used for computations (using floats or fixed
point types is not supported).

lib/AI/FANN.pm  view on Meta::CPAN

  FANN_GAUSSIAN_STEPWISE
  FANN_ELLIOT
  FANN_ELLIOT_SYMMETRIC
  FANN_LINEAR_PIECE
  FANN_LINEAR_PIECE_SYMMETRIC
  FANN_SIN_SYMMETRIC
  FANN_COS_SYMMETRIC
  FANN_SIN
  FANN_COS

  # enum fann_errorfunc_enum:
  FANN_ERRORFUNC_LINEAR
  FANN_ERRORFUNC_TANH

  # enum fann_stopfunc_enum:
  FANN_STOPFUNC_MSE
  FANN_STOPFUNC_BIT

=head1 CLASSES

The classes defined by this package are:

lib/AI/FANN.pm  view on Meta::CPAN

=item $ann->test($input, $desired_output)

C<$input> and C<$desired_output> are arrays.

It returns an array with the values of the output layer.

=item $ann->reset_MSE

-

=item $ann->train_on_file($filename, $max_epochs, $epochs_between_reports, $desired_error)

-

=item $ann->train_on_data($train_data, $max_epochs, $epochs_between_reports, $desired_error)

C<$train_data> is a AI::FANN::TrainData object.

=item $ann->cascadetrain_on_file($filename, $max_neurons, $neurons_between_reports, $desired_error)

-

=item $ann->cascadetrain_on_data($train_data, $max_neurons, $neurons_between_reports, $desired_error)

C<$train_data> is a AI::FANN::TrainData object.

=item $ann->train_epoch($train_data)

C<$train_data> is a AI::FANN::TrainData object.

=item $ann->print_connections

-

lib/AI/FANN.pm  view on Meta::CPAN

=item $ann->cascade_activation_steepnesses(@activation_steepnesses)

sets the list of activation steepnesses to use for cascade training.

=item $ann->training_algorithm

=item $ann->training_algorithm($training_algorithm)

-

=item $ann->train_error_function

=item $ann->train_error_function($error_function)

-

=item $ann->train_stop_function

=item $ann->train_stop_function($stop_function)

-

=item $ann->learning_rate

morefann.c  view on Meta::CPAN

		}
		return ptr;
	}
	return 0;
}

struct fann_train_data *
fann_train_data_create(unsigned int num_data, unsigned int num_input, unsigned int num_output) {
	struct fann_train_data *data = (struct fann_train_data *)calloc(1, sizeof(*data));
	if (data) {
		fann_init_error_data((struct fann_error *)data);
		data->input = allocvv(num_data, num_input);
		data->output = allocvv(num_data, num_output);
		if (data->input && data->output) {
			data->num_data = num_data;
			data->num_input = num_input;
			data->num_output = num_output;
			return data;
		}
	}
	return 0;
}

void
fann_train_data_set(struct fann_train_data *data, unsigned int ix,
					fann_type *input, fann_type *output ) {
	if (ix < data->num_data) {
		memcpy(data->input[ix], input, data->num_input * sizeof(fann_type));
		memcpy(data->output[ix], output, data->num_output * sizeof(fann_type));
	}
	else {
		fann_error((struct fann_error *)data, FANN_E_INDEX_OUT_OF_BOUND, ix);
	}
}

/*
enum fann_activationfunc_enum
fann_get_activation_function(struct fann *ann, unsigned int layer, int neuron_index) {
    struct fann_neuron *neuron = fann_get_neuron(ann, layer, neuron_index);
    if (neuron) {
        return neuron->activation_function;
    }

ppport.h  view on Meta::CPAN


  --version                   show version

  --patch=file                write one patch file with changes
  --copy=suffix               write changed copies with suffix
  --diff=program              use diff program and options

  --compat-version=version    provide compatibility with Perl version
  --cplusplus                 accept C++ comments

  --quiet                     don't output anything except fatal errors
  --nodiag                    don't show diagnostics
  --nohints                   don't show hints
  --nochanges                 don't suggest changes
  --nofilter                  don't filter input files

  --strip                     strip all script and doc functionality from
                              ppport.h

  --list-provided             list provided API
  --list-unsupported          list unsupported API

ppport.h  view on Meta::CPAN


=head2 --cplusplus

Usually, F<ppport.h> will detect C++ style comments and
replace them with C style comments for portability reasons.
Using this option instructs F<ppport.h> to leave C++
comments untouched.

=head2 --quiet

Be quiet. Don't print anything except fatal errors.

=head2 --nodiag

Don't output any diagnostic messages. Only portability
alerts will be printed.

=head2 --nohints

Don't output any hints. Hints often contain useful portability
notes. Warnings will still be displayed.

ppport.h  view on Meta::CPAN

PL_Sv|5.005000||p
PL_compiling|5.004050||p
PL_copline|5.009005||p
PL_curcop|5.004050||p
PL_curstash|5.004050||p
PL_debstash|5.004050||p
PL_defgv|5.004050||p
PL_diehook|5.004050||p
PL_dirty|5.004050||p
PL_dowarn|||pn
PL_errgv|5.004050||p
PL_expect|5.009005||p
PL_hexdigit|5.005000||p
PL_hints|5.005000||p
PL_last_in_gv|||n
PL_laststatval|5.005000||p
PL_modglobal||5.005000|n
PL_na|5.004050||pn
PL_no_modify|5.006000||p
PL_ofs_sv|||n
PL_perl_destruct_level|5.004050||p

ppport.h  view on Meta::CPAN

PTRV|5.006000||p
PUSHMARK|||
PUSH_MULTICALL||5.009005|
PUSHi|||
PUSHmortal|5.009002||p
PUSHn|||
PUSHp|||
PUSHs|||
PUSHu|5.004000||p
PUTBACK|||
PerlIO_clearerr||5.007003|
PerlIO_close||5.007003|
PerlIO_context_layers||5.009004|
PerlIO_eof||5.007003|
PerlIO_error||5.007003|
PerlIO_fileno||5.007003|
PerlIO_fill||5.007003|
PerlIO_flush||5.007003|
PerlIO_get_base||5.007003|
PerlIO_get_bufsiz||5.007003|
PerlIO_get_cnt||5.007003|
PerlIO_get_ptr||5.007003|
PerlIO_read||5.007003|
PerlIO_seek||5.007003|
PerlIO_set_cnt||5.007003|
PerlIO_set_ptrcnt||5.007003|
PerlIO_setlinebuf||5.007003|
PerlIO_stderr||5.007003|
PerlIO_stdin||5.007003|
PerlIO_stdout||5.007003|
PerlIO_tell||5.007003|
PerlIO_unread||5.007003|
PerlIO_write||5.007003|
Perl_signbit||5.009005|n
PoisonFree|5.009004||p
PoisonNew|5.009004||p
PoisonWith|5.009004||p
Poison|5.008000||p

ppport.h  view on Meta::CPAN

ptr_table_free||5.009005|
ptr_table_new||5.009005|
ptr_table_split||5.009005|
ptr_table_store||5.009005|
push_scope|||
put_byte|||
pv_display||5.006000|
pv_escape||5.009004|
pv_pretty||5.009004|
pv_uni_display||5.007003|
qerror|||
qsortsvu|||
re_compile||5.009005|
re_croak2|||
re_dup|||
re_intuit_start||5.009005|
re_intuit_string||5.006000|
readpipe_override|||
realloc||5.007002|n
reentrant_free|||
reentrant_init|||
reentrant_retry|||vn
reentrant_size|||
ref_array_or_hash|||
refcounted_he_chain_2hv|||
refcounted_he_fetch|||
refcounted_he_free|||
refcounted_he_new|||

ppport.h  view on Meta::CPAN

vwarner||5.006000|
vwarn||5.006000|
wait4pid|||
warn_nocontext|||vn
warner_nocontext|||vn
warner|5.006000|5.004000|pv
warn|||v
watch|||
whichsig|||
write_no_mem|||
write_to_stderr|||
xmldump_all|||
xmldump_attr|||
xmldump_eval|||
xmldump_form|||
xmldump_indent|||v
xmldump_packsubs|||
xmldump_sub|||
xmldump_vindent|||
yyerror|||
yylex|||
yyparse|||
yywarn|||
);

if (exists $opt{'list-unsupported'}) {
  my $f;
  for $f (sort { lc $a cmp lc $b } keys %API) {
    next unless $API{$f}{todo};
    print "$f ", '.'x(40-length($f)), " ", format_version($API{$f}{todo}), "\n";

ppport.h  view on Meta::CPAN

  }

  my $s = $warnings != 1 ? 's' : '';
  my $warn = $warnings ? " ($warnings warning$s)" : '';
  info("Analysis completed$warn");

  if ($file{changes}) {
    if (exists $opt{copy}) {
      my $newfile = "$filename$opt{copy}";
      if (-e $newfile) {
        error("'$newfile' already exists, refusing to write copy of '$filename'");
      }
      else {
        local *F;
        if (open F, ">$newfile") {
          info("Writing copy of '$filename' with changes to '$newfile'");
          print F $c;
          close F;
        }
        else {
          error("Cannot open '$newfile' for writing: $!");
        }
      }
    }
    elsif (exists $opt{patch} || $opt{changes}) {
      if (exists $opt{patch}) {
        unless ($patch_opened) {
          if (open PATCH, ">$opt{patch}") {
            $patch_opened = 1;
          }
          else {
            error("Cannot open '$opt{patch}' for writing: $!");
            delete $opt{patch};
            $opt{changes} = 1;
            goto fallback;
          }
        }
        mydiff(\*PATCH, $filename, $c);
      }
      else {
fallback:
        info("Suggested changes:");

ppport.h  view on Meta::CPAN


  if (!defined $diff) {
    $diff = run_diff('diff -u', $file, $str);
  }

  if (!defined $diff) {
    $diff = run_diff('diff', $file, $str);
  }

  if (!defined $diff) {
    error("Cannot generate a diff. Please install Text::Diff or use --copy.");
    return;
  }

  print F $diff;
}

sub run_diff
{
  my($prog, $file, $str) = @_;
  my $tmp = 'dppptemp';

ppport.h  view on Meta::CPAN

        $diff .= $_;
      }
      close F;
      unlink $tmp;
      return $diff;
    }

    unlink $tmp;
  }
  else {
    error("Cannot open '$tmp' for writing: $!");
  }

  return undef;
}

sub rec_depend
{
  my($func, $seen) = @_;
  return () unless exists $depends{$func};
  $seen = {%{$seen||{}}};

ppport.h  view on Meta::CPAN

  $opt{quiet} and return;
  $opt{diag} and print @_, "\n";
}

sub warning
{
  $opt{quiet} and return;
  print "*** ", @_, "\n";
}

sub error
{
  print "*** ERROR: ", @_, "\n";
}

my %given_hints;
my %given_warnings;
sub hint
{
  $opt{quiet} and return;
  my $func = shift;

ppport.h  view on Meta::CPAN

#  endif
#endif

#define _dpppDEC2BCD(dec) ((((dec)/100)<<8)|((((dec)%100)/10)<<4)|((dec)%10))
#define PERL_BCDVERSION ((_dpppDEC2BCD(PERL_REVISION)<<24)|(_dpppDEC2BCD(PERL_VERSION)<<12)|_dpppDEC2BCD(PERL_SUBVERSION))

/* It is very unlikely that anyone will try to use this with Perl 6
   (or greater), but who knows.
 */
#if PERL_REVISION != 5
#  error ppport.h only works with Perl version 5
#endif /* PERL_REVISION != 5 */

#ifdef I_LIMITS
#  include <limits.h>
#endif

#ifndef PERL_UCHAR_MIN
#  define PERL_UCHAR_MIN ((unsigned char)0)
#endif

ppport.h  view on Meta::CPAN

#  define PL_Sv                     Sv
#  define PL_compiling              compiling
#  define PL_copline                copline
#  define PL_curcop                 curcop
#  define PL_curstash               curstash
#  define PL_debstash               debstash
#  define PL_defgv                  defgv
#  define PL_diehook                diehook
#  define PL_dirty                  dirty
#  define PL_dowarn                 dowarn
#  define PL_errgv                  errgv
#  define PL_expect                 expect
#  define PL_hexdigit               hexdigit
#  define PL_hints                  hints
#  define PL_laststatval            laststatval
#  define PL_na                     na
#  define PL_perl_destruct_level    perl_destruct_level
#  define PL_perldb                 perldb
#  define PL_rsfp_filters           rsfp_filters
#  define PL_rsfp                   rsfp
#  define PL_stack_base             stack_base

ppport.h  view on Meta::CPAN

#ifndef PERL_LOADMOD_IMPORT_OPS
#  define PERL_LOADMOD_IMPORT_OPS        0x4
#endif

/* Replace: 0 */

/* Replace perl_eval_pv with eval_pv */

#ifndef eval_pv
#if defined(NEED_eval_pv)
static SV* DPPP_(my_eval_pv)(char *p, I32 croak_on_error);
static
#else
extern SV* DPPP_(my_eval_pv)(char *p, I32 croak_on_error);
#endif

#ifdef eval_pv
#  undef eval_pv
#endif
#define eval_pv(a,b) DPPP_(my_eval_pv)(aTHX_ a,b)
#define Perl_eval_pv DPPP_(my_eval_pv)

#if defined(NEED_eval_pv) || defined(NEED_eval_pv_GLOBAL)

SV*
DPPP_(my_eval_pv)(char *p, I32 croak_on_error)
{
    dSP;
    SV* sv = newSVpv(p, 0);

    PUSHMARK(sp);
    eval_sv(sv, G_SCALAR);
    SvREFCNT_dec(sv);

    SPAGAIN;
    sv = POPs;
    PUTBACK;

    if (croak_on_error && SvTRUE(GvSV(errgv)))
	croak(SvPVx(GvSV(errgv), na));

    return sv;
}

#endif
#endif

#ifndef vload_module
#if defined(NEED_vload_module)
static void DPPP_(my_vload_module)(U32 flags, SV *name, SV *ver, va_list *args);

ppport.h  view on Meta::CPAN

#ifndef ckWARN
#  ifdef G_WARN_ON
#    define  ckWARN(a)                  (PL_dowarn & G_WARN_ON)
#  else
#    define  ckWARN(a)                  PL_dowarn
#  endif
#endif

#if (PERL_BCDVERSION >= 0x5004000) && !defined(warner)
#if defined(NEED_warner)
static void DPPP_(my_warner)(U32 err, const char *pat, ...);
static
#else
extern void DPPP_(my_warner)(U32 err, const char *pat, ...);
#endif

#define Perl_warner DPPP_(my_warner)

#if defined(NEED_warner) || defined(NEED_warner_GLOBAL)

void
DPPP_(my_warner)(U32 err, const char *pat, ...)
{
  SV *sv;
  va_list args;

  PERL_UNUSED_ARG(err);

  va_start(args, pat);
  sv = vnewSVpvf(pat, &args);
  va_end(args);
  sv_2mortal(sv);
  warn("%s", SvPV_nolen(sv));
}

#define warner  Perl_warner

samples/add.pl  view on Meta::CPAN


    my $ann = AI::FANN->new_from_file("add.ann");

    for (1..10) {
        my $a = rand(2) - 1;
        my $b = rand(2) - 1;

        my $c = 0.5 * ($a + $b);

        my $out = $ann->run([$a, $b]);
        printf "%f + %f = %f (good: %f, error: %4.1f%%)\n",
            $a, $b, $out->[0], $c, 50*abs($out->[0] - $c);
    }
}

else {
    die "bad action\n"
}

typemap  view on Meta::CPAN

struct fann *	T_PTROBJ_MAGIC
struct fann_train_data *	T_PTROBJ_MAGIC

fann_type	T_DOUBLE

fta_input	T_FTA_INPUT
fta_output	T_FTA_OUTPUT

enum fann_train_enum	T_UV
enum fann_stopfunc_enum	T_UV
enum fann_errorfunc_enum	T_UV
enum fann_activationfunc_enum	T_UV

enum fann_train_enum    T_FANN_TRAIN_ENUM
enum fann_activationfunc_enum   T_FANN_ACTIVATIONFUNC_ENUM
enum fann_errorfunc_enum    T_FANN_ERRORFUNC_ENUM
enum fann_stopfunc_enum T_STOPFUNC_ENUM

INPUT

T_PTROBJ_MAGIC
	$var = ($type)_sv2obj(aTHX_ $arg, \"${type}\", 1);

T_FTA_INPUT
	$var = _sv2fta(aTHX_ $arg, self->num_input, WANT_MORTAL, \"${var}\");

T_FTA_OUTPUT
	$var = _sv2fta(aTHX_ $arg, self->num_output, WANT_MORTAL, \"${var}\");

T_FANN_TRAIN_ENUM
    $var = _sv2fann_train_enum($arg)

T_FANN_ACTIVATIONFUNC_ENUM
    $var = _sv2fann_activationfunc_enum($arg)

T_FANN_ERRORFUNC_ENUM
    $var = _sv2fann_errorfunc_enum($arg)

T_STOPFUNC_ENUM
    $var = _sv2fann_stopfunc_enum($arg)


OUTPUT

T_PTROBJ_MAGIC
	$arg = _obj2sv(aTHX_ $var, ST(0), "$type");

T_FTA_OUTPUT
	$arg = _fta2sv(aTHX_ $var, self->num_output);

T_FANN_TRAIN_ENUM
    $arg = _fann_train_enum2sv($var);

T_FANN_ACTIVATIONFUNC_ENUM
    $arg = _fann_activationfunc_enum2sv($var);

T_FANN_ERRORFUNC_ENUM
    $arg = _fann_errorfunc_enum2sv($var);

T_STOPFUNC_ENUM
    $arg = _fann_stopfunc_enum2sv($var);



( run in 1.334 second using v1.01-cache-2.11-cpan-49f99fa48dc )