AI-FANN

 view release on metacpan or  search on metacpan

Changes  view on Meta::CPAN

Revision history for Perl extension AI::FANN.

0.10 Mar 10, 2009
    - syntax error in Makefile.PL (bug report by Michael Stevens)
    - backport OpenBSD patch for Makefile.PL

0.09 Oct 16, 2008
    - AI::FANN::TrainData->new_from_file was nos working (bug reported
      by Renata Camargo)
    - constructors were not reporting errors via exceptions as
      documented
    - compiler warnings cleaned

0.08 Jul 5, 2007
    - use doubles instead of floats on accessors
    - MSE is a float (bug and patch by Alex Lang)
    - add support for FANN_SIN_SYMMETRIC, FANN_COS_SYMMETRIC,
      FANN_SIN and FANN_COS constants.

0.07 Nov 22, 2006
    - move to FANN 2.1

0.06 Sep 18, 2006
    - add support for FANN_LIB and FANN_INCLUDE to MAkefile.PL

0.05 May 4, 2006
    - ppport.h regenerated with latest version of Devel::PPPort to
      make it work on perls older than 5.8.8 (bug reported by bodyn).

0.04 May 1, 2006
    - corrected bug on accessor generators with indexes
    - better accessor generation using default arguments XS feature

0.03 Apr 14, 2006
    - improved docs.

0.02 Apr 14, 2006
    - improved docs.

0.01 Apr 7, 2006
    - original version; created by h2xs 1.23 with options
        -An AI::FANN /usr/local/include/fann.h

FANN.xs  view on Meta::CPAN

#include "constants.h"

#define WANT_MORTAL 1

typedef fann_type *fta; /* fta: fann_type array */
typedef fta fta_input;
typedef fta fta_output;

static SV *
_obj2sv(pTHX_ void *ptr, SV * klass, char * ctype) {
    if (ptr) {
	SV *rv;
	SV *sv = newSVpvf("%s(0x%p)", ctype, ptr);
	SV *mgobj = sv_2mortal(newSViv(PTR2IV(ptr)));
	SvREADONLY_on(mgobj);
	sv_magic(sv, mgobj, '~', ctype, 0);
	/* SvREADONLY_on(sv); */
	rv = newRV_noinc(sv);
	if (SvOK(klass)) {
	    HV *stash;
	    if (SvROK(klass))
		stash = SvSTASH(klass);
	    else
		stash = gv_stashsv(klass, 1);
	    
	    sv_bless(rv, stash);
	}
	return rv;
    }
    return &PL_sv_undef;
}

static void *
_sv2obj(pTHX_ SV* self, char * ctype, int required) {
    SV *sv = SvRV(self);
    if (sv) {
        if (SvTYPE(sv) == SVt_PVMG) {
            MAGIC *mg = mg_find(sv, '~');
            if (mg) {
                if (strcmp(ctype, mg->mg_ptr) == 0 && mg->mg_obj) {
                    return INT2PTR(void *, SvIV(mg->mg_obj));
                }
            }
        }
    }
    if (required) {
        Perl_croak(aTHX_ "object of class %s expected", ctype);
    }
    return NULL;
}

static SV *
_fta2sv(pTHX_ fann_type *fta, unsigned int len) {
    unsigned int i;
    AV *av = newAV();
    av_extend(av, len - 1);
    for (i = 0; i < len; i++) {
        SV *sv = newSVnv(fta[i]);
        av_store(av, i, sv);
    }
    return newRV_noinc((SV*)av);
}

static AV*
_srv2av(pTHX_ SV* sv, unsigned int len, char * const name) {
    if (SvROK(sv)) {
        AV *av = (AV*)SvRV(sv);
        if (SvTYPE((SV*)av)==SVt_PVAV) {
            if (av_len(av)+1 == len) {
                return av;
            }
            else {
                Perl_croak(aTHX_ "wrong number of elements in %s array, %d found when %d were required",
                           name, (unsigned int)(av_len(av)+1), len);
            }
        }
    }
    Perl_croak(aTHX_ "wrong type for %s argument, array reference expected", name);
}

static fann_type*
_sv2fta(pTHX_ SV *sv, unsigned int len, int flags, char * const name) {
    unsigned int i;
    fann_type *fta;
    AV *av = _srv2av(aTHX_ sv, len, name);

    Newx(fta, len, fann_type);
    if (flags & WANT_MORTAL) SAVEFREEPV(fta);

    for (i = 0; i < len; i++) {
        SV ** svp = av_fetch(av, i, 0);
        fta[i] = SvNV(svp ? *svp : &PL_sv_undef);
    }
    return fta;
}

static void
_check_error(pTHX_ struct fann_error *self) {
    if (self) {
        if (fann_get_errno(self) != FANN_E_NO_ERROR) {
            ERRSV = newSVpv(self->errstr, strlen(self->errstr) - 2);
            fann_get_errstr(self);
            Perl_croak(aTHX_ Nullch);
        }
    }
    else {
        Perl_croak(aTHX_ "Constructor failed");
    }
}

static unsigned int
_sv2enum(pTHX_ SV *sv, unsigned int top, char * const name) {
	unsigned int value = SvUV(sv);
	if (value > top) {
		Perl_croak(aTHX_ "value %d is out of range for %s", value, name);
	}
	return value;
}

static SV *
_enum2sv(pTHX_ unsigned int value, char const * const * const names, unsigned int top, char const * const name) {
    SV *sv;
    if (value > top) {
        Perl_croak(aTHX_ "internal error: value %d out of range for %s", value, name);
    }
    sv = newSVpv(names[value], 0);
    SvUPGRADE(sv, SVt_PVIV);
    SvUV_set(sv, value);
    SvIOK_on(sv);
    SvIsUV_on(sv);
    return sv;
}

#define _sv2fann_train_enum(sv) _sv2enum(aTHX_ sv, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _sv2fann_activationfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _sv2fann_errorfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")
#define _sv2fann_stopfunc_enum(sv) _sv2enum(aTHX_ sv, FANN_STOPFUNC_BIT, "fann_stopfunc_enum")

#define _fann_train_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_TRAIN_NAMES, FANN_TRAIN_QUICKPROP, "fann_train_enum")
#define _fann_activationfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ACTIVATIONFUNC_NAMES, FANN_LINEAR_PIECE_SYMMETRIC, "fann_activationfunc_enum")
#define _fann_errorfunc_enum2sv(sv) _enum2sv(aTHX_ sv, FANN_ERRORFUNC_NAMES, FANN_ERRORFUNC_TANH, "fann_errorfunc_enum")

Makefile.PL  view on Meta::CPAN

use ExtUtils::MakeMaker;

use strict;
use warnings;

my $fann_lib = '';
my $fann_inc = '';
my $prefix;

@ARGV = map {
    if (/^FANN_LIB=(.*)/) {
        $fann_lib = "-L$1 ";
        ()
    }
    elsif (/^FANN_INCLUDE=(.*)/) {
        $fann_inc = "-I$1 ";
        ()
    }
    else {
	$prefix = $1 if /^PREFIX=(.*)/;
	$_
    }
} @ARGV;

if (defined $prefix) {
    $fann_lib = "-L$prefix/lib " unless length $fann_lib;
    $fann_inc = "-I$prefix/include " unless length $fann_inc;
}

WriteMakefile( NAME => 'AI::FANN',
               VERSION_FROM => 'lib/AI/FANN.pm',
               PREREQ_PM => {},
               ABSTRACT_FROM => 'lib/AI/FANN.pm',
               AUTHOR => 'Salvador Fandiño <sfandino@yahoo.com>',
               LIBS => ["${fann_lib}-ldoublefann"],
               DEFINE => '',
               INC => "${fann_inc}-I.",
               OBJECT => '$(BASEEXT)$(OBJ_EXT) morefann$(OBJ_EXT)',
               #OPTIMIZE => '-g -O0',
               depend => { '$(BASEEXT).c' => 'constants.h accessors.xsh' }
             );

sub MY::postamble {
    return <<MAKE_FRAG

constants.h: genconstants
\t\$(PERL) genconstants > constants.h

accessors.xsh: genaccessors
\t\$(PERL) genaccessors > accessors.xsh

MAKE_FRAG

}

README  view on Meta::CPAN

This module requires the FANN library version 2.1.0beta or later
compiled to use doubles internally.

The module Test::More is also required for testing.


INSTALLATION

To install this module type the following:

   perl Makefile.PL
   make
   make test
   make install

You may need to add two extra parameters to the Makefile.PL script to
indicate where to find the FANN library and include files if they are
not installed on some standard locations. For instance:

   perl Makefile.PL                            \
       FANN_LIB=/usr/local/fann/lib            \
       FANN_INCLUDE=/usr/local/fann/include


COPYRIGHT AND LICENCE

Copyright (C) 2006 by Salvador Fandino (sfandino@yahoo.com).

This Perl module is free software; you can redistribute it and/or
modify it under the same terms as Perl itself, either Perl version
5.8.8 or, at your option, any later version of Perl 5 you may have
available.

accessors.xsh  view on Meta::CPAN


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_train_enum
accessor_training_algorithm(self, value = NO_INIT)
    struct fann * self;
    enum fann_train_enum value
  CODE:
    if (items > 1) {
        fann_set_training_algorithm(self, value);
    }
    RETVAL = fann_get_training_algorithm(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_errorfunc_enum
accessor_train_error_function(self, value = NO_INIT)
    struct fann * self;
    enum fann_errorfunc_enum value
  CODE:
    if (items > 1) {
        fann_set_train_error_function(self, value);
    }
    RETVAL = fann_get_train_error_function(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_stopfunc_enum
accessor_train_stop_function(self, value = NO_INIT)
    struct fann * self;
    enum fann_stopfunc_enum value
  CODE:
    if (items > 1) {
        fann_set_train_stop_function(self, value);
    }
    RETVAL = fann_get_train_stop_function(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_learning_rate(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_learning_rate(self, value);
    }
    RETVAL = fann_get_learning_rate(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_learning_momentum(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_learning_momentum(self, value);
    }
    RETVAL = fann_get_learning_momentum(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_bit_fail_limit(self, value = NO_INIT)
    struct fann * self;
    fann_type value
  CODE:
    if (items > 1) {
        fann_set_bit_fail_limit(self, value);
    }
    RETVAL = fann_get_bit_fail_limit(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_quickprop_decay(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_quickprop_decay(self, value);
    }
    RETVAL = fann_get_quickprop_decay(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_quickprop_mu(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_quickprop_mu(self, value);
    }
    RETVAL = fann_get_quickprop_mu(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_increase_factor(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_increase_factor(self, value);
    }
    RETVAL = fann_get_rprop_increase_factor(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_decrease_factor(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_decrease_factor(self, value);
    }
    RETVAL = fann_get_rprop_decrease_factor(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_delta_min(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_delta_min(self, value);
    }
    RETVAL = fann_get_rprop_delta_min(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_rprop_delta_max(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_rprop_delta_max(self, value);
    }
    RETVAL = fann_get_rprop_delta_max(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_num_inputs(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_num_input(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_num_outputs(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_num_output(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_total_neurons(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_total_neurons(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_total_connections(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_total_connections(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_connection_rate(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_connection_rate(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_MSE(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_MSE(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_bit_fail(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_bit_fail(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_cascade_output_change_fraction(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_cascade_output_change_fraction(self, value);
    }
    RETVAL = fann_get_cascade_output_change_fraction(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_cascade_output_stagnation_epochs(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_cascade_output_stagnation_epochs(self, value);
    }
    RETVAL = fann_get_cascade_output_stagnation_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

double
accessor_cascade_candidate_change_fraction(self, value = NO_INIT)
    struct fann * self;
    double value
  CODE:
    if (items > 1) {
        fann_set_cascade_candidate_change_fraction(self, value);
    }
    RETVAL = fann_get_cascade_candidate_change_fraction(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_candidate_stagnation_epochs(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_candidate_stagnation_epochs(self, value);
    }
    RETVAL = fann_get_cascade_candidate_stagnation_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_cascade_weight_multiplier(self, value = NO_INIT)
    struct fann * self;
    fann_type value
  CODE:
    if (items > 1) {
        fann_set_cascade_weight_multiplier(self, value);
    }
    RETVAL = fann_get_cascade_weight_multiplier(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_cascade_candidate_limit(self, value = NO_INIT)
    struct fann * self;
    fann_type value
  CODE:
    if (items > 1) {
        fann_set_cascade_candidate_limit(self, value);
    }
    RETVAL = fann_get_cascade_candidate_limit(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_max_out_epochs(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_max_out_epochs(self, value);
    }
    RETVAL = fann_get_cascade_max_out_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_max_cand_epochs(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_max_cand_epochs(self, value);
    }
    RETVAL = fann_get_cascade_max_cand_epochs(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_num_candidates(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_cascade_num_candidates(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_cascade_num_candidate_groups(self, value = NO_INIT)
    struct fann * self;
    unsigned int value
  CODE:
    if (items > 1) {
        fann_set_cascade_num_candidate_groups(self, value);
    }
    RETVAL = fann_get_cascade_num_candidate_groups(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

enum fann_activationfunc_enum
accessor_neuron_activation_function(self, layer, neuron_index, value = NO_INIT)
    struct fann * self;
    unsigned int layer;
    unsigned int neuron_index;
    enum fann_activationfunc_enum value
  CODE:
    if (items > 3) {
        fann_set_activation_function(self, value, layer, neuron_index);
    }
    RETVAL = fann_get_activation_function(self, layer, neuron_index);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_layer_activation_function(self, layer, value)
    struct fann * self;
    unsigned int layer;
    enum fann_activationfunc_enum value;
  CODE:
    fann_set_activation_function_layer(self, value, layer);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_hidden_activation_function(self, value)
    struct fann * self;
    enum fann_activationfunc_enum value;
  CODE:
    fann_set_activation_function_hidden(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_output_activation_function(self, value)
    struct fann * self;
    enum fann_activationfunc_enum value;
  CODE:
    fann_set_activation_function_output(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

fann_type
accessor_neuron_activation_steepness(self, layer, neuron, value = NO_INIT)
    struct fann * self;
    unsigned int layer;
    unsigned int neuron;
    fann_type value
  CODE:
    if (items > 3) {
        fann_set_activation_steepness(self, value, layer, neuron);
    }
    RETVAL = fann_get_activation_steepness(self, layer, neuron);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_layer_activation_steepness(self, layer, value)
    struct fann * self;
    unsigned int layer;
    fann_type value;
  CODE:
    fann_set_activation_steepness_layer(self, value, layer);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_hidden_activation_steepness(self, value)
    struct fann * self;
    fann_type value;
  CODE:
    fann_set_activation_steepness_hidden(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_
void
accessor_output_activation_steepness(self, value)
    struct fann * self;
    fann_type value;
  CODE:
    fann_set_activation_steepness_output(self, value);
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_layer_num_neurons(self, layer)
	struct fann * self;
    unsigned int layer;
  CODE:
    RETVAL = fann_get_num_neurons(self, layer);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN    PREFIX = accessor_

unsigned int
accessor_num_layers(self)
	struct fann * self;
  CODE:
    RETVAL = fann_get_num_layers(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN::TrainData    PREFIX = accessor_

unsigned int
accessor_num_inputs(self)
	struct fann_train_data * self;
  CODE:
    RETVAL = fann_train_data_num_input(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN::TrainData    PREFIX = accessor_

unsigned int
accessor_num_outputs(self)
	struct fann_train_data * self;
  CODE:
    RETVAL = fann_train_data_num_output(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);


MODULE = AI::FANN    PACKAGE = AI::FANN::TrainData    PREFIX = accessor_

unsigned int
accessor_length(self)
	struct fann_train_data * self;
  CODE:
    RETVAL = fann_train_data_length(self);
  OUTPUT:
    RETVAL
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

constants.h  view on Meta::CPAN

static char * const my_constant_names[] = {
    "FANN_TRAIN_INCREMENTAL",
    "FANN_TRAIN_BATCH",
    "FANN_TRAIN_RPROP",
    "FANN_TRAIN_QUICKPROP",
    "FANN_LINEAR",
    "FANN_THRESHOLD",
    "FANN_THRESHOLD_SYMMETRIC",
    "FANN_SIGMOID",
    "FANN_SIGMOID_STEPWISE",
    "FANN_SIGMOID_SYMMETRIC",
    "FANN_SIGMOID_SYMMETRIC_STEPWISE",
    "FANN_GAUSSIAN",
    "FANN_GAUSSIAN_SYMMETRIC",
    "FANN_GAUSSIAN_STEPWISE",
    "FANN_ELLIOT",
    "FANN_ELLIOT_SYMMETRIC",
    "FANN_LINEAR_PIECE",
    "FANN_LINEAR_PIECE_SYMMETRIC",
    "FANN_SIN_SYMMETRIC",
    "FANN_COS_SYMMETRIC",
    "FANN_SIN",
    "FANN_COS",
    "FANN_ERRORFUNC_LINEAR",
    "FANN_ERRORFUNC_TANH",
    "FANN_STOPFUNC_MSE",
    "FANN_STOPFUNC_BIT",
    0,
};
static const unsigned int my_constant_values[] = {
    FANN_TRAIN_INCREMENTAL,
    FANN_TRAIN_BATCH,
    FANN_TRAIN_RPROP,
    FANN_TRAIN_QUICKPROP,
    FANN_LINEAR,
    FANN_THRESHOLD,
    FANN_THRESHOLD_SYMMETRIC,
    FANN_SIGMOID,
    FANN_SIGMOID_STEPWISE,
    FANN_SIGMOID_SYMMETRIC,
    FANN_SIGMOID_SYMMETRIC_STEPWISE,
    FANN_GAUSSIAN,
    FANN_GAUSSIAN_SYMMETRIC,
    FANN_GAUSSIAN_STEPWISE,
    FANN_ELLIOT,
    FANN_ELLIOT_SYMMETRIC,
    FANN_LINEAR_PIECE,
    FANN_LINEAR_PIECE_SYMMETRIC,
    FANN_SIN_SYMMETRIC,
    FANN_COS_SYMMETRIC,
    FANN_SIN,
    FANN_COS,
    FANN_ERRORFUNC_LINEAR,
    FANN_ERRORFUNC_TANH,
    FANN_STOPFUNC_MSE,
    FANN_STOPFUNC_BIT,
};

genaccessors  view on Meta::CPAN

#!/usr/bin/perl

use strict;
use warnings;

$| = 1;

my %struct = ( 'AI::FANN' => 'struct fann *',
               'AI::FANN::TrainData' => 'struct fann_train_data *' );

sub accessor {
    my ($name, $type, $getter, $setter, @ixs) = @_;
	
    my ($package, $method) = $name =~ /^(?:(.*)::)?(.*)$/
        or die "wrong accessor name $name";

    $package = $package ? "AI::FANN::$package" : 'AI::FANN';
    my $struct = $struct{$package}
        or die "wrong package name $package";

    push @ixs, 'value' unless grep /^value$/, @ixs;
    my @ixs1 =  grep !/^value$/, @ixs;

    my $nixs = @ixs;

    my $types = join("\n    ", "$struct self;", map "unsigned int $_;", @ixs1);
    my $args = join(', ', 'self', @ixs1);
    my $setargs = join(', ', 'self', @ixs);

    if ($getter) {
        if ($getter =~ /^->/) {
            $getter = "self->$getter"
        }
        else {
            $getter = "$getter($args)"
        }
    }

    if ($setter) {
        if ($setter =~ /^->/) {
            $setter = "self->$setter = value"
        }
        else {
            $setter = "$setter($setargs)"
        }
    }


    print <<HEAD;

MODULE = AI::FANN    PACKAGE = $package    PREFIX = accessor_
HEAD

    if ($setter and $getter) {
        print <<EOA

$type
accessor_$method($args, value = NO_INIT)
    $types
    $type value
  CODE:
    if (items > $nixs) {
        $setter;
    }
    RETVAL = $getter;
  OUTPUT:
    RETVAL
EOA

    }
    elsif ($getter) {
        print <<EOA;

$type
accessor_$method($args)
	$types
  CODE:
    RETVAL = $getter;
  OUTPUT:
    RETVAL
EOA

    }
    elsif ($setter) {
        print <<EOA;
void
accessor_$method($args, value)
    $types
    $type value;
  CODE:
    $setter;
EOA

    }
    else {
        die "both setter and getter are null"
    }

    print <<EOA;
  CLEANUP:
    _check_error(aTHX_ (struct fann_error *)self);

EOA
}


while(<DATA>) {
	chomp;
	next if /^\s*(?:#.*)?$/;
	my (@args) = split /\s*,\s*/;
	@args > 2 or die "wrong number of arguments: $_";

	accessor(@args);
}

__DATA__

training_algorithm, enum fann_train_enum, fann_get_training_algorithm, fann_set_training_algorithm
train_error_function, enum fann_errorfunc_enum, fann_get_train_error_function, fann_set_train_error_function
train_stop_function, enum fann_stopfunc_enum, fann_get_train_stop_function, fann_set_train_stop_function
learning_rate, double, fann_get_learning_rate, fann_set_learning_rate
learning_momentum, double, fann_get_learning_momentum, fann_set_learning_momentum
bit_fail_limit, fann_type, fann_get_bit_fail_limit, fann_set_bit_fail_limit

genconstants  view on Meta::CPAN

#!/usr/bin/perl

my @names;
while(<DATA>) {
	chomp;
	next if /^\s*(?:#.*)?$/;
	push @names, $_
}

my $n = @names;
my $n2 = $n*2;

print "static char * const my_constant_names[] = {\n";
for (@names) {
	print qq(    "$_",\n)
}
print qq(    0,\n);
print "};\n";
print "static const unsigned int my_constant_values[] = {\n";
for (@names) {
	print qq(    $_,\n)
}
print "};\n";

__DATA__

# enum fann_train_enum:
FANN_TRAIN_INCREMENTAL
FANN_TRAIN_BATCH
FANN_TRAIN_RPROP
FANN_TRAIN_QUICKPROP

lib/AI/FANN.pm  view on Meta::CPAN

use strict;
use warnings;
use Carp;

require XSLoader;
XSLoader::load('AI::FANN', $VERSION);

use Exporter qw(import);

{
    my @constants = _constants();

    our %EXPORT_TAGS = ( 'all' => [ @constants ] );
    our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );

    require constant;
    for my $constant (@constants) {
        constant->import($constant, $constant);
    }
}

sub num_neurons {

    @_ == 1 or croak "Usage: AI::FANN::get_neurons(self)";

    my $self = shift;
    if (wantarray) {
        map { $self->layer_num_neurons($_) } (0 .. $self->num_layers - 1);
    }
    else {
        $self->total_neurons;
    }
}

1;
__END__

=head1 NAME

AI::FANN - Perl wrapper for the Fast Artificial Neural Network library

=head1 SYNOPSIS

Train...

  use AI::FANN qw(:all);

  # create an ANN with 2 inputs, a hidden layer with 3 neurons and an
  # output layer with 1 neuron:
  my $ann = AI::FANN->new_standard(2, 3, 1);

  $ann->hidden_activation_function(FANN_SIGMOID_SYMMETRIC);
  $ann->output_activation_function(FANN_SIGMOID_SYMMETRIC);

  # create the training data for a XOR operator:
  my $xor_train = AI::FANN::TrainData->new( [-1, -1], [-1],
                                            [-1, 1], [1],
                                            [1, -1], [1],
                                            [1, 1], [-1] );

  $ann->train_on_data($xor_train, 500000, 1000, 0.001);

  $ann->save("xor.ann");

Run...

  use AI::FANN;

  my $ann = AI::FANN->new_from_file("xor.ann");

  for my $a (-1, 1) {
    for my $b (-1, 1) {
      my $out = $ann->run([$a, $b]);
      printf "xor(%f, %f) = %f\n", $a, $b, $out->[0];
    }
  }

=head1 DESCRIPTION


  WARNING:  THIS IS A VERY EARLY RELEASE,
            MAY CONTAIN CRITICAL BUGS!!!

AI::FANN is a Perl wrapper for the Fast Artificial Neural Network
(FANN) Library available from L<http://fann.sourceforge.net>:

  Fast Artificial Neural Network Library is a free open source neural
  network library, which implements multilayer artificial neural
  networks in C with support for both fully connected and sparsely
  connected networks. Cross-platform execution in both fixed and
  floating point are supported. It includes a framework for easy
  handling of training data sets. It is easy to use, versatile, well
  documented, and fast. PHP, C++, .NET, Python, Delphi, Octave, Ruby,
  Pure Data and Mathematica bindings are available. A reference manual
  accompanies the library with examples and recommendations on how to
  use the library. A graphical user interface is also available for
  the library.

AI::FANN object oriented interface provides an almost direct map to
the C library API. Some differences have been introduced to make it
more perlish:

=over 4

=item *

Two classes are used: C<AI::FANN> that wraps the C C<struct fann> type
and C<AI::FANN::TrainData> that wraps C<struct fann_train_data>.

=item *

Prefixes and common parts on the C function names referring to those
structures have been removed. For instance C
C<fann_train_data_shuffle> becomes C<AI::FANN::TrainData::shuffle> that
will be usually called as...

  $train_data->shuffle;

=item *

Pairs of C get/set functions are wrapped in Perl with dual accessor
methods named as the attribute (and without any C<set_>/C<get_>
prefix). For instance:

  $ann->bit_fail_limit($limit); # sets the bit_fail_limit

  $bfl = $ann->bit_fail_limit;  # gets the bit_fail_limit


Pairs of get/set functions requiring additional indexing arguments are
also wrapped inside dual accessors:

  # sets:
  $ann->neuron_activation_function($layer_ix, $neuron_ix, $actfunc);

  # gets:
  $af = $ann->neuron_activation_function($layer_ix, $neuron_ix);

Important: note that on the Perl version, the optional value argument
is moved to the last position (on the C version of the C<set_> method
it is usually the second argument).

=item *

Some functions have been renamed to make the naming more consistent
and to follow Perl conventions:

  C                                      Perl
  -----------------------------------------------------------
  fann_create_from_file               => new_from_file
  fann_create_standard                => new_standard
  fann_get_num_input                  => num_inputs
  fann_get_activation_function        => neuron_activation_function
  fann_set_activation_function        => ^^^
  fann_set_activation_function_layer  => layer_activation_function
  fann_set_activation_function_hidden => hidden_activation_function
  fann_set_activation_function_output => output_activation_function

=item *

Boolean methods return true on success and undef on failure.

=item *

Any error reported from the C side is automaticaly converter to a Perl
exception. No manual error checking is required after calling FANN
functions.

morefann.c  view on Meta::CPAN


#include "morefann.h"
#include <string.h>

static fann_type **allocvv(unsigned int n1, unsigned int n2) {
	fann_type **ptr = (fann_type **)malloc(n1 * sizeof(fann_type *));
	fann_type *v = (fann_type *)malloc(n1 * n2 * sizeof(fann_type));
	if (ptr && v) {
		unsigned int i;
		for (i = 0; i < n1; i++) {
			ptr[i] = v + i * n2;
		}
		return ptr;
	}
	return 0;
}

struct fann_train_data *
fann_train_data_create(unsigned int num_data, unsigned int num_input, unsigned int num_output) {
	struct fann_train_data *data = (struct fann_train_data *)calloc(1, sizeof(*data));
	if (data) {
		fann_init_error_data((struct fann_error *)data);
		data->input = allocvv(num_data, num_input);
		data->output = allocvv(num_data, num_output);
		if (data->input && data->output) {
			data->num_data = num_data;
			data->num_input = num_input;
			data->num_output = num_output;
			return data;
		}
	}
	return 0;
}

void
fann_train_data_set(struct fann_train_data *data, unsigned int ix,
					fann_type *input, fann_type *output ) {
	if (ix < data->num_data) {
		memcpy(data->input[ix], input, data->num_input * sizeof(fann_type));
		memcpy(data->output[ix], output, data->num_output * sizeof(fann_type));
	}
	else {
		fann_error((struct fann_error *)data, FANN_E_INDEX_OUT_OF_BOUND, ix);
	}
}

/*
enum fann_activationfunc_enum
fann_get_activation_function(struct fann *ann, unsigned int layer, int neuron_index) {
    struct fann_neuron *neuron = fann_get_neuron(ann, layer, neuron_index);
    if (neuron) {
        return neuron->activation_function;
    }
    return 0;
}
*/

/*
fann_type
fann_get_activation_steepness(struct fann *ann, unsigned int layer, int neuron_index) {
    struct fann_neuron *neuron = fann_get_neuron(ann, layer, neuron_index);
    if (neuron) {
        return neuron->activation_steepness;
    }
    return 0;
}
*/

/*
unsigned int
fann_get_num_layers(struct fann *ann) {
    return ann->last_layer - ann->first_layer;
}
*/

unsigned int
fann_get_num_neurons(struct fann *ann, unsigned int layer_index) {
    struct fann_layer * layer = fann_get_layer(ann, layer_index);
    if (layer) {
        return layer->last_neuron - layer->first_neuron;
    }
    return 0;
}

morefann.h  view on Meta::CPAN


#include <doublefann.h>

void
fann_train_data_set(struct fann_train_data *data, unsigned int ix,
					fann_type *input, fann_type *output );

struct fann_train_data *
fann_train_data_create(unsigned int num_data,
					   unsigned int num_input, unsigned int num_output);


struct fann_layer*
fann_get_layer(struct fann *ann, int layer);

struct fann_neuron*
fann_get_neuron_layer(struct fann *ann, struct fann_layer* layer, int neuron);

struct fann_neuron*
fann_get_neuron(struct fann *ann, unsigned int layer, int neuron);

ppport.h  view on Meta::CPAN

#if 0
<<'SKIP';
#endif
/*
----------------------------------------------------------------------

    ppport.h -- Perl/Pollution/Portability Version 3.13

    Automatically created by Devel::PPPort running under perl 5.010000.

    Do NOT edit this file directly! -- Edit PPPort_pm.PL and the
    includes in parts/inc/ instead.

    Use 'perldoc ppport.h' to view the documentation below.

----------------------------------------------------------------------

SKIP

=pod

=head1 NAME

ppport.h - Perl/Pollution/Portability version 3.13

=head1 SYNOPSIS

  perl ppport.h [options] [source files]

  Searches current directory for files if no [source files] are given

  --help                      show short help

  --version                   show version

  --patch=file                write one patch file with changes
  --copy=suffix               write changed copies with suffix
  --diff=program              use diff program and options

  --compat-version=version    provide compatibility with Perl version
  --cplusplus                 accept C++ comments

  --quiet                     don't output anything except fatal errors
  --nodiag                    don't show diagnostics
  --nohints                   don't show hints
  --nochanges                 don't suggest changes
  --nofilter                  don't filter input files

  --strip                     strip all script and doc functionality from
                              ppport.h

  --list-provided             list provided API
  --list-unsupported          list unsupported API
  --api-info=name             show Perl API portability information

=head1 COMPATIBILITY

This version of F<ppport.h> is designed to support operation with Perl
installations back to 5.003, and has been tested up to 5.10.0.

=head1 OPTIONS

=head2 --help

samples/ox.pl  view on Meta::CPAN


use GD;
use AI::FANN qw(:all);

my $num = 500;
my $size = 16;

$| = 1;

sub draw_o {
    my ($im, $color) = @_;
    my $r = int (0.2 + rand(0.25 * $size));
    my $x0 = $r + int rand($size - 2 * $r);
    my $y0 = $r + int rand($size - 2 * $r);
    $im->arc($x0, $y0, $r, $r, 0, 360, $color);
    return 2*$r/$size
}

sub draw_x {
    my ($im, $color) = @_;
    my $r = int (0.2 + rand(0.25 * $size));
    my $x0 = $r + int rand($size - 2 * $r);
    my $y0 = $r + int rand($size - 2 * $r);
    $im->line($x0-$r, $y0-$r, $x0+$r, $y0+$r, $color);
    $im->line($x0-$r, $y0+$r, $x0+$r, $y0-$r, $color);
    return 2*$r/$size
}

sub image_to_input {
    my ($im, $type) = @_;
    my @data;
    for my $x (0..$size-1) {
        for my $y (0..$size-1) {
            push @data, $im->getPixel($x, $y);
        }
    }
    return \@data;
}

sub make_train {
    my $train = AI::FANN::TrainData->new_empty($num, $size * $size, 2);
    for (0..$num - 1) {
        print ".";
        my $im = GD::Image->new($size, $size);
        my $white = $im->colorAllocate(255,255,255);
        my $black = $im->colorAllocate(0,0,0);
        my $type = (rand > .5);
        my $r = $type ? draw_x($im, $black) : draw_o($im, $black);
        $train->data($_, image_to_input($im), [$type, $r]);
    }
    print "\n";
    my $ann = AI::FANN->new_standard(@_);
    for (1..40) {
        $ann->train_on_data($train, 100, 1, 0.0001);
        # $ann->print_connections;
        $ann->print_parameters;
        $ann->save("ox.ann");
    }
}

sub make_test {
    my $rep = shift;
    my $ann = AI::FANN->new_from_file("ox.ann");
    print "ann read\n";
    for (0..$rep - 1) {
        my $im = GD::Image->new($size, $size);
        my $white = $im->colorAllocate(255,255,255);
        my $black = $im->colorAllocate(0,0,0);
        my $type = (rand > .5);
        my $r = $type ? draw_x($im, $black) : draw_o($im, $black);
        my $out = $ann->run(image_to_input($im));
        printf ("type: %f, r: %4.2f out type: %f, r: %4.2f\n", $type, $r, $out->[0], $out->[1]);
    }
}

if ($ARGV[0] eq 'train') {
    make_train($size * $size, 4 * $size * $size, 240, 200, 60, 20, 2);
}
elsif ($ARGV[0] eq 'test') {
    make_test($ARGV[1] || 10);
}
else {
    die "wrong action"
}

samples/xor.pl  view on Meta::CPAN

#!/usr/bin/perl

use strict;
use warnings;

use AI::FANN qw(:all);

if ($ARGV[0] eq 'train') {

    # create an ANN with 2 inputs, a hidden layer with 3 neurons and an
    # output layer with 1 neuron:
    my $ann = AI::FANN->new_standard(2, 3, 1);

    $ann->hidden_activation_function(FANN_SIGMOID_SYMMETRIC);
    $ann->output_activation_function(FANN_SIGMOID_SYMMETRIC);

    # create the training data for a XOR operator:
    my $xor_train = AI::FANN::TrainData->new( [-1, -1], [-1],
                                              [-1, 1], [1],
                                              [1, -1], [1],
                                              [1, 1], [-1] );

    $ann->train_on_data($xor_train, 500000, 100, 0.0001);

    $ann->save("xor.ann");

}
elsif ($ARGV[0] eq 'test') {

    my $ann = AI::FANN->new_from_file("xor.ann");

    for my $a (-1, 1) {
        for my $b (-1, 1) {
            my $out = $ann->run([$a, $b]);
            printf "xor(%f, %f) = %f\n", $a, $b, $out->[0];
        }
    }

}
else {
    die "bad action\n"
}

t/pods.t  view on Meta::CPAN

#!/usr/bin/perl

use strict;
use Test::More;

plan skip_all => "Only the author needs to check that POD docs are right"
    unless eval "no warnings; getlogin eq 'salva'";

eval "use Test::Pod 1.00";
plan skip_all => "Test::Pod 1.00 required for testing POD" if $@;

all_pod_files_ok( all_pod_files( qw(blib) ) );

typemap  view on Meta::CPAN

enum fann_activationfunc_enum	T_UV

enum fann_train_enum    T_FANN_TRAIN_ENUM
enum fann_activationfunc_enum   T_FANN_ACTIVATIONFUNC_ENUM
enum fann_errorfunc_enum    T_FANN_ERRORFUNC_ENUM
enum fann_stopfunc_enum T_STOPFUNC_ENUM

INPUT

T_PTROBJ_MAGIC
	$var = ($type)_sv2obj(aTHX_ $arg, \"${type}\", 1);

T_FTA_INPUT
	$var = _sv2fta(aTHX_ $arg, self->num_input, WANT_MORTAL, \"${var}\");

T_FTA_OUTPUT
	$var = _sv2fta(aTHX_ $arg, self->num_output, WANT_MORTAL, \"${var}\");

T_FANN_TRAIN_ENUM
    $var = _sv2fann_train_enum($arg)

T_FANN_ACTIVATIONFUNC_ENUM
    $var = _sv2fann_activationfunc_enum($arg)

T_FANN_ERRORFUNC_ENUM
    $var = _sv2fann_errorfunc_enum($arg)

T_STOPFUNC_ENUM
    $var = _sv2fann_stopfunc_enum($arg)


OUTPUT

T_PTROBJ_MAGIC
	$arg = _obj2sv(aTHX_ $var, ST(0), "$type");

T_FTA_OUTPUT
	$arg = _fta2sv(aTHX_ $var, self->num_output);

T_FANN_TRAIN_ENUM
    $arg = _fann_train_enum2sv($var);

T_FANN_ACTIVATIONFUNC_ENUM
    $arg = _fann_activationfunc_enum2sv($var);

T_FANN_ERRORFUNC_ENUM
    $arg = _fann_errorfunc_enum2sv($var);

T_STOPFUNC_ENUM
    $arg = _fann_stopfunc_enum2sv($var);

 view all matches for this distribution
 view release on metacpan -  search on metacpan

( run in 0.807 second using v1.00-cache-2.02-grep-82fe00e-cpan-2c419f77a38b )