AI-NeuralNet-Simple
view release on metacpan or search on metacpan
* This performs something like "$rv = \@av;" in Perl.
*/
SV *build_rv(AV *av)
{
SV *rv;
/*
* To understand what is going on here, look at retrieve_ref()
* in the Storable.xs file. In particular, we don't perform
* an SvREFCNT_inc(av) because the av we're supplying is going
* to be referenced only by the REF we're building here.
* --RAM
*/
rv = NEWSV(10002, 0);
sv_upgrade(rv, SVt_RV);
SvRV(rv) = (SV *) av;
SvROK_on(rv);
return rv;
}
/*
* Build reference to a 2-dimensional array, implemented as an array
* or array references. The holding array has `rows' rows and each array
* reference has `columns' entries.
*
* The name "axa" denotes the "product" of 2 arrays.
*/
SV *build_axaref(void *arena, int rows, int columns)
{
AV *av;
int i;
double **p;
av = newAV();
av_extend(av, rows);
for (i = 0, p = arena; i < rows; i++, p++) {
int j;
double *q;
AV *av2;
av2 = newAV();
av_extend(av2, columns);
for (j = 0, q = *p; j < columns; j++, q++)
av_store(av2, j, newSVnv((NV) *q));
av_store(av, i, build_rv(av2));
}
return build_rv(av);
}
#define EXPORT_VERSION 1
#define EXPORTED_ITEMS 9
/*
* Exports the C data structures to the Perl world for serialization
* by Storable. We don't want to duplicate the logic of Storable here
* even though we have to do some low-level Perl object construction.
*
* The structure we return is an array reference, which contains the
* following items:
*
* 0 the export version number, in case format changes later
* 1 the amount of neurons in the input layer
* 2 the amount of neurons in the hidden layer
* 3 the amount of neurons in the output layer
* 4 the learning rate
* 5 the sigmoid delta
* 6 whether to use a bipolar (tanh) routine instead of the sigmoid
* 7 [[weight.input_to_hidden[0]], [weight.input_to_hidden[1]], ...]
* 8 [[weight.hidden_to_output[0]], [weight.hidden_to_output[1]], ...]
*/
SV *c_export_network(int handle)
{
NEURAL_NETWORK *n = c_get_network(handle);
AV *av;
int i = 0;
av = newAV();
av_extend(av, EXPORTED_ITEMS);
av_store(av, i++, newSViv(EXPORT_VERSION));
av_store(av, i++, newSViv(n->size.input));
av_store(av, i++, newSViv(n->size.hidden));
av_store(av, i++, newSViv(n->size.output));
av_store(av, i++, newSVnv(n->learn_rate));
av_store(av, i++, newSVnv(n->delta));
av_store(av, i++, newSViv(n->use_bipolar));
av_store(av, i++,
build_axaref(n->weight.input_to_hidden,
n->size.input + 1, n->size.hidden + 1));
av_store(av, i++,
build_axaref(n->weight.hidden_to_output,
n->size.hidden + 1, n->size.output));
if (i != EXPORTED_ITEMS)
croak("BUG in c_export_network()");
return build_rv(av);
}
/*
* Load a Perl array of array (a matrix) with "rows" rows and "columns" columns
* into the pre-allocated C array of arrays.
*
* The "hold" argument is an holding array and the Perl array of array which
* we expect is at index "idx" within that holding array.
*/
void c_load_axa(AV *hold, int idx, void *arena, int rows, int columns)
{
SV **sav;
SV *rv;
AV *av;
int i;
double **array = arena;
sav = av_fetch(hold, idx, 0);
( run in 1.004 second using v1.01-cache-2.11-cpan-39bf76dae61 )