AI-NeuralNet-Simple

 view release on metacpan or  search on metacpan

MANIFEST  view on Meta::CPAN

1
2
3
4
5
6
7
8
9
10
11
12
13
14
Changes
examples/game_ai.pl
examples/logical_or.pl
Makefile.PL
MANIFEST
META.yml                        Module meta-data (added by MakeMaker)
README
Simple.xs
lib/AI/NeuralNet/Simple.pm
t/10nn_simple.t
t/20nn_multi.t
t/30nn_storable.t
t/pod-coverage.t
t/pod.t

Simple.xs  view on Meta::CPAN

413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
        av_store(av, i, build_rv(av2));
    }
 
    return build_rv(av);
}
 
#define EXPORT_VERSION    1
#define EXPORTED_ITEMS    9
 
/*
 * Exports the C data structures to the Perl world for serialization
 * by Storable.  We don't want to duplicate the logic of Storable here
 * even though we have to do some low-level Perl object construction.
 *
 * The structure we return is an array reference, which contains the
 * following items:
 *
 *  0    the export version number, in case format changes later
 *  1    the amount of neurons in the input layer
 *  2    the amount of neurons in the hidden layer
 *  3    the amount of neurons in the output layer

Simple.xs  view on Meta::CPAN

504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
            croak("row %d of serialized item %d is not an array ref", i, idx);
 
        subav = get_array(rv);
 
        for (j = 0; j < columns; j++)
            row[j] = get_float_element(subav, j);
    }
}
 
/*
 * Create new network from a retrieved data structure, such as the one
 * produced by c_export_network().
 */
int c_import_network(SV *rv)
{
    NEURAL_NETWORK *n;
    int handle;
    SV **sav;
    AV *av;
    int i = 0;
 
    /*
     * Unfortunately, since those data come from the outside, we need
     * to validate most of the structural information to make sure
     * we're not fed garbage or something we cannot process, like a
     * newer version of the serialized data. This makes the code heavy.
     *        --RAM
     */
 
    if (!is_array_ref(rv))
        croak("c_import_network() not given an array reference");
 
    av = get_array(rv);
 
    /* Check version number */
    sav = av_fetch(av, i++, 0);

Simple.xs  view on Meta::CPAN

803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
double *input, *output; /* C arrays */
double max_error = 0.0;
 
int set_length=0;
int i,j;
int index;
 
set_length = av_len(get_array(set))+1;
 
if (!set_length)
    croak("_train_set() array ref has no data");
if (set_length % 2)
    croak("_train_set array ref must have an even number of elements");
 
/* allocate memory for out input and output arrays */
input_array    = get_array_from_aoa(set, 0);
input          = malloc(sizeof(double) * set_length * (av_len(input_array)+1));
 
output_array    = get_array_from_aoa(set, 1);
output          = malloc(sizeof(double) * set_length * (av_len(output_array)+1));
 
for (i=0; i < set_length; i += 2) {
    input_array = get_array_from_aoa(set, i);
     
    if (av_len(input_array)+1 != n->size.input)
        croak("Length of input data does not match");
     
    /* iterate over the input_array and assign the floats to input */
     
    for (j = 0; j < n->size.input; j++) {
        index = (i/2*n->size.input)+j;
        input[index] = get_float_element(input_array, j);
    }
     
    output_array = get_array_from_aoa(set, i+1);
    if (av_len(output_array)+1 != n->size.output)
        croak("Length of output data does not match");
 
    for (j = 0; j < n->size.output; j++) {
        index = (i/2*n->size.output)+j;
        output[index] = get_float_element(output_array, j);
    }
}
 
for (i = 0; i < iterations; i++) {
    max_error = 0.0;

Simple.xs  view on Meta::CPAN

868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
    return max_error;
}
 
SV* c_infer(int handle, SV *array_ref)
{
    NEURAL_NETWORK *n = c_get_network(handle);
    int    i;
    AV     *perl_array, *result = newAV();
 
    /* feed the data */
    perl_array = get_array(array_ref);
 
    for (i = 0; i < n->size.input; i++)
        n->tmp[i] = get_float_element(perl_array, i);
 
    c_feed(n, n->tmp, NULL, 0);
 
    /* read the results */
    for (i = 0; i < n->size.output; i++) {
        av_push(result, newSVnv(n->neuron.output[i]));

examples/game_ai.pl  view on Meta::CPAN

93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
        print $message;
        chomp($response = <STDIN>);
        exit if substr(lc $response, 0, 1) eq 'q';
        $valid_response = $response =~ /$domain/;
    } until $valid_response;
    return $response;
}
 
sub display_result
{
    my ($net,@data) = @_;
    my $result      = $net->winner(\@data);
    my @health      = qw/Poor Average Good/;
    my @knife       = qw/No Yes/;
    my @gun         = qw/No Yes/;
    printf $format,
        $health[$_[1]],
        $knife[$_[2]],
        $gun[$_[3]],
        $_[4],             # number of enemies
        $actions[$result];
}

lib/AI/NeuralNet/Simple.pm  view on Meta::CPAN

74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
}
 
sub use_bipolar {
    my ( $self, $bipolar ) = @_;
    return c_get_use_bipolar( $self->handle ) unless defined $bipolar;
    c_set_use_bipolar( $self->handle, $bipolar );
    return $self;
}
 
sub infer {
    my ( $self, $data ) = @_;
    c_infer( $self->handle, $data );
}
 
sub winner {
 
    # returns index of largest value in inferred answer
    my ( $self, $data ) = @_;
    my $arrayref = c_infer( $self->handle, $data );
 
    my $largest = 0;
    for ( 0 .. $#$arrayref ) {
        $largest = $_ if $arrayref->[$_] > $arrayref->[$largest];
    }
    return $largest;
}
 
sub learn_rate {
    my ( $self, $rate ) = @_;

lib/AI/NeuralNet/Simple.pm  view on Meta::CPAN

309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
=item 1 Designing
 
This is choosing the number of layers and the number of neurons per layer.  In
C<AI::NeuralNet::Simple>, the number of layers is fixed.
 
With more complete neural net packages, you can also pick which activation
functions you wish to use and the "learn rate" of the neurons.
 
=item 2 Training
 
This involves feeding the neural network enough data until the error rate is
low enough to be acceptable.  Often we have a large data set and merely keep
iterating until the desired error rate is achieved.
 
=item 3 Measuring results
 
One frequent mistake made with neural networks is failing to test the network
with different data from the training data.  It's quite possible for a
backpropagation network to hit what is known as a "local minimum" which is not
truly where it should be.  This will cause false results.  To check for this,
after training we often feed in other known good data for verification.  If the
results are not satisfactory, perhaps a different number of neurons per layer
should be tried or a different set of training data should be supplied.
 
=back
 
=head1 Programming C<AI::NeuralNet::Simple>
 
=head2 C<new($input, $hidden, $output)>
 
C<new()> accepts three integers.  These number represent the number of nodes in
the input, hidden, and output layers, respectively.  To create the "logical or"
network described earlier:

lib/AI/NeuralNet/Simple.pm  view on Meta::CPAN

368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
=head2 C<use_bipolar($boolean)>
 
Returns whether the network currently uses a bipolar activation function.
If an argument is supplied, instruct the network to use a bipolar activation
function or not.
 
You should not change the activation function during the traning.
 
=head2 C<train(\@input, \@output)>
 
This method trains the network to associate the input data set with the output
data set.  Representing the "logical or" is as follows:
 
  $net->train([1,1] => [0,1]);
  $net->train([1,0] => [0,1]);
  $net->train([0,1] => [0,1]);
  $net->train([0,0] => [1,0]);
 
Note that a one pass through the data is seldom sufficient to train a network.
In the example "logical or" program, we actually run this data through the
network ten thousand times.
 
  for (1 .. 10000) {
    $net->train([1,1] => [0,1]);
    $net->train([1,0] => [0,1]);
    $net->train([0,1] => [0,1]);
    $net->train([0,0] => [1,0]);
  }
 
The routine returns the Mean Squared Error (MSE) representing how far the
network answered.
 
It is far preferable to use C<train_set()> as this lets you control the MSE
over the training set and it is more efficient because there are less memory
copies back and forth.
 
=head2 C<train_set(\@dataset, [$iterations, $mse])>
 
Similar to train, this method allows us to train an entire data set at once.
It is typically faster than calling individual "train" methods.  The first
argument is expected to be an array ref of pairs of input and output array
refs.
 
The second argument is the number of iterations to train the set.  If
this argument is not provided here, you may use the C<iterations()> method to
set it (prior to calling C<train_set()>, of course).  A default of 10,000 will
be provided if not set.
 
The third argument is the targeted Mean Square Error (MSE). When provided,

lib/AI/NeuralNet/Simple.pm  view on Meta::CPAN

430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
seen over the whole training set (and not an average MSE).
 
=head2 C<iterations([$integer])>
 
If called with a positive integer argument, this method will allow you to set
number of iterations that train_set will use and will return the network
object.  If called without an argument, it will return the number of iterations
it was set to.
 
  $net->iterations;         # returns 100000
  my @training_data = (
    [1,1] => [0,1],
    [1,0] => [0,1],
    [0,1] => [0,1],
    [0,0] => [1,0],
  );
  $net->iterations(100000) # let's have lots more iterations!
      ->train_set(\@training_data);
   
=head2 C<learn_rate($rate)>)
 
This method, if called without an argument, will return the current learning
rate.  .20 is the default learning rate.
 
If called with an argument, this argument must be greater than zero and less
than one.  This will set the learning rate and return the object.
   
  $net->learn_rate; #returns the learning rate
  $net->learn_rate(.1)
      ->iterations(100000)
      ->train_set(\@training_data);
 
If you choose a lower learning rate, you will train the network slower, but you
may get a better accuracy.  A higher learning rate will train the network
faster, but it can have a tendancy to "overshoot" the answer when learning and
not learn as accurately.
 
=head2 C<infer(\@input)>
 
This method, if provided with an input array reference, will return an array
reference corresponding to the output values that it is guessing.  Note that

t/10nn_simple.t  view on Meta::CPAN

29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
    '... and setting it outside of legal boundaries should die';
is(sprintf("%.1f", $net->learn_rate), "0.2", '... and it should have the correct learn rate');
isa_ok($net->learn_rate(.3), $CLASS => '... and setting it should return the object');
is(sprintf("%.1f", $net->learn_rate), "0.3", '... and should set it correctly');
$net->learn_rate(.2);
 
can_ok($net, 'train');
 
# teach the network logical 'or'
 
ok($net->train([1,1], [0,1]), 'Calling train() with valid data should succeed');
for (1 .. 10000) {
    $net->train([1,1],[0,1]);
    $net->train([1,0],[0,1]);
    $net->train([0,1],[0,1]);
    $net->train([0,0],[1,0]);
}
 
can_ok($net, 'winner');
is($net->winner([1,1]), 1, '... and it should return the index of the highest valued result');
is($net->winner([1,0]), 1, '... and it should return the index of the highest valued result');



( run in 1.062 second using v1.01-cache-2.11-cpan-49f99fa48dc )