AI-Perceptron
view release on metacpan or search on metacpan
Makefile.PL view on Meta::CPAN
unless (eval "use Module::Build::Compat 0.02; 1" ) {
print "This module requires Module::Build to install itself.\n";
require ExtUtils::MakeMaker;
my $yn = ExtUtils::MakeMaker::prompt
(' Install Module::Build now from CPAN?', 'y');
unless ($yn =~ /^y/i) {
warn " *** Cannot install without Module::Build. Exiting ...\n";
exit 1;
}
my $p = AI::Perceptron->new
->num_inputs( 2 )
->learning_rate( 0.04 )
->threshold( 0.02 )
->weights([ 0.1, 0.2 ]);
my @inputs = ( 1.3, -0.45 ); # input can be any number
my $target = 1; # output is always -1 or 1
my $current = $p->compute_output( @inputs );
print "current output: $current, target: $target\n";
$p->add_examples( [ $target, @inputs ] );
$p->max_iterations( 10 )->train or
warn "couldn't train in 10 iterations!";
print "training until it gets it right\n";
$p->max_iterations( -1 )->train; # watch out for infinite loops
DESCRIPTION
This module is meant to show how a single node of a neural network
works.
Training is done by the *Stochastic Approximation of the
Gradient-Descent* model.
MODEL
examples/and.pl view on Meta::CPAN
#
# And - and function using a perceptron
# Steve Purkis <spurkis@epn.nu>
# July 20, 1999
##
use Data::Dumper;
use AI::Perceptron;
print( "Example: training a perceptron to recognize an 'AND' function.\n",
"usage: $0 [<threshold> <weight1> <weight2>]\n" );
my $p = AI::Perceptron->new
->num_inputs( 2 )
->learning_rate( 0.1 );
if (@ARGV) {
$p->threshold( shift(@ARGV) )
->weights([ shift(@ARGV), shift(@ARGV) ]);
}
my @training_exs = (
[-1 => -1, -1],
[-1 => 1, -1],
[-1 => -1, 1],
[ 1 => 1, 1],
);
print "\nBefore Training\n";
dump_perceptron( $p );
print "\nTraining...\n";
$p->train( @training_exs );
print "\nAfter Training\n";
dump_perceptron( $p );
sub dump_perceptron {
my $p = shift;
print "\tThreshold: ", $p->threshold, " Weights: ", join(', ', @{ $p->weights }), "\n";
foreach my $inputs (@training_exs) {
my $target = $inputs->[0];
print "\tInputs = {", join(',', @$inputs[1..2]), "}, target=$target, output=", $p->compute_output( @$inputs[1..2] ), "\n";
}
}
lib/AI/Perceptron.pm view on Meta::CPAN
my $p = AI::Perceptron->new
->num_inputs( 2 )
->learning_rate( 0.04 )
->threshold( 0.02 )
->weights([ 0.1, 0.2 ]);
my @inputs = ( 1.3, -0.45 ); # input can be any number
my $target = 1; # output is always -1 or 1
my $current = $p->compute_output( @inputs );
print "current output: $current, target: $target\n";
$p->add_examples( [ $target, @inputs ] );
$p->max_iterations( 10 )->train or
warn "couldn't train in 10 iterations!";
print "training until it gets it right\n";
$p->max_iterations( -1 )->train; # watch out for infinite loops
=cut
package AI::Perceptron;
use strict;
use accessors qw( num_inputs learning_rate _weights threshold
training_examples max_iterations );
( run in 0.388 second using v1.01-cache-2.11-cpan-de7293f3b23 )