AI-NeuralNet-Simple
view release on metacpan - search on metacpan
view release on metacpan or search on metacpan
examples/game_ai.pl view on Meta::CPAN
$_ = substr $_, 0, 1;
tr/yn/10/;
}
printf "I think I will %s!\n\n", $actions[$net->winner([
$health,
$knife,
$gun,
$enemies])];
}
sub prompt
{
my ($message,$domain) = @_;
my $valid_response = 0;
my $response;
do {
print $message;
chomp($response = <STDIN>);
exit if substr(lc $response, 0, 1) eq 'q';
$valid_response = $response =~ /$domain/;
} until $valid_response;
return $response;
}
sub display_result
{
my ($net,@data) = @_;
my $result = $net->winner(\@data);
my @health = qw/Poor Average Good/;
my @knife = qw/No Yes/;
my @gun = qw/No Yes/;
printf $format,
$health[$_[1]],
$knife[$_[2]],
$gun[$_[3]],
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
if ( $] >= 5.006 ) {
require XSLoader;
XSLoader::load( 'AI::NeuralNet::Simple', $VERSION );
}
else {
require DynaLoader;
push @ISA, 'DynaLoader';
AI::NeuralNet::Simple->bootstrap($VERSION);
}
sub handle { $_[0]->{handle} }
sub new {
my ( $class, @args ) = @_;
logdie "you must supply three positive integers to new()"
unless 3 == @args;
foreach (@args) {
logdie "arguments to new() must be positive integers"
unless defined $_ && /^\d+$/;
}
my $seed = rand(1); # Perl invokes srand() on first call to rand()
my $handle = c_new_network(@args);
logdie "could not create new network" unless $handle >= 0;
my $self = bless {
input => $args[0],
hidden => $args[1],
output => $args[2],
handle => $handle,
}, $class;
$self->iterations(10000); # set a reasonable default
}
sub train {
my ( $self, $inputref, $outputref ) = @_;
return c_train( $self->handle, $inputref, $outputref );
}
sub train_set {
my ( $self, $set, $iterations, $mse ) = @_;
$iterations ||= $self->iterations;
$mse = -1.0 unless defined $mse;
return c_train_set( $self->handle, $set, $iterations, $mse );
}
sub iterations {
my ( $self, $iterations ) = @_;
if ( defined $iterations ) {
logdie "iterations() value must be a positive integer."
unless $iterations
and $iterations =~ /^\d+$/;
$self->{iterations} = $iterations;
return $self;
}
$self->{iterations};
}
sub delta {
my ( $self, $delta ) = @_;
return c_get_delta( $self->handle ) unless defined $delta;
logdie "delta() value must be a positive number" unless $delta > 0.0;
c_set_delta( $self->handle, $delta );
return $self;
}
sub use_bipolar {
my ( $self, $bipolar ) = @_;
return c_get_use_bipolar( $self->handle ) unless defined $bipolar;
c_set_use_bipolar( $self->handle, $bipolar );
return $self;
}
sub infer {
my ( $self, $data ) = @_;
c_infer( $self->handle, $data );
}
sub winner {
# returns index of largest value in inferred answer
my ( $self, $data ) = @_;
my $arrayref = c_infer( $self->handle, $data );
my $largest = 0;
for ( 0 .. $#$arrayref ) {
$largest = $_ if $arrayref->[$_] > $arrayref->[$largest];
}
return $largest;
}
sub learn_rate {
my ( $self, $rate ) = @_;
return c_get_learn_rate( $self->handle ) unless defined $rate;
logdie "learn rate must be between 0 and 1, exclusive"
unless $rate > 0 && $rate < 1;
c_set_learn_rate( $self->handle, $rate );
return $self;
}
sub DESTROY {
my $self = shift;
c_destroy_network( $self->handle );
}
#
# Serializing hook for Storable
#
sub STORABLE_freeze {
my ( $self, $cloning ) = @_;
my $internal = c_export_network( $self->handle );
# This is an excellent example where "we know better" than
# the recommended way in Storable's man page...
# Behaviour is the same whether we're cloning or not --RAM
my %copy = %$self;
delete $copy{handle};
return ( "", \%copy, $internal );
}
#
# Deserializing hook for Storable
#
sub STORABLE_thaw {
my ( $self, $cloning, $x, $copy, $internal ) = @_;
%$self = %$copy;
$self->{handle} = c_import_network($internal);
}
1;
__END__
=head1 NAME
view all matches for this distributionview release on metacpan - search on metacpan
( run in 0.614 second using v1.00-cache-2.02-grep-82fe00e-cpan-2c419f77a38b )