view release on metacpan or search on metacpan
examples/game_ai.pl view on Meta::CPAN
my $net = AI::NeuralNet::Simple->new(4,20,4);
$net->iterations(shift || 100000);
$net->train_set( [
# health knife gun enemy
[GOOD, YES, YES, 0], WANDER,
[GOOD, YES, NO, 2], HIDE,
[GOOD, YES, NO, 1], ATTACK,
[GOOD, YES, NO, 0], WANDER,
[GOOD, NO, YES, 2], ATTACK,
[GOOD, NO, YES, 1], ATTACK,
[GOOD, NO, NO, 3], HIDE,
[GOOD, NO, NO, 2], HIDE,
[GOOD, NO, NO, 1], RUN,
[GOOD, NO, NO, 0], WANDER,
[AVERAGE, YES, YES, 0], WANDER,
[AVERAGE, YES, NO, 2], HIDE,
[AVERAGE, YES, NO, 1], RUN,
[AVERAGE, NO, YES, 2], HIDE,
[AVERAGE, NO, YES, 1], ATTACK,
[AVERAGE, NO, NO, 3], HIDE,
[AVERAGE, NO, NO, 2], HIDE,
[AVERAGE, NO, NO, 1], RUN,
[AVERAGE, NO, NO, 0], WANDER,
[AVERAGE, NO, NO, 0], WANDER,
[POOR, YES, NO, 2], HIDE,
[POOR, YES, NO, 1], RUN,
[POOR, NO, YES, 2], HIDE,
[POOR, NO, YES, 1], RUN,
[POOR, NO, NO, 2], HIDE,
[POOR, NO, NO, 1], HIDE,
[POOR, NO, NO, 0], WANDER,
[POOR, YES, NO, 0], WANDER,
]);
my $format = "%8s %5s %3s %7s %6s\n";
my @actions = qw/attack run wander hide/;
examples/game_ai.pl view on Meta::CPAN
display_result($net,1,1,0,0);
display_result($net,1,0,1,2);
display_result($net,0,1,0,3);
while (1) {
print "Type 'quit' to exit\n";
my $health = prompt("Am I in poor, average, or good health? ", qr/^(?i:[pag])/);
my $knife = prompt("Do I have a knife? ", qr/^(?i:[yn])/);
my $gun = prompt("Do I have a gun? ", qr/^(?i:[yn])/);
my $enemies = prompt("How many enemies can I see? ", qr/^\d+$/);
$health = substr $health, 0, 1;
$health =~ tr/pag/012/;
foreach ($knife,$gun) {
$_ = substr $_, 0, 1;
tr/yn/10/;
}
printf "I think I will %s!\n\n", $actions[$net->winner([
$health,
$knife,
$gun,
$enemies])];
}
sub prompt
{
my ($message,$domain) = @_;
my $valid_response = 0;
my $response;
do {
print $message;
chomp($response = <STDIN>);
exit if substr(lc $response, 0, 1) eq 'q';
$valid_response = $response =~ /$domain/;
} until $valid_response;
return $response;
}
sub display_result
{
my ($net,@data) = @_;
my $result = $net->winner(\@data);
my @health = qw/Poor Average Good/;
my @knife = qw/No Yes/;
my @gun = qw/No Yes/;
printf $format,
$health[$_[1]],
$knife[$_[2]],
$gun[$_[3]],
$_[4], # number of enemies
$actions[$result];
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Ollama/Client.pm view on Meta::CPAN
AI::Ollama::Client - Client for AI::Ollama
=head1 SYNOPSIS
use 5.020;
use AI::Ollama::Client;
my $client = AI::Ollama::Client->new(
server => 'https://example.com/',
);
my $res = $client->someMethod()->get;
say $res;
=head1 METHODS
=head2 C<< checkBlob >>
my $res = $client->checkBlob()->get;
Check to see if a blob exists on the Ollama server which is useful when creating models.
=cut
around 'checkBlob' => sub ( $super, $self, %options ) {
$super->( $self, %options )->then( sub( $res ) {
if( $res->code =~ /^2\d\d$/ ) {
return Future->done( 1 )
} else {
return Future->done( 0 )
}
});
};
=head2 C<< createBlob >>
my $res = $client->createBlob()->get;
Create a blob from a file. Returns the server file path.
=cut
=head2 C<< generateChatCompletion >>
my $res = $client->generateChatCompletion()->get;
Generate the next message in a chat with a provided model.
Returns a L<< AI::Ollama::GenerateChatCompletionResponse >>.
=cut
=head2 C<< copyModel >>
my $res = $client->copyModel()->get;
Creates a model with another name from an existing model.
=cut
=head2 C<< createModel >>
my $res = $client->createModel()->get;
Create a model from a Modelfile.
Returns a L<< AI::Ollama::CreateModelResponse >>.
=cut
=head2 C<< deleteModel >>
my $res = $client->deleteModel()->get;
Delete a model and its data.
=cut
=head2 C<< generateEmbedding >>
my $res = $client->generateEmbedding()->get;
Generate embeddings from a model.
Returns a L<< AI::Ollama::GenerateEmbeddingResponse >>.
=cut
=head2 C<< generateCompletion >>
use Future::Utils 'repeat';
my $responses = $client->generateCompletion();
repeat {
my ($res) = $responses->shift;
if( $res ) {
my $str = $res->get;
say $str;
}
Future::Mojo->done( defined $res );
} until => sub($done) { $done->get };
Generate a response for a given prompt with a provided model.
Returns a L<< AI::Ollama::GenerateCompletionResponse >>.
=cut
around 'generateCompletion' => sub ( $super, $self, %options ) {
# Encode images as base64, if images exist:
# (but create a copy so we don't over write the input array)
if (my $images = $options{images}) {
# Allow { filename => '/etc/passwd' }
$options{images} = [
map {
my $item = $_;
if( ref($item) eq 'HASH' ) {
$item = Mojo::File->new($item->{filename})->slurp();
};
encode_base64($item)
} @$images ];
}
return $super->($self, %options);
};
=head2 C<< pullModel >>
my $res = $client->pullModel(
name => 'llama',
)->get;
Download a model from the ollama library.
Returns a L<< AI::Ollama::PullModelResponse >>.
=cut
=head2 C<< pushModel >>
my $res = $client->pushModel()->get;
Upload a model to a model library.
Returns a L<< AI::Ollama::PushModelResponse >>.
=cut
=head2 C<< showModelInfo >>
my $info = $client->showModelInfo()->get;
say $info->modelfile;
Show details about a model including modelfile, template, parameters, license, and system prompt.
Returns a L<< AI::Ollama::ModelInfo >>.
=cut
=head2 C<< listModels >>
my $info = $client->listModels()->get;
for my $model ($info->models->@*) {
say $model->model; # llama2:latest
}
List models that are available locally.
Returns a L<< AI::Ollama::ModelsResponse >>.
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PBDD.pm view on Meta::CPAN
@ISA = qw(Exporter DynaLoader);
@EXPORT = qw(
BDD_REORDER_NONE
BDD_REORDER_WIN2
BDD_REORDER_WIN3
BDD_REORDER_SIFT
BDD_REORDER_RANDOM
);
@EXPORT_OK = qw(
// setup and cleanup
init
gc
verbose
kill
// simple BDD operations
getOne
getZero
createBDD
getVarCount
getBDD
// ref counting
ref
deref
localDeref
// BDD operations
and
or
andTo
orTo
nand
nor
xor
ite
imp
biimp
not
makeSet
exists
forall
relProd
restrict
constrain
// variables replacement
createPair
deletePair
replace
showPair
// BDD analysis
support
nodeCount
satOne
satCount
// printing
printDot
printSet
print
// debugging
printStats
checkPackage
debugPackage
debugBDD
// low-level access
internal_index
internal_refcount
internal_isconst
internal_constvalue
internal_iscomplemented
internal_then
internal_else
// dynamic variable ordering
reorder_setMethod
reorder_now
reorder_enableDynamic
reorder_createVariableGroup
);
$VERSION = '0.01';
bootstrap AI::PBDD $VERSION;
sub satCount {
my ($bdd, $vars_ignored) = @_;
if (!defined($vars_ignored)) {
return satCount__I($bdd);
} else {
return satCount__II($bdd, $vars_ignored);
}
}
sub printDot {
my ($bdd, $filename) = @_;
if (!defined($filename)) {
printDot__I($bdd);
} else {
printDot__II($bdd, $filename);
}
}
sub makeSet {
my ($vars, $size, $offset) = @_;
if (!defined($offset)) {
return makeSetI($vars, $size);
} else {
return makeSetII($vars, $size, $offset);
}
}
sub createPair {
my ($old, $new) = @_;
my $size = @$old;
return createPairI($old, $new, $size);
}
1;
__END__
lib/AI/PBDD.pm view on Meta::CPAN
Perl wrapper for the BuDDy C library
=head1 SYNOPSIS
use AI::PBDD qw(init createBDD and printDot kill);
init(100, 100000);
my $var1 = createBDD();
my $var2 = createBDD();
my $bdd = and($var1, $var2);
printDot($bdd);
kill();
=head1 DESCRIPTION
Binary Decision Diagrams (BDDs) are used for efficient computation of many common problems. This is done by giving a compact representation and a set of efficient operations on boolean functions f: {0,1}^n --> {0,1}.
lib/AI/PBDD.pm view on Meta::CPAN
BDDs and their operations are described in many academic papers that can be found on the Internet. A good place to get started with BDDs is the wikipedia article L<http://en.wikipedia.org/wiki/Binary_decision_diagram>.
It can also be useful to look at the test code for this package in the C<t> directory, as well as at the JBDD documentation and exaples at L<http://javaddlib.sourceforge.net/jbdd/>.
=head1 VERSION
This man page documents "PBDD" version 0.01.
=head1 AUTHOR
Gianluca Torta
mailto:torta@di.unito.it
=head1 COPYRIGHT
Copyright (c) 2011 by Gianluca Torta. All rights reserved.
view all matches for this distribution
view release on metacpan or search on metacpan
examples/NeuralNet/pso_ann.pl view on Meta::CPAN
use strict;
use AI::PSO;
my %test_params = (
numParticles => 4,
numNeighbors => 3,
maxIterations => 1000,
dimensions => 8, # 8 since we have 8 network weights we want to optimize for a 3 input 2 hidden 1 output feed-forward neural net
deltaMin => -2.0,
deltaMax => 4.0,
meWeight => 2.0,
meMin => 0.0,
meMax => 1.0,
themWeight => 2.0,
themMin => 0.0,
themMax => 1.0,
exitFitness => 0.99,
verbose => 1,
);
my $numInputs = 3;
my $numHidden = 2;
my $xferFunc = "Logistic";
examples/NeuralNet/pso_ann.pl view on Meta::CPAN
my $expectedValue = 3.5; # this is the value that we want to train the ANN to produce (just like the example in t/PTO.t)
sub test_fitness_function(@) {
my (@arr) = (@_);
&writeAnnConfig($annConfig, $numInputs, $numHidden, $xferFunc, @arr);
my $netValue = &runANN($annConfig, $annInputs);
print "network value = $netValue\n";
# the closer the network value gets to our desired value
# then we want to set the fitness closer to 1.
#
# This is a special case of the sigmoid, and looks an awful lot
# like the hyperbolic tangent ;)
#
my $magnitudeFromBest = abs($expectedValue - $netValue);
return 2 / (1 + exp($magnitudeFromBest));
}
pso_set_params(\%test_params);
pso_register_fitness_function('test_fitness_function');
pso_optimize();
examples/NeuralNet/pso_ann.pl view on Meta::CPAN
##### io #########
sub writeAnnConfig() {
my ($configFile, $inputs, $hidden, $func, @weights) = (@_);
open(ANN, ">$configFile");
print ANN "$inputs $hidden\n";
print ANN "$func\n";
foreach my $weight (@weights) {
print ANN "$weight ";
}
print ANN "\n";
close(ANN);
}
sub runANN($$) {
my ($configFile, $dataFile) = @_;
my $networkValue = `ann_compute $configFile $dataFile`;
chomp($networkValue);
return $networkValue;
}
view all matches for this distribution
view release on metacpan or search on metacpan
example/PSOTest-MultiCore.pl view on Meta::CPAN
use AI::ParticleSwarmOptimization::MCE;
#use AI::ParticleSwarmOptimization::Pmap;
use Data::Dumper; $::Data::Dumper::Sortkeys = 1;
#=======================================================================
sub calcFit {
my @values = @_;
my $offset = int (-@values / 2);
my $sum;
select( undef, undef, undef, 0.01 ); # Simulation of heavy processing...
$sum += ($_ - $offset++) ** 2 for @values;
return $sum;
}
#=======================================================================
++$|;
#-----------------------------------------------------------------------
#my $pso = AI::ParticleSwarmOptimization::Pmap->new( # Multi-core
my $pso = AI::ParticleSwarmOptimization::MCE->new( # Multi-core
#my $pso = AI::ParticleSwarmOptimization->new( # Single-core
-fitFunc => \&calcFit,
-dimensions => 10,
-iterations => 10,
-numParticles => 1000,
# only for many-core version # the best if == $#cores of your system
# selecting best value if undefined
-workers => 4,
);
my $beg = time;
view all matches for this distribution
view release on metacpan or search on metacpan
example/PSOTest-MultiCore.pl view on Meta::CPAN
#use AI::ParticleSwarmOptimization::MCE;
use AI::ParticleSwarmOptimization::Pmap;
use Data::Dumper; $::Data::Dumper::Sortkeys = 1;
#=======================================================================
sub calcFit {
my @values = @_;
my $offset = int (-@values / 2);
my $sum;
select( undef, undef, undef, 0.01 ); # Simulation of heavy processing...
$sum += ($_ - $offset++) ** 2 for @values;
return $sum;
}
#=======================================================================
++$|;
#-----------------------------------------------------------------------
#my $pso = AI::ParticleSwarmOptimization->new( # Single-core
#my $pso = AI::ParticleSwarmOptimization::MCE->new( # Multi-core
my $pso = AI::ParticleSwarmOptimization::Pmap->new( # Multi-core
-fitFunc => \&calcFit,
-dimensions => 10,
-iterations => 10,
-numParticles => 1000,
# only for many-core version # the best if == $#cores of your system
# selecting best value if undefined
-workers => 4,
);
my $beg = time;
view all matches for this distribution
view release on metacpan or search on metacpan
Samples/PSOPlatTest.pl view on Meta::CPAN
use AI::ParticleSwarmOptimization;
use Math::Random::MT qw();
++$|;
my $pso = AI::ParticleSwarmOptimization->new (
-fitFunc => \&calcFit,
-dimensions => 3,
-iterations => 500,
-exitPlateau => 1,
-exitPlateauDP => 3,
-exitPlateauBurnin => 100,
-exitPlateauWindow => 60,
);
$pso->init ();
my $fitValue = $pso->optimize ();
Samples/PSOPlatTest.pl view on Meta::CPAN
my ($fit, @values) = $pso->getParticleBestPos ($best);
my $iters = $pso->getIterationCount ();
print $pso->getSeed();
printf ",# Fit %.5f at (%s) after %d iterations\n",
$fit, join (', ', map {sprintf '%.4f', $_} @values), $iters;
sub calcFit {
my @values = @_;
my $offset = int (-@values / 2);
my $sum;
$sum += ($_ - $offset++)**2 for @values;
return $sum;
}
view all matches for this distribution
view release on metacpan or search on metacpan
Benchmark/perl-vs-xs.pl view on Meta::CPAN
use AI::Pathfinding::AStar::Rectangle;
my $m = AI::Pathfinding::AStar::Rectangle->new({ width => WIDTH_X, heigth => WIDTH_Y });
for my $x (0 .. WIDTH_X - 1 )
{
for my $y (0 .. WIDTH_Y - 1 )
{
$map[$x][$y] = 1;
}
}
$map[5][$_] = 0 for 5 .. WIDTH_Y - 5;
$map[WIDTH_X - 5][$_] = 0 for 5 .. WIDTH_Y - 5;
$map[$_][5] = 0 for 5 .. WIDTH_X - 5;
Benchmark/perl-vs-xs.pl view on Meta::CPAN
$map[WIDTH_X - 15][$_] = 0 for 15 .. WIDTH_Y - 10;
$map[$_][15] = 0 for 15 .. WIDTH_X - 15;
for my $x (0 .. WIDTH_X - 1 )
{
for my $y (0 .. WIDTH_Y - 1 )
{
$m->set_passability($x, $y, $map[$x][$y]) ;
}
}
my ( $x_start, $y_start ) = ( WIDTH_X >> 1, WIDTH_Y >> 1 );
my ( $x_end, $y_end ) = ( 0, 0 );
my $t0 = [gettimeofday];
my $path;
my $r = timethese( -1, {Perl=>sub { astar( $x_start, $y_start, $x_end, $y_end ) },
XS=>sub {$m->astar($x_start, $y_start, $x_end, $y_end);}});
cmpthese($r);
die;
for (0..99) {
$path = &astar( $x_start, $y_start, $x_end, $y_end );
}
print "Elapsed: ".tv_interval ( $t0 )."\n";
print "Path length: ".length($path)."\n";
# start end points
$map[ $x_start ][ $y_start ] = 3;
$map[ $x_end ][ $y_end ] = 4;
# draw path
my %vect = (
# x y
1 => [-1, 1, '|/'],
2 => [ 0, 1, '.|'],
3 => [ 1, 1, '|\\'],
4 => [-1, 0, '|<'],
6 => [ 1, 0, '|>'],
7 => [-1,-1, '|\\'],
8 => [ 0,-1, '\'|'],
9 => [ 1,-1, '|/']
);
my ( $x, $y ) = ( $x_start, $y_start );
for ( split //, $path )
{
$map[$x][$y] = '|o';
$x += $vect{$_}->[0];
$y += $vect{$_}->[1];
$map[$x][$y] = '|o';
}
printf "%02d", $_ for 0 .. WIDTH_X - 1;
print "\n";
for my $y ( 0 .. WIDTH_Y - 1 )
{
for my $x ( 0 .. WIDTH_X - 1 )
{
print $map[$x][$y] eq
'1' ? "|_" : (
$map[$x][$y] eq '0' ? "|#" : (
$map[$x][$y] eq '3' ? "|S" : (
$map[$x][$y] eq '4' ? "|E" : $map[$x][$y] ) ) );
}
print "$y\n";
}
sub astar
{
my ( $xs, $ys, $xe, $ye ) = @_;
my %close;
my ( %open, @g, @h, @r, @open_idx );
for my $x (0 .. WIDTH_X - 1 )
{
for my $y (0 .. WIDTH_Y - 1 )
{
$g[$x][$y] = 0;
$r[$x][$y] = 0;
$h[$x][$y] = 0;
}
}
my %cost = (
"0.-1" => 5, #|.
"1.-1" => 7, #/.
"1.0" => 5, #.-
"1.1" => 7, #`\
"0.1" => 5, #`|
"-1.1" => 7, #
"-1.0" => 5,
"-1.-1" => 7
);
my $it = 0;
my $oindx = 0;
my ( $x, $y ) = ( $xs, $ys );
while ( $x != $xe || $y != $ye )
{
$close{$x}{$y} = 1;
$open{$x}{$y} = 0;
for ( "0.-1", "-1.1", "0.1", "1.1", "-1.0", "1.-1", "1.0", "-1.-1" )
{
my ( $xd, $yd ) = split /\./, $_;
my ( $xn, $yn ) = ( $x + $xd, $y + $yd );
next if $xn == WIDTH_X ||
$xn < 0 ||
$yn == WIDTH_Y ||
$yn < 0 ||
$close{$xn}{$yn} ||
$map[$xn][$yn] == 0;
my $ng = $g[$x][$y] + $cost{$_};
if ( $open{$xn}{$yn} )
{
if ( $ng < $g[$xn][$yn] )
{
$r[$xn][$yn] = [$x,$y];
$g[$xn][$yn] = $ng;
}
}
else
{
$open{$xn}{$yn} = 1;
$g[$xn][$yn] = $ng;
my ( $xa, $ya ) = ( abs( $xn - $xe ), abs( $yn - $ye ) );
$h[$xn][$yn] = #( $xa > $ya ? $xa : $ya ) * 7;
( abs( $xn - $xe ) + abs( $yn - $ye ) ) * 7;
$r[$xn][$yn] = [$x,$y];
push @open_idx, [$xn, $yn, \$g[$xn][$yn], \$h[$xn][$yn]];
}
# deb($x, $y, $xn, $yn, \@g);
}
@open_idx = sort { ${$a->[2]} + ${$a->[3]} <=> ${$b->[2]} + ${$b->[3]} } @open_idx;
( $x, $y ) = @{ shift @open_idx };
$it++;
}
# print "Iterations: $it: $oindx\n";
my $path = "";
my %idx2path =
(
"0.-1" => 8, #|.
"1.-1" => 9, #/.
"1.0" => 6, #.-
"1.1" => 3, #`\
"0.1" => 2, #`|
"-1.1" => 1, #
"-1.0" => 4,
"-1.-1" => 7
);
while ( $x != $xs || $y != $ys )
{
# print "$x:$y\n";
my ($xp, $yp) = @{$r[$x][$y]};
$path = $idx2path{($x-$xp).".".($y-$yp)}.$path;
( $x, $y ) = ( $xp, $yp);
}
# print "Path: $path\n";
return $path;
}
sub calc_obstacle
{
my ( $x1, $y1, $x2, $y2 ) = @_;
my ( $x, $y, $Xend, $obstacle, $pixel);
my $dx = abs($x2 - $x1);
my $dy = abs($y2 - $y1);
my $d = ( $dy << 1 ) - $dx;
my $inc1 = $dy << 1;
my $inc2 = ($dy - $dx) << 1;
if ( $x1 > $x2)
{
$x = $x2;
$y = $y2;
$Xend = $x1;
}
else
{
$x = $x1;
$y = $y1;
$Xend = $x2;
};
$obstacle+=!$map[$x][$y];
$pixel+=5;
while ( $x < $Xend )
{
$x++;
if ($d < 0) {$d += $inc1}
else
{
$y++;
$d += $inc2;
};
$obstacle+=!$map[$x][$y];
$pixel += 5;
};
return ( $obstacle << 3 ) + $pixel;
}
sub deb
{
my ( $x, $y, $xn, $yn, $g) = @_;
for my $j ( 0 .. WIDTH_Y - 1 )
{
for my $i ( 0 .. WIDTH_X - 1 )
{
if ( !$map[$i][$j] )
{
print " ##"
}
else
{
if ( $x == $i && $y == $j)
{
print "c";
}
elsif ( $xn == $i && $yn == $j )
{
print "n";
}
else
{
print " ";
}
printf "%02d", $g->[$i]->[$j]
}
}
print "\n";
}
<>;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/AStar.pm view on Meta::CPAN
use AI::Pathfinding::AStar::AStarNode;
my $nodes;
sub _init {
my $self = shift;
croak "no getSurrounding() method defined" unless $self->can("getSurrounding");
return $self->SUPER::_init(@_);
}
sub doAStar
{
my ($map, $target, $open, $nodes, $max) = @_;
my $n = 0;
FLOOP: while ( (defined $open->top()) && ($open->top()->{id} ne $target) ) {
#allow incremental calculation
last FLOOP if (defined($max) and (++$n == $max));
my $curr_node = $open->extract_top();
$curr_node->{inopen} = 0;
my $G = $curr_node->{g};
#get surrounding squares
my $surr_nodes = $map->getSurrounding($curr_node->{id}, $target);
foreach my $node (@$surr_nodes) {
my ($surr_id, $surr_cost, $surr_h) = @$node;
#skip the node if it's in the CLOSED list
next if ( (exists $nodes->{$surr_id}) && (! $nodes->{$surr_id}->{inopen}) );
#add it if we haven't seen it before
if (! exists $nodes->{$surr_id}) {
my $surr_node = AI::Pathfinding::AStar::AStarNode->new($surr_id,$G+$surr_cost,$surr_h);
$surr_node->{parent} = $curr_node;
$surr_node->{cost} = $surr_cost;
$surr_node->{inopen} = 1;
$nodes->{$surr_id} = $surr_node;
$open->add($surr_node);
}
else {
#otherwise it's already in the OPEN list
#check to see if it's cheaper to go through the current
#square compared to the previous path
my $surr_node = $nodes->{$surr_id};
my $currG = $surr_node->{g};
my $possG = $G + $surr_cost;
if ($possG < $currG) {
#change the parent
$surr_node->{parent} = $curr_node;
$surr_node->{g} = $possG;
$open->decrease_key($surr_node);
}
}
}
}
}
sub fillPath
{
my ($map,$open,$nodes,$target) = @_;
my $path = [];
my $curr_node = (exists $nodes->{$target}) ? $nodes->{$target} : $open->top();
while (defined $curr_node) {
unshift @$path, $curr_node->{id};
$curr_node = $curr_node->{parent};
}
return $path;
}
sub findPath {
my ($map, $start, $target) = @_;
my $nodes = {};
my $curr_node = undef;
my $open = Heap::Binomial->new;
#add starting square to the open list
$curr_node = AI::Pathfinding::AStar::AStarNode->new($start,0,0); # AStarNode(id,g,h)
$curr_node->{parent} = undef;
$curr_node->{cost} = 0;
$curr_node->{g} = 0;
$curr_node->{h} = 0;
$curr_node->{inopen} = 1;
$nodes->{$start} = $curr_node;
$open->add($curr_node);
$map->doAStar($target,$open,$nodes,undef);
my $path = $map->fillPath($open,$nodes,$target);
return wantarray ? @{$path} : $path;
}
sub findPathIncr {
my ($map, $start, $target, $state, $max) = @_;
my $open = undef;
my $curr_node = undef;;
my $nodes = {};
if (defined($state)) {
$nodes = $state->{'visited'};
$open = $state->{'open'};
}
else {
$open = Heap::Binomial->new;
#add starting square to the open list
$curr_node = AI::Pathfinding::AStar::AStarNode->new($start,0,0); # AStarNode(id,g,h)
$curr_node->{parent} = undef;
$curr_node->{cost} = 0;
$curr_node->{g} = 0;
$curr_node->{h} = 0;
$curr_node->{inopen} = 1;
$nodes->{$start} = $curr_node;
$open->add($curr_node);
}
$map->doAStar($target,$open,$nodes,$max);
my $path = $map->fillPath($open,$nodes,$target);
$state = {
'path' => $path,
'open' => $open,
'visited' => $nodes,
'done' => defined($nodes->{$target}),
};
return $state;
}
1;
__END__
lib/AI/Pathfinding/AStar.pm view on Meta::CPAN
AI::Pathfinding::AStar - Perl implementation of the A* pathfinding algorithm
=head1 SYNOPSIS
package My::Map::Package;
use base AI::Pathfinding::AStar;
# Methods required by AI::Pathfinding::AStar
sub getSurrounding { ... }
package main;
use My::Map::Package;
my $map = My::Map::Package->new or die "No map for you!";
my $path = $map->findPath($start, $target);
print join(', ', @$path), "\n";
#Or you can do it incrementally, say 3 nodes at a time
my $state = $map->findPathIncr($start, $target, undef, 3);
while ($state->{path}->[-1] ne $target) {
print join(', ', @{$state->{path}}), "\n";
$state = $map->findPathIncr($start, $target, $state, 3);
}
print "Completed Path: ", join(', ', @{$state->{path}}), "\n";
=head1 DESCRIPTION
This module implements the A* pathfinding algorithm. It acts as a base class from which a custom map object can be derived. It requires from the map object a subroutine named C<getSurrounding> (described below) and provides to the object two routin...
AI::Pathfinding::AStar requires that the map object define a routine named C<getSurrounding> which accepts the starting and target node ids for which you are calculating the path. In return it should provide an array reference containing the followi...
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
has _num_boards => ( isa => 'Int', is => 'ro', init_arg => 'num_boards', );
has _orig_scans_data => ( isa => 'PDL', is => 'rw' );
has _optimize_for => ( isa => 'Str', is => 'ro', init_arg => 'optimize_for', );
has _scans_data => ( isa => 'PDL', is => 'rw' );
has _selected_scans =>
( isa => 'ArrayRef', is => 'ro', init_arg => 'selected_scans', );
has _status => ( isa => 'Str', is => 'rw' );
has _quotas => ( isa => 'ArrayRef[Int]', is => 'ro', init_arg => 'quotas' );
has _total_boards_solved => ( isa => 'Int', is => 'rw' );
has _total_iters => ( is => 'rw' );
has _trace_cb =>
( isa => 'Maybe[CodeRef]', is => 'ro', init_arg => 'trace_cb' );
has _scans_meta_data => ( isa => 'ArrayRef', is => 'ro', init_arg => 'scans' );
has _scans_iters_pdls =>
( isa => 'HashRef', is => 'rw', init_arg => 'scans_iters_pdls' );
has _stats_factors => (
isa => 'HashRef',
is => 'ro',
init_arg => 'stats_factors',
default => sub { return +{}; },
);
sub BUILD
{
my $self = shift;
my $args = shift;
my $scans_data = PDL::cat(
map {
my $id = $_->id();
my $pdl = $self->_scans_iters_pdls()->{$id};
my $factor = $self->_stats_factors->{$id};
(
defined($factor)
? ( ( $pdl >= 0 ) * ( ( $pdl / $factor )->ceil() ) +
( $pdl < 0 ) * $pdl )
: $pdl
);
} @{ $self->_selected_scans() }
);
$self->_orig_scans_data($scans_data);
$self->_scans_data( $self->_orig_scans_data()->copy() );
return 0;
}
my $BOARDS_DIM = 0;
my $SCANS_DIM = 1;
my $STATISTICS_DIM = 2;
sub _next_iter_idx
{
my $self = shift;
my $ret = $self->_iter_idx();
$self->_iter_idx( $ret + 1 );
return $ret;
}
sub _get_next_quota
{
my $self = shift;
my $iter = $self->_next_iter_idx();
if ( ref( $self->_quotas() ) eq "ARRAY" )
{
return $self->_quotas()->[$iter];
}
else
{
return $self->_quotas()->($iter);
}
}
sub _calc_get_iter_state_param_method
{
my $self = shift;
my $optimize_for = $self->_optimize_for();
my %resolve = (
len => "_get_iter_state_params_len",
minmax_len => "_get_iter_state_params_minmax_len",
speed => "_get_iter_state_params_speed",
);
return $resolve{$optimize_for};
}
sub _get_iter_state_params
{
my $self = shift;
my $method = $self->_calc_get_iter_state_param_method();
return $self->$method();
}
sub _my_sum_over
{
my $pdl = shift;
return $pdl->sumover()->slice(":,(0)");
}
sub _my_xchg_sum_over
{
my $pdl = shift;
return _my_sum_over( $pdl->xchg( 0, 1 ) );
}
sub _get_iter_state_params_len
{
my $self = shift;
my $iters_quota = 0;
my $num_solved_in_iter = 0;
my $selected_scan_idx;
# If no boards were solved, then try with a larger quota
while ( $num_solved_in_iter == 0 )
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more", );
}
$iters_quota += $q_more;
my $iters = $self->_scans_data()->slice(":,:,0");
my $solved = ( ( $iters <= $iters_quota ) & ( $iters > 0 ) );
my $num_moves = $self->_scans_data->slice(":,:,2");
my $solved_moves = $solved * $num_moves;
my $solved_moves_sums = _my_sum_over($solved_moves);
my $solved_moves_counts = _my_sum_over($solved);
my $solved_moves_avgs = $solved_moves_sums / $solved_moves_counts;
( undef, undef, $selected_scan_idx, undef ) =
$solved_moves_avgs->minmaximum();
$num_solved_in_iter = $solved_moves_counts->at($selected_scan_idx);
}
return {
quota => $iters_quota,
num_solved => $num_solved_in_iter,
scan_idx => $selected_scan_idx,
};
}
sub _get_iter_state_params_minmax_len
{
my $self = shift;
my $iters_quota = 0;
my $num_solved_in_iter = 0;
my $selected_scan_idx;
# If no boards were solved, then try with a larger quota
while ( $num_solved_in_iter == 0 )
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more", );
}
$iters_quota += $q_more;
my $iters = $self->_scans_data()->slice(":,:,0");
my $solved = ( ( $iters <= $iters_quota ) & ( $iters > 0 ) );
my $num_moves = $self->_scans_data->slice(":,:,2");
my $solved_moves = $solved * $num_moves;
my $solved_moves_maxima = $solved_moves->maximum()->slice(":,(0),(0)");
my $solved_moves_counts = _my_sum_over($solved);
( undef, undef, $selected_scan_idx, undef ) =
$solved_moves_maxima->minmaximum();
$num_solved_in_iter = $solved_moves_counts->at($selected_scan_idx);
}
return {
quota => $iters_quota,
num_solved => $num_solved_in_iter,
scan_idx => $selected_scan_idx,
};
}
sub _get_iter_state_params_speed
{
my $self = shift;
my $iters_quota = 0;
my $num_solved_in_iter = 0;
my $selected_scan_idx;
# If no boards were solved, then try with a larger quota
while ( $num_solved_in_iter == 0 )
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more" );
}
$iters_quota += $q_more;
( undef, $num_solved_in_iter, undef, $selected_scan_idx ) =
PDL::minmaximum(
PDL::sumover(
( $self->_scans_data() <= $iters_quota ) &
( $self->_scans_data() > 0 )
)
);
}
return {
quota => $iters_quota,
num_solved => $num_solved_in_iter->at(0),
scan_idx => $selected_scan_idx->at(0),
};
}
sub _get_selected_scan
{
my $self = shift;
my $iter_state =
AI::Pathfinding::OptimizeMultiple::IterState->new(
$self->_get_iter_state_params(), );
$iter_state->attach_to($self);
return $iter_state;
}
sub _inspect_quota
{
my $self = shift;
my $state = $self->_get_selected_scan();
$state->register_params();
$state->update_total_iters();
if ( $self->_total_boards_solved() == $self->_num_boards() )
{
$self->_status("solved_all");
}
else
{
$state->update_idx_slice();
}
$state->detach();
}
sub calc_meta_scan
{
my $self = shift;
$self->chosen_scans( [] );
$self->_total_boards_solved(0);
$self->_total_iters(0);
$self->_status("iterating");
# $self->_inspect_quota() throws ::Error::OutOfQuotas if
# it does not have any available quotas.
eval {
while ( $self->_status() eq "iterating" )
{
$self->_inspect_quota();
}
};
if (
my $err = Exception::Class->caught(
'AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas')
)
{
$self->_status("out_of_quotas");
}
else
{
$err = Exception::Class->caught();
if ($err)
{
if ( not( blessed $err && $err->can('rethrow') ) )
{
die $err;
}
$err->rethrow;
}
}
return;
}
sub _get_num_scans
{
my $self = shift;
return ( ( $self->_scans_data()->dims() )[$SCANS_DIM] );
}
sub _calc_chosen_scan
{
my ( $self, $selected_scan_idx, $iters_quota ) = @_;
return AI::Pathfinding::OptimizeMultiple::ScanRun->new(
{
iters => (
$iters_quota * (
$self->_stats_factors->{
( $self->_selected_scans->[$selected_scan_idx]->id() ),
} // 1
)
),
scan_idx => $selected_scan_idx,
}
);
}
sub calc_flares_meta_scan
{
my $self = shift;
$self->chosen_scans( [] );
$self->_total_boards_solved(0);
$self->_total_iters(0);
$self->_status("iterating");
my $iters_quota = 0;
my $flares_num_iters = PDL::Core::pdl( [ (0) x $self->_get_num_scans() ] );
my $ones_constant =
PDL::Core::pdl( [ map { [1] } ( 1 .. $self->_get_num_scans() ) ] );
my $next_num_iters_for_each_scan_x_scan =
( ( $ones_constant x $flares_num_iters ) );
my $num_moves = $self->_scans_data->slice(":,:,1");
# The number of moves for dimension 0,1,2 above.
my $num_moves_repeat = $num_moves->clump( 1 .. 2 )->xchg( 0, 1 )
->dummy( 0, $self->_get_num_scans() );
my $selected_scan_idx;
my $loop_iter_num = 0;
my $UNSOLVED_NUM_MOVES_CONSTANT = 64 * 1024 * 1024;
my $last_avg = $UNSOLVED_NUM_MOVES_CONSTANT;
FLARES_LOOP:
while ( my $q_more = $self->_get_next_quota() )
{
$iters_quota += $q_more;
# Next number of iterations for each scan x scan combination.
my $next_num_iters = (
( $ones_constant x $flares_num_iters ) + (
PDL::MatrixOps::identity( $self->_get_num_scans() ) *
$iters_quota
)
);
# print "\$next_num_iters = $next_num_iters\n";
my $iters = $self->_scans_data()->slice(":,:,0");
my $iters_repeat =
$iters->dummy( 0, $self->_get_num_scans() )->xchg( 1, 2 )
->clump( 2 .. 3 );
# print "\$iters_repeat =", join(",",$iters_repeat->dims()), "\n";
my $next_num_iters_repeat =
$next_num_iters->dummy( 0, $self->_num_boards() )->xchg( 0, 2 );
# print "\$next_num_iters_repeat =", join(",",$next_num_iters_repeat->dims()), "\n";
# A boolean tensor of which boards were solved:
# Dimension 0 - Which scan is it. - size - _get_num_scans()
# Dimension 1 - Which scan we added the quota to
# - size - _get_num_scans()
# Dimension 2 - Which board. - size - _num_boards()
my $solved =
( $iters_repeat >= 0 ) * ( $iters_repeat < $next_num_iters_repeat );
# print "\$num_moves_repeat =", join(",",$num_moves_repeat->dims()), "\n";
my $num_moves_solved =
( $solved * $num_moves_repeat ) +
( $solved->not() * $UNSOLVED_NUM_MOVES_CONSTANT );
my $minimal_num_moves_solved =
$num_moves_solved->xchg( 0, 1 )->minimum();
my $which_minima_are_solved =
( $minimal_num_moves_solved != $UNSOLVED_NUM_MOVES_CONSTANT );
my $minimal_with_zeroes =
$which_minima_are_solved * $minimal_num_moves_solved;
my $solved_moves_sums = _my_xchg_sum_over($minimal_with_zeroes);
my $solved_moves_counts = _my_xchg_sum_over($which_minima_are_solved);
my $solved_moves_avgs = $solved_moves_sums / $solved_moves_counts;
# print join(",", $solved_moves_avgs->minmaximum()), "\n";
my $min_avg;
( $min_avg, undef, $selected_scan_idx, undef ) =
$solved_moves_avgs->minmaximum();
$last_avg = $min_avg;
push @{ $self->chosen_scans() },
$self->_calc_chosen_scan( $selected_scan_idx, $iters_quota );
$flares_num_iters->set( $selected_scan_idx,
$flares_num_iters->at($selected_scan_idx) + $iters_quota );
$self->_selected_scans()->[$selected_scan_idx]->mark_as_used();
$iters_quota = 0;
my $num_solved = $solved_moves_counts->at($selected_scan_idx);
my $flares_num_iters_repeat =
$flares_num_iters->dummy( 0, $self->_num_boards() );
# A boolean tensor:
# Dimension 0 - board.
# Dimension 1 - scans.
my $solved_with_which_iter =
( $flares_num_iters_repeat >= $iters->clump( 1 .. 2 ) ) &
( $iters->clump( 1 .. 2 ) >= 0 );
my $total_num_iters = (
( $solved_with_which_iter * $flares_num_iters_repeat )->sum() + (
$solved_with_which_iter->not()->andover() *
$flares_num_iters->sum()
)->sum()
);
print "Finished ", $loop_iter_num++,
" ; #Solved = $num_solved ; Iters = $total_num_iters ; Avg = $min_avg\n";
STDOUT->flush();
}
}
sub calc_board_iters
{
my $self = shift;
my $board = shift;
my $board_iters = 0;
my @info = PDL::list( $self->_orig_scans_data()->slice("$board,:") );
my @orig_info = @info;
foreach my $s ( @{ $self->chosen_scans() } )
{
if ( ( $info[ $s->scan_idx() ] > 0 )
&& ( $info[ $s->scan_idx() ] <= $s->iters() ) )
{
$board_iters += $info[ $s->iters() ];
last;
}
else
{
if ( $info[ $s->scan_idx() ] > 0 )
{
$info[ $s->scan_idx() ] -= $s->iters();
}
$board_iters += $s->iters();
}
}
return {
'per_scan_iters' => \@orig_info,
'board_iters' => $board_iters,
};
}
sub get_final_status
{
my $self = shift;
return $self->_status();
}
sub simulate_board
{
my ( $self, $board_idx, $args ) = @_;
if ( $board_idx !~ /\A[0-9]+\z/ )
{
die "Board index '$board_idx' is not numeric!";
}
$args ||= {};
my $chosen_scans = ( $args->{chosen_scans} || $self->chosen_scans );
my @info = PDL::list( $self->_orig_scans_data()->slice("$board_idx,:") );
my $board_iters = 0;
my @scan_runs;
my $status = "Unsolved";
my $add_new_scan_run = sub {
my $scan_run = shift;
push @scan_runs, $scan_run;
$board_iters += $scan_run->iters();
return;
};
SCANS_LOOP:
foreach my $s (@$chosen_scans)
{
if ( ( $info[ $s->scan_idx() ] > 0 )
&& ( $info[ $s->scan_idx() ] <= $s->iters() ) )
{
$add_new_scan_run->(
AI::Pathfinding::OptimizeMultiple::ScanRun->new(
{
iters => $info[ $s->scan_idx() ],
scan_idx => $s->scan_idx(),
},
)
);
$status = "Solved";
last SCANS_LOOP;
}
else
{
if ( $info[ $s->scan_idx() ] > 0 )
{
$info[ $s->scan_idx() ] -= $s->iters();
}
$add_new_scan_run->(
AI::Pathfinding::OptimizeMultiple::ScanRun->new(
{
iters => $s->iters(),
scan_idx => $s->scan_idx(),
},
)
);
}
}
return AI::Pathfinding::OptimizeMultiple::SimulationResults->new(
{
status => $status,
scan_runs => \@scan_runs,
total_iters => $board_iters,
}
);
}
sub _trace
{
my ( $self, $args ) = @_;
if ( my $trace_callback = $self->_trace_cb() )
{
$trace_callback->($args);
}
return;
}
sub get_total_iters
{
my $self = shift;
return $self->_total_iters();
}
sub _add_to_total_iters
{
my $self = shift;
my $how_much = shift;
$self->_total_iters( $self->_total_iters() + $how_much );
return;
}
sub _add_to_total_boards_solved
{
my $self = shift;
my $how_much = shift;
$self->_total_boards_solved( $self->_total_boards_solved() + $how_much );
return;
}
1; # End of AI::Pathfinding::OptimizeMultiple
__END__
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
version 0.0.17
=head1 SYNOPSIS
use AI::Pathfinding::OptimizeMultiple
my @scans =
(
{
name => "first_search"
},
{
name => "second_search",
},
{
name => "third_search",
},
);
my $obj = AI::Pathfinding::OptimizeMultiple->new(
{
scans => \@scans,
num_boards => 32_000,
optimize_for => 'speed',
scans_iters_pdls =>
{
first_search => $first_search_pdl,
second_search => $second_search_pdl,
},
quotas => [400, 300, 200],
selected_scans =>
[
AI::Pathfinding::OptimizeMultiple::Scan->new(
id => 'first_search',
cmd_line => "--preset first_search",
),
AI::Pathfinding::OptimizeMultiple::Scan->new(
id => 'second_search',
cmd_line => "--preset second_search",
),
AI::Pathfinding::OptimizeMultiple::Scan->new(
id => 'third_search',
cmd_line => "--preset third_search",
),
],
}
);
$obj->calc_meta_scan();
foreach my $scan_alloc (@{$self->chosen_scans()})
{
printf "Run %s for %d iterations.\n",
$scans[$scan_alloc->scan_idx], $scan_alloc->iters;
}
=head1 DESCRIPTION
This CPAN distribution implements the algorithm described here:
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
with it, or whatever. If you want to contribute patches, please send me a diff or prod me to pull
from your repository :)
L<http://github.com/shlomif/fc-solve>
git clone ssh://git@github.com/shlomif/fc-solve.git
=head1 AUTHOR
Shlomi Fish <shlomif@cpan.org>
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
This software is Copyright (c) 2012 by Shlomi Fish.
This is free software, licensed under:
The MIT (X11) License
=cut
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
# This allows declaration use AI::Pathfinding::SMAstar ':all';
# If you do not need this, moving things directly into @EXPORT or @EXPORT_OK
# will save memory.
our %EXPORT_TAGS = ( 'all' => [ qw(
) ] );
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
our @EXPORT = qw(
);
our $VERSION = '0.07';
use AI::Pathfinding::SMAstar::PriorityQueue;
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
##################################################
# SMAstar constructor
##################################################
sub new {
my $invocant = shift;
my $class = ref($invocant) || $invocant;
my $self = {
_priority_queue => AI::Pathfinding::SMAstar::PriorityQueue->new(),
_state_eval_func => undef,
_state_goal_p_func => undef,
_state_num_successors_func => undef,
_state_successors_iterator => undef,
_show_prog_func => undef,
_state_get_data_func => undef,
@_, # attribute override
};
return bless $self, $class;
}
sub state_eval_func {
my $self = shift;
if (@_) { $self->{_state_eval_func} = shift }
return $self->{_state_eval_func};
}
sub state_goal_p_func {
my $self = shift;
if (@_) { $self->{_state_goal_p_func} = shift }
return $self->{_state_goal_p_func};
}
sub state_num_successors_func {
my $self = shift;
if (@_) { $self->{_state_num_successors_func} = shift }
return $self->{_state_num_successors_func};
}
sub state_successors_iterator {
my $self = shift;
if (@_) { $self->{_state_successors_iterator} = shift }
return $self->{_state_successors_iterator};
}
sub state_get_data_func {
my $self = shift;
if (@_) { $self->{_state_get_data_func} = shift }
return $self->{_state_get_data_func};
}
sub show_prog_func {
my $self = shift;
if (@_) { $self->{_show_prog_func} = shift }
return $self->{_show_prog_func};
}
###################################################################
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
# be multiple start-states.
#
###################################################################
sub add_start_state
{
my ($self, $state) = @_;
my $state_eval_func = $self->{_state_eval_func};
my $state_goal_p_func = $self->{_state_goal_p_func};
my $state_num_successors_func = $self->{_state_num_successors_func},
my $state_successors_iterator = $self->{_state_successors_iterator},
my $state_get_data_func = $self->{_state_get_data_func};
# make sure required functions have been defined
if(!defined($state_eval_func)){
croak "SMAstar: evaluation function is not defined\n";
}
if(!defined($state_goal_p_func)){
croak "SMAstar: goal function is not defined\n";
}
if(!defined($state_num_successors_func)){
croak "SMAstar: num successors function is not defined\n";
}
if(!defined($state_successors_iterator)){
croak "SMAstar: successor iterator is not defined\n";
}
# create a path object from this state
my $state_obj = AI::Pathfinding::SMAstar::Path->new(
_state => $state,
_eval_func => $state_eval_func,
_goal_p_func => $state_goal_p_func,
_num_successors_func => $state_num_successors_func,
_successors_iterator => $state_successors_iterator,
_get_data_func => $state_get_data_func,
);
my $fcost = AI::Pathfinding::SMAstar::Path::fcost($state_obj);
# check if the fcost of this node looks OK (is numeric)
unless(Scalar::Util::looks_like_number($fcost)){
croak "Error: f-cost of state is not numeric. Cannot add state to queue.\n";
}
$state_obj->f_cost($fcost);
# check if the num_successors function returns a number
my $num_successors = $state_obj->get_num_successors();
unless(Scalar::Util::looks_like_number($num_successors)){
croak "Error: Number of state successors is not numeric. Cannot add state to queue.\n";
}
# test out the iterator function to make sure it returns
# an object of the correct type
my $classname = ref($state);
my $test_successor_iterator = $state_obj->{_successors_iterator}->($state);
my $test_successor = $test_successor_iterator->($state);
my $succ_classname = ref($test_successor);
unless($succ_classname eq $classname){
croak "Error: Successor iterator method of object $classname does " .
"not return an object of type $classname.\n";
}
# add this node to the queue
$self->{_priority_queue}->insert($state_obj);
}
###################################################################
#
# start the SMAstar search process
#
###################################################################
sub start_search
{
my ($self,
$log_function,
$str_function,
$max_states_in_queue,
$max_cost,
) = @_;
if(!defined($str_function)){
croak "SMAstar start_search: str_function is not defined.\n";
}
sma_star_tree_search(\($self->{_priority_queue}),
\&AI::Pathfinding::SMAstar::Path::is_goal,
\&AI::Pathfinding::SMAstar::Path::get_descendants_iterator_smastar,
\&AI::Pathfinding::SMAstar::Path::fcost,
\&AI::Pathfinding::SMAstar::Path::backup_fvals,
$log_function,
$str_function,
\&AI::Pathfinding::SMAstar::Path::progress,
$self->{_show_prog_func},
$max_states_in_queue,
$max_cost,
);
}
#################################################################
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
#
#
#################################################################
sub sma_star_tree_search
{
my ($priority_queue,
$goal_p,
$successors_func,
$eval_func,
$backup_func,
$log_function, # debug string func; represent state object as a string.
$str_function,
$prog_function,
$show_prog_func,
$max_states_in_queue,
$max_cost,
) = @_;
my $iteration = 0;
my $num_states_in_queue = $$priority_queue->size();
my $max_extra_states_in_queue = $max_states_in_queue;
$max_states_in_queue = $num_states_in_queue + $max_extra_states_in_queue;
my $max_depth = ($max_states_in_queue - $num_states_in_queue);
my $best; # the best candidate for expansion
if($$priority_queue->is_empty() || !$$priority_queue){
return;
}
else{
my $num_successors = 0;
# loop over the elements in the priority queue
while(!$$priority_queue->is_empty()){
# determine the current size of the queue
my $num_states_in_queue = $$priority_queue->{_size};
# get the best candidate for expansion from the queue
$best = $$priority_queue->deepest_lowest_cost_leaf_dont_remove();
#------------------------------------------------------
if(!$DEBUG){
my $str = $log_function->($best);
$show_prog_func->($iteration, $num_states_in_queue, $str);
}
else{
my $str = $log_function->($best);
print "best is: " . $str_function->($best) . ", cost: " . $best->{_f_cost} . "\n";
}
#------------------------------------------------------
if($best->$goal_p()) {
# goal achieved! iteration: $iteration, number of
# states in queue: $num_states_in_queue.
return $best;
}
elsif($best->{_f_cost} >= $max_cost){
croak "\n\nSearch unsuccessful. max_cost reached (cost: $max_cost).\n";
}
else{
my $successors_iterator = $best->$successors_func();
my $succ = $successors_iterator->();
if($succ){
# if succ is at max depth and is not a goal node, set succ->fcost to infinity
if($succ->depth() >= $max_depth && !$succ->$goal_p() ){
$succ->{_f_cost} = $max_cost;
}
else{
# calling eval for comparison, and maintaining pathmax property
$succ->{_f_cost} = max($eval_func->($succ), $eval_func->($best));
my $descendant_index = $succ->{_descendant_index};
$best->{_descendant_fcosts}->[$descendant_index] = $succ->{_f_cost};
}
}
# determine if $best is completed, and if so backup values
if($best->is_completed()){
# remove from queue first, back up fvals, then insert back on queue.
# this way, it gets placed in its rightful place on the queue.
my $fval_before_backup = $best->{_f_cost};
# STEPS:
# 1) remove best and all antecedents from queue, but only if they are
# going to be altered by backing-up fvals. This is because
# removing and re-inserting in queue changes temporal ordering,
# and we don't want to do that unless the node will be
# placed in a new cost-bucket/tree.
# 2) then backup fvals
# 3) then re-insert best and all antecedents back on queue.
# Check if need for backup fvals
$best->check_need_fval_change();
my $cmp_func = sub {
my ($str) = @_;
return sub{
my ($obj) = @_;
my $obj_path_str = $str_function->($obj);
if($obj_path_str eq $str){
return 1;
}
else{
return 0;
}
}
};
my $antecedent = $best->{_antecedent};
my %was_on_queue;
my $i = 0;
# Now remove the offending nodes from queue, if any
if($best->need_fval_change()){
# remove best from the queue
$best = $$priority_queue->deepest_lowest_cost_leaf();
while($antecedent){
my $path_str = $str_function->($antecedent);
if($antecedent->is_on_queue() && $antecedent->need_fval_change()){
$was_on_queue{$i} = 1;
$$priority_queue->remove($antecedent, $cmp_func->($path_str));
}
$antecedent = $antecedent->{_antecedent};
$i++;
}
}
# Backup fvals
if($best->need_fval_change()){
$best->$backup_func();
}
# Put everything back on the queue
if($best->need_fval_change()){
$$priority_queue->insert($best);
my $antecedent = $best->{_antecedent};
my $i = 0;
while($antecedent){
if($was_on_queue{$i} && $antecedent->need_fval_change()){
# the antecedent needed fval change too.
$$priority_queue->insert($antecedent);
}
if($antecedent->need_fval_change()){
# set need_fval_change back to 0, so it will not be automatically seen as
# needing changed in the future. This is important, since we do not want
# to remove an element from the queue *unless* we need to change the fcost.
# This is because when we remove it from the queue and re-insert it, it
# loses its seniority in the queue (it becomes the newest node at its cost
# and depth) and will not be removed at the right time when searching for
# deepest_lowest_cost_leafs or shallowest_highest_cost_leafs.
$antecedent->{_need_fcost_change} = 0;
}
$antecedent = $antecedent->{_antecedent};
$i++;
}
# Again, set need_fval_change back to 0, so it will not be automatically
# seen as needing changed in the future.
$best->{_need_fcost_change} = 0;
}
}
#
# If best's descendants are all in memory, mark best as completed.
#
if($best->all_in_memory()) {
if(!($best->is_completed())){
$best->is_completed(1);
}
my $cmp_func = sub {
my ($str) = @_;
return sub{
my ($obj) = @_;
my $obj_str = $str_function->($obj);
if($obj_str eq $str){
return 1;
}
else{
return 0;
}
}
};
my $best_str = $str_function->($best);
# If best is not a root node
if($best->{_depth} != 0){
# descendant index is the unique index indicating which descendant
# this node is of its antecedent.
my $descendant_index = $best->{_descendant_index};
my $antecedent = $best->{_antecedent};
$$priority_queue->remove($best, $cmp_func->($best_str));
if($antecedent){
$antecedent->{_descendants_produced}->[$descendant_index] = 0;
}
}
}
# there are no more successors of $best
if(!$succ){
next;
}
my $antecedent;
my @antecedents_that_need_to_be_inserted;
# If the maximum number of states in the queue has been reached,
# we need to remove the shallowest-highest-cost leaf to make room
# for more nodes. That means we have to make sure that the antecedent
# produces this descendant again at some point in the future if needed.
if($num_states_in_queue > $max_states_in_queue){
my $shcl_obj = $$priority_queue->shallowest_highest_cost_leaf($best, $succ, $str_function);
if(!$shcl_obj){
croak "Error while pruning queue: shallowest-highest-cost-leaf was null\n";
}
$antecedent = $shcl_obj->{_antecedent};
if($antecedent){
my $antecedent_successors = \$antecedent->{_descendants_list};
$antecedent->remember_forgotten_nodes_fcost($shcl_obj);
$antecedent->{_forgotten_nodes_num} = $antecedent->{_forgotten_nodes_num} + 1;
my $descendant_index = $shcl_obj->{_descendant_index};
# record the index of this descendant in the forgotten_nodes list
$antecedent->{_forgotten_nodes_offsets}->{$descendant_index} = 1;
# flag the antecedent as not having this descendant in the queue
$antecedent->{_descendants_produced}->[$descendant_index] = 0;
$antecedent->{_descendant_fcosts}->[$descendant_index] = -1;
# flag the ancestor node as having deleted a descendant
$antecedent->descendants_deleted(1);
# update the number of descendants this node has in memory
$antecedent->{_num_successors_in_mem} = $antecedent->{_num_successors_in_mem} - 1;
# update the total number of nodes in the queue.
$num_states_in_queue--;
}
} # end if (num_states_on_queue > max_states)
# if there is a successor to $best, insert it in the priority queue.
if($succ){
$$priority_queue->insert($succ);
$best->{_num_successors_in_mem} = $best->{_num_successors_in_mem} + 1;
}
else{
croak "Error: no successor to insert\n";
}
}
}
continue {
$iteration++;
}
print "\n\nreturning unsuccessfully. iteration: $iteration\n";
return;
}
}
sub max
{
my ($n1, $n2) = @_;
return ($n1 > $n2 ? $n1 : $n2);
}
sub fp_compare {
my ($a, $b, $dp) = @_;
my $a_seq = sprintf("%.${dp}g", $a);
my $b_seq = sprintf("%.${dp}g", $b);
if($a_seq eq $b_seq){
return 0;
}
elsif($a_seq lt $b_seq){
return -1;
}
else{
return 1;
}
}
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
AI::Pathfinding::SMAstar - Simplified Memory-bounded A* Search
=head1 SYNOPSIS
use AI::Pathfinding::SMAstar;
=head2 EXAMPLE
##################################################################
#
# This example uses a hypothetical object called FrontierObj, and
# shows the functions that the FrontierObj class must feature in
# order to perform a path-search in a solution space populated by
# FrontierObj objects.
#
##################################################################
my $smastar = AI::Pathfinding::SMAstar->new(
# evaluates f(n) = g(n) + h(n), returns a number
_state_eval_func => \&FrontierObj::evaluate,
# when called on a node, returns 1 if it is a goal
_state_goal_p_func => \&FrontierObj::goal_test,
# must return the number of successors of a node
_state_num_successors_func => \&FrontierObj::get_num_successors,
# must return *one* successor at a time
_state_successors_iterator => \&FrontierObj::get_successors_iterator,
# can be any suitable string representation
_state_get_data_func => \&FrontierObj::string_representation,
# gets called once per iteration, useful for showing algorithm progress
_show_prog_func => \&FrontierObj::progress_callback,
);
# You can start the search from multiple start-states.
# Add the initial states to the smastar object before starting the search.
foreach my $frontierObj (@start_states){
$smastar->add_start_state($frontierObj);
}
#
# Start the search. If successful, $frontierGoalPath will contain the
# goal path. The optimal path to the goal node will be encoded in the
# ancestry of the goal path. $frontierGoalPath->antecedent() contains
# the goal path's parent path, and so forth back to the start path, which
# contains only the start state.
#
# $frontierGoalPath->state() contains the goal FrontierObj itself.
#
my $frontierGoalPath = $smastar->start_search(
\&log_function, # returns a string used for logging progress
\&str_function, # returns a string used to *uniquely* identify a node
$max_states_in_queue, # indicate the maximum states allowed in memory
$MAX_COST, # indicate the maximum cost allowed in search
);
In the example above, a hypothetical object, C<FrontierObj>, is used to
represent a state, or I<node> in your search space. To use SMA* search to
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
=head1 METHODS
=head2 new()
my $smastar = AI::Pathfinding::SMAstar->new();
Creates a new SMA* search object.
=head2 start_search()
my $frontierGoalObj = $smastar->start_search(
\&log_function, # returns a string used for logging progress
\&str_function, # returns a string used to *uniquely* identify a node
$max_states_in_queue, # indicate the maximum states allowed in memory
$MAX_COST, # indicate the maximum cost allowed in search
);
Initiates a memory-bounded search. When calling this function, pass a handle to
a function for recording current status( C<log_function> above- this can be
an empty subroutine if you don't care), a function that returns a *unique* string
representing a node in the search-space (this *cannot* be an empty subroutine), a
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
value (beyond which the search will cease).
=head2 state_eval_func()
$smastar->state_eval_func(\&FrontierObj::evaluate);
Set or get the handle to the function that returns the cost of the object
argument (node) in the search space.
=head2 state_goal_p_func()
$smastar->state_goal_p_func(\&FrontierObj::goal_test);
Set/get the handle to the goal predicate function. This is a function
that returns 1 if the argument object is a goal node, or 0 otherwise.
=head2 state_num_successors_func()
$smastar->state_num_successors_func(\&FrontierObj::get_num_successors);
Set/get the handle to the function that returns the number of successors
of this the object argument (node).
=head2 state_successors_iterator()
$smastar->state_successors_iterator(\&FrontierObj::get_successors_iterator);
Set/get the handle to the function that returns iterator that produces the
next successor of this node.
=head2 state_get_data_func()
$smastar->state_get_data_func(\&FrontierObj::string_representation);
Set/get the handle to the function that returns a string
representation of this node.
=head2 show_prog_func()
$smatar->show_prog_func(\&FrontierObj::progress_callback);
Sets/gets the callback function for displaying the progress of the search.
It can be an empty callback (sub{}) if you do not need this output.
=head2 DEPENDENCIES
Tree::AVL
Test::More
=head2 INCLUDED MODULES
AI::Pathfinding::SMAstar
AI::Pathfinding::SMAstar::Path
AI::Pathfinding::SMAstar::PriorityQueue
AI::Pathfinding::SMAstar::TreeOfQueues
=head2 EXPORT
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
use constant TUNE_UP => 1;
use constant TUNE_DOWN => 0;
=head1 SYNOPSIS
#!/usr/bin/perl
use AI::Perceptron::Simple qw(...);
# create a new nerve / neuron / perceptron
$nerve = AI::Perceptron::Simple->new( {
initial_value => $size_of_each_dendrite,
learning_rate => 0.3, # optional
threshold => 0.85, # optional
attribs => \@dendrites,
} );
# train
$nerve->tame( ... );
$nerve->exercise( ... );
$nerve->train( $training_data_csv, $expected_column_name, $save_nerve_to );
# or
$nerve->train(
$training_data_csv, $expected_column_name, $save_nerve_to,
$show_progress, $identifier); # these two parameters must go together
# validate
$nerve->take_lab_test( ... );
$nerve->take_mock_exam( ... );
# fill results to original file
$nerve->validate( {
stimuli_validate => $validation_data_csv,
predicted_column_index => 4,
} );
# or
# fill results to a new file
$nerve->validate( {
stimuli_validate => $validation_data_csv,
predicted_column_index => 4,
results_write_to => $new_csv
} );
# test - see "validate" method, same usage
$nerve->take_real_exam( ... );
$nerve->work_in_real_world( ... );
$nerve->test( ... );
# confusion matrix
my %c_matrix = $nerve->get_confusion_matrix( {
full_data_file => $file_csv,
actual_output_header => $header_name,
predicted_output_header => $predicted_header_name,
more_stats => 1, # optional
} );
# accessing the confusion matrix
my @keys = qw( true_positive true_negative false_positive false_negative
total_entries accuracy sensitivity );
for ( @keys ) {
print $_, " => ", $c_matrix{ $_ }, "\n";
}
# output to console
$nerve->display_confusion_matrix( \%c_matrix, {
zero_as => "bad apples", # cat milk green etc.
one_as => "good apples", # dog honey pink etc.
} );
# saving and loading data of perceptron locally
# NOTE: nerve data is automatically saved after each trainning process
use AI::Perceptron::Simple ":local_data";
my $nerve_file = "apples.nerve";
preserve( ... );
save_perceptron( $nerve, $nerve_file );
# load data of percpetron for use in actual program
my $apple_nerve = revive( ... );
my $apple_nerve = load_perceptron( $nerve_file );
# for portability of nerve data
use AI::Perceptron::Simple ":portable_data";
my $yaml_nerve_file = "pearls.yaml";
preserve_as_yaml ( ... );
save_perceptron_yaml ( $nerve, $yaml_nerve_file );
# load nerve data on the other computer
my $pearl_nerve = revive_from_yaml ( ... );
my $pearl_nerve = load_perceptron_yaml ( $yaml_nerve_file );
# processing data
use AI::Perceptron::Simple ":process_data";
shuffle_stimuli ( ... )
shuffle_data ( ORIGINAL_STIMULI, $new_file_1, $new_file_2, ... );
shuffle_data ( $original_stimuli => $new_file_1, $new_file_2, ... );
=head1 EXPORT
None by default.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
use Exporter qw( import );
our @EXPORT_OK = qw(
shuffle_data shuffle_stimuli
preserve save_perceptron revive load_perceptron
preserve_as_yaml save_perceptron_yaml revive_from_yaml load_perceptron_yaml
);
our %EXPORT_TAGS = (
process_data => [ qw( shuffle_data shuffle_stimuli ) ],
local_data => [ qw( preserve save_perceptron revive load_perceptron ) ],
portable_data => [ qw( preserve_as_yaml save_perceptron_yaml revive_from_yaml load_perceptron_yaml ) ],
);
=head1 DESCRIPTION
This module provides methods to build, train, validate and test a perceptron. It can also save the data of the perceptron for future use for any actual AI programs.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
Shuffles C<$original_data> or C<ORIGINAL_DATA> and saves them to other files.
=cut
sub shuffle_stimuli {
shuffle_data( @_ );
}
sub shuffle_data {
my $stimuli = shift or croak "Please specify the original file name";
my @shuffled_stimuli_names = @_
or croak "Please specify the output files for the shuffled data";
my @aoa;
for ( @shuffled_stimuli_names ) {
# copied from _real_validate_or_test
# open for shuffling
my $aoa = csv (in => $stimuli, encoding => ":encoding(utf-8)");
my $attrib_array_ref = shift @$aoa; # 'remove' the header, it's annoying :)
@aoa = shuffle( @$aoa ); # this can only process actual array
unshift @aoa, $attrib_array_ref; # put back the headers before saving file
csv( in => \@aoa, out => $_, encoding => ":encoding(utf-8)" )
and
print "Saved shuffled data into ", basename($_), "!\n";
}
}
=head1 CREATION RELATED SUBROUTINES/METHODS
=head2 new ( \%options )
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=back
=cut
sub new {
my $class = shift;
my $data_ref = shift;
my %data = %{ $data_ref };
# check keys
$data{ learning_rate } = LEARNING_RATE if not exists $data{ learning_rate };
$data{ threshold } = THRESHOLD if not exists $data{ threshold };
#####
# don't pack this key checking process into a subroutine for now
# this is also used in &_real_validate_or_test
my @missing_keys;
for ( qw( initial_value attribs ) ) {
push @missing_keys, $_ unless exists $data{ $_ };
}
croak "Missing keys: @missing_keys" if @missing_keys;
#####
# continue to process the rest of the data
my %attributes;
for ( @{ $data{ attribs } } ) {
$attributes{ $_ } = $data{ initial_value };
}
my %processed_data = (
learning_rate => $data{ learning_rate },
threshold => $data{ threshold },
attributes_hash_ref => \%attributes,
);
bless \%processed_data, $class;
}
=head2 get_attributes
Obtains a hash of all the attributes of the perceptron
=cut
sub get_attributes {
my $self = shift;
%{ $self->{attributes_hash_ref} };
}
=head2 learning_rate ( $value )
=head2 learning_rate
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
If C<$value> is given, sets the learning rate to C<$value>. If not, then it returns the learning rate.
=cut
sub learning_rate {
my $self = shift;
if ( @_ ) {
$self->{learning_rate} = shift;
} else {
$self->{learning_rate}
}
}
=head2 threshold ( $value )
=head2 threshold
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
If C<$value> is given, sets the threshold / passing rate to C<$value>. If not, then it returns the passing rate.
=cut
sub threshold {
my $self = shift;
if ( @_ ) {
$self->{ threshold } = shift;
} else {
$self->{ threshold };
}
}
=head1 TRAINING RELATED SUBROUTINES/METHODS
All the training methods here have the same parameters as the two actual C<train> method and they all do the same stuff. They are also used in the same way.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
If C<$display_stats> is specified ie. set to C<1>, then you B<MUST> specify the C<$identifier>. C<$identifier> is the column / header name that is used to identify a specific row of data in C<$stimuli_train_csv>.
=cut
sub tame {
train( @_ );
}
sub exercise {
train( @_ );
}
sub train {
my $self = shift;
my( $stimuli_train_csv, $expected_output_header, $save_nerve_to_file, $display_stats, $identifier ) = @_;
$display_stats = 0 if not defined $display_stats;
if ( $display_stats and not defined $identifier ) {
croak "Please specifiy a string for \$identifier if you are trying to display stats";
}
# CSV processing is all according to the documentation of Text::CSV
open my $data_fh, "<:encoding(UTF-8)", $stimuli_train_csv
or croak "Can't open $stimuli_train_csv: $!";
my $csv = Text::CSV->new( {auto_diag => 1, binary => 1} );
my $attrib = $csv->getline($data_fh);
$csv->column_names( $attrib );
# individual row
ROW: while ( my $row = $csv->getline_hr($data_fh) ) {
# print $row->{book_name}, " -> ";
# print $row->{$expected_output_header} ? "ææ\n" : "é
丽ä¼å\n";
# calculate the output and fine tune parameters if necessary
while (1) {
my $output = _calculate_output( $self, $row );
#print "Sum = ", $output, "\n";
# $expected_output_header to be checked together over here
# if output >= threshold
# then category/result aka output is considered 1
# else output considered 0
# output expected/actual tuning
# 0 0 -
# 1 0 down
# 0 1 up
# 1 1 -
if ( ($output >= $self->threshold) and ( $row->{$expected_output_header} eq 0 ) ) {
_tune( $self, $row, TUNE_DOWN );
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> TUNED DOWN";
print " Old sum = ", $output;
print " Threshold = ", $self->threshold;
print " New Sum = ", _calculate_output( $self, $row ), "\n";
}
} elsif ( ($output < $self->threshold) and ( $row->{$expected_output_header} eq 1 ) ) {
_tune( $self, $row, TUNE_UP );
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> TUNED UP";
print " Old sum = ", $output;
print " Threshold = ", $self->threshold;
print " New Sum = ", _calculate_output( $self, $row ), "\n";
}
} elsif ( ($output < $self->threshold) and ( $row->{$expected_output_header} eq 0 ) ) {
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> NO TUNING NEEDED";
print " Sum = ", _calculate_output( $self, $row );
print " Threshold = ", $self->threshold, "\n";
}
next ROW;
} elsif ( ($output >= $self->threshold) and ( $row->{$expected_output_header} eq 1 ) ) {
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> NO TUNING NEEDED";
print " Sum = ", _calculate_output( $self, $row );
print " Threshold = ", $self->threshold, "\n";
}
next ROW;
} #else { print "Something's not right\n'" }
}
}
close $data_fh;
save_perceptron( $self, $save_nerve_to_file ); # this doesn't return anything
}
=head2 &_calculate_output( $self, \%stimuli_hash )
Calculates and returns the C<sum(weightage*input)> for each individual row of data. Actually, it justs add up all the existing weight since the C<input> is always 1 for now :)
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
This subroutine should be called in the procedural way for now.
=cut
sub _calculate_output {
my $self = shift;
my $stimuli_hash_ref = shift;
my %dendrites = $self->get_attributes;
my $sum; # this is the output
for ( keys %dendrites ) {
# if input is 1 for a dendrite, then calculate it
if ( $stimuli_hash_ref->{ $_ } ) {
# $sum += $dendrites{ $_ } * 1; # no need, if 1 then it is always the value itself
# this is very efficient, nice :)
$sum += $dendrites{ $_ };
}
}
$sum;
}
=head2 &_tune( $self, \%stimuli_hash, $tune_up_or_down )
Fine tunes the nerve. This will directly alter the attributes values in C<$self> according to the attributes / dendrites specified in C<new>.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
This subroutine should be called in the procedural way for now.
=cut
sub _tune {
my $self = shift;
my ( $stimuli_hash_ref, $tuning_status ) = @_;
my %dendrites = $self->get_attributes;
for ( keys %dendrites ) {
if ( $tuning_status == TUNE_DOWN ) {
if ( $stimuli_hash_ref->{ $_ } ) { # must check this one, it must be 1 before we can alter the actual dendrite size in the nerve :)
$self->{ attributes_hash_ref }{ $_ } -= $self->learning_rate;
}
#print $_, ": ", $self->{ attributes_hash_ref }{ $_ }, "\n";
} elsif ( $tuning_status == TUNE_UP ) {
if ( $stimuli_hash_ref->{ $_ } ) {
$self->{ attributes_hash_ref }{ $_ } += $self->learning_rate;
}
#print $_, ": ", $self->{ attributes_hash_ref }{ $_ }, "\n";
}
}
}
=head1 VALIDATION RELATED METHODS
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
I<*This method will call C<_real_validate_or_test> to do the actual work.>
=cut
sub take_mock_exam {
my ( $self, $data_hash_ref ) = @_;
$self->_real_validate_or_test( $data_hash_ref );
}
sub take_lab_test {
my ( $self, $data_hash_ref ) = @_;
$self->_real_validate_or_test( $data_hash_ref );
}
sub validate {
my ( $self, $data_hash_ref ) = @_;
$self->_real_validate_or_test( $data_hash_ref );
}
=head1 TESTING RELATED SUBROUTINES/METHODS
All the testing methods here have the same parameters as the actual C<test> method and they all do the same stuff. They are also used in the same way.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
# redirect to _real_validate_or_test
sub take_real_exam {
my ( $self, $data_hash_ref ) = @_;
$self->_real_validate_or_test( $data_hash_ref );
}
sub work_in_real_world {
my ( $self, $data_hash_ref ) = @_;
$self->_real_validate_or_test( $data_hash_ref );
}
sub test {
my ( $self, $data_hash_ref ) = @_;
$self->_real_validate_or_test( $data_hash_ref );
}
=head2 _real_validate_or_test ( $data_hash_ref )
This is where the actual validation or testing takes place.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
sub _real_validate_or_test {
my $self = shift; my $data_hash_ref = shift;
#####
my @missing_keys;
for ( qw( stimuli_validate predicted_column_index ) ) {
push @missing_keys, $_ unless exists $data_hash_ref->{ $_ };
}
croak "Missing keys: @missing_keys" if @missing_keys;
#####
my $stimuli_validate = $data_hash_ref->{ stimuli_validate };
my $predicted_index = $data_hash_ref->{ predicted_column_index };
# actual processing starts here
my $output_file = defined $data_hash_ref->{ results_write_to }
? $data_hash_ref->{ results_write_to }
: $stimuli_validate;
# open for writing results
my $aoa = csv (in => $stimuli_validate, encoding => ":encoding(utf-8)");
my $attrib_array_ref = shift @$aoa; # 'remove' the header, it's annoying :)
$aoa = _fill_predicted_values( $self, $stimuli_validate, $predicted_index, $aoa );
# put back the array of headers before saving file
unshift @$aoa, $attrib_array_ref;
print "Saving data to $output_file\n";
csv( in => $aoa, out => $output_file, encoding => ":encoding(utf-8)" );
print "Done saving!\n";
}
=head2 &_fill_predicted_values ( $self, $stimuli_validate, $predicted_index, $aoa )
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
This subroutine should be called in the procedural way.
=cut
sub _fill_predicted_values {
my ( $self, $stimuli_validate, $predicted_index, $aoa ) = @_;
# CSV processing is all according to the documentation of Text::CSV
open my $data_fh, "<:encoding(UTF-8)", $stimuli_validate
or croak "Can't open $stimuli_validate: $!";
my $csv = Text::CSV->new( {auto_diag => 1, binary => 1} );
my $attrib = $csv->getline($data_fh);
$csv->column_names( $attrib );
# individual row
my $row = 0;
while ( my $data = $csv->getline_hr($data_fh) ) {
if ( _calculate_output( $self, $data ) >= $self->threshold ) {
# write 1 into aoa
$aoa->[ $row ][ $predicted_index ] = 1;
} else {
#write 0 into aoa
$aoa->[ $row ][ $predicted_index ] = 0;
}
$row++;
}
close $data_fh;
$aoa;
}
=head1 RESULTS RELATED SUBROUTINES/METHODS
This part is related to generating the confusion matrix.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
sub get_exam_results {
my ( $self, $info ) = @_;
$self->get_confusion_matrix( $info );
}
sub get_confusion_matrix {
my ( $self, $info ) = @_;
my %c_matrix = _collect_stats( $info ); # processes total_entries, accuracy, sensitivity etc
%c_matrix;
}
=head2 &_collect_stats ( \%options )
Generates a hash of confusion matrix based on C<%options> given in the C<get_confusion_matrix> method.
=cut
sub _collect_stats {
my $info = shift;
my $file = $info->{ full_data_file };
my $actual_header = $info->{ actual_output_header };
my $predicted_header = $info->{ predicted_output_header };
my $more_stats = defined ( $info->{ more_stats } ) ? 1 : 0;
my %c_matrix = (
true_positive => 0, true_negative => 0, false_positive => 0, false_negative => 0,
accuracy => 0, sensitivity => 0
);
# CSV processing is all according to the documentation of Text::CSV
open my $data_fh, "<:encoding(UTF-8)", $file
or croak "Can't open $file: $!";
my $csv = Text::CSV->new( {auto_diag => 1, binary => 1} );
my $attrib = $csv->getline($data_fh); # get the row of headers, can't specify any column
# shouldn't be a problem, since we're reading line by line :)
$csv->column_names( $attrib );
# individual row
while ( my $row = $csv->getline_hr($data_fh) ) {
# don't pack this part into another subroutine, number of rows can be very big
if ( $row->{ $actual_header } == 1 and $row->{ $predicted_header } == 1 ) {
# true positive
$c_matrix{ true_positive }++;
} elsif ( $row->{ $actual_header } == 0 and $row->{ $predicted_header } == 0 ) {
# true negative
$c_matrix{ true_negative }++;
} elsif ( $row->{ $actual_header } == 1 and $row->{ $predicted_header } == 0 ) {
# false negative
$c_matrix{ false_negative }++;
} elsif ( $row->{ $actual_header } == 0 and $row->{ $predicted_header } == 1 ) {
# false positive
$c_matrix{ false_positive }++;
} else {
croak "Something's wrong!\n".
"Make sure that the actual and predicted values in your file are binary ie 0 or 1" ;
}
}
close $data_fh;
_calculate_total_entries( \%c_matrix );
_calculate_sensitivity( \%c_matrix );
_calculate_accuracy( \%c_matrix );
if ( $more_stats == 1 ) {
_calculate_precision( \%c_matrix );
_calculate_specificity( \%c_matrix );
_calculate_f1_score( \%c_matrix );
# unimplemented, some more left
_calculate_negative_predicted_value( \%c_matrix ); #
_calculate_false_negative_rate( \%c_matrix ); #
_calculate_false_positive_rate( \%c_matrix ); #
_calculate_false_discovery_rate( \%c_matrix ); #
_calculate_false_omission_rate( \%c_matrix ); #
_calculate_balanced_accuracy( \%c_matrix ); #
}
%c_matrix;
}
=head2 &_calculate_total_entries ( $c_matrix_ref )
Calculates and adds the data for the C<total_entries> key in the confusion matrix hash.
=cut
sub _calculate_total_entries {
my $c_matrix = shift;
my $total = $c_matrix->{ true_negative } + $c_matrix->{ false_positive };
$total += $c_matrix->{ false_negative } + $c_matrix->{ true_positive };
$c_matrix->{ total_entries } = $total;
}
=head2 &_calculate_accuracy ( $c_matrix_ref )
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
sub _calculate_accuracy {
my $c_matrix = shift;
my $numerator = $c_matrix->{ true_positive } + $c_matrix->{ true_negative };
my $denominator = $numerator + $c_matrix->{ false_positive } + $c_matrix->{ false_negative };
$c_matrix->{ accuracy } = $numerator / $denominator * 100;
# no need to return anything, we're using ref
}
=head2 &_calculate_sensitivity ( $c_matrix_ref )
Calculates and adds the data for the C<sensitivity> key in the confusion matrix hash.
=cut
sub _calculate_sensitivity {
my $c_matrix = shift;
my $numerator = $c_matrix->{ true_positive };
my $denominator = $numerator + $c_matrix->{ false_negative };
$c_matrix->{ sensitivity } = $numerator / $denominator * 100;
# no need to return anything, we're using ref
}
=head2 &_calculate_precision ( $c_matrix_ref )
Calculates and adds the data for the C<precision> key in the confusion matrix hash.
=cut
sub _calculate_precision {
my $c_matrix = shift;
my $numerator = $c_matrix->{ true_positive };
my $denominator = $numerator + $c_matrix->{ false_positive };
$c_matrix->{ precision } = $numerator / $denominator * 100;
}
=head2 &_calculate_specificity ( $c_matrix_ref )
Calculates and adds the data for the C<specificity> key in the confusion matrix hash.
=cut
sub _calculate_specificity {
my $c_matrix = shift;
my $numerator = $c_matrix->{ true_negative };
my $denominator = $numerator + $c_matrix->{ false_positive };
$c_matrix->{ specificity } = $numerator / $denominator * 100;
}
=head2 &_calculate_f1_score ( $c_matrix_ref )
Calculates and adds the data for the C<F1_Score> key in the confusion matrix hash.
=cut
sub _calculate_f1_score {
my $c_matrix = shift;
my $numerator = 2 * $c_matrix->{ true_positive };
my $denominator = $numerator + $c_matrix->{ false_positive } + $c_matrix->{ false_negative };
$c_matrix->{ F1_Score } = $numerator / $denominator * 100;
}
=head2 &_calculate_negative_predicted_value( $c_matrix_ref )
Calculates and adds the data for the C<negative_predicted_value> key in the confusion matrix hash.
=cut
sub _calculate_negative_predicted_value {
my $c_matrix = shift;
my $numerator = $c_matrix->{ true_negative };
my $denominator = $numerator + $c_matrix->{ false_negative };
$c_matrix->{ negative_predicted_value } = $numerator / $denominator * 100;
}
=head2 &_calculate_false_negative_rate( $c_matrix_ref )
Calculates and adds the data for the C<false_negative_rate> key in the confusion matrix hash.
=cut
sub _calculate_false_negative_rate {
my $c_matrix = shift;
my $numerator = $c_matrix->{ false_negative };
my $denominator = $numerator + $c_matrix->{ true_positive };
$c_matrix->{ false_negative_rate } = $numerator / $denominator * 100;
}
=head2 &_calculate_false_positive_rate( $c_matrix_ref )
Calculates and adds the data for the C<false_positive_rate> key in the confusion matrix hash.
=cut
sub _calculate_false_positive_rate {
my $c_matrix = shift;
my $numerator = $c_matrix->{ false_positive };
my $denominator = $numerator + $c_matrix->{ true_negative };
$c_matrix->{ false_positive_rate } = $numerator / $denominator * 100;
}
=head2 &_calculate_false_discovery_rate( $c_matrix_ref )
Calculates and adds the data for the C<false_discovery_rate> key in the confusion matrix hash.
=cut
sub _calculate_false_discovery_rate {
my $c_matrix = shift;
my $numerator = $c_matrix->{ false_positive };
my $denominator = $numerator + $c_matrix->{ true_positive };
$c_matrix->{ false_discovery_rate } = $numerator / $denominator * 100;
}
=head2 &_calculate_false_omission_rate( $c_matrix_ref )
Calculates and adds the data for the C<false_omission_rate> key in the confusion matrix hash.
=cut
sub _calculate_false_omission_rate {
my $c_matrix = shift;
my $numerator = $c_matrix->{ false_negative };
my $denominator = $numerator + $c_matrix->{ true_negative };
$c_matrix->{ false_omission_rate } = $numerator / $denominator * 100;
}
=head2 &_calculate_balanced_accuracy( $c_matrix_ref )
Calculates and adds the data for the C<balanced_accuracy> key in the confusion matrix hash.
=cut
sub _calculate_balanced_accuracy {
my $c_matrix = shift;
my $numerator = $c_matrix->{ sensitivity } + $c_matrix->{ specificity };
my $denominator = 2;
$c_matrix->{ balanced_accuracy } = $numerator / $denominator; # numerator already in %
}
=head2 display_exam_results ( ... )
The parameters are the same as C<display_confusion_matrix>. See the next method.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
sub display_exam_results {
my ( $self, $c_matrix, $labels ) = @_;
$self->display_confusion_matrix( $c_matrix, $labels );
}
sub display_confusion_matrix {
my ( $self, $c_matrix, $labels ) = @_;
#####
my @missing_keys;
for ( qw( zero_as one_as ) ) {
push @missing_keys, $_ unless exists $labels->{ $_ };
}
croak "Missing keys: @missing_keys" if @missing_keys;
#####
_print_extended_matrix ( _build_matrix( $c_matrix, $labels ) );
}
=head2 &_build_matrix ( $c_matrix, $labels )
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
sub _build_matrix {
my ( $c_matrix, $labels ) = @_;
my $predicted_columns = [ "P: ".$labels->{ zero_as }, "P: ".$labels->{ one_as }, "Sum" ];
my $actual_rows = [ "A: ".$labels->{ zero_as }, "A: ".$labels->{ one_as }, "Sum"];
# row sum
my $actual_0_sum = $c_matrix->{ true_negative } + $c_matrix->{ false_positive };
my $actual_1_sum = $c_matrix->{ false_negative } + $c_matrix->{ true_positive };
# column sum
my $predicted_0_sum = $c_matrix->{ true_negative } + $c_matrix->{ false_negative };
my $predicted_1_sum = $c_matrix->{ false_positive } + $c_matrix->{ true_positive };
my $data = [
[ $c_matrix->{ true_negative }, $c_matrix->{ false_positive }, $actual_0_sum ],
[ $c_matrix->{ false_negative }, $c_matrix->{ true_positive }, $actual_1_sum ],
[ $predicted_0_sum, $predicted_1_sum, $c_matrix->{ total_entries } ],
];
my $matrix = Text::Matrix->new(
rows => $actual_rows,
columns => $predicted_columns,
data => $data,
);
$matrix, $c_matrix;
}
=head2 &_print_extended_matrix ( $matrix, $c_matrix )
Extends and outputs the matrix on the screen.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
sub _print_extended_matrix {
my ( $matrix, $c_matrix ) = @_;
print "~~" x24, "\n";
print "CONFUSION MATRIX (A:actual P:predicted)\n";
print "~~" x24, "\n";
print $matrix->matrix();
print "~~" x24, "\n";
print "Total of ", $c_matrix->{ total_entries } , " entries\n";
print " Accuracy: $c_matrix->{ accuracy } %\n";
print " Sensitivity: $c_matrix->{ sensitivity } %\n";
# more stats
print " Precision: $c_matrix->{ precision } %\n" if exists $c_matrix->{ precision };
print " Specificity: $c_matrix->{ specificity } %\n" if exists $c_matrix->{ specificity };
print " F1 Score: $c_matrix->{ F1_Score } %\n" if exists $c_matrix->{ F1_Score };
print " Negative Predicted Value: $c_matrix->{ negative_predicted_value } %\n" if exists $c_matrix->{ negative_predicted_value };
print " False Negative Rate: $c_matrix->{ false_negative_rate } %\n" if exists $c_matrix->{ false_negative_rate };
print " False Positive Rate: $c_matrix->{ false_positive_rate } %\n" if exists $c_matrix->{ false_positive_rate };
print " False Discovery Rate: $c_matrix->{ false_discovery_rate } %\n" if exists $c_matrix->{ false_discovery_rate };
print " False Omission Rate: $c_matrix->{ false_omission_rate } %\n" if exists $c_matrix->{ false_omission_rate };
print " Balanced Accuracy: $c_matrix->{ balanced_accuracy } %\n" if exists $c_matrix->{ balanced_accuracy };
print "~~" x24, "\n";
}
=head1 NERVE DATA RELATED SUBROUTINES
This part is about saving the data of the nerve. These subroutines can be imported using the C<:local_data> tag.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
process this will be called automatically.
=cut
sub preserve {
save_perceptron( @_ );
}
sub save_perceptron {
my $self = shift;
my $nerve_file = shift;
use Storable;
store $self, $nerve_file;
no Storable;
}
=head2 revive (...)
The parameters and usage are the same as C<load_perceptron>. See the next subroutine.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
Loads the data and turns it into a C<AI::Perceptron::Simple> object as the return value.
=cut
sub revive {
load_perceptron( @_ );
}
sub load_perceptron {
my $nerve_file_to_load = shift;
use Storable;
my $loaded_nerve = retrieve( $nerve_file_to_load );
no Storable;
$loaded_nerve;
}
=head1 NERVE PORTABILITY RELATED SUBROUTINES
These subroutines can be imported using the C<:portable_data> tag.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
Saves the C<AI::Perceptron::Simple> object into a C<YAML> file.
=cut
sub preserve_as_yaml {
save_perceptron_yaml( @_ );
}
sub save_perceptron_yaml {
my $self = shift;
my $nerve_file = shift;
use YAML;
YAML::DumpFile( $nerve_file, $self );
no YAML;
}
=head2 revive_from_yaml (...)
The parameters and usage are the same as C<load_perceptron>. See the next subroutine.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
Loads the YAML data and turns it into a C<AI::Perceptron::Simple> object as the return value.
=cut
sub revive_from_yaml {
load_perceptron_yaml( @_ );
}
sub load_perceptron_yaml {
my $nerve_file_to_load = shift;
use YAML;
local $YAML::LoadBlessed = 1;
my $loaded_nerve = YAML::LoadFile( $nerve_file_to_load );
no YAML;
$loaded_nerve;
}
=head1 TO DO
These are the to-do's that B<MIGHT> be done in the future. Don't put too much hope in them please :)
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
perldoc AI::Perceptron::Simple
You can also look for information at:
=over 4
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
This software is Copyright (c) 2021 by Raphael Jong Jun Jie.
This is free software, licensed under:
The Artistic License 2.0 (GPL Compatible)
=cut
1; # End of AI::Perceptron::Simple
view all matches for this distribution
view release on metacpan or search on metacpan
examples/and.pl view on Meta::CPAN
use Data::Dumper;
use AI::Perceptron;
print( "Example: training a perceptron to recognize an 'AND' function.\n",
"usage: $0 [<threshold> <weight1> <weight2>]\n" );
my $p = AI::Perceptron->new
->num_inputs( 2 )
->learning_rate( 0.1 );
if (@ARGV) {
$p->threshold( shift(@ARGV) )
->weights([ shift(@ARGV), shift(@ARGV) ]);
}
my @training_exs = (
[-1 => -1, -1],
[-1 => 1, -1],
[-1 => -1, 1],
[ 1 => 1, 1],
);
print "\nBefore Training\n";
dump_perceptron( $p );
print "\nTraining...\n";
examples/and.pl view on Meta::CPAN
print "\nAfter Training\n";
dump_perceptron( $p );
sub dump_perceptron {
my $p = shift;
print "\tThreshold: ", $p->threshold, " Weights: ", join(', ', @{ $p->weights }), "\n";
foreach my $inputs (@training_exs) {
my $target = $inputs->[0];
print "\tInputs = {", join(',', @$inputs[1..2]), "}, target=$target, output=", $p->compute_output( @$inputs[1..2] ), "\n";
}
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PredictionClient/Alien/TensorFlowServingProtos.pm view on Meta::CPAN
=head1 SYNOPSIS
In your Build.PL:
use Module::Build;
use AI::PredictionClient::Alien::TensorFlowServingProtos;
my $builder = Module::Build->new(
...
configure_requires => {
'AI::PredictionClient::Alien::TensorFlowServingProtos' => '0',
...
},
extra_compiler_flags => AI::PredictionClient::Alien::TensorFlowServingProtos->cflags,
extra_linker_flags => AI::PredictionClient::Alien::TensorFlowServingProtos->libs,
...
);
$build->create_build_script;
In your Makefile.PL:
use ExtUtils::MakeMaker;
use Config;
use AI::PredictionClient::Alien::TensorFlowServingProtos;
WriteMakefile(
...
CONFIGURE_REQUIRES => {
'AI::PredictionClient::Alien::TensorFlowServingProtos' => '0',
},
CCFLAGS => AI::PredictionClient::Alien::TensorFlowServingProtos->cflags . " $Config{ccflags}",
LIBS => [ AI::PredictionClient::Alien::TensorFlowServingProtos->libs ],
...
);
=cut
=head1 DESCRIPTION
lib/AI/PredictionClient/Alien/TensorFlowServingProtos.pm view on Meta::CPAN
module will download and build a private copy.
The system dependencies needed for this module to build are most often already installed.
If not, the following dependencies need to be installed.
$ [sudo] apt-get install build-essential make g++
See the Alien::Google::GRPC for potential additional build dependencies.
At this time only Linux builds are supported.
view all matches for this distribution
view release on metacpan or search on metacpan
bin/Inception.pl view on Meta::CPAN
my $default_port = '9000';
my $default_model = 'inception';
my $default_model_signature = 'predict_images';
option image_file => (
is => 'ro',
required => 1,
format => 's',
doc => '* Required: Path to image to be processed'
);
option host => (
is => 'ro',
required => 0,
format => 's',
default => $default_host,
doc => "IP address of the server [Default: $default_host]"
);
option port => (
is => 'ro',
required => 0,
format => 's',
default => $default_port,
doc => "Port number of the server [Default: $default_port]"
);
option model_name => (
is => 'ro',
required => 0,
format => 's',
default => $default_model,
doc => "Model to process image [Default: $default_model]"
);
option model_signature => (
is => 'ro',
required => 0,
format => 's',
default => $default_model_signature,
doc => "API signature for model [Default: $default_model_signature]"
);
option debug_verbose => (is => 'ro', doc => 'Verbose output');
option debug_loopback_interface => (
is => 'ro',
required => 0,
doc => "Test loopback through dummy server"
);
option debug_camel => (
is => 'ro',
required => 0,
doc => "Test using camel image"
);
sub run {
my ($self) = @_;
my $image_ref = $self->read_image($self->image_file);
my $client = AI::PredictionClient::InceptionClient->new(
host => $self->host,
port => $self->port
);
$client->model_name($self->model_name);
$client->model_signature($self->model_signature);
$client->debug_verbose($self->debug_verbose);
$client->loopback($self->debug_loopback_interface);
$client->camel($self->debug_camel);
printf("Sending image %s to server at host:%s port:%s\n",
$self->image_file, $self->host, $self->port);
if ($client->call_inception($image_ref)) {
my $results_ref = $client->inception_results;
my $classifications_ref = $results_ref->{'classes'};
my $scores_ref = $results_ref->{'scores'};
my $comments = 'Clasification Results for ' . $self->image_file;
my $results_text
= form
'.===========================================================================.',
'| Class | Score |',
'|-----------------------------------------------------------+---------------|',
'| {[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[} |{]].[[[[[[[[} |',
$classifications_ref, $scores_ref,
'|===========================================================================|',
'| {[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[} |',
$comments,
"'==========================================================================='";
print $results_text;
} else {
printf("Failed. Status: %s, Status Code: %s, Status Message: %s \n",
$client->status, $client->status_code, $client->status_message);
return 1;
}
return 0;
}
sub read_image {
my $self = shift;
return \'' if $self->debug_camel;
my $file_name = shift;
my $max_file_size = 16 * 1000 * 1000; # A large but safe maximum
open(my $fh, '<:raw', $file_name)
or die "Could not open file: $file_name";
read($fh, my $buffer, $max_file_size);
close $fh;
return \$buffer;
}
exit main->new_with_options->run;
__END__
view all matches for this distribution
view release on metacpan or search on metacpan
examples/append.pl view on Meta::CPAN
END_PROLOG
print "Appending two lists 'append([a],[b,c,d],Z).'\n";
$prolog->query('append([a],[b,c,d],Z).');
while (my $result = $prolog->results) {
print Dumper($result),"\n";
}
print "\nWhich lists appends to a known list to form another known list?\n'append(X,[b,c,d],[a,b,c,d]).'\n";
$prolog->query('append(X,[b,c,d],[a,b,c,d]).');
while (my $result = $prolog->results) {
print Dumper($result),"\n";
}
print "\nWhich lists can be appended to form a given list?\n'append(X, Y, [foo, bar, 7, baz]).'\n";
my $list = $prolog->list(qw/foo bar 7 baz/);
$prolog->query("append(X,Y,[$list]).");
while (my $result = $prolog->results) {
print Dumper($result),"\n";
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
our $VERSION = '1.02';
# Specify default exports:
our @ISA = ("Exporter");
our @EXPORT = (
"anneal",
);
# Constants:
my $POUND = "#";
my $SQ = "'";
my $DQ = "\"";
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
# temperature during the annealing process.
#
# The function returns a reference to an array containing the
# optimized list of numbers.
sub anneal {
my $number_specs = validate_number_specs($_[0]);
my $cost_function = $_[1];
my $cycles_per_temperature = $_[2];
my $current_temperature;
my $lowest_cost;
my @integral_lower_bounds;
my @integral_upper_bounds;
my @optimized_list;
$current_temperature = 1;
for my $number_spec (@{ $number_specs }) {
push @integral_lower_bounds, int($number_spec->{"LowerBound"}
* (10 ** $number_spec->{"Precision"}));
push @integral_upper_bounds, int($number_spec->{"UpperBound"}
* (10 ** $number_spec->{"Precision"}));
if ($integral_upper_bounds[-1] - $integral_lower_bounds[-1]
> $current_temperature) {
$current_temperature
= $integral_upper_bounds[-1] - $integral_lower_bounds[-1];
} # end if
} # next $number_spec
while ($current_temperature > 0) {
my @adjusted_lower_bounds;
my @adjusted_upper_bounds;
# Calculate the temperature-adjusted bounds:
for my $dex (0..$#integral_lower_bounds) {
if ($current_temperature >= $integral_upper_bounds[$dex]
- $integral_lower_bounds[$dex] || !defined($lowest_cost)) {
push @adjusted_lower_bounds, $integral_lower_bounds[$dex];
push @adjusted_upper_bounds, $integral_upper_bounds[$dex];
}
else {
my $adjusted_lower_bound;
my $adjusted_upper_bound;
my $half_range = $current_temperature / 2.0;
if (floor($half_range) != $half_range) {
my $rand = rand();
if ($rand >= 0.5) {
$half_range = ceil($half_range);
}
else {
$half_range = floor($half_range);
} # end if
} # end if
$adjusted_lower_bound = int($optimized_list[$dex]
* (10 ** $number_specs->[$dex]->{"Precision"})
- $half_range);
if ($adjusted_lower_bound < $integral_lower_bounds[$dex]) {
$adjusted_lower_bound = $integral_lower_bounds[$dex];
}
elsif ($adjusted_lower_bound + $current_temperature
> $integral_upper_bounds[$dex]) {
$adjusted_lower_bound = $integral_upper_bounds[$dex]
- $current_temperature;
} # end if
$adjusted_upper_bound
= $adjusted_lower_bound + $current_temperature;
push @adjusted_lower_bounds, $adjusted_lower_bound;
push @adjusted_upper_bounds, $adjusted_upper_bound;
} # end if
} # next $dex
# Determine whether brute force is appropriate, and if so, use it:
my $combinations
= 1 + $adjusted_upper_bounds[0] - $adjusted_lower_bounds[0];
for my $dex (1..$#adjusted_upper_bounds) {
if ($combinations > $cycles_per_temperature) {
$combinations = 0;
last;
} # end if
$combinations *= (1 + $adjusted_upper_bounds[$dex]
- $adjusted_lower_bounds[$dex]);
} # next $dex
if ($combinations > 0 && $combinations <= $cycles_per_temperature) {
my @adjusted_number_specs;
# Create the adjusted number specifications:
for my $dex (0..$#{ $number_specs }) {
push @adjusted_number_specs, {
"LowerBound" => $adjusted_lower_bounds[$dex]
/ (10 ** $number_specs->[$dex]->{"Precision"}),
"UpperBound" => $adjusted_upper_bounds[$dex]
/ (10 ** $number_specs->[$dex]->{"Precision"}),
"Precision" => $number_specs->[$dex]->{"Precision"}};
} # next $dex
# Perform the brute-force analysis:
@optimized_list = @{ use_brute_force(
\@adjusted_number_specs, $cost_function) };
# Break out of the temperature-reduction loop:
last;
} # end if
# Perform randomization cycles:
for (1..$cycles_per_temperature) {
my @candidate_list;
my $cost;
for my $dex (0..$#adjusted_lower_bounds) {
my $rand = rand();
my $addend = floor($rand * (1 + $adjusted_upper_bounds[$dex]
- $adjusted_lower_bounds[$dex]));
push @candidate_list,
($adjusted_lower_bounds[$dex] + $addend)
/ (10 ** $number_specs->[$dex]->{"Precision"});
} # next $dex
$cost = $cost_function->(\@candidate_list);
unless (defined($lowest_cost) && $cost >= $lowest_cost) {
$lowest_cost = $cost;
@optimized_list = @candidate_list;
} # end unless
} # next cycle
# Reduce the temperature:
$current_temperature = floor(
$current_temperature * $TEMPERATURE_MULTIPLIER);
} # end while
return \@optimized_list;
} # end sub
####
# Private helper functions for use by this module:
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
# number representing a cost to be minimized). The method tests every
# possible combination of numbers matching the specifications and returns a
# reference to an array containing the optimal numbers, where "optimal"
# means producing the lowest cost.
sub use_brute_force {
my $number_specs = validate_number_specs($_[0]);
my $cost_function = $_[1];
my @optimized_list;
my @lists;
my @cursors;
# Populate the list of lists of numbers:
for my $number_spec (@{ $number_specs }) {
my @list;
my $num = $number_spec->{"LowerBound"};
while ($num <= $number_spec->{"UpperBound"}) {
push @list, $num;
$num += 1 / (10 ** $number_spec->{"Precision"});
} # end while
push @lists, \@list;
} # next $number_spec
# Populate @cursors with the starting position for each list of numbers:
for (0..$#lists) {
push @cursors, 0;
} # next
# Perform the tests:
my $lowest_cost = undef;
my $finished = $FALSE;
do {
# Perform a test using the current cursors:
my @candidate_list;
my $cost;
for my $dex (0..$#lists) {
push @candidate_list, $lists[$dex]->[$cursors[$dex]];
} # next $dex
$cost = $cost_function->(\@candidate_list);
unless (defined($lowest_cost) && $cost >= $lowest_cost) {
$lowest_cost = $cost;
@optimized_list = @candidate_list;
} # end unless
# Adjust the cursors for the next test if not finished:
for my $dex (reverse(0..$#lists)) {
my $cursor = $cursors[$dex];
if ($cursor < $#{ $lists[$dex] }) {
$cursor++;
$cursors[$dex] = $cursor;
last;
}
elsif ($dex == 0) {
$finished = $TRUE;
last;
}
else {
$cursors[$dex] = 0;
} # end if
} # next $dex
} until ($finished);
# Return the result:
return \@optimized_list;
} # end sub
# The validate_number_specs() function takes a reference to an array of
# number specifications (which are references to hashes with "LowerBound",
# "UpperBound", and "Precision" fields) and returns a reference to a version
# of the array in which bounds with higher precision than that specified
# have been rounded inward. If a number specification is not valid, the
# function calls "die" with an error message.
sub validate_number_specs {
my $raw_number_specs = $_[0];
my @processed_number_specs = @{ $raw_number_specs };
for my $number_spec (@processed_number_specs) {
my $lower_bound = $number_spec->{"LowerBound"};
my $upper_bound = $number_spec->{"UpperBound"};
my $precision = $number_spec->{"Precision"};
unless (looks_like_number($precision)
&& int($precision) == $precision
&& $precision >= 0 && $precision <= 4) {
die "ERROR: In a number specification, the precision must be "
. "an integer in the range 0 to 4.\n";
} # end unless
unless (looks_like_number($lower_bound)
&& looks_like_number($upper_bound)
&& $upper_bound > $lower_bound
&& $upper_bound <= 10 ** (4 - $precision)
&& $lower_bound >= -1 * (10 ** (4 - $precision))) {
die "ERROR: In a number specification, the lower and upper "
. "bounds must be numbers such that the upper bound is "
. "greater than the lower bound, the upper bound is not "
. "greater than 10 to the power of (4 - p) where p is the "
. "precision, and the lower bound is not less than -1 times "
. "the result of taking 10 to the power of (4 - p).\n";
} # end unless
# Round the bounds inward as necessary:
my $integral_lower_bound = ceil( $lower_bound * (10 ** $precision));
my $integral_upper_bound = floor($upper_bound * (10 ** $precision));
$number_spec->{"LowerBound"}
= $integral_lower_bound / (10 ** $precision);
$number_spec->{"UpperBound"}
= $integral_upper_bound / (10 ** $precision);
} # next $number_spec
return \@processed_number_specs;
} # end sub
# Module return value:
1;
__END__
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
AI::SimulatedAnnealing - optimize a list of numbers according to a specified
cost function.
=head1 SYNOPSIS
use AI::SimulatedAnnealing;
$optimized_list = anneal(
$number_specs, $cost_function, $cycles_per_temperature);
=head1 DESCRIPTION
This module provides a single public function, anneal(), that optimizes
a list of numbers according to a specified cost function.
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
reduction, the anneal() function multiplies the temperature by 0.95
and then rounds the result down to the nearest integer (if the result
isn't already an integer). When the temperature reaches zero,
annealing is immediately terminated.
NOTE: Annealing can sometimes complete before the temperature
reaches zero if, after a particular temperature reduction, a
brute-force optimization approach (that is, testing every possible
combination of numbers within the subranges determined by the new
temperature) would produce a number of tests that is less than or
equal to the specified cycles per temperature. In that case, the
anneal() function performs the brute-force optimization to complete
the annealing process.
After a temperature reduction, the anneal() function determines each
new subrange such that the current optimal integer from the total
range is as close as possible to the center of the new subrange.
When there is a tie between two possible positions for the subrange
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/TensorFlow/Libtensorflow.pm view on Meta::CPAN
FFI::C->ffi($ffi);
$ffi->mangler(AI::TensorFlow::Libtensorflow::Lib->mangler_default);
sub new {
my ($class) = @_;
bless {}, $class;
}
$ffi->attach( 'Version' => [], 'string' );#}}}
1;
lib/AI/TensorFlow/Libtensorflow.pm view on Meta::CPAN
=for html <a href="https://mybinder.org/v2/gh/EntropyOrg/perl-AI-TensorFlow-Libtensorflow/master"><img src="https://mybinder.org/badge_logo.svg" alt="Binder" /></a>
<a href="https://quay.io/repository/entropyorg/perl-ai-tensorflow-libtensorflow"><img src="https://img.shields.io/badge/quay.io-images-red.svg" alt="quay.io images" /></a>
=head1 SYNOPSIS
use aliased 'AI::TensorFlow::Libtensorflow' => 'Libtensorflow';
=head1 DESCRIPTION
The C<libtensorflow> library provides low-level C bindings
for TensorFlow with a stable ABI.
lib/AI/TensorFlow/Libtensorflow.pm view on Meta::CPAN
=head1 CLASS METHODS
=head2 Version
my $version = Libtensorflow->Version();
like $version, qr/(\d|\.)+/, 'Got version';
B<Returns>
=over 4
lib/AI/TensorFlow/Libtensorflow.pm view on Meta::CPAN
This software is Copyright (c) 2022-2023 by Auto-Parallel Technologies, Inc.
This is free software, licensed under:
The Apache License, Version 2.0, January 2004
=cut
view all matches for this distribution