view release on metacpan or search on metacpan
lib/AI/FANN/Evolving.pm view on Meta::CPAN
sub new {
my $class = shift;
my %args = @_;
my $self = {};
bless $self, $class;
$self->_init(%args);
# de-serialize from a file
if ( my $file = $args{'file'} ) {
$self->{'ann'} = AI::FANN->new_from_file($file);
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Fuzzy/Axis.pm view on Meta::CPAN
my ($class) = @_;
my $self = {};
$self->{labels} = {};
bless $self, $class;
return $self;
}
sub addlabel {
# adds a label for a range of values..
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/FuzzyEngine.pm view on Meta::CPAN
use AI::FuzzyEngine::Variable;
sub new {
my ($class) = @_;
my $self = bless {}, $class;
$self->{_variables} = [];
return $self;
}
view all matches for this distribution
view release on metacpan or search on metacpan
FuzzyInference.pm view on Meta::CPAN
sub new {
my $self = shift;
my $class = ref($self) || $self;
my $obj = bless {} => $class;
$obj->_init;
return $obj;
}
view all matches for this distribution
view release on metacpan or search on metacpan
AI/Gene/Sequence.pm view on Meta::CPAN
# they will not be called by the 'sequence' methods.
# Default constructor
sub new {
my $gene = ['',[]];
return bless $gene, ref $_[0] || $_[0];
}
# remember that clone method may require deep copying depending on
# your specific needs
sub clone {
my $self = shift;
my $new = [$self->[0]];
$new->[1] = [@{$self->[1]}];
return bless $new, ref $self;
}
# You need some way to use the gene you've made and mutated, but
# this will let you have a look, if it starts being odd.
view all matches for this distribution
view release on metacpan or search on metacpan
our $VERSION = '0.01';
sub new {
my ( $class ) = @_;
my $self = bless [], $class;
$self->dwim( "Implement self" );
return $self;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Genetic/Pro.pm view on Meta::CPAN
sub new {
my ( $class, %args ) = ( shift, @_ );
#-------------------------------------------------------------------
my %opts = map { if(ref $_){$_}else{ /^-?(.*)$/o; $1 }} @_;
my $self = bless \%opts, $class;
#-------------------------------------------------------------------
$AI::Genetic::Pro::Array::Type::Native = 1 if $self->native;
#-------------------------------------------------------------------
lib/AI/Genetic/Pro.pm view on Meta::CPAN
my ( $self, $dump ) = @_;
if( my $typ = $self->_package ){
@{ $dump->{ chromosomes } } = map {
my $arr = $typ->make_with_packed( $_ );
bless $arr, q[AI::Genetic::Pro::Chromosome];
} @{ $dump->{ chromosomes } };
}
%$self = %$dump;
view all matches for this distribution
view release on metacpan or search on metacpan
# -terminate: set termination sub.
sub new {
my ($class, %args) = @_;
my $self = bless {
ADDSEL => {}, # user-defined selections
ADDCRS => {}, # user-defined crossovers
ADDMUT => {}, # user-defined mutations
ADDSTR => {}, # user-defined strategies
} => $class;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Image.pm view on Meta::CPAN
$attr{'error'} = 'API Key missing' unless $attr{'key'};
$attr{'model'} = 'dall-e-2' unless $attr{'model'};
$attr{'size'} = '512x512' unless $attr{'size'};
return bless \%attr, $class;
}
# Define endpoints for APIs
my %url = (
'OpenAI' => 'https://api.openai.com/v1/images/generations',
view all matches for this distribution
view release on metacpan or search on metacpan
O_OBJECT
if( sv_isobject($arg) && (SvTYPE(SvRV($arg)) == SVt_PVMG) )
$var = ($type)SvIV((SV*)SvRV( $arg ));
else{
warn( \"${Package}::$func_name() -- $var is not a blessed SV reference\" );
XSRETURN_UNDEF;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/ML/Expr.pm view on Meta::CPAN
package AI::ML::Expr;
use strict;
use warnings;
use Chart::Gnuplot;
use Scalar::Util 'blessed';
use AI::ML;
use Math::Lapack;
use aliased 'Math::Lapack::Matrix' => 'M';
use parent 'Exporter';
use parent 'Math::Lapack::Expr';
our @EXPORT = qw(mini_batch tanh sigmoid relu lrelu d_sigmoid d_relu d_lrelu d_tanh softmax sigmoid_cost plot plot_cost);
use Math::Lapack::Expr;
sub _bless {
my $matrix = shift;
return bless { _matrix => $matrix, type => 'matrix' } => "Math::Lapack::Matrix";
}
=head2 sigmoid
Allow apply the function sigmoid to every element of the matrix.
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub sigmoid {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'sigmoid', args => [$self] } => __PACKAGE__
}
sub eval_sigmoid {
my $tree = shift;
if (blessed($tree) && $tree->isa("Math::Lapack::Matrix")) {
return _bless _sigmoid($tree->matrix_id);
}
die "Sigmoid for non matrix: " . ref($tree);
}
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub relu {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'relu', args => [$self] } => __PACKAGE__;
}
sub eval_relu {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _relu($tree->matrix_id);
}
die "ReLU for non matrix";
}
=head2 d_relu
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub d_relu {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_relu', args => [$self] } => __PACKAGE__;
}
sub eval_d_relu {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _d_relu($tree->matrix_id);
}
die "ReLU for non matrix";
}
=head2 lrelu
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub lrelu {
my ($self, $v) = @_;
return bless { package => __PACKAGE__, type => 'lrelu', args => [$self, $v] } => __PACKAGE__;
}
sub eval_lrelu {
my ($tree, $v) = @_;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _lrelu($tree->matrix_id, $v);
}
die "lReLU for non matrix";
}
=head2 d_lrelu
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub d_lrelu {
my ($self, $v) = @_;
return bless { package => __PACKAGE__, type => 'd_lrelu', args => [$self, $v] } => __PACKAGE__;
}
sub eval_d_lrelu {
my ($tree, $v) = @_;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _d_lrelu($tree->matrix_id, $v);
}
die "lReLU for non matrix";
}
lib/AI/ML/Expr.pm view on Meta::CPAN
$m = $m->softmax();
=cut
sub softmax {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'softmax', args => [$self] } => __PACKAGE__;
}
sub eval_softmax {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
lib/AI/ML/Expr.pm view on Meta::CPAN
my $e_x = exp( $tree - $s );
my $div = sum( $e_x, 1 );
return $e_x / $div;
#use Data::Dumper;
#print STDERR Dumper $matrix;
# return _bless _softmax($tree->matrix_id);
}
die "softmax for non matrix";
}
=head2 d_softmax
lib/AI/ML/Expr.pm view on Meta::CPAN
$m = $m->d_softmax();
=cut
sub d_softmax {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_softmax', args => [$self] } => __PACKAGE__;
}
sub eval_d_softmax {
my $tree = shift;
if (ref($tree) eq "Math::Lapack::Matrix") {
return _bless _d_softmax($tree->matrix_id);
}
die "d_softmax for non matrix";
}
=head2 tanh
lib/AI/ML/Expr.pm view on Meta::CPAN
$m = $m->tanh();
=cut
sub tanh {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'tanh', args => [$self] } => __PACKAGE__;
}
sub eval_tanh {
my $tree = shift;
if( ref($tree) eq "Math::Lapack::Matrix"){
return _bless _tanh($tree->matrix_id);
}
die "tanh for non matrix";
}
=head2 d_tanh
lib/AI/ML/Expr.pm view on Meta::CPAN
$m = $m->d_tanh();
=cut
sub d_tanh {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_tanh', args => [$self] } => __PACKAGE__;
}
sub eval_d_tanh {
my $tree = shift;
if( ref($tree) eq "Math::Lapack::Matrix"){
return _bless _d_tanh($tree->matrix_id);
}
die "d_tanh for non matrix";
}
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub d_sigmoid {
my ($self) = @_;
return bless { package => __PACKAGE__, type => 'd_sigmoid', args => [$self] } => __PACKAGE__;
}
sub eval_d_sigmoid {
my $tree = shift;
if( ref($tree) eq "Math::Lapack::Matrix"){
return _bless _d_sigmoid($tree->matrix_id);
}
return "d_sigmoid for non matrix";
}
=head2 sigmoid_cost
lib/AI/ML/Expr.pm view on Meta::CPAN
=cut
sub mini_batch {
my ($self, $start, $size, $axis) = @_;
$axis = 0 unless defined $axis; #default
return _bless _mini_batch($self->matrix_id, $start, $size, $axis);
}
=head2 prediction
=cut
sub prediction {
my ($self, %opts) = @_;
my $t = exists $opts{threshold} ? $opts{threshold} : 0.50;
return _bless _predict_binary_classification($self->matrix_id, $t);
}
=head2 precision
=cut
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/MXNet/CachedOp.pm view on Meta::CPAN
sub call
{
my $self = shift;
my @args;
my %kwargs;
if(blessed $_[0] and $_[0]->isa('AI::MXNet::NDArray'))
{
while(blessed $_[0] and $_[0]->isa('AI::MXNet::NDArray'))
{
push @args, shift(@_);
}
%kwargs = @_;
}
lib/AI/MXNet/CachedOp.pm view on Meta::CPAN
}
my $original_output;
if(defined $out)
{
$original_output = $out;
if(blessed($out))
{
$out = [$out];
}
}
else
view all matches for this distribution
view release on metacpan or search on metacpan
inc/Module/Install.pm view on Meta::CPAN
$args{path} = $args{name};
$args{path} =~ s!::!/!g;
}
$args{file} ||= "$args{base}/$args{prefix}/$args{path}.pm";
bless( \%args, $class );
}
sub call {
my ($self, $method) = @_;
my $obj = $self->load($method) or return;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/MegaHAL.pm view on Meta::CPAN
sub new {
my ($class,%args) = @_;
my $self;
# Bless ourselves into the AI::MegaHAL class.
$self = bless({ },$class);
# Make sure that we can find a brain or a training file somewhere
# else die with an error.
my $path = $args{'Path'} || ".";
if(-e "$path/megahal.brn" || -e "$path/megahal.trn") {
view all matches for this distribution
view release on metacpan or search on metacpan
bin/remote.pl view on Meta::CPAN
sub new {
my $class = shift;
no strict 'refs';
my $self = bless { @_, cache => [] }, $class;
# compute some defaults
$self->{category} ||= ${"$class\::Default"};
# fall back to last resort (FIXME should we carp()?)
bin/remote.pl view on Meta::CPAN
}
sub new {
my $class = shift;
bless { cache => [] }, $class;
}
sub theme {
my $class = ref $_[0] || $_[0];
no strict 'refs';
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NNEasy/NN.pm view on Meta::CPAN
$this->{learning_algorithm_class} = "AI::NNEasy::NN::" . $this->{learning_algorithm} ;
my $nntype_class = "AI::NNEasy::NN::" . $this->{networktype} . '_' . $this->{learning_algorithm} ;
bless($this , $nntype_class) ;
foreach my $i ( @$params ) {
next if !$$i{nodes} ;
my %layer = %{$i} ;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NNFlex.pm view on Meta::CPAN
###############################################################################
sub new
{
my $class = shift;
my $network={};
bless $network,$class;
# intercept the new style 'empty network' constructor call
# Maybe I should deprecate the old one, but its convenient, provided you
# can follow the mess of hashes
lib/AI/NNFlex.pm view on Meta::CPAN
foreach (keys %{$params})
{
$$layer{$_} = $$params{$_}
}
bless $layer,$class;
my $numNodes = $$params{'nodes'};
my @nodes;
lib/AI/NNFlex.pm view on Meta::CPAN
}
$$node{'active'} = 1;
$$node{'error'} = 0;
bless $node,$class;
AI::NNFlex::dbug($params,"Created node $node",2);
return $node;
}
##############################################################################
view all matches for this distribution
view release on metacpan or search on metacpan
NaiveBayes1.pm view on Meta::CPAN
# non-exported package globals go here
use vars qw();
sub new {
my $package = shift;
return bless {
attributes => [ ],
labels => [ ],
attvals => {},
real_stat => {},
numof_instances => 0,
NaiveBayes1.pm view on Meta::CPAN
sub import_from_YAML {
my $package = shift;
my $yaml = shift;
my $self = YAML::Load($yaml);
return bless $self, $package;
}
sub import_from_YAML_file {
my $package = shift;
my $yamlf = shift;
my $self = YAML::LoadFile($yamlf);
return bless $self, $package;
}
# assume that the last header count means counts
# after optionally removing counts, the last header is label
sub add_table {
view all matches for this distribution
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
use strict;
# Dummy constructor
sub new {
bless {}, shift
}
# Rounds floats to ints
sub intr {
shift if(substr($_[0],0,4) eq 'AI::');
BackProp.pm view on Meta::CPAN
my $layers = shift;
my $size = shift;
my $out = shift || $size;
my $flag = shift || 0;
bless $self, $type;
# If $layers is a string, then it will be nummerically equal to 0, so try to load it
# as a network file.
if($layers == 0) {
# We use a "1" flag as the second argument to indicate that we want load()
BackProp.pm view on Meta::CPAN
use strict;
# Dummy constructor.
sub new {
bless { PARENT => $_[1] }, $_[0]
}
# This is so we comply with the neuron interface.
sub weight {}
sub input {}
BackProp.pm view on Meta::CPAN
use strict;
# Dummy constructor.
sub new {
bless { PARENT => $_[1] }, $_[0]
}
# Compliance with neuron interface
sub weight {}
BackProp.pm view on Meta::CPAN
file => $_[1]
};
my (@a,@b)=load_pcx($_[1]);
$self->{image}=\@a;
$self->{palette}=\@b;
bless \%{$self}, $type;
}
# Returns a rectangular block defined by an array ref in the form of
# [$x1,$y1,$x2,$y2]
# Return value is an array ref
BackProp.pm view on Meta::CPAN
=item $net->load_pcx($filename);
Oh heres a treat... this routine will load a PCX-format file (yah, I know ... ancient format ... but
it is the only one I could find specs for to write it in Perl. If anyone can get specs for
any other formats, or could write a loader for them, I would be very grateful!) Anyways, a PCX-format
file that is exactly 320x200 with 8 bits per pixel, with pure Perl. It returns a blessed refrence to
a AI::NeuralNet::BackProp::PCX object, which supports the following routinges/members. See example
files ex_pcxl.pl and ex_pcx.pl in the ./examples/ directory.
view all matches for this distribution
view release on metacpan or search on metacpan
* References
* ==========
*
* Each of Rect, Map, Array, and Vector contains a member 'ref' which is
* an SV* pointing to an RV. The RV can be returned directly to perl-land
* after being blessed into its respective class.
*
* The RV references an SV containing an IV. The IV is set to the base
* address of its component structure. This is so the class code can know
* which instance of the class is being referred to on callback.
*
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Kohonen.pm view on Meta::CPAN
=cut
sub new {
my $class = shift;
my %args = @_;
my $self = bless \%args,$class;
$self->{missing_mask} = 'x' unless defined $self->{missing_mask};
$self->_process_table if defined $self->{table}; # Creates {input}
$self->load_input($self->{input_file}) if defined $self->{input_file}; # Creates {input}
if (not defined $self->{input}){
view all matches for this distribution
view release on metacpan or search on metacpan
my $layers = shift;
my $nodes = shift;
my $outputs = shift || $nodes;
my $inputs = shift || $nodes;
bless $self, $type;
# If $layers is a string, then it will be numerically equal to 0, so
# try to load it as a network file.
if($layers == 0) {
# We use a "1" flag as the second argument to indicate that we
# You can use "sigmoid_1" as a synonym to "sigmoid".
# Type can also be a CODE ref, ( ref($type) eq "CODE" ).
# If $type is a CODE ref, then the function is called in this form:
# $output = &$type($sum_of_inputs,$self);
# The code ref then has access to all the data in that node (thru the
# blessed refrence $self) and is expected to return the value to be used
# as the output for that node. The sum of all the inputs to that node
# is already summed and passed as the first argument.
sub activation {
my $self = shift;
my $layer = shift || 0;
my $self ={
_parent => shift,
_inputs => [],
_outputs => []
};
bless $self, $type;
}
# Receive inputs from other nodes, and also send
# outputs on.
sub input {
1;
# Internal usage, prevents recursion on empty nodes.
package AI::NeuralNet::Mesh::cap;
sub new { bless {}, shift }
sub input {}
sub adjust_weight {}
sub add_output_node {}
sub add_input_node {}
1;
my $type = shift;
my $self ={
_parent => shift,
_inputs => [],
};
bless $self, $type;
}
sub add_input_node {
my $self = shift;
return (++$self->{_inputs_size})-1;
=item AI::NeuralNet::Mesh->new($file);
This will automatically create a new network from the file C<$file>. It will
return undef if the file was of an incorrect format or non-existant. Otherwise,
it will return a blessed refrence to a network completly restored from C<$file>.
=item AI::NeuralNet::Mesh->new(\@layer_sizes);
This constructor will make a network with the number of layers corresponding to the length
in elements of the array ref passed. Each element in the array ref passed is expected
$output = &$code_ref($sum_of_inputs, $self);
The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.
See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.
Three of the activation syntaxes are shown in the first constructor above, the "linear",
$output = &$code_ref($sum_of_inputs, $self);
The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.
See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.
The activation type for each layer is preserved across load/save calls.
This is a treat... this routine will load a PCX-format file (yah, I know ... ancient
format ... but it is the only one I could find specs for to write it in Perl. If
anyone can get specs for any other formats, or could write a loader for them, I
would be very grateful!) Anyways, a PCX-format file that is exactly 320x200 with 8 bits
per pixel, with pure Perl. It returns a blessed refrence to a PCX::Loader object, which
supports the following routinges/members. See example files ex_pcx.pl and ex_pcxl.pl in
the ./examples/ directory.
See C<perldoc PCX::Loader> for information on the methods of the object returned.
we simply round to an integer and pluck that index from the array and
use it as the output value for that node.
See? It's not that hard! Using custom activation functions, you could do
just about anything with the node that you want to, since you have
access to the node just as if you were a blessed member of that node's object.
=item ramp($r);
ramp() preforms smooth ramp activation between 0 and 1 if $r is 1,
in the ALN example, I use a connector in the main package called tree() instead of
the default connector. Before I call the new() constructor, I use this line of code:
$AI::NeuralNet::Mesh::Connector = 'main::tree'
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. See notes on CUSTOM NETWORK CONNECTORS,
below, for more information on creating your own custom connector.
=item $AI::NeuralNet::Mesh::DEBUG
in the ALN example, I use a connector in the main package called tree() instead of
the default connector. Before I call the new() constructor, I use this line of code:
$AI::NeuralNet::Mesh::Connector = 'main::tree'
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument.
Example connector:
sub connect_three {
my $self = shift;
$y + $r1a give the index into the mesh array of the current node to connect the output FROM.
We need to connect this nodes output lines to the next layers input nodes. We do this
with a simple method of the outputing node (the node at $y+$r1a), called add_output_node().
add_output_node() takes one simple arguemnt: A blessed refrence to a node that it is supposed
to output its final value TO. We get this blessed refrence with more simple addition.
$y + $r2a gives us the node directly above the first node (supposedly...I'll get to the "supposedly"
part in a minute.) By adding or subtracting from this number we get the neighbor nodes.
In the above example you can see we check the $y index to see that we havn't come close to
any of the edges of the range.
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/SOM/Hexa.pm view on Meta::CPAN
=cut
sub new {
my $class = shift;
my %options = @_;
my $self = bless { %options }, $class;
if ($self->{output_dim} > 0) {
$self->{_D} = $self->{output_dim};
} else {
die "output dimension must be positive integer";
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
unless defined $_ && /^\d+$/;
}
my $seed = rand(1); # Perl invokes srand() on first call to rand()
my $handle = c_new_network(@args);
logdie "could not create new network" unless $handle >= 0;
my $self = bless {
input => $args[0],
hidden => $args[1],
output => $args[2],
handle => $handle,
}, $class;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/ParticleSwarmOptimization/MCE.pm view on Meta::CPAN
#=======================================================================
sub new {
my ($class, %params) = @_;
#-------------------------------------------------------------------
my $self = bless {}, $class;
$self->SUPER::setParams( %params );
#-------------------------------------------------------------------
$self->_init_mce( \%params );
$self->_init_pop( \%params );
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/ParticleSwarmOptimization.pm view on Meta::CPAN
use constant kLogDetail => 8;
use constant kLogIterDetail => (kLogIter | kLogDetail);
sub new {
my ($class, %params) = @_;
my $self = bless {}, $class;
$self->setParams (%params);
return $self;
}
view all matches for this distribution
view release on metacpan or search on metacpan
Rectangle.xs view on Meta::CPAN
string = SvRV(ST(0));
clone = sv_newmortal();
sv_setsv( clone, string );
clone = newRV_inc( clone );
sv_bless( clone, SvSTASH( string ));
XPUSHs( sv_2mortal(clone));
void
clone_rect(pmap self, IV begin_x, IV begin_y, IV end_x, IV end_y)
Rectangle.xs view on Meta::CPAN
};
/*Prepare for return full object */
clone = newRV_inc( clone );
sv_bless( clone, SvSTASH( SvRV(ST(0) )));
XPUSHs( sv_2mortal(clone));
void
new(self, options)
SV * self;
Rectangle.xs view on Meta::CPAN
newmap->width = re_map.width;
newmap->height = re_map.height;
SvCUR_set(object, map_size);
RETVALUE = sv_2mortal( newRV_inc(object ));
sv_bless(RETVALUE, gv_stashpv( SvPV_nolen( self ), GV_ADD));
XPUSHs(RETVALUE);
void
start_x(pmap self, int newpos_x = 0 )
PPCODE:
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/AStar/AStarNode.pm view on Meta::CPAN
$self->{parent} = undef;
$self->{cost} = 0;
$self->{inopen} = 0;
$self->{heap} = undef;
bless ($self, $class);
return $self;
}
sub heap {
my ($self, $val) = @_;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
use AI::Pathfinding::OptimizeMultiple::SimulationResults ();
use MooX qw/late/;
use PDL;
use Scalar::Util qw/ blessed /;
has chosen_scans => ( isa => 'ArrayRef', is => 'rw' );
has _iter_idx => ( isa => 'Int', is => 'rw', default => sub { 0; }, );
has _num_boards => ( isa => 'Int', is => 'ro', init_arg => 'num_boards', );
has _orig_scans_data => ( isa => 'PDL', is => 'rw' );
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
else
{
$err = Exception::Class->caught();
if ($err)
{
if ( not( blessed $err && $err->can('rethrow') ) )
{
die $err;
}
$err->rethrow;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
_state_get_data_func => undef,
@_, # attribute override
};
return bless $self, $class;
}
sub state_eval_func {
my $self = shift;
view all matches for this distribution