view release on metacpan or search on metacpan
lib/AI/Genetic/Pro.pm view on Meta::CPAN
use constant GD => 'GD::Graph::linespoints';
#=======================================================================
my $_Cache = { };
my $_temp_chromosome;
#=======================================================================
sub new {
my ( $class, %args ) = ( shift, @_ );
#-------------------------------------------------------------------
my %opts = map { if(ref $_){$_}else{ /^-?(.*)$/o; $1 }} @_;
my $self = bless \%opts, $class;
view all matches for this distribution
view release on metacpan or search on metacpan
examples/image_classification.pl view on Meta::CPAN
GetOptions(
## my Pembroke Welsh Corgi Kyuubi, enjoing Solar eclipse of August 21, 2017
'image=s' => \(my $image = 'http://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/'.
'gluon/dataset/kyuubi.jpg'),
'model=s' => \(my $model = 'resnet152_v2'),
'help' => sub { HelpMessage(0) },
) or HelpMessage(1);
## get a pretrained model (download parameters file if necessary)
my $net = get_model($model, pretrained => 1);
view all matches for this distribution
view release on metacpan or search on metacpan
examples/calculator.pl view on Meta::CPAN
use warnings;
use AI::MXNet ('mx');
## preparing the samples
## to train our network
sub samples {
my($batch_size, $func) = @_;
# get samples
my $n = 16384;
## creates a pdl with $n rows and two columns with random
## floats in the range between 0 and 1
view all matches for this distribution
view release on metacpan or search on metacpan
bin/from-folder.pl view on Meta::CPAN
GetOptions (\%opts, "cache_file=s");
sub translate
{
return unless -f;
(my $rel_name = $File::Find::name) =~ s{.*/}{}xs;
my $name = md5_hex($rel_name);
my $go = 0;
view all matches for this distribution
view release on metacpan or search on metacpan
examples/bp.pl view on Meta::CPAN
#==============================================================
#********** THIS IS THE MAIN PROGRAM **************************
#==============================================================
sub main
{
# initiate the weights
initWeights();
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NaiveBayes.pm view on Meta::CPAN
with Storage(format => 'Storable', io => 'File');
has model => (is => 'ro', isa => 'HashRef[HashRef]', required => 1);
sub train {
my $self = shift;
my $learner = AI::NaiveBayes::Learner->new();
for my $example ( @_ ){
$learner->add_example( %$example );
}
return $learner->classifier;
}
sub classify {
my ($self, $newattrs) = @_;
$newattrs or die "Missing parameter for classify()";
my $m = $self->model;
view all matches for this distribution
view release on metacpan or search on metacpan
examples/digits/digits.pl view on Meta::CPAN
show784($delta(:,6));
show784($delta(:,4));
}
#die join (',',$nncost->dims);
use PDL::Graphics2D;
sub show784{
my $w = shift;
$w = $w->squeeze;
my $min = $w->minimum;
$w -= $min;
my $max = $w->maximum;
$w /= $max;
$w = $w->reshape(28,28);
imag2d $w;
}
sub sigmoid{
my $foo = shift;
return 1/(1+E**-$foo);
}
sub logistic{
#find sigmoid before calling this.
#grad=logistic(sigmoid(foo))
my $foo = shift;
return $foo * (1-$foo);
}
view all matches for this distribution
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
package AI::NeuralNet::BackProp::neuron;
use strict;
# Dummy constructor
sub new {
bless {}, shift
}
# Rounds floats to ints
sub intr {
shift if(substr($_[0],0,4) eq 'AI::');
try { return int(sprintf("%.0f",shift)) }
catch { return 0 }
}
# Receives input from other neurons. They must
# be registered as a synapse of this neuron to effectively
# input.
sub input {
my $self = shift;
my $sid = shift;
my $value = shift;
# We simply weight the value sent by the neuron. The neuron identifies itself to us
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Hopfield.pm view on Meta::CPAN
has 'matrix_rows' => ( is => 'rw', isa => 'Int');
has 'matrix_cols' => ( is => 'rw', isa => 'Int');
sub BUILD {
my $self = shift;
my $args = shift;
my $matrix = Math::SparseMatrix->new($args->{row}, $args->{col});
$self->matrix($matrix);
$self->matrix_rows($args->{row});
$self->matrix_cols($args->{col});
}
sub train() {
my $self = shift;
my @pattern = @_;
if ( ($#pattern + 1) != $self->matrix_rows) {
die "Can't train a pattern of size " . ($#pattern + 1) . " on a hopfield network of size " , $self->matrix_rows;
view all matches for this distribution
view release on metacpan or search on metacpan
examples/game_ai.pl view on Meta::CPAN
$knife,
$gun,
$enemies])];
}
sub prompt
{
my ($message,$domain) = @_;
my $valid_response = 0;
my $response;
do {
view all matches for this distribution
view release on metacpan or search on metacpan
examples/NeuralNet/pso_ann.pl view on Meta::CPAN
my $annInputs = "pso.dat";
my $expectedValue = 3.5; # this is the value that we want to train the ANN to produce (just like the example in t/PTO.t)
sub test_fitness_function(@) {
my (@arr) = (@_);
&writeAnnConfig($annConfig, $numInputs, $numHidden, $xferFunc, @arr);
my $netValue = &runANN($annConfig, $annInputs);
print "network value = $netValue\n";
view all matches for this distribution
view release on metacpan or search on metacpan
example/PSOTest-MultiCore.pl view on Meta::CPAN
#use AI::ParticleSwarmOptimization;
use AI::ParticleSwarmOptimization::MCE;
#use AI::ParticleSwarmOptimization::Pmap;
use Data::Dumper; $::Data::Dumper::Sortkeys = 1;
#=======================================================================
sub calcFit {
my @values = @_;
my $offset = int (-@values / 2);
my $sum;
select( undef, undef, undef, 0.01 ); # Simulation of heavy processing...
view all matches for this distribution
view release on metacpan or search on metacpan
Samples/PSOPlatTest.pl view on Meta::CPAN
printf ",# Fit %.5f at (%s) after %d iterations\n",
$fit, join (', ', map {sprintf '%.4f', $_} @values), $iters;
sub calcFit {
my @values = @_;
my $offset = int (-@values / 2);
my $sum;
$sum += ($_ - $offset++)**2 for @values;
view all matches for this distribution