view release on metacpan or search on metacpan
examples/add.pl view on Meta::CPAN
$network->init();
# Taken from Mesh ex_add.pl
my $dataset = AI::NNFlex::Dataset->new([
[ 1, 1 ], [ 2 ],
[ 1, 2 ], [ 3 ],
[ 2, 2 ], [ 4 ],
[ 20, 20 ], [ 40 ],
[ 10, 10 ], [ 20 ],
view all matches for this distribution
view release on metacpan or search on metacpan
examples/digits/deep_digits.pl view on Meta::CPAN
use FindBin qw($Bin);
chdir $Bin;
unless (-e "t10k-labels-idx1-ubyte.fits"){ die <<"NODATA";}
pull this data by running get_digits.sh
convert it to FITS by running idx_to_fits.pl
NODATA
my $images = rfits('t10k-images-idx3-ubyte.fits');
my $labels = rfits('t10k-labels-idx1-ubyte.fits');
my $y = identity(10)->range($labels->transpose)->sever;
say 't10k data loaded';
my $nerl = AI::Nerl->new(
# type => image,dims=>[28,28],...
scale_input => 1/256,
);
view all matches for this distribution
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
shift if(substr($_[0],0,4) eq 'AI::');
my ($fa,$fb)=(shift,shift);
sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
}
# This sub will take an array ref of a data set, which it expects in this format:
# my @data_set = ( [ ...inputs... ], [ ...outputs ... ],
# ... rows ...
# );
#
# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
# data in the set in order. Usage:
#
# learn_set(\@data,[ options ]);
#
# Options are options in hash form. They can be of any form that $net->learn takes.
#
# It returns a percentage string.
#
sub learn_set {
my $self = shift if(substr($_[0],0,4) eq 'AI::');
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{pattern})?$args{pattern}*2+1:1;
my ($fa,$fb);
for my $x (0..$len) {
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn( $data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
my $res;
$data->[$row] = $self->crunch($data->[$row]) if($data->[$row] == 0);
if ($p) {
$res=pdiff($data->[$row],$self->run($data->[$row-1]));
} else {
$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
}
return $res;
}
# This sub will take an array ref of a data set, which it expects in this format:
# my @data_set = ( [ ...inputs... ], [ ...outputs ... ],
# ... rows ...
# );
#
# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
# data in the set in RANDOM order. Usage:
#
# learn_set_rand(\@data,[ options ]);
#
# Options are options in hash form. They can be of any form that $net->learn takes.
#
# It returns a true value.
#
sub learn_set_rand {
my $self = shift if(substr($_[0],0,4) eq 'AI::');
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my @learned;
while(1) {
_GET_X:
my $x=$self->intr(rand()*$len);
goto _GET_X if($learned[$x]);
$learned[$x]=1;
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn($data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
view all matches for this distribution
view release on metacpan or search on metacpan
README
t/00-load.t
t/manifest.t
t/pod-coverage.t
t/pod.t
META.yml Module YAML meta-data (added by MakeMaker)
META.json Module JSON meta-data (added by MakeMaker)
view all matches for this distribution
view release on metacpan or search on metacpan
Makefile.PL
MANIFEST
README
t/AI-NeuralNet-Kohonen-Visual.t
lib/AI/NeuralNet/Kohonen/Visual.pm
META.yml Module meta-data (added by MakeMaker)
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Kohonen.pm view on Meta::CPAN
1 .5 0 orange
1 .5 1 pink"
);
$_->train;
$_->save_file('mydata.txt');
exit;
=head1 DESCRIPTION
An illustrative implimentation of Kohonen's Self-organising Feature Maps (SOMs)
view all matches for this distribution
view release on metacpan or search on metacpan
# First create the individual nodes
for my $x (0..$tmp-1) {
$self->{mesh}->[$x] = AI::NeuralNet::Mesh::node->new($self);
}
# Get an instance of an output (data collector) node
$self->{output} = AI::NeuralNet::Mesh::output->new($self);
# Connect the output layer to the data collector
for $x (0..$outputs-1) {
$self->{mesh}->[$tmp-$outputs+$x]->add_output_node($self->{output});
}
# Now we use the _c() method to connect the layers together.
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/SOM.pm view on Meta::CPAN
[ 0, 4, -3]);
my @mes = $nn->train (30, ...); # learn about the smallest errors
# during training
print $nn->as_data; # dump the raw data
print $nn->as_string; # prepare a somehow formatted string
use AI::NeuralNet::SOM::Torus;
# similar to above
view all matches for this distribution
view release on metacpan or search on metacpan
examples/game_ai.pl view on Meta::CPAN
return $response;
}
sub display_result
{
my ($net,@data) = @_;
my $result = $net->winner(\@data);
my @health = qw/Poor Average Good/;
my @knife = qw/No Yes/;
my @gun = qw/No Yes/;
printf $format,
$health[$_[1]],
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Ollama/Client.pm view on Meta::CPAN
=head2 C<< deleteModel >>
my $res = $client->deleteModel()->get;
Delete a model and its data.
=cut
=head2 C<< generateEmbedding >>
view all matches for this distribution
view release on metacpan or search on metacpan
Makefile.PL
MANIFEST
README
t/PBDD.t
XS.xs
META.yml Module YAML meta-data (added by MakeMaker)
META.json Module JSON meta-data (added by MakeMaker)
view all matches for this distribution
view release on metacpan or search on metacpan
examples/NeuralNet/pso_ann.pl view on Meta::CPAN
print ANN "\n";
close(ANN);
}
sub runANN($$) {
my ($configFile, $dataFile) = @_;
my $networkValue = `ann_compute $configFile $dataFile`;
chomp($networkValue);
return $networkValue;
}
view all matches for this distribution
view release on metacpan or search on metacpan
distributed under the terms of this Lesser General Public
License (also called "this License"). Each licensee is
addressed as "you".
A "library" means a collection of software functions and/or
data prepared so as to be conveniently linked with
application programs (which use some of those functions
and data) to form executables.
The "Library", below, refers to any such software library or
work which has been distributed under these terms. A "work
based on the Library" means either the Library or any
derivative work under copyright law: that is to say, a work
view all matches for this distribution
view release on metacpan or search on metacpan
t/06-setstart.t
t/07-dastar.t
examples/snake_labirint.pl
Benchmark/perl-vs-xs.pl
META.yml Module meta-data (added by MakeMaker)
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
use Scalar::Util qw/ blessed /;
has chosen_scans => ( isa => 'ArrayRef', is => 'rw' );
has _iter_idx => ( isa => 'Int', is => 'rw', default => sub { 0; }, );
has _num_boards => ( isa => 'Int', is => 'ro', init_arg => 'num_boards', );
has _orig_scans_data => ( isa => 'PDL', is => 'rw' );
has _optimize_for => ( isa => 'Str', is => 'ro', init_arg => 'optimize_for', );
has _scans_data => ( isa => 'PDL', is => 'rw' );
has _selected_scans =>
( isa => 'ArrayRef', is => 'ro', init_arg => 'selected_scans', );
has _status => ( isa => 'Str', is => 'rw' );
has _quotas => ( isa => 'ArrayRef[Int]', is => 'ro', init_arg => 'quotas' );
has _total_boards_solved => ( isa => 'Int', is => 'rw' );
has _total_iters => ( is => 'rw' );
has _trace_cb =>
( isa => 'Maybe[CodeRef]', is => 'ro', init_arg => 'trace_cb' );
has _scans_meta_data => ( isa => 'ArrayRef', is => 'ro', init_arg => 'scans' );
has _scans_iters_pdls =>
( isa => 'HashRef', is => 'rw', init_arg => 'scans_iters_pdls' );
has _stats_factors => (
isa => 'HashRef',
is => 'ro',
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
} );
# train
$nerve->tame( ... );
$nerve->exercise( ... );
$nerve->train( $training_data_csv, $expected_column_name, $save_nerve_to );
# or
$nerve->train(
$training_data_csv, $expected_column_name, $save_nerve_to,
$show_progress, $identifier); # these two parameters must go together
# validate
$nerve->take_lab_test( ... );
$nerve->take_mock_exam( ... );
# fill results to original file
$nerve->validate( {
stimuli_validate => $validation_data_csv,
predicted_column_index => 4,
} );
# or
# fill results to a new file
$nerve->validate( {
stimuli_validate => $validation_data_csv,
predicted_column_index => 4,
results_write_to => $new_csv
} );
view all matches for this distribution
view release on metacpan or search on metacpan
alien_packages/tds/base64.proto view on Meta::CPAN
option cc_enable_arenas = true;
option java_outer_classname = "base64";
option java_multiple_files = true;
option java_package = "org.tds";
// Protocol buffer to encode/decode base64 data for JSON transport.
// Protocol Buffers encode bytes to base64 when transforming to JSON.
message Base64Proto {
repeated bytes base64 = 1;
};
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PredictionClient.pm view on Meta::CPAN
If you don't have a server to talk to, but want to see if most everything else is working use
the --debug_loopback_interface. This will provide a sample response you can test the client with.
The module can use the same loopback interface for debugging your bespoke clients.
The --debug_verbose option will dump the data structures of the request and response to allow
you to see what is going on.
=head3 The response from a live server to the camel image looks like this:
Inception.pl --image_file=zzzzz --debug_camel --host=107.170.xx.xxx --port=9000
view all matches for this distribution
view release on metacpan or search on metacpan
t/annealing_tests.t view on Meta::CPAN
when ($Text::BSV::Exception::IO_ERROR) {
say STDERR "Couldn't open $DQ$bsv_file_path$DQ for reading.";
exit(1);
}
when ($Text::BSV::Exception::INVALID_DATA_FORMAT) {
say STDERR "Invalid BSV data: " . $exception->get_message();
exit(1);
}
default {
say STDERR $exception->get_message();
exit(1);
} # end when
} # end given
} # end if
# Generate a list of distances for each probability from the data in the
# BSV file:
my $field_names = $bsv_file_reader->get_field_names();
my @mapped_distances; # indexes 2-5 = Probability constants;
# values = references to number arrays
view all matches for this distribution
view release on metacpan or search on metacpan
Makefile.PL
MANIFEST This list of files
README
samples/termites.pl
t/AI-Termites.t
META.yml Module meta-data (added by MakeMaker)
view all matches for this distribution
view release on metacpan or search on metacpan
examples/basic.pl view on Meta::CPAN
use AI::XGBoost qw(train);
# We are going to solve a binary classification problem:
# Mushroom poisonous or not
my $train_data = DMatrix->From(file => 'agaricus.txt.train');
my $test_data = DMatrix->From(file => 'agaricus.txt.test');
# With XGBoost we can solve this problem using 'gbtree' booster
# and as loss function a logistic regression 'binary:logistic'
# (Gradient Boosting Regression Tree)
# XGBoost Tree Booster has a lot of parameters that we can tune
# (https://github.com/dmlc/xgboost/blob/master/doc/parameter.md)
my $booster = train(data => $train_data, number_of_rounds => 10, params => {
objective => 'binary:logistic',
eta => 1.0,
max_depth => 2,
silent => 1
});
# For binay classification predictions are probability confidence scores in [0, 1]
# indicating that the label is positive (1 in the first column of agaricus.txt.test)
my $predictions = $booster->predict(data => $test_data);
say join "\n", @$predictions[0 .. 10];
view all matches for this distribution
view release on metacpan or search on metacpan
MANIFEST
ppport.h
README
t/AIIA-GMT.t
lib/AIIA/GMT.pm
META.yml Module meta-data (added by MakeMaker)
view all matches for this distribution