view release on metacpan or search on metacpan
lib/AI/Nerl/Network.pm view on Meta::CPAN
=head2 train($x,$y, %params)
Train with backpropagation using $x as input & $y as target.
$x and $y are both pdls. If there are multiple cases, each one will
occupy a column (dimension 2) of the pdl. If your dimensions are off,
you will experience an pdl error of some sort.
=head3 %params
=head4 passes
view all matches for this distribution
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
package AI::NeuralNet::BackProp;
use Benchmark;
use strict;
# Returns the number of elements in an array ref, undef on error
sub _FETCHSIZE {
my $a=$_[0];
my ($b,$x);
return undef if(substr($a,0,5) ne "ARRAY");
foreach $b (@{$a}) { $x++ };
BackProp.pm view on Meta::CPAN
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{pattern})?$args{pattern}*2+1:1;
my ($fa,$fb);
for my $x (0..$len) {
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn( $data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
my $res;
BackProp.pm view on Meta::CPAN
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my @learned;
while(1) {
_GET_X:
my $x=$self->intr(rand()*$len);
goto _GET_X if($learned[$x]);
BackProp.pm view on Meta::CPAN
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn($data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
return 1;
BackProp.pm view on Meta::CPAN
my %args = @_;
my $inc = $args{inc} || 0.20;
my $max = $args{max} || 1024;
my $_mx = intr($max/10);
my $_mi = 0;
my $error = ($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
my $div = $self->{DIV};
my $size = $self->{SIZE};
my $out = $self->{OUT};
my $divide = AI::NeuralNet::BackProp->intr($div/$out);
my ($a,$b,$y,$flag,$map,$loop,$diff,$pattern,$value);
BackProp.pm view on Meta::CPAN
$loop = 0;
my $ldiff = 0;
my $dinc = 0.0001;
my $cdiff = 0;
$diff = 100;
$error = ($error>-1)?$error:-1;
# $flag only goes high when all neurons in output map compare exactly with
# desired result map or $max loops is reached
#
while(!$flag && ($max ? $loop<$max : 1)) {
BackProp.pm view on Meta::CPAN
}
# Save last $diff
$ldiff = $diff;
# This catches a max error argument and handles it
if(!($error>-1 ? $diff>$error : 1)) {
$flag=1;
last;
}
# Debugging
AI::NeuralNet::BackProp::out4 "Difference: $diff\%\t Increment: $inc\tMax Error: $error\%\n";
AI::NeuralNet::BackProp::out1 "\n\nMapping results from $map:\n";
# This loop compares each element of the output map with the desired result map.
# If they don't match exactly, we call weight() on the offending output neuron
# and tell it what it should be aiming for, and then the offending neuron will
BackProp.pm view on Meta::CPAN
Options should be written on hash form. There are three options:
inc => $learning_gradient
max => $maximum_iterations
error => $maximum_allowable_percentage_of_error
$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.20.
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024. Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.
$maximum_allowable_percentage_of_error is the maximum allowable error to have. If
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.
=item $net->learn_set(\@set, [ options ]);
BackProp.pm view on Meta::CPAN
and 0.42, where no 0s were allowed because the learning would never finish learning completly
with a 0 in the input.
Yet with the allowance of 0s, it requires one of two factors to learn correctly. Either you
must enable randomness with $net->random(0.0001) (Any values work [other than 0], see random() ),
or you must set an error-minimum with the 'error => 5' option (you can use some other error value
as well).
When randomness is enabled (that is, when you call random() with a value other than 0), it interjects
a bit of randomness into the output of every neuron in the network, except for the input and output
neurons. The randomness is interjected with rand()*$rand, where $rand is the value that was
BackProp.pm view on Meta::CPAN
bad to have a pure 0 internally because the weights cannot change a 0 when multiplied by a 0, the
product stays a 0. Yet when a weight is multiplied by 0.00001, eventually with enough weight, it will
be able to learn. With a 0 value instead of 0.00001 or whatever, then it would never be able
to add enough weight to get anything other than a 0.
The second option to allow for 0s is to enable a maximum error with the 'error' option in
learn() , learn_set() , and learn_set_rand() . This allows the network to not worry about
learning an output perfectly.
For accuracy reasons, it is recomended that you work with 0s using the random() method.
view all matches for this distribution
view release on metacpan or search on metacpan
examples/eigenvector_initialization.pl view on Meta::CPAN
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
examples/eigenvector_initialization.pl view on Meta::CPAN
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Kohonen.pm view on Meta::CPAN
sub _gauss_weight { my ($r, $sigma) = (shift,shift);
return exp( -($r**2) / (2 * $sigma**2) );
}
=head1 PUBLIC METHOD quantise_error
Returns the quantise error for either the supplied points,
or those in the C<input> field.
=cut
sub quantise_error { my ($self,$targets) = (shift,shift);
my $qerror=0;
if (not defined $targets){
$targets = $self->{input};
} else {
foreach (@$targets){
if (not ref $_ or ref $_ ne 'ARRAY'){
lib/AI/NeuralNet/Kohonen.pm view on Meta::CPAN
confess "target input and map dimensions differ";
}
for my $i (0..$#bmu){
foreach my $w (0..$self->{weight_dim}){
$qerror += $targets->[$i]->{values}->[$w]
- $self->{map}->[$bmu[$i]->[1]]->[$bmu[$i]->[2]]->{weight}->[$w];
}
}
$qerror /= scalar @$targets;
return $qerror;
}
=head1 PRIVATE METHOD _add_input_from_str
view all matches for this distribution
view release on metacpan or search on metacpan
my $layers = shift;
# Looks like we got ourselves a layer specs array
if(ref($layers) eq "ARRAY") {
if($self->{total_layers}!=$#{$layers}) {
$self->{error} = "extend(): Cannot add new layers. Create a new network to add layers.\n";
return undef;
}
if(ref($layers->[0]) eq "HASH") {
$self->{total_nodes} = 0;
$self->{inputs} = $layers->[0]->{nodes};
for (0..$self->{total_layers}){$self->extend_layer($_,$layers->[$_])}
$self->{layers} = $layers;
for (0..$self->{total_layers}){$self->{total_nodes}+= $self->{layers}->[$_]}
}
} else {
$self->{error} = "extend(): Invalid argument type.\n";
return undef;
}
return 1;
}
sub extend_layer {
my $self = shift;
my $layer = shift || 0;
my $specs = shift;
if(!$specs) {
$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";
return undef;
}
if(ref($specs) eq "HASH") {
$self->activation($layer,$specs->{activation}) if($specs->{activation});
$self->threshold($layer,$specs->{threshold}) if($specs->{threshold});
my $n = 0;
my $more = $nodes - $self->{layers}->[$layer] - 1;
d("Checking on extending layer $layer to $nodes nodes (check:$self->{layers}->[$layer]).\n",9);
return 1 if ($nodes == $self->{layers}->[$layer]);
if ($self->{layers}->[$layer]>$nodes) {
$self->{error} = "add_nodes(): I cannot remove nodes from the network with this version of my module. You must create a new network to remove nodes.\n";
return undef;
}
d("Extending layer $layer by $more.\n",9);
for (0..$more){$self->{mesh}->[$#{$self->{mesh}}+1]=AI::NeuralNet::Mesh::node->new($self)}
for(0..$layer-2){$n+=$self->{layers}->[$_]}
my $outputs = shift; # target outputs
my %args = @_; # get args into hash
my $inc = $args{inc} || 0.002; # learning gradient
my $max = $args{max} || 1024; # max iteterations
my $degrade = $args{degrade} || 0; # enable gradient degrading
my $error = ($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
my $dinc = 0.0002; # amount to adjust gradient by
my $diff = 100; # error magin between results
my $start = new Benchmark;
$inputs = $self->crunch($inputs) if($inputs == 0);
$outputs = $self->crunch($outputs) if($outputs == 0);
my ($flag,$ldiff,$cdiff,$_mi,$loop,$y);
while(!$flag && ($max ? $loop<$max : 1)) {
my $b = new Benchmark;
my $got = $self->run($inputs);
$diff = pdiff($got,$outputs);
$flag = 1;
if(($error>-1 ? $diff<$error : 0) || !$diff) {
$flag=1;
last;
}
if($degrade) {
my $data = shift;
my %args = @_;
my $len = $#{$data}/2;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $degrade = $args{degrade};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{row}) ?$args{row}+1:1;
my $leave = (defined $args{leave})?$args{leave}:0;
for my $x (0..$len-$leave) {
d("Learning set $x...\n",4);
my $str = $self->learn( $data->[$x*2],
$data->[$x*2+1],
inc=>$inc,
max=>$max,
error=>$error,
degrade=>$degrade);
}
if ($p) {
return pdiff($data->[$row],$self->run($data->[$row-1]));
}
close(FILE);
if(!(-f $file)) {
$self->{error} = "Error writing to \"$file\".";
return undef;
}
return $self;
}
my ($a,$b) = split /=/, $line;
$db{$a}=$b;
}
if(!$db{"header"}) {
$self->{error} = "Invalid format.";
return undef;
}
return $self->load_old($file) if($self->version($db{"header"})<0.21);
my $self = shift;
my $file = shift;
my $load_flag = shift;
if(!(-f $file)) {
$self->{error} = "File \"$file\" does not exist.";
return undef;
}
open(FILE,"$file");
my @lines=<FILE>;
my ($a,$b) = split /=/, $line;
$db{$a}=$b;
}
if(!$db{"header"}) {
$self->{error} = "Invalid format.";
return undef;
}
if($load_flag) {
undef $self;
sub load_pcx {
my $self = shift;
my $file = shift;
eval('use PCX::Loader');
if(@_) {
$self->{error}="Cannot load PCX::Loader module: @_";
return undef;
}
return PCX::Loader->new($self,$file);
}
sub crunched {
my $self = shift;
for my $a (0..$self->{_crunched}->{_length}-1) {
return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
}
$self->{error} = "Word \"$_[0]\" not found.";
return undef;
}
# Alias for crunched(), above
sub word { crunched(@_) }
# Same as benchmark()
sub benchmarked {
benchmark(shift);
}
# Return the last error in the mesh, or undef if no error.
sub error {
my $self = shift;
return undef if !$self->{error};
chomp($self->{error});
return $self->{error}."\n";
}
# Used to format array ref into columns
# Usage:
# join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.
In this module I have included a more accurate form of "learning" for the
mesh. This form preforms descent toward a local error minimum (0) on a
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.
Options should be written on hash form. There are three options:
inc => $learning_gradient
max => $maximum_iterations
error => $maximum_allowable_percentage_of_error
degrade => $degrade_increment_flag
$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024. Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.
$maximum_allowable_percentage_of_error is the maximum allowable error to have. If
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.
$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on.
In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
level 4.
Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.
Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.
Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.
This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the error() method,
below.
If there were no errors, it will return a refrence to $net.
=item $net->load($filename);
This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.
If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.
If there were no errors, it will return a refrence to $net.
UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!
=item $net->crunched($word);
This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list.
If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the error() method, below.
=item $net->word($word);
A function alias for crunched().
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across load() and save() calls.
=item $net->error();
Returns the last error message which occured in the mesh, or undef if no errors have
occured.
=item $net->load_pcx($filename);
NOTE: To use this function, you must have PCX::Loader installed. If you do not have
PCX::Loader installed, it will return undef and store an error for you to retrive with
the error() method, below.
This is a treat... this routine will load a PCX-format file (yah, I know ... ancient
format ... but it is the only one I could find specs for to write it in Perl. If
anyone can get specs for any other formats, or could write a loader for them, I
would be very grateful!) Anyways, a PCX-format file that is exactly 320x200 with 8 bits
name a few.
One of the first impressive neural networks was NetTalk, which read in ASCII
text and correctly pronounced the words (producing phonemes which drove a
speech chip), even those it had never seen before. Designed by John Hopkins
biophysicist Terry Sejnowski and Charles Rosenberg of Princeton in 1986,
this application made the Backprogagation training algorithm famous. Using
the same paradigm, a neural network has been trained to classify sonar
returns from an undersea mine and rock. This classifier, designed by
Sejnowski and R. Paul Gorman, performed better than a nearest-neighbor
classifier.
Neural networks are poor at precise calculations and serial processing. They
are also unable to predict or recognize anything that does not inherently
contain some sort of pattern. For example, they cannot predict the lottery,
since this is a random process. It is unlikely that a neural network could
be built which has the capacity to think as well as a person does for two
reasons. Neural networks are terrible at deduction, or logical thinking and
the human brain is just too complex to completely simulate. Also, some
problems are too difficult for present technology. Real vision, for
example, is a long way off.
In short, Neural Networks are poor at precise calculations, but good at
view all matches for this distribution
view release on metacpan or search on metacpan
examples/eigenvector_initialization.pl view on Meta::CPAN
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => $dim);
examples/eigenvector_initialization.pl view on Meta::CPAN
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
c_infer( $self->handle, $data );
}
sub winner {
# returns index of largest value in inferred answer
my ( $self, $data ) = @_;
my $arrayref = c_infer( $self->handle, $data );
my $largest = 0;
for ( 0 .. $#$arrayref ) {
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
=head1 DESCRIPTION
=head2 The Disclaimer
Please note that the following information is terribly incomplete. That's
deliberate. Anyone familiar with neural networks is going to laugh themselves
silly at how simplistic the following information is and the astute reader will
notice that I've raised far more questions than I've answered.
So why am I doing this? Because I'm giving I<just enough> information for
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
1 1 0 1
1 0 0 1
0 1 0 1
0 0 1 0
The type of network we use is a forward-feed back error propagation network,
referred to as a back-propagation network, for short. The way it works is
simple. When we feed in our input, it travels from the input to hidden layers
and then to the output layers. This is the "feed forward" part. We then
compare the output to the expected results and measure how far off we are. We
then adjust the weights on the "output to hidden" synapses, measure the error
on the hidden nodes and then adjust the weights on the "hidden to input"
synapses. This is what is referred to as "back error propagation".
We continue this process until the amount of error is small enough that we are
satisfied. In reality, we will rarely if ever get precise results from the
network, but we learn various strategies to interpret the results. In the
example above, we use a "winner takes all" strategy. Which ever of the output
nodes has the greatest value will be the "winner", and thus the answer.
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
With more complete neural net packages, you can also pick which activation
functions you wish to use and the "learn rate" of the neurons.
=item 2 Training
This involves feeding the neural network enough data until the error rate is
low enough to be acceptable. Often we have a large data set and merely keep
iterating until the desired error rate is achieved.
=item 3 Measuring results
One frequent mistake made with neural networks is failing to test the network
with different data from the training data. It's quite possible for a
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
That clearly has the second output item being close to 1, so as a helper method
for use with a winner take all strategy, we have ...
=head2 C<winner(\@input)>
This method returns the index of the highest value from inferred results:
print $net->winner([1,1]); # will likely print "1"
For a more comprehensive example of how this is used, see the
"examples/game_ai.pl" program.
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
"Naturally Intelligent Systems", by Maureen Caudill and Charles Butler,
copyright (c) 1990 by Massachussetts Institute of Technology.
This book is a decent introduction to neural networks in general. The forward
feed back error propogation is but one of many types.
=head1 AUTHORS
Curtis "Ovid" Poe, C<ovid [at] cpan [dot] org>
Multiple network support, persistence, export of MSE (mean squared error),
training until MSE below a given threshold and customization of the
activation function added by Raphael Manfredi C<Raphael_Manfredi@pobox.com>.
=head1 COPYRIGHT AND LICENSE
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
$res->done($resp);
} elsif( $resp->code == 404 ) {
# Blob was not found
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
# Should we validate using OpenAPI::Modern here?!
if( $resp->code == 201 ) {
# Blob was successfully created
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
# Should we validate using OpenAPI::Modern here?!
if( $resp->code == 200 ) {
# Successful operation.
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
# Should we validate using OpenAPI::Modern here?!
if( $resp->code == 200 ) {
# Successful operation.
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
If C<false> the response will be returned as a single response object, otherwise the response will be streamed as a series of objects.
=item C<< system >>
The system prompt to (overrides what is defined in the Modelfile).
=item C<< template >>
The full prompt or prompt template (overrides what is defined in the Modelfile).
=back
Returns a L<< AI::Ollama::GenerateCompletionResponse >> on success.
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
$self->emit(request => $tx);
$tx = $self->ua->start_p($tx)->then(sub($tx) {
$r1->resolve( $tx );
undef $r1;
})->catch(sub($err) {
$self->emit(response => $tx, $err);
$r1->fail( $err => $tx );
undef $r1;
});
return $res
}
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
sub validate_response( $self, $payload, $tx ) {
if( $self->validate_responses
and my $openapi = $self->openapi ) {
my $results = $openapi->validate_response($payload, { request => $tx->req });
if( $results->{error}) {
say $results;
say $tx->res->to_string;
};
};
}
sub validate_request( $self, $tx ) {
if( $self->validate_requests
and my $openapi = $self->openapi ) {
my $results = $openapi->validate_request($tx->req);
if( $results->{error}) {
say $results;
say $tx->req->to_string;
};
};
}
view all matches for this distribution
view release on metacpan or search on metacpan
void dumpBDD_info(int bdd)
{
BddNode *node;
node = &bddnodes[bdd];
fprintf (stderr,
"BDD %d: RefCount %d, Level %-3d, IF %-3d, ELSE %-3d, HASH %d\n",
bdd, node->refcou, node->level, node->high, node->low, node->hash);
fflush(stderr);
}
long createPair(int *old, int *new_, int size) {
bddPair *pair = bdd_newpair ();
int checkBuddy() {
int i;
if (!checkBDD (0))
{
fprintf (stderr, "bdd FALSE failed sanity check\n");
dumpBDD_info (0);
return 0;
}
if (!checkBDD (1))
{
fprintf (stderr, "bdd TRUE failed sanity check\n");
dumpBDD_info (1);
return 0;
}
for (i = bddvarnum; i < bddvarnum; i++)
int checkBDD(int bdd)
{
if (!bddrunning)
{
bdd_error (BDD_RUNNING);
return 0;
}
if (bdd < 0 || (bdd) >= bddnodesize)
{
bdd_error (BDD_ILLBDD);
return 0;
}
if (bdd >= 2 && LOW (bdd) == -1)
{
bdd_error (BDD_ILLBDD);
return 0;
}
if (bddnodes[bdd].refcou == 0)
{
fprintf (stderr, "ERROR: refcount is zero\n");
return 0;
}
// refcount for TRUE and FALSE is saturated by default, ignore them!
if (bdd > 2 && bddnodes[bdd].refcou == MAXREF)
{
fprintf (stderr, "ERROR: refcount is saturated\n");
return 0;
}
return 1;
}
}
txt = (char *) malloc (bddvarnum + 2);
if (!txt)
{
bdd_error (BDD_MEMORY);
return;
}
for (i = 0; i < varcount; i++)
txt[i] = '-';
if (node_count < MIN_NODES)
node_count = MIN_NODES;
else if (node_count > MAX_NODES)
{
fprintf (stderr,
"[JBDD:init()] Number of nodes should be between %d and %d nodes\n",
MIN_NODES, MAX_NODES);
exit (20);
}
cachesize = nodenum / 8; // WAS: 10
if (has_bdd)
{
fprintf (stderr,
"A BDD package exist while you are trying to create another one\n"
"The BDD package is SINGLETON!\n");
exit (20);
}
has_bdd = 1;
if (bdd_false () != 0 || bdd_true () != 1)
{
fprintf (stderr, " INTERNAL ERROR : false = %d, true = %d\n",
bdd_false (), bdd_true ());
exit (20);
}
}
else
{
fprintf (stderr, "bdd_init(%ld,%ld) Failed\n (error code %d)\n",
nodenum, cachesize, ok);
exit (20);
}
}
{
bdd_done ();
has_bdd = 0;
}
else
fprintf (stderr, "Killing already dead BDD class :(\n");
}
int getOne()
CODE:
{
BDD ret;
if (varcount >= varnum)
{
fprintf (stderr, "Maximum var count (%d) reached!!\n", varnum);
exit (20);
}
ret = bdd_ithvar (varcount++);
// bddnodes[ret].refcou = 1; // why does BuDDy sets the initial
int index
CODE:
{
if (index < 0 || index >= varcount)
{
fprintf (stderr, "[JBUDDY.getBDD] requested bad BDD: %d\n", index);
RETVAL=bdd_false();
}
RETVAL=bdd_ithvar (index);
}
OUTPUT:
// See init for a explaination about 2 + varnum...
div = pow (2, 2 + varnum);
sat = bdd_satcount (bdd);
// fprintf(stderr, "sat = %lf, div = %lf or 2^%ld\n", sat, div, ( 2 +
// varnum));
sat /= div;
RETVAL=sat;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PSO.pm view on Meta::CPAN
# no strict 'refs';
# if(defined(&{"main::$user_fitness_function"})) {
# $return_fitness = &$user_fitness_function(@values);
# } else {
# warn "error running user_fitness_function\n";
# exit 1;
# }
# use strict 'refs';
$return_fitness = $user_fitness_function->call(@values);
lib/AI/PSO.pm view on Meta::CPAN
# So, particle 1 has neighbors 2, 3, 4, 5 if K = 4. particle 4 has neighbors 5, 6, 7, 8
# ...
#
sub get_index_of_neighbor($$) {
my ($particleIndex, $neighborNum) = @_;
# TODO: insert error checking code / defensive programming
return ($particleIndex + $neighborNum) % $numParticles;
}
#
lib/AI/PSO.pm view on Meta::CPAN
if(&compute_fitness(@{$particles[$particleNeighborIndex]{bestPos}}) > $bestNeighborFitness) {
$bestNeighborFitness = &compute_fitness(@{$particles[$particleNeighborIndex]{bestPos}});
$bestNeighborIndex = $particleNeighborIndex;
}
}
# TODO: insert error checking code / defensive programming
return $particleNeighborIndex;
}
#
# clamp_velocity
view all matches for this distribution
view release on metacpan or search on metacpan
who decide to use it. You can use it too, but we suggest you
first think carefully about whether this license or the ordinary
General Public License is the better strategy to use in any
particular case, based on the explanations below.
When we speak of free software, we are referring to freedom
of use, not price. Our General Public Licenses are designed
to make sure that you have the freedom to distribute copies
of free software (and charge for this service if you wish); that
you receive source code or can get it if you want it; that you
can change the software and use pieces of it in new free
containing the Library or a portion of it, either verbatim or with
modifications and/or translated straightforwardly into another
language. (Hereinafter, translation is included without
limitation in the term "modification".)
"Source code" for a work means the preferred form of the
work for making modifications to it. For a library, complete
source code means all the source code for all modules it
contains, plus any associated interface definition files, plus
the scripts used to control compilation and installation of the
library.
publish on each copy an appropriate copyright notice and
disclaimer of warranty; keep intact all the notices that refer
to this License and to the absence of any warranty; and
distribute a copy of this License along with the Library.
You may charge a fee for the physical act of transferring a
copy, and you may at your option offer warranty protection in
exchange for a fee.
2. You may modify your copy or copies of the Library or any
portion of it, thus forming a work based on the Library, and
view all matches for this distribution
view release on metacpan or search on metacpan
who decide to use it. You can use it too, but we suggest you
first think carefully about whether this license or the ordinary
General Public License is the better strategy to use in any
particular case, based on the explanations below.
When we speak of free software, we are referring to freedom
of use, not price. Our General Public Licenses are designed
to make sure that you have the freedom to distribute copies
of free software (and charge for this service if you wish); that
you receive source code or can get it if you want it; that you
can change the software and use pieces of it in new free
containing the Library or a portion of it, either verbatim or with
modifications and/or translated straightforwardly into another
language. (Hereinafter, translation is included without
limitation in the term "modification".)
"Source code" for a work means the preferred form of the
work for making modifications to it. For a library, complete
source code means all the source code for all modules it
contains, plus any associated interface definition files, plus
the scripts used to control compilation and installation of the
library.
publish on each copy an appropriate copyright notice and
disclaimer of warranty; keep intact all the notices that refer
to this License and to the absence of any warranty; and
distribute a copy of this License along with the Library.
You may charge a fee for the physical act of transferring a
copy, and you may at your option offer warranty protection in
exchange for a fee.
2. You may modify your copy or copies of the Library or any
portion of it, thus forming a work based on the Library, and
view all matches for this distribution
view release on metacpan or search on metacpan
the Free Software Foundation's software and to any other program whose
authors commit to using it. (Some other Free Software Foundation software is
covered by the GNU Library General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not price. Our
General Public Licenses are designed to make sure that you have the freedom
to distribute copies of free software (and charge for this service if you wish), that
you receive source code or can get it if you want it, that you can change the
software or use pieces of it in new free programs; and that you know you can do
these things.
publish on each copy an appropriate copyright notice and disclaimer of warranty;
keep intact all the notices that refer to this License and to the absence of any
warranty; and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and you may at
your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion of it, thus
forming a work based on the Program, and copy and distribute such
modifications or work under the terms of Section 1 above, provided that you also
c) Accompany it with the information you received as to the offer to distribute
corresponding source code. (This alternative is allowed only for noncommercial
distribution and only if you received the program in object code or executable
form with such an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for making
modifications to it. For an executable work, complete source code means all the
source code for all modules it contains, plus any associated interface definition
files, plus the scripts used to control compilation and installation of the
executable. However, as a special exception, the source code distributed need
not include anything that is normally distributed (in either source or binary form)
view all matches for this distribution
view release on metacpan or search on metacpan
Rectangle.xs view on Meta::CPAN
goto last_op;
}
weigth+= path_weigths[ *position - '0' ];
++position;
}
// fprintf( stderr, "ok");
gimme = GIMME_V;
if (gimme == G_ARRAY){
int x,y;
int norm;
norm = offset ;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more", );
}
$iters_quota += $q_more;
my $iters = $self->_scans_data()->slice(":,:,0");
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more", );
}
$iters_quota += $q_more;
my $iters = $self->_scans_data()->slice(":,:,0");
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more" );
}
$iters_quota += $q_more;
( undef, $num_solved_in_iter, undef, $selected_scan_idx ) =
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
$self->_inspect_quota();
}
};
if (
my $err = Exception::Class->caught(
'AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas')
)
{
$self->_status("out_of_quotas");
}
else
{
$err = Exception::Class->caught();
if ($err)
{
if ( not( blessed $err && $err->can('rethrow') ) )
{
die $err;
}
$err->rethrow;
}
}
return;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
_state_successors_iterator => undef,
_show_prog_func => undef,
_state_get_data_func => undef,
@_, # attribute override
};
return bless $self, $class;
}
view all matches for this distribution
view release on metacpan or search on metacpan
docs/specifications.t view on Meta::CPAN
# these three steps could be done in seperated scripts if necessary
# &train and &validate could be put inside a loop or something
# the parameters make more sense when they are taken from @ARGV
# so when it's the first time training, it will create the nerve_file,
# the second time and up it will directly overrride that file since everything is read from it
# ... anyway :) afterall training stage wasn't meant to be a fully working program, so it shouldnt be a problem
# just assume that
$perceptron->train( $stimuli_train, $save_nerve_to_file );
# reads training stimuli from csv
# tune attributes based on csv data
view all matches for this distribution
view release on metacpan or search on metacpan
software--to make sure the software is free for all its users. The
General Public License applies to the Free Software Foundation's
software and to any other program whose authors commit to using it.
You can use it for your programs, too.
When we speak of free software, we are referring to freedom, not
price. Specifically, the General Public License is designed to make
sure that you have the freedom to give away or sell copies of free
software, that you receive source code or can get it if you want it,
that you can change the software or use pieces of it in new free
programs; and that you know you can do these things.
appropriately publish on each copy an appropriate copyright notice and
disclaimer of warranty; keep intact all the notices that refer to this
General Public License and to the absence of any warranty; and give any
other recipients of the Program a copy of this General Public License
along with the Program. You may charge a fee for the physical act of
transferring a copy.
2. You may modify your copy or copies of the Program or any portion of
it, and copy and distribute such modifications under the terms of Paragraph
1 above, provided that you also do the following:
that there is no warranty (or else, saying that you provide a
warranty) and that users may redistribute the program under these
conditions, and telling the user how to view a copy of this General
Public License.
d) You may charge a fee for the physical act of transferring a
copy, and you may at your option offer warranty protection in
exchange for a fee.
Mere aggregation of another independent work with the Program (or its
derivative) on a volume of a storage or distribution medium does not bring
c) accompany it with the information you received as to where the
corresponding source code may be obtained. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form alone.)
Source code for a work means the preferred form of the work for making
modifications to it. For an executable file, complete source code means
all the source code for all modules it contains; but, as a special
exception, it need not include source code for modules which are standard
libraries that accompany the operating system on which the executable
file runs, or for standard header files or definitions files that
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PredictionClient/CPP/PredictionGrpcCpp.pm view on Meta::CPAN
google::protobuf::util::Status request_serialized_status =
google::protobuf::util::JsonStringToMessage(
serialized_request_object, &predictRequest, jparse_options);
if (!request_serialized_status.ok()) {
std::string error_result =
"{\"Status\": \"Error:object:request_deserialization:protocol_buffers\", ";
error_result += "\"StatusCode\": \"" +
std::to_string(request_serialized_status.error_code()) +
"\", ";
error_result += "\"StatusMessage\":" +
to_base64(request_serialized_status.error_message()) +
"}";
return error_result;
}
Status status = stub_->Predict(&context, predictRequest, &response);
if (status.ok()) {
google::protobuf::util::Status response_serialize_status =
google::protobuf::util::MessageToJsonString(
response, &serialized_result_object, jprint_options);
if (!response_serialize_status.ok()) {
std::string error_result =
"{\"Status\": \"Error:object:response_serialization:protocol_buffers\", ";
error_result += "\"StatusCode\": \"" +
std::to_string(response_serialize_status.error_code()) +
"\", ";
error_result += "\"StatusMessage\":" +
to_base64(response_serialize_status.error_message()) +
"}";
return error_result;
}
std::string success_result = "{\"Status\": \"OK\", ";
success_result += "\"StatusCode\": \"\", ";
success_result += "\"StatusMessage\": \"\", ";
success_result += "\"Result\": " + serialized_result_object + "}";
return success_result;
} else {
std::string error_result = "{\"Status\": \"Error:transport:grpc\", ";
error_result +=
"\"StatusCode\": \"" + std::to_string(status.error_code()) + "\", ";
error_result += "\"StatusMessage\":" + to_base64(status.error_message()) + "}";
return error_result;
}
}
std::string PredictionClient::to_base64(std::string text) {
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Prolog.pm view on Meta::CPAN
owns(merlyn,gold).
owns(ovid,rubies).
knows(badguy,merlyn).
END_PROLOG
Side note: in Prolog, programs are often referred to as databases.
=head2 Creating a query
To create a query for the database, use C<query>.
lib/AI/Prolog.pm view on Meta::CPAN
=item * Tutorial.
=item * Data structure cookbook.
=item * Better error reporting.
=back
=head1 EXPORT
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
# The validate_number_specs() function takes a reference to an array of
# number specifications (which are references to hashes with "LowerBound",
# "UpperBound", and "Precision" fields) and returns a reference to a version
# of the array in which bounds with higher precision than that specified
# have been rounded inward. If a number specification is not valid, the
# function calls "die" with an error message.
sub validate_number_specs {
my $raw_number_specs = $_[0];
my @processed_number_specs = @{ $raw_number_specs };
for my $number_spec (@processed_number_specs) {
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/TensorFlow/Libtensorflow/Session.pm view on Meta::CPAN
=over 4
=item Maybe[TFSession]
A new execution session with the associated graph, or C<undef> on
error.
=back
B<C API>: L<< C<TF_NewSession>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_NewSession >>
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
sub XGDMatrixCreateFromFile {
my ( $filename, $silent ) = @_;
$silent //= 1;
my $matrix = 0;
my $error = AI::XGBoost::CAPI::RAW::XGDMatrixCreateFromFile( $filename, $silent, \$matrix );
_CheckCall($error);
return $matrix;
}
sub XGDMatrixCreateFromMat {
my ( $data, $missing ) = @_;
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
# TODO ¿Adapters?
my $data_adapter = [ map { @$_ } @$data ];
my $nrows = scalar @$data;
my $ncols = scalar @{ $data->[0] };
my $matrix = 0;
my $error = AI::XGBoost::CAPI::RAW::XGDMatrixCreateFromMat( $data_adapter, $nrows, $ncols, $missing, \$matrix );
_CheckCall($error);
return $matrix;
}
sub XGDMatrixNumRow {
my ($matrix) = @_;
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
}
sub XGDMatrixSliceDMatrix {
my ( $matrix, $list_of_indices ) = @_;
my $new_matrix = 0;
my $error = AI::XGBoost::CAPI::RAW::XGDMatrixSliceDMatrix( $matrix, $list_of_indices, scalar @$list_of_indices,
\$new_matrix );
_CheckCall($error);
return $new_matrix;
}
sub XGDMatrixFree {
my ($matrix) = @_;
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
# Check return code and if necesary, launch an exception
#
sub _CheckCall {
my ($return_code) = @_;
if ($return_code) {
my $error_message = AI::XGBoost::CAPI::RAW::XGBGetLastError();
XGBoostException->throw( error => $error_message );
}
}
1;
lib/AI/XGBoost/CAPI.pm view on Meta::CPAN
Perlified wrapper for the C API
=head2 Error handling
XGBoost c api functions returns some int to signal the presence/absence of error.
In this module that is achieved using Exceptions from L<Exception::Class>
=head1 FUNCTIONS
=head2 XGDMatrixCreateFromFile
view all matches for this distribution
view release on metacpan or search on metacpan
--diff=program use diff program and options
--compat-version=version provide compatibility with Perl version
--cplusplus accept C++ comments
--quiet don't output anything except fatal errors
--nodiag don't show diagnostics
--nohints don't show hints
--nochanges don't suggest changes
--nofilter don't filter input files
Using this option instructs F<ppport.h> to leave C++
comments untouched.
=head2 --quiet
Be quiet. Don't print anything except fatal errors.
=head2 --nodiag
Don't output any diagnostic messages. Only portability
alerts will be printed.
PL_debstash|5.004050||p
PL_defgv|5.004050||p
PL_diehook|5.004050||p
PL_dirty|5.004050||p
PL_dowarn|||pn
PL_errgv|5.004050||p
PL_hexdigit|5.005000||p
PL_hints|5.005000||p
PL_last_in_gv|||n
PL_modglobal||5.005000|n
PL_na|5.004050||pn
PUSHn|||
PUSHp|||
PUSHs|||
PUSHu|5.004000||p
PUTBACK|||
PerlIO_clearerr||5.007003|
PerlIO_close||5.007003|
PerlIO_eof||5.007003|
PerlIO_error||5.007003|
PerlIO_fileno||5.007003|
PerlIO_fill||5.007003|
PerlIO_flush||5.007003|
PerlIO_get_base||5.007003|
PerlIO_get_bufsiz||5.007003|
PerlIO_read||5.007003|
PerlIO_seek||5.007003|
PerlIO_set_cnt||5.007003|
PerlIO_set_ptrcnt||5.007003|
PerlIO_setlinebuf||5.007003|
PerlIO_stderr||5.007003|
PerlIO_stdin||5.007003|
PerlIO_stdout||5.007003|
PerlIO_tell||5.007003|
PerlIO_unread||5.007003|
PerlIO_write||5.007003|
ptr_table_store|||
push_scope|||
put_byte|||
pv_display||5.006000|
pv_uni_display||5.007003|
qerror|||
re_croak2|||
re_dup|||
re_intuit_start||5.006000|
re_intuit_string||5.006000|
realloc||5.007002|n
regwhite|||
reg|||
repeatcpy|||
report_evil_fh|||
report_uninit|||
require_errno|||
require_pv||5.006000|
rninstr|||
rsignal_restore|||
rsignal_save|||
rsignal_state||5.004000|
warner_nocontext|||vn
warner||5.006000|v
warn|||v
watch|||
whichsig|||
write_to_stderr|||
yyerror|||
yylex|||
yyparse|||
yywarn|||
);
if ($file{changes}) {
if (exists $opt{copy}) {
my $newfile = "$filename$opt{copy}";
if (-e $newfile) {
error("'$newfile' already exists, refusing to write copy of '$filename'");
}
else {
local *F;
if (open F, ">$newfile") {
info("Writing copy of '$filename' with changes to '$newfile'");
print F $c;
close F;
}
else {
error("Cannot open '$newfile' for writing: $!");
}
}
}
elsif (exists $opt{patch} || $opt{changes}) {
if (exists $opt{patch}) {
unless ($patch_opened) {
if (open PATCH, ">$opt{patch}") {
$patch_opened = 1;
}
else {
error("Cannot open '$opt{patch}' for writing: $!");
delete $opt{patch};
$opt{changes} = 1;
goto fallback;
}
}
if (!defined $diff) {
$diff = run_diff('diff', $file, $str);
}
if (!defined $diff) {
error("Cannot generate a diff. Please install Text::Diff or use --copy.");
return;
}
print F $diff;
}
unlink $tmp;
}
else {
error("Cannot open '$tmp' for writing: $!");
}
return undef;
}
{
$opt{quiet} and return;
print "*** ", @_, "\n";
}
sub error
{
print "*** ERROR: ", @_, "\n";
}
my %given_hints;
/* It is very unlikely that anyone will try to use this with Perl 6
(or greater), but who knows.
*/
#if PERL_REVISION != 5
# error ppport.h only works with Perl version 5
#endif /* PERL_REVISION != 5 */
#ifdef I_LIMITS
# include <limits.h>
#endif
# define PL_debstash debstash
# define PL_defgv defgv
# define PL_diehook diehook
# define PL_dirty dirty
# define PL_dowarn dowarn
# define PL_errgv errgv
# define PL_hexdigit hexdigit
# define PL_hints hints
# define PL_na na
# define PL_no_modify no_modify
# define PL_perl_destruct_level perl_destruct_level
/* Replace perl_eval_pv with eval_pv */
/* eval_pv depends on eval_sv */
#ifndef eval_pv
#if defined(NEED_eval_pv)
static SV* DPPP_(my_eval_pv)(char *p, I32 croak_on_error);
static
#else
extern SV* DPPP_(my_eval_pv)(char *p, I32 croak_on_error);
#endif
#ifdef eval_pv
# undef eval_pv
#endif
#define Perl_eval_pv DPPP_(my_eval_pv)
#if defined(NEED_eval_pv) || defined(NEED_eval_pv_GLOBAL)
SV*
DPPP_(my_eval_pv)(char *p, I32 croak_on_error)
{
dSP;
SV* sv = newSVpv(p, 0);
PUSHMARK(sp);
SPAGAIN;
sv = POPs;
PUTBACK;
if (croak_on_error && SvTRUE(GvSV(errgv)))
croak(SvPVx(GvSV(errgv), na));
return sv;
}
#endif
view all matches for this distribution
view release on metacpan or search on metacpan
select $ofh;
my $Query = join("\r\n", # "CRLF"
"GET $Desired HTTP/1.1",
# Do we need a Host: header with an "AbsoluteURI?"
# not needed: http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.2
# but this is trumped by an Apache error message invoking RFC2068 sections 9 and 14.23
"Host: $HostName",
"User-Agent: $agent",
"Connection: close",
'','') ;
print SOCK $Query or croak "could not print to miniget socket";
$Sessions{$Coo}->{AISXML} = $AISXML;
# parse AISXML...
my %aisvar;
foreach (qw{
identity
error
aissri
user_remote_addr
}
# ,@{$Param{XML}}
){
};
print <<EOF;
Content-Type: text/plain
internal AIS module logic error
EOF
exit;
$Identity = $Sessions{$Coo}->{identity};
if($Identity eq 'ERROR'){
print <<EOF;
Content-type: text/plain
There was an error with the authentication layer
of this web service: $Sessions{$Coo}->{error}
please contact $ENV{SERVER_ADMIN} to report this.
EOF
exit;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AIX/LVM.pm view on Meta::CPAN
sub init
{
my $self = shift;
my ($result, %lslv, %lspv, %lsvg, @lslv, @lsvg, @lspv);
my ($lsvg, $lsvg_error) = $self->_exec_open3("lsvg -o");
croak "Error found during execution of lsvg -o: $lsvg_error\n" if $lsvg_error;
@lsvg = $self->_splitter($lsvg, qr'\n+');
foreach my $lvg (@lsvg) {
$self->{$lvg}= $self->_get_lv_pv_props($lvg); #Hierarchy is lsvg -> lslv and lspv
}
return $self;
lib/AIX/LVM.pm view on Meta::CPAN
{
my $self = shift;
my $lvg = shift;
croak "Logical volume group is not found\n" unless $lvg;
my (@lv, @pv, %lvg_hash);
my ($lslv, $lslv_error) = $self->_exec_open3("lsvg -l $lvg"); # Populate LV Values
croak "Error found during execution of lsvg -l $lvg: $lslv_error\n" if $lslv_error;
my @lslv = $self->_splitter($lslv, qr'\n+');
foreach my $lslv_l (@lslv[2..$#lslv]) {
push @lv, $1 if ($lslv_l=~/^(\S+)/);
}
foreach my $lv (@lv) {
$lvg_hash{lvol}->{$lv}= $self->_get_lslv_l_m_prop($lv);
}
my ($lspv, $lspv_error) = $self->_exec_open3("lsvg -p $lvg"); # Populate PV Values
croak "Error found during execution of lsvg -p $lvg: $lspv_error\n" if $lspv_error;
my @lspv = $self->_splitter($lspv, qr'\n+');
foreach my $lspv_l (@lspv[2..$#lspv]) {
push @pv, $1 if ($lspv_l=~/^(\S+)/);
}
foreach my $pv (@pv) {
$lvg_hash{pvol}->{$pv}= $self->_get_lspv_l_m_prop($pv);
}
my ($prop, $prop_error) = $self->_exec_open3("lsvg $lvg"); # Populate Properties
croak "Error found during execution of lsvg $lvg: $prop_error\n" if $prop_error;
$lvg_hash{prop} = $self->_parse_properties($prop, @lsvg_prop);
return \%lvg_hash;
}
# This subroutine is used to populate LV Logical Values, LV Physical Values and Properties of Logical Volumes
lib/AIX/LVM.pm view on Meta::CPAN
{
my $self = shift;
my $lv = shift;
croak "Logical volume is not found\n" unless $lv;
my (@lv, @pv, %lslv);
my ($lslv, $lslv_error) = $self->_exec_open3("lslv -l $lv"); # Populate LV Logical Values
croak "Error found during execution of lslv -l $lv: $lslv_error\n" if $lslv_error;
$lslv{"LV_LOGICAL_CMD_OUT"} = $lslv;
my ($lspv, $lspv_error) = $self->_exec_open3("lslv -m $lv"); # Populate LV Mirror Values
croak "Error found during execution of lslv -m $lv: $lspv_error\n" if $lspv_error;
$lslv{"LV_MIRROR_CMD_OUT"} = $lspv;
my ($prop, $prop_error) = $self->_exec_open3("lslv $lv"); # Populate LV Properties
croak "Error found during execution of lslv $lv: $prop_error\n" if $prop_error;
$lslv{prop} = $self->_parse_properties($prop, @lslv_prop);
return \%lslv;
}
# # This subroutine is used to populate PV Logical Values, PV PP Values and Properties of Physical Volumes
lib/AIX/LVM.pm view on Meta::CPAN
{
my $self = shift;
my $pv = shift;
croak "Physical volume is not found\n" unless $pv;
my (@lv, @pv, %lspv);
my ($lslv, $lslv_error) = $self->_exec_open3("lspv -l $pv"); # Populate PV Logical Values
croak "Error found during execution of lspv -l $pv: $lslv_error\n" if $lslv_error;
$lspv{"PV_LOGICAL_CMD_OUT"} = $lslv;
my ($lspv, $lspv_error) = $self->_exec_open3("lspv -M $pv"); # Populate PV in LV Values
croak "Error found during execution of lspv -M $pv: $lspv_error\n" if $lspv_error;
$lspv{"PV_LV_CMD_OUT"} = $lspv;
my ($lspp, $lspp_error) = $self->_exec_open3("lspv -p $pv"); # Populate PV Physical Partitions Values
croak "Error found during execution of lspv -p $pv: $lspp_error\n" if $lspp_error;
$lspv{"PV_PP_CMD_OUT"} = $lspp;
my ($prop, $prop_error) = $self->_exec_open3("lspv $pv"); # Populate PV Properties
croak "Error found during execution of lspv $pv: $prop_error\n" if $prop_error;
$lspv{prop} = $self->_parse_properties($prop, @lspv_prop);
return \%lspv;
}
# This subroutine performs parsing the output of the commands for passed array values.
lib/AIX/LVM.pm view on Meta::CPAN
# This subroutine is used to execute the commands using open3 to capture Error stream.
sub _exec_open3
{
my $self = shift;
my ($result, $error);
my $writer_h = new IO::Handle;
my $reader_h = new IO::Handle;
my $error_h = new IO::Handle;
my $pid = open3($writer_h, $reader_h, $error_h, @_) or croak "Not able to open3: $! \n";
$reader_h->autoflush();
$error_h->autoflush();
my $selector = IO::Select->new();
$selector->add($reader_h, $error_h); ## Add the handlers to select call ##
while( my @ready = $selector->can_read ){
foreach my $fh ( @ready ){
if( fileno($fh) == fileno($reader_h) ){
my $ret = $reader_h->sysread($_, 1024);
$result .= $_;
$selector->remove($fh) unless $ret;
}
if( fileno($fh) == fileno($error_h) ){
my $ret = $error_h->sysread($_, 1024);
$error .= $_;
$selector->remove($fh) unless $ret;
}
}
}
$reader_h->autoflush();
$error_h->autoflush();
waitpid $pid, 0;
my $rc = $? >> 8;
carp "Error in executing the command\n" if ($rc);
return $result, $error;
}
# Splitter based on pattern
sub _splitter
view all matches for this distribution
view release on metacpan or search on metacpan
inc/Devel/CheckLib.pm view on Meta::CPAN
To avoid exporting them, C<use Devel::CheckLib ()>.
=head2 assert_lib
This takes several named parameters, all of which are optional, and dies
with an error message if any of the libraries listed can
not be found. B<Note>: dying in a Makefile.PL or Build.PL may provoke
a 'FAIL' report from CPAN Testers' automated smoke testers. Use
C<check_lib_or_exit> instead.
The named parameters are:
inc/Devel/CheckLib.pm view on Meta::CPAN
=back
=head2 check_lib_or_exit
This behaves exactly the same as C<assert_lib()> except that instead of
dieing, it warns (with exactly the same error message) and exits.
This is intended for use in Makefile.PL / Build.PL
when you might want to prompt the user for various paths and
things before checking that what they've told you is sane.
If any library or header is missing, it exits with an exit value of 0 to avoid
inc/Devel/CheckLib.pm view on Meta::CPAN
=item IBM's tools on AIX
=item Microsoft's tools on Windows
=item MinGW on Windows (with Strawberry Perl)
=item Borland's tools on Windows
=back
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ALBD.pm view on Meta::CPAN
# input: $scoresRef <- a reference to a hash of scores (hash{CUI}=score)
# $ranksRef <- a reference to an array of CUIs ranked by their score
# $printTo <- optional, outputs the $printTo top ranked terms. If not
# specified, all terms are output
# output: a line seperated string containing ranked terms, scores, and thier
# preferred terms
sub _rankedTermsToString {
my $self = shift;
my $scoresRef = shift;
my $ranksRef = shift;
my $printTo = shift;
lib/ALBD.pm view on Meta::CPAN
#add the score
$string .= sprintf "%.5f\t", "${$scoresRef}{${$ranksRef}[$i]}\t";
#add the CUI
$string .= "${$ranksRef}[$i]\t";
#add the name
my $name = $umls_interface->getPreferredTerm(${$ranksRef}[$i]);
#if no preferred name, get anything
if (!defined $name || $name eq '') {
my $termListRef = $umls_interface->getTermList('C0440102');
if (scalar @{$termListRef} > 0) {
$name = '.**'.${$termListRef}[0];
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ALPM/Conf.pm view on Meta::CPAN
chomp;
$line = $_;
$parser->($_);
}
};
my $err = $@;
close $if;
if($err){
# Print the offending file and line number along with any errors...
# (This is why we use dies with newlines, for cascading error msgs)
die "$@$path:$. $line\n"
}
return;
}
lib/ALPM/Conf.pm view on Meta::CPAN
my $alpm = ALPM->new($root, $dbpath);
_setarch($opts);
while(my ($opt, $val) = each %$opts){
# The SetOption type in typemap croaks on error, no need to check.
_setopt($alpm, $opt, $val);
}
my $usesl = grep { /signatures/ } $alpm->caps;
for my $db (@$dbs){
lib/ALPM/Conf.pm view on Meta::CPAN
next if(!@$mirs);
_expurls($mirs, $opts->{'arch'}, $r);
$sl = 'default' if(!$usesl);
my $x = $alpm->register($r, $sl)
or die "Failed to register $r database: " . $alpm->strerror;
$x->add_server($_) for(@$mirs);
}
return $alpm;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AMF/Connection.pm view on Meta::CPAN
$class->{'endpoint'}.$class->{'append_to_endpoint'}, # TODO - check if append to URL this really work for HTTP POST
Content_Type => "application/x-amf",
Content => $request_stream->getStreamData()
);
croak "HTTP POST error: ".$http_response->status_line."\n"
unless($http_response->is_success);
my $response_stream = new AMF::Connection::InputStream( $http_response->decoded_content, $class->{'input_amf_options'});
my $response = new AMF::Connection::Message;
$response->deserialize( $response_stream );
view all matches for this distribution