view release on metacpan or search on metacpan
lib/AI/FANN/Evolving.pm view on Meta::CPAN
'FANN_SIN_SYMMETRIC' => FANN_SIN_SYMMETRIC,
'FANN_COS_SYMMETRIC' => FANN_COS_SYMMETRIC,
# 'FANN_SIN' => FANN_SIN, # range is between 0 and 1
# 'FANN_COS' => FANN_COS, # range is between 0 and 1
},
'errorfunc' => {
'FANN_ERRORFUNC_LINEAR' => FANN_ERRORFUNC_LINEAR,
'FANN_ERRORFUNC_TANH' => FANN_ERRORFUNC_TANH,
},
'stopfunc' => {
'FANN_STOPFUNC_MSE' => FANN_STOPFUNC_MSE,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
$constant{$k} = $v;
}
}
my %default = (
'error' => 0.0001,
'epochs' => 5000,
'train_type' => 'ordinary',
'epoch_printfreq' => 100,
'neuron_printfreq' => 0,
'neurons' => 15,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
sub _scalar_properties {
(
training_algorithm => 'train',
train_error_function => 'errorfunc',
train_stop_function => 'stopfunc',
learning_rate => \&_mutate_double,
learning_momentum => \&_mutate_double,
quickprop_decay => \&_mutate_double,
quickprop_mu => \&_mutate_double,
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
sub _init {
my $self = shift;
my %args = @_;
for ( qw(error epochs train_type epoch_printfreq neuron_printfreq neurons activation_function) ) {
$self->{$_} = $args{$_} // $default{$_};
}
return $self;
}
lib/AI/FANN/Evolving.pm view on Meta::CPAN
# train
$self->{'ann'}->cascadetrain_on_data(
$data,
$self->neurons,
$self->neuron_printfreq,
$self->error,
);
}
else {
$log->debug("normal training");
lib/AI/FANN/Evolving.pm view on Meta::CPAN
# train
$self->{'ann'}->train_on_data(
$data,
$self->epochs,
$self->epoch_printfreq,
$self->error,
);
}
}
=item enum_properties
lib/AI/FANN/Evolving.pm view on Meta::CPAN
Returns a hash whose keys are names of enums and values the possible states for the
enum
=cut
=item error
Getter/setter for the error rate. Default is 0.0001
=cut
sub error {
my $self = shift;
if ( @_ ) {
my $value = shift;
$log->debug("setting error threshold to $value");
return $self->{'error'} = $value;
}
else {
$log->debug("getting error threshold");
return $self->{'error'};
}
}
=item epochs
lib/AI/FANN/Evolving.pm view on Meta::CPAN
}
# this is here so that we can trap method calls that need to be
# delegated to the FANN object. at this point we're not even
# going to care whether the FANN object implements these methods:
# if it doesn't we get the normal error for unknown methods, which
# the user then will have to resolve.
sub AUTOLOAD {
my $self = shift;
my $method = $AUTOLOAD;
$method =~ s/.+://;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/FANN.pm view on Meta::CPAN
Boolean methods return true on success and undef on failure.
=item *
Any error reported from the C side is automaticaly converter to a Perl
exception. No manual error checking is required after calling FANN
functions.
=item *
Memory management is automatic, no need to call destroy methods.
lib/AI/FANN.pm view on Meta::CPAN
FANN_SIN_SYMMETRIC
FANN_COS_SYMMETRIC
FANN_SIN
FANN_COS
# enum fann_errorfunc_enum:
FANN_ERRORFUNC_LINEAR
FANN_ERRORFUNC_TANH
# enum fann_stopfunc_enum:
FANN_STOPFUNC_MSE
lib/AI/FANN.pm view on Meta::CPAN
=item $ann->reset_MSE
-
=item $ann->train_on_file($filename, $max_epochs, $epochs_between_reports, $desired_error)
-
=item $ann->train_on_data($train_data, $max_epochs, $epochs_between_reports, $desired_error)
C<$train_data> is a AI::FANN::TrainData object.
=item $ann->cascadetrain_on_file($filename, $max_neurons, $neurons_between_reports, $desired_error)
-
=item $ann->cascadetrain_on_data($train_data, $max_neurons, $neurons_between_reports, $desired_error)
C<$train_data> is a AI::FANN::TrainData object.
=item $ann->train_epoch($train_data)
lib/AI/FANN.pm view on Meta::CPAN
=item $ann->training_algorithm($training_algorithm)
-
=item $ann->train_error_function
=item $ann->train_error_function($error_function)
-
=item $ann->train_stop_function
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/FuzzyEngine.pm view on Meta::CPAN
my $val = $var_a->defuzzify(); # $var_a returns a 1dim piddle with two elements
So do the fuzzy operations as provided by the fuzzy engine C<$fe> itself.
Any operation on more then one piddle expands those to common
dimensions, if possible, or throws a PDL error otherwise.
The way expansion is done is best explained by code
(see C<< AI::FuzzyEngine->_cat_array_of_piddles(@pdls) >>).
Assuming all piddles are in C<@pdls>,
calculation goes as follows:
view all matches for this distribution
view release on metacpan or search on metacpan
Added: Extensive test suite (tgene.t)
mutate_overwrite added
BUGFIXES: Most methods have more gene length related sanity checking.
So long as positive integers are used as args, then there should
be no fatal errors through missing the end of substrings.
mutate when called with ref of probs only worked with keys
of generic probs hash, this is now fixed.
0.10 Wed Dec 27 21:00:00 2000
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Genetic/Pro.pm view on Meta::CPAN
#=======================================================================
sub as_array {
my ($self, $chromosome) = @_;
if($self->type eq q/bitvector/){
# This could lead to internal error, bacause of underlaying Tie::Array::Packed
#return @$chromosome if wantarray;
#return $chromosome;
my @chr = @$chromosome;
return @chr if wantarray;
view all matches for this distribution
view release on metacpan or search on metacpan
# sub init():
# This method initializes the population to completely
# random individuals. It deletes all current individuals!!!
# It also examines the type of individuals we want, and
# require()s the proper class. Throws an error if it can't.
# Must pass to it an anon list that will be passed to the
# newRandom method of the individual.
# In case of bitvector, $newArgs is length of bitvector.
# In case of rangevector, $newArgs is anon list of anon lists.
# Takes a variable number of arguments. The first argument is the
# total number, N, of new individuals to add. The remaining arguments
# are genomes to inject. There must be at most N genomes to inject.
# If the number, n, of genomes to inject is less than N, N - n random
# genomes are added. Perhaps an example will help?
# returns 1 on success and undef on error.
sub inject {
my ($self, $count, @genomes) = @_;
unless ($self->{INIT}) {
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Image.pm view on Meta::CPAN
# Create Image object
sub new {
my $class = shift;
my %attr = @_;
$attr{'error'} = '';
$attr{'api'} = 'OpenAI' unless $attr{'api'};
$attr{'error'} = 'Invalid API' unless $attr{'api'} eq 'OpenAI';
$attr{'error'} = 'API Key missing' unless $attr{'key'};
$attr{'model'} = 'dall-e-2' unless $attr{'model'};
$attr{'size'} = '512x512' unless $attr{'size'};
return bless \%attr, $class;
lib/AI/Image.pm view on Meta::CPAN
);
# Returns true if last operation was success
sub success {
my $self = shift;
return !$self->{'error'};
}
# Returns error if last operation failed
sub error {
my $self = shift;
return $self->{'error'};
}
# Header for calling OpenAI
sub _get_header_openai {
my $self = shift;
lib/AI/Image.pm view on Meta::CPAN
The size for the generated image (default: '512x512').
=item debug
Used for testing. If set to any true value, the image method
will return details of the error encountered instead of C<undef>
=back
=head2 image
lib/AI/Image.pm view on Meta::CPAN
my $success = $ai->success();
Returns true if the last operation was successful.
=head2 error
my $error = $ai->error();
Returns the error message if the last operation failed.
=head1 EXAMPLE
It is common that the generated image will want to be saved as a file. This can be easily acheived
using the C<getstore> method of L<LWP::Simple>.
view all matches for this distribution
view release on metacpan or search on metacpan
LibNeural.pm view on Meta::CPAN
=item $nn->train([I1,I2,...],[O1,O2,...],MINERR,TRAINRATE)
Completes a training cycle for the given inputs I1-IN, with the expected
results of O1-OM, where N is the number of inputs and M is the number of
outputs. MINERR is the mean squared error at the output that you wish to be achieved. TRAINRATE is the learning rate to be used.
=item (O1,O2) = $nn->run([I1,I2,...])
Calculate the corresponding outputs (O1-OM) for the given inputs (I1-ON) based
on the previous training. Should only be called after the network has been
view all matches for this distribution
view release on metacpan or search on metacpan
scripts/mnist.pl view on Meta::CPAN
my $res;
for my $key ( keys %opt ) {
my $file = "$url/$opt{$key}.gz";
my $ff = File::Fetch->new(uri => $file);
my $aux = $ff->fetch() or die $ff->error;
#print "$file\n";
#$res = $http->get("$file");
#my $content = $res->{content};
# # $res = $http->get("$route/".$opt{$key});
#print STDERR Dumper $content;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/MXNet/Base.pm view on Meta::CPAN
=head2 assert
Parameters
-----------
Bool $input
Str $error_str
Calls Carp::confess with $error_str//"AssertionError" if the $input is false
=cut
sub assert
{
my ($input, $error_str) = @_;
local($Carp::CarpLevel) = 1;
Carp::confess($error_str//'AssertionError')
unless $input;
}
=head2 check_call
Checks the return value of C API call
This function will raise an exception when error occurs.
Every API call is wrapped with this function.
Returns the C API call return values stripped of first return value,
checks for return context and returns first element in
the values list when called in scalar context.
view all matches for this distribution
view release on metacpan or search on metacpan
void* p_list_arguments;
void* p_declare_backward_dependency;
};
/*!
* \brief return str message of the last error
* all function in this file will return 0 when success
* and -1 when an error occured,
* MXGetLastError can be called to retrieve the error
*
* this function is threadsafe and can be called by different thread
* \return error info
*/
const char *MXGetLastError();
//-------------------------------------
// Part 0: Global State setups
view all matches for this distribution
view release on metacpan or search on metacpan
inc/Module/AutoInstall.pm view on Meta::CPAN
return
unless system( 'sudo', $^X, $0, "--config=$config",
"--installdeps=$missing" );
print << ".";
*** The 'sudo' command exited with error! Resuming...
.
}
return _prompt(
qq(
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/MegaHAL.pm view on Meta::CPAN
use vars qw(@EXPORT @ISA $VERSION $AUTOLOAD);
@EXPORT = qw(megahal_setnoprompt
megahal_setnowrap
megahal_setnobanner
megahal_seterrorfile
megahal_setstatusfile
megahal_initialize
megahal_initial_greeting
megahal_command
megahal_do_reply
lib/AI/MegaHAL.pm view on Meta::CPAN
# Bless ourselves into the AI::MegaHAL class.
$self = bless({ },$class);
# Make sure that we can find a brain or a training file somewhere
# else die with an error.
my $path = $args{'Path'} || ".";
if(-e "$path/megahal.brn" || -e "$path/megahal.trn") {
chdir($path) || die("Error: chdir: $!\n");
} else {
die("Error: unable to locate megahal.brn or megahal.trn\n");
view all matches for this distribution
view release on metacpan or search on metacpan
bin/from-folder.pl view on Meta::CPAN
our $cache = {};
our @target = split("\/",$opts{cache_file});
my $set = AI::MicroStructure::ObjectSet->new();
eval {
local $^W = 0; # because otherwhise doesn't pass errors
#`rm $opts{cache_file}`;
$cache = lock_retrieve($opts{cache_file});
$cache = {} unless $cache;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NNEasy.pm view on Meta::CPAN
sub NNEasy {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $file = shift(@_) ;
my @out_types = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $error_ok = shift(@_) ;
my $in = shift(@_) ;
my $out = shift(@_) ;
my @layers = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $conf = shift(@_) ;
lib/AI/NNEasy.pm view on Meta::CPAN
@out_types = sort {$a <=> $b} @out_types ;
$this->{OUT_TYPES} = \@out_types ;
if ( $error_ok <= 0 ) {
my ($min_dif , $last) ;
my $i = -1 ;
foreach my $out_types_i ( @out_types ) {
++$i ;
if ($i > 0) {
my $dif = $out_types_i - $last ;
$min_dif = $dif if !defined $min_dif || $dif < $min_dif ;
}
$last = $out_types_i ;
}
$error_ok = $min_dif / 2 ;
$error_ok -= $error_ok*0.1 ;
}
$this->{ERROR_OK} = $error_ok ;
return $this ;
}
sub _layer_conf {
lib/AI/NNEasy.pm view on Meta::CPAN
$err *= -1 if $err < 0 ;
return $err ;
}
*_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
sub _learn_set_get_output_error_pl {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my $set = shift(@_) ;
my $error_ok = shift(@_) ;
my $ins_ok = shift(@_) ;
my $verbose = shift(@_) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
lib/AI/NNEasy.pm view on Meta::CPAN
my ($err,$learn_ok,$print) ;
for (my $i = 0 ; $i < @$set ; $i+=2) {
$this->{NN}->run($$set[$i]) ;
my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
$er *= -1 if $er < 0 ;
++$learn_ok if $er < $error_ok ;
$err += $er ;
$print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
}
$err /= $ins_ok ;
lib/AI/NNEasy.pm view on Meta::CPAN
}
$limit ||= 30000 ;
$err_static_limit_positive ||= $err_static_limit/2 ;
my $error_ok = $this->{ERROR_OK} ;
my $check_diff_count = 1000 ;
my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
$err_static = 0 ;
while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
++$counter ;
if ( !($counter % 100) || $learn_ok == $ins_ok ) {
my $err_diff = $err_last - $err ;
lib/AI/NNEasy.pm view on Meta::CPAN
$err_count = 0 ;
}
if ( $verbose ) {
print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
print $print ;
}
}
print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
}
}
sub get_set_error {
my $this = ref($_[0]) ? shift : undef ;
my $CLASS = ref($this) || __PACKAGE__ ;
my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
my $ins_ok = shift(@_) ;
lib/AI/NNEasy.pm view on Meta::CPAN
sv_catsv(ret , elem) ;
}
return ret ;
}
void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
dXSARGS;
STRLEN len;
int i ;
HV* self_hv = OBJ_HV( self );
lib/AI/NNEasy.pm view on Meta::CPAN
SPAGAIN ;
ret = POPs ;
er = SvNV(ret) ;
if (er < 0) er *= -1 ;
if (er < error_ok) ++learn_ok ;
err += er ;
if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
lib/AI/NNEasy.pm view on Meta::CPAN
Here's an example of a NN to compute XOR:
use AI::NNEasy ;
## Our maximal error for the output calculation.
my $ERR_OK = 0.1 ;
## Create the NN:
my $nn = AI::NNEasy->new(
'xor.nne' , ## file to save the NN.
[0,1] , ## Output types of the NN.
$ERR_OK , ## Maximal error for output.
2 , ## Number of inputs.
1 , ## Number of outputs.
[3] , ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
) ;
lib/AI/NNEasy.pm view on Meta::CPAN
[0,1] => [1],
[1,0] => [1],
[1,1] => [0],
);
## Calculate the actual error for the set:
my $set_err = $nn->get_set_error(\@set) ;
## If set error is bigger than maximal error lest's learn this set:
if ( $set_err > $ERR_OK ) {
$nn->learn_set( \@set ) ;
## Save the NN:
$nn->save ;
}
lib/AI/NNEasy.pm view on Meta::CPAN
An array of outputs that the NN can have, so the NN can find the nearest number in this
list to give your the right output.
=item ERROR_OK
The maximal error of the calculated output.
If not defined ERROR_OK will be calculated by the minimal difference between 2 types at
@OUTPUT_TYPES dived by 2:
@OUTPUT_TYPES = [0 , 0.5 , 1] ;
lib/AI/NNEasy.pm view on Meta::CPAN
Here's a completly example of use:
my $nn = AI::NNEasy->new(
'xor.nne' , ## file to save the NN.
[0,1] , ## Output types of the NN.
0.1 , ## Maximal error for output.
2 , ## Number of inputs.
1 , ## Number of outputs.
[3] , ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
{random_connections=>0 , networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1} ,
) ;
lib/AI/NNEasy.pm view on Meta::CPAN
=back
=head2 learn_set (@SET , OK_OUTPUTS , LIMIT , VERBOSE)
Learn a set of inputs until get the right error for the outputs.
=over 4
=item @SET
lib/AI/NNEasy.pm view on Meta::CPAN
If TRUE turn verbose method ON when learning.
=back
=head2 get_set_error (@SET , OK_OUTPUTS)
Get the actual error of a set in the NN. If the returned error is bigger than
I<ERROR_OK> defined on I<new()> you should learn or relearn the set.
=head2 run (@INPUT)
Run a input and return the output calculated by the NN based in what the NN already have learned.
lib/AI/NNEasy.pm view on Meta::CPAN
Same of I<run()>, but the output will return the nearest output value based in the
I<@OUTPUT_TYPES> defined at I<new()>.
For example an input I<[0,1]> learned that have
the output I<[1]>, actually will return something like 0.98324 as output and
not 1, since the error never should be 0. So, with I<run_get_winner()>
we get the output of I<run()>, let's say that is 0.98324, and find what output
is near of this number, that in this case should be 1. An output [0], will return
by I<run()> something like 0.078964, and I<run_get_winner()> return 0.
=head1 Samples
lib/AI/NNEasy.pm view on Meta::CPAN
Some functions of this module have I<Inline> functions writed in C.
I have made a C version only for the functions that are wild called, like:
AI::NNEasy::_learn_set_get_output_error
AI::NNEasy::NN::tanh
AI::NNEasy::NN::feedforward::run
view all matches for this distribution
view release on metacpan or search on metacpan
examples/add.pl view on Meta::CPAN
my $err = 10;
# Stop after 4096 epochs -- don't want to wait more than that
for ( my $i = 0; ($err > 0.0001) && ($i < 4096); $i++ ) {
$err = $dataset->learn($network);
print "Epoch = $i error = $err\n";
}
foreach (@{$dataset->run($network)})
{
foreach (@$_){print $_}
view all matches for this distribution
view release on metacpan or search on metacpan
/*! \brief handle to a symbol that can be bind as operator */
typedef NNSymbol *SymbolHandle;
/*! \brief handle to Graph */
typedef NNGraph *GraphHandle;
/*!
* \brief Set the last error message needed by C API
* \param msg The error message to set.
*/
void NNAPISetLastError(const char* msg);
/*!
* \brief return str message of the last error
* all function in this file will return 0 when success
* and -1 when an error occured,
* NNGetLastError can be called to retrieve the error
*
* this function is threadsafe and can be called by different thread
* \return error info
*/
const char *NNGetLastError(void);
/*!
* \brief list all the available operator names, include entries.
* \param out_size the size of returned array
view all matches for this distribution
view release on metacpan or search on metacpan
- fixing lexical sorting of version numbers
1.10 Tue Feb 22 09:31:12 AST 2011
- fixed testing problems due to differences in precision
- fixed podchecker warning (some space)
- better test error reporting in 2.t
- added t/pod.t, thanks to Michael Stevens
1.9 Tue Aug 31 09:27:51 ADT 2010
- fixed testing problems due to differences in precision in t/2.t
1.8 Fri Aug 21 06:36:34 ADT 2009
- fixed a pod documentation error
1.7 Thu Aug 20 14:20:15 ADT 2009
- improvements in documentation
- added method add_csv_file
- added method drop_attributes
- removed real_attr and added attribute_type field
1.6 Wed Aug 19 09:09:57 ADT 2009
- improved an error message
- fixed some testing problems due to whitespace
- small improvement in generating documentation
1.5 Wed Jan 30 08:06:22 AST 2008
- fixed testing problems due to differences in the lowest
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Nerl/Network.pm view on Meta::CPAN
=head2 train($x,$y, %params)
Train with backpropagation using $x as input & $y as target.
$x and $y are both pdls. If there are multiple cases, each one will
occupy a column (dimension 2) of the pdl. If your dimensions are off,
you will experience an pdl error of some sort.
=head3 %params
=head4 passes
view all matches for this distribution
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
package AI::NeuralNet::BackProp;
use Benchmark;
use strict;
# Returns the number of elements in an array ref, undef on error
sub _FETCHSIZE {
my $a=$_[0];
my ($b,$x);
return undef if(substr($a,0,5) ne "ARRAY");
foreach $b (@{$a}) { $x++ };
BackProp.pm view on Meta::CPAN
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{pattern})?$args{pattern}*2+1:1;
my ($fa,$fb);
for my $x (0..$len) {
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn( $data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
my $res;
BackProp.pm view on Meta::CPAN
my $data = shift;
my %args = @_;
my $len = $#{$data}/2-1;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my @learned;
while(1) {
_GET_X:
my $x=$self->intr(rand()*$len);
goto _GET_X if($learned[$x]);
BackProp.pm view on Meta::CPAN
print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
my $str = $self->learn($data->[$x*2], # The list of data to input to the net
$data->[$x*2+1], # The output desired
inc=>$inc, # The starting learning gradient
max=>$max, # The maximum num of loops allowed
error=>$error); # The maximum (%) error allowed
print $str if($AI::NeuralNet::BackProp::DEBUG);
}
return 1;
BackProp.pm view on Meta::CPAN
my %args = @_;
my $inc = $args{inc} || 0.20;
my $max = $args{max} || 1024;
my $_mx = intr($max/10);
my $_mi = 0;
my $error = ($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
my $div = $self->{DIV};
my $size = $self->{SIZE};
my $out = $self->{OUT};
my $divide = AI::NeuralNet::BackProp->intr($div/$out);
my ($a,$b,$y,$flag,$map,$loop,$diff,$pattern,$value);
BackProp.pm view on Meta::CPAN
$loop = 0;
my $ldiff = 0;
my $dinc = 0.0001;
my $cdiff = 0;
$diff = 100;
$error = ($error>-1)?$error:-1;
# $flag only goes high when all neurons in output map compare exactly with
# desired result map or $max loops is reached
#
while(!$flag && ($max ? $loop<$max : 1)) {
BackProp.pm view on Meta::CPAN
}
# Save last $diff
$ldiff = $diff;
# This catches a max error argument and handles it
if(!($error>-1 ? $diff>$error : 1)) {
$flag=1;
last;
}
# Debugging
AI::NeuralNet::BackProp::out4 "Difference: $diff\%\t Increment: $inc\tMax Error: $error\%\n";
AI::NeuralNet::BackProp::out1 "\n\nMapping results from $map:\n";
# This loop compares each element of the output map with the desired result map.
# If they don't match exactly, we call weight() on the offending output neuron
# and tell it what it should be aiming for, and then the offending neuron will
BackProp.pm view on Meta::CPAN
Options should be written on hash form. There are three options:
inc => $learning_gradient
max => $maximum_iterations
error => $maximum_allowable_percentage_of_error
$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.20.
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024. Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.
$maximum_allowable_percentage_of_error is the maximum allowable error to have. If
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.
=item $net->learn_set(\@set, [ options ]);
BackProp.pm view on Meta::CPAN
and 0.42, where no 0s were allowed because the learning would never finish learning completly
with a 0 in the input.
Yet with the allowance of 0s, it requires one of two factors to learn correctly. Either you
must enable randomness with $net->random(0.0001) (Any values work [other than 0], see random() ),
or you must set an error-minimum with the 'error => 5' option (you can use some other error value
as well).
When randomness is enabled (that is, when you call random() with a value other than 0), it interjects
a bit of randomness into the output of every neuron in the network, except for the input and output
neurons. The randomness is interjected with rand()*$rand, where $rand is the value that was
BackProp.pm view on Meta::CPAN
bad to have a pure 0 internally because the weights cannot change a 0 when multiplied by a 0, the
product stays a 0. Yet when a weight is multiplied by 0.00001, eventually with enough weight, it will
be able to learn. With a 0 value instead of 0.00001 or whatever, then it would never be able
to add enough weight to get anything other than a 0.
The second option to allow for 0s is to enable a maximum error with the 'error' option in
learn() , learn_set() , and learn_set_rand() . This allows the network to not worry about
learning an output perfectly.
For accuracy reasons, it is recomended that you work with 0s using the random() method.
view all matches for this distribution
view release on metacpan or search on metacpan
examples/eigenvector_initialization.pl view on Meta::CPAN
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
input_dim => $dim);
examples/eigenvector_initialization.pl view on Meta::CPAN
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Kohonen.pm view on Meta::CPAN
sub _gauss_weight { my ($r, $sigma) = (shift,shift);
return exp( -($r**2) / (2 * $sigma**2) );
}
=head1 PUBLIC METHOD quantise_error
Returns the quantise error for either the supplied points,
or those in the C<input> field.
=cut
sub quantise_error { my ($self,$targets) = (shift,shift);
my $qerror=0;
if (not defined $targets){
$targets = $self->{input};
} else {
foreach (@$targets){
if (not ref $_ or ref $_ ne 'ARRAY'){
lib/AI/NeuralNet/Kohonen.pm view on Meta::CPAN
confess "target input and map dimensions differ";
}
for my $i (0..$#bmu){
foreach my $w (0..$self->{weight_dim}){
$qerror += $targets->[$i]->{values}->[$w]
- $self->{map}->[$bmu[$i]->[1]]->[$bmu[$i]->[2]]->{weight}->[$w];
}
}
$qerror /= scalar @$targets;
return $qerror;
}
=head1 PRIVATE METHOD _add_input_from_str
view all matches for this distribution
view release on metacpan or search on metacpan
my $layers = shift;
# Looks like we got ourselves a layer specs array
if(ref($layers) eq "ARRAY") {
if($self->{total_layers}!=$#{$layers}) {
$self->{error} = "extend(): Cannot add new layers. Create a new network to add layers.\n";
return undef;
}
if(ref($layers->[0]) eq "HASH") {
$self->{total_nodes} = 0;
$self->{inputs} = $layers->[0]->{nodes};
for (0..$self->{total_layers}){$self->extend_layer($_,$layers->[$_])}
$self->{layers} = $layers;
for (0..$self->{total_layers}){$self->{total_nodes}+= $self->{layers}->[$_]}
}
} else {
$self->{error} = "extend(): Invalid argument type.\n";
return undef;
}
return 1;
}
sub extend_layer {
my $self = shift;
my $layer = shift || 0;
my $specs = shift;
if(!$specs) {
$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";
return undef;
}
if(ref($specs) eq "HASH") {
$self->activation($layer,$specs->{activation}) if($specs->{activation});
$self->threshold($layer,$specs->{threshold}) if($specs->{threshold});
my $n = 0;
my $more = $nodes - $self->{layers}->[$layer] - 1;
d("Checking on extending layer $layer to $nodes nodes (check:$self->{layers}->[$layer]).\n",9);
return 1 if ($nodes == $self->{layers}->[$layer]);
if ($self->{layers}->[$layer]>$nodes) {
$self->{error} = "add_nodes(): I cannot remove nodes from the network with this version of my module. You must create a new network to remove nodes.\n";
return undef;
}
d("Extending layer $layer by $more.\n",9);
for (0..$more){$self->{mesh}->[$#{$self->{mesh}}+1]=AI::NeuralNet::Mesh::node->new($self)}
for(0..$layer-2){$n+=$self->{layers}->[$_]}
my $outputs = shift; # target outputs
my %args = @_; # get args into hash
my $inc = $args{inc} || 0.002; # learning gradient
my $max = $args{max} || 1024; # max iteterations
my $degrade = $args{degrade} || 0; # enable gradient degrading
my $error = ($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
my $dinc = 0.0002; # amount to adjust gradient by
my $diff = 100; # error magin between results
my $start = new Benchmark;
$inputs = $self->crunch($inputs) if($inputs == 0);
$outputs = $self->crunch($outputs) if($outputs == 0);
my ($flag,$ldiff,$cdiff,$_mi,$loop,$y);
while(!$flag && ($max ? $loop<$max : 1)) {
my $b = new Benchmark;
my $got = $self->run($inputs);
$diff = pdiff($got,$outputs);
$flag = 1;
if(($error>-1 ? $diff<$error : 0) || !$diff) {
$flag=1;
last;
}
if($degrade) {
my $data = shift;
my %args = @_;
my $len = $#{$data}/2;
my $inc = $args{inc};
my $max = $args{max};
my $error = $args{error};
my $degrade = $args{degrade};
my $p = (defined $args{flag}) ?$args{flag} :1;
my $row = (defined $args{row}) ?$args{row}+1:1;
my $leave = (defined $args{leave})?$args{leave}:0;
for my $x (0..$len-$leave) {
d("Learning set $x...\n",4);
my $str = $self->learn( $data->[$x*2],
$data->[$x*2+1],
inc=>$inc,
max=>$max,
error=>$error,
degrade=>$degrade);
}
if ($p) {
return pdiff($data->[$row],$self->run($data->[$row-1]));
}
close(FILE);
if(!(-f $file)) {
$self->{error} = "Error writing to \"$file\".";
return undef;
}
return $self;
}
my ($a,$b) = split /=/, $line;
$db{$a}=$b;
}
if(!$db{"header"}) {
$self->{error} = "Invalid format.";
return undef;
}
return $self->load_old($file) if($self->version($db{"header"})<0.21);
my $self = shift;
my $file = shift;
my $load_flag = shift;
if(!(-f $file)) {
$self->{error} = "File \"$file\" does not exist.";
return undef;
}
open(FILE,"$file");
my @lines=<FILE>;
my ($a,$b) = split /=/, $line;
$db{$a}=$b;
}
if(!$db{"header"}) {
$self->{error} = "Invalid format.";
return undef;
}
if($load_flag) {
undef $self;
sub load_pcx {
my $self = shift;
my $file = shift;
eval('use PCX::Loader');
if(@_) {
$self->{error}="Cannot load PCX::Loader module: @_";
return undef;
}
return PCX::Loader->new($self,$file);
}
sub crunched {
my $self = shift;
for my $a (0..$self->{_crunched}->{_length}-1) {
return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
}
$self->{error} = "Word \"$_[0]\" not found.";
return undef;
}
# Alias for crunched(), above
sub word { crunched(@_) }
# Same as benchmark()
sub benchmarked {
benchmark(shift);
}
# Return the last error in the mesh, or undef if no error.
sub error {
my $self = shift;
return undef if !$self->{error};
chomp($self->{error});
return $self->{error}."\n";
}
# Used to format array ref into columns
# Usage:
# join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.
In this module I have included a more accurate form of "learning" for the
mesh. This form preforms descent toward a local error minimum (0) on a
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.
Options should be written on hash form. There are three options:
inc => $learning_gradient
max => $maximum_iterations
error => $maximum_allowable_percentage_of_error
degrade => $degrade_increment_flag
$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024. Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.
$maximum_allowable_percentage_of_error is the maximum allowable error to have. If
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.
$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on.
In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
level 4.
Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.
Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.
Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.
This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the error() method,
below.
If there were no errors, it will return a refrence to $net.
=item $net->load($filename);
This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.
If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.
If there were no errors, it will return a refrence to $net.
UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!
=item $net->crunched($word);
This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list.
If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the error() method, below.
=item $net->word($word);
A function alias for crunched().
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across load() and save() calls.
=item $net->error();
Returns the last error message which occured in the mesh, or undef if no errors have
occured.
=item $net->load_pcx($filename);
NOTE: To use this function, you must have PCX::Loader installed. If you do not have
PCX::Loader installed, it will return undef and store an error for you to retrive with
the error() method, below.
This is a treat... this routine will load a PCX-format file (yah, I know ... ancient
format ... but it is the only one I could find specs for to write it in Perl. If
anyone can get specs for any other formats, or could write a loader for them, I
would be very grateful!) Anyways, a PCX-format file that is exactly 320x200 with 8 bits
view all matches for this distribution
view release on metacpan or search on metacpan
examples/eigenvector_initialization.pl view on Meta::CPAN
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize; # random
my @mes = $nn->train ($epochs, @vs);
warn "random: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # constant initialisation
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => $dim);
$nn->initialize ($vs[-1]);
my @mes = $nn->train ($epochs, @vs);
warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
{ # eigenvector initialisation
my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
input_dim => $dim);
examples/eigenvector_initialization.pl view on Meta::CPAN
}
$nn->initialize (@training_vectors[0..0]); # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
my @mes = $nn->train ($epochs, @vs);
warn "eigen: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}
__END__
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
1 1 0 1
1 0 0 1
0 1 0 1
0 0 1 0
The type of network we use is a forward-feed back error propagation network,
referred to as a back-propagation network, for short. The way it works is
simple. When we feed in our input, it travels from the input to hidden layers
and then to the output layers. This is the "feed forward" part. We then
compare the output to the expected results and measure how far off we are. We
then adjust the weights on the "output to hidden" synapses, measure the error
on the hidden nodes and then adjust the weights on the "hidden to input"
synapses. This is what is referred to as "back error propagation".
We continue this process until the amount of error is small enough that we are
satisfied. In reality, we will rarely if ever get precise results from the
network, but we learn various strategies to interpret the results. In the
example above, we use a "winner takes all" strategy. Which ever of the output
nodes has the greatest value will be the "winner", and thus the answer.
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
With more complete neural net packages, you can also pick which activation
functions you wish to use and the "learn rate" of the neurons.
=item 2 Training
This involves feeding the neural network enough data until the error rate is
low enough to be acceptable. Often we have a large data set and merely keep
iterating until the desired error rate is achieved.
=item 3 Measuring results
One frequent mistake made with neural networks is failing to test the network
with different data from the training data. It's quite possible for a
lib/AI/NeuralNet/Simple.pm view on Meta::CPAN
"Naturally Intelligent Systems", by Maureen Caudill and Charles Butler,
copyright (c) 1990 by Massachussetts Institute of Technology.
This book is a decent introduction to neural networks in general. The forward
feed back error propogation is but one of many types.
=head1 AUTHORS
Curtis "Ovid" Poe, C<ovid [at] cpan [dot] org>
Multiple network support, persistence, export of MSE (mean squared error),
training until MSE below a given threshold and customization of the
activation function added by Raphael Manfredi C<Raphael_Manfredi@pobox.com>.
=head1 COPYRIGHT AND LICENSE
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
$res->done($resp);
} elsif( $resp->code == 404 ) {
# Blob was not found
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
# Should we validate using OpenAPI::Modern here?!
if( $resp->code == 201 ) {
# Blob was successfully created
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
# Should we validate using OpenAPI::Modern here?!
if( $resp->code == 200 ) {
# Successful operation.
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
# Should we validate using OpenAPI::Modern here?!
if( $resp->code == 200 ) {
# Successful operation.
$res->done($resp);
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
}
});
my $_tx;
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
} else {
# Unknown/unhandled content type
$res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
}
} else {
# An unknown/unhandled response, likely an error
$res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
}
})->retain;
# Start our transaction
lib/AI/Ollama/Client/Impl.pm view on Meta::CPAN
sub validate_response( $self, $payload, $tx ) {
if( $self->validate_responses
and my $openapi = $self->openapi ) {
my $results = $openapi->validate_response($payload, { request => $tx->req });
if( $results->{error}) {
say $results;
say $tx->res->to_string;
};
};
}
sub validate_request( $self, $tx ) {
if( $self->validate_requests
and my $openapi = $self->openapi ) {
my $results = $openapi->validate_request($tx->req);
if( $results->{error}) {
say $results;
say $tx->req->to_string;
};
};
}
view all matches for this distribution
view release on metacpan or search on metacpan
int checkBDD(int bdd)
{
if (!bddrunning)
{
bdd_error (BDD_RUNNING);
return 0;
}
if (bdd < 0 || (bdd) >= bddnodesize)
{
bdd_error (BDD_ILLBDD);
return 0;
}
if (bdd >= 2 && LOW (bdd) == -1)
{
bdd_error (BDD_ILLBDD);
return 0;
}
if (bddnodes[bdd].refcou == 0)
{
}
txt = (char *) malloc (bddvarnum + 2);
if (!txt)
{
bdd_error (BDD_MEMORY);
return;
}
for (i = 0; i < varcount; i++)
txt[i] = '-';
}
}
else
{
fprintf (stderr, "bdd_init(%ld,%ld) Failed\n (error code %d)\n",
nodenum, cachesize, ok);
exit (20);
}
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PSO.pm view on Meta::CPAN
# no strict 'refs';
# if(defined(&{"main::$user_fitness_function"})) {
# $return_fitness = &$user_fitness_function(@values);
# } else {
# warn "error running user_fitness_function\n";
# exit 1;
# }
# use strict 'refs';
$return_fitness = $user_fitness_function->call(@values);
lib/AI/PSO.pm view on Meta::CPAN
# So, particle 1 has neighbors 2, 3, 4, 5 if K = 4. particle 4 has neighbors 5, 6, 7, 8
# ...
#
sub get_index_of_neighbor($$) {
my ($particleIndex, $neighborNum) = @_;
# TODO: insert error checking code / defensive programming
return ($particleIndex + $neighborNum) % $numParticles;
}
#
lib/AI/PSO.pm view on Meta::CPAN
if(&compute_fitness(@{$particles[$particleNeighborIndex]{bestPos}}) > $bestNeighborFitness) {
$bestNeighborFitness = &compute_fitness(@{$particles[$particleNeighborIndex]{bestPos}});
$bestNeighborIndex = $particleNeighborIndex;
}
}
# TODO: insert error checking code / defensive programming
return $particleNeighborIndex;
}
#
# clamp_velocity
view all matches for this distribution
view release on metacpan or search on metacpan
--diff=program use diff program and options
--compat-version=version provide compatibility with Perl version
--cplusplus accept C++ comments
--quiet don't output anything except fatal errors
--nodiag don't show diagnostics
--nohints don't show hints
--nochanges don't suggest changes
--nofilter don't filter input files
Using this option instructs F<ppport.h> to leave C++
comments untouched.
=head2 --quiet
Be quiet. Don't print anything except fatal errors.
=head2 --nodiag
Don't output any diagnostic messages. Only portability
alerts will be printed.
PL_defgv|5.004050||p
PL_diehook|5.004050||p
PL_dirty|5.004050||p
PL_dowarn|||pn
PL_errgv|5.004050||p
PL_error_count|5.011000||p
PL_expect|5.011000||p
PL_hexdigit|5.005000||p
PL_hints|5.005000||p
PL_in_my_stash|5.011000||p
PL_in_my|5.011000||p
PUTBACK|||
PerlIO_clearerr||5.007003|
PerlIO_close||5.007003|
PerlIO_context_layers||5.009004|
PerlIO_eof||5.007003|
PerlIO_error||5.007003|
PerlIO_fileno||5.007003|
PerlIO_fill||5.007003|
PerlIO_flush||5.007003|
PerlIO_get_base||5.007003|
PerlIO_get_bufsiz||5.007003|
put_byte|||
pv_display|5.006000||p
pv_escape|5.009004||p
pv_pretty|5.009004||p
pv_uni_display||5.007003|
qerror|||
qsortsvu|||
re_compile||5.009005|
re_croak2|||
re_dup_guts|||
re_intuit_start||5.009005|
xmldump_form|||
xmldump_indent|||v
xmldump_packsubs|||
xmldump_sub|||
xmldump_vindent|||
yyerror|||
yylex|||
yyparse|||
yywarn|||
);
if ($file{changes}) {
if (exists $opt{copy}) {
my $newfile = "$filename$opt{copy}";
if (-e $newfile) {
error("'$newfile' already exists, refusing to write copy of '$filename'");
}
else {
local *F;
if (open F, ">$newfile") {
info("Writing copy of '$filename' with changes to '$newfile'");
print F $c;
close F;
}
else {
error("Cannot open '$newfile' for writing: $!");
}
}
}
elsif (exists $opt{patch} || $opt{changes}) {
if (exists $opt{patch}) {
unless ($patch_opened) {
if (open PATCH, ">$opt{patch}") {
$patch_opened = 1;
}
else {
error("Cannot open '$opt{patch}' for writing: $!");
delete $opt{patch};
$opt{changes} = 1;
goto fallback;
}
}
if (!defined $diff) {
$diff = run_diff('diff', $file, $str);
}
if (!defined $diff) {
error("Cannot generate a diff. Please install Text::Diff or use --copy.");
return;
}
print F $diff;
}
}
unlink $tmp;
}
else {
error("Cannot open '$tmp' for writing: $!");
}
return undef;
}
{
$opt{quiet} and return;
print "*** ", @_, "\n";
}
sub error
{
print "*** ERROR: ", @_, "\n";
}
my %given_hints;
/* It is very unlikely that anyone will try to use this with Perl 6
(or greater), but who knows.
*/
#if PERL_REVISION != 5
# error ppport.h only works with Perl version 5
#endif /* PERL_REVISION != 5 */
#ifndef dTHR
# define dTHR dNOOP
#endif
#ifndef dTHX
# define PL_defgv defgv
# define PL_diehook diehook
# define PL_dirty dirty
# define PL_dowarn dowarn
# define PL_errgv errgv
# define PL_error_count error_count
# define PL_expect expect
# define PL_hexdigit hexdigit
# define PL_hints hints
# define PL_in_my in_my
# define PL_laststatval laststatval
# define PL_lex_state D_PPP_my_PL_parser_var(lex_state)
# define PL_lex_stuff D_PPP_my_PL_parser_var(lex_stuff)
# define PL_tokenbuf D_PPP_my_PL_parser_var(tokenbuf)
# define PL_in_my D_PPP_my_PL_parser_var(in_my)
# define PL_in_my_stash D_PPP_my_PL_parser_var(in_my_stash)
# define PL_error_count D_PPP_my_PL_parser_var(error_count)
#else
/* ensure that PL_parser != NULL and cannot be dereferenced */
/* Replace perl_eval_pv with eval_pv */
#ifndef eval_pv
#if defined(NEED_eval_pv)
static SV* DPPP_(my_eval_pv)(char *p, I32 croak_on_error);
static
#else
extern SV* DPPP_(my_eval_pv)(char *p, I32 croak_on_error);
#endif
#ifdef eval_pv
# undef eval_pv
#endif
#define Perl_eval_pv DPPP_(my_eval_pv)
#if defined(NEED_eval_pv) || defined(NEED_eval_pv_GLOBAL)
SV*
DPPP_(my_eval_pv)(char *p, I32 croak_on_error)
{
dSP;
SV* sv = newSVpv(p, 0);
PUSHMARK(sp);
SPAGAIN;
sv = POPs;
PUTBACK;
if (croak_on_error && SvTRUE(GvSV(errgv)))
croak(SvPVx(GvSV(errgv), na));
return sv;
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more", );
}
$iters_quota += $q_more;
my $iters = $self->_scans_data()->slice(":,:,0");
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more", );
}
$iters_quota += $q_more;
my $iters = $self->_scans_data()->slice(":,:,0");
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $q_more = $self->_get_next_quota();
if ( !defined($q_more) )
{
AI::Pathfinding::OptimizeMultiple::Error::OutOfQuotas->throw(
error => "No q_more" );
}
$iters_quota += $q_more;
( undef, $num_solved_in_iter, undef, $selected_scan_idx ) =
view all matches for this distribution