Result:
found more than 265 distributions - search limited to the first 2001 files matching your query ( run in 0.757 )


AI-Genetic-Pro

 view release on metacpan or  search on metacpan

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

use constant GD 		=> 'GD::Graph::linespoints'; 
#=======================================================================
my $_Cache = { };
my $_temp_chromosome;
#=======================================================================
sub new {
	my ( $class, %args ) = ( shift, @_ );
	
	#-------------------------------------------------------------------
	my %opts = map { if(ref $_){$_}else{ /^-?(.*)$/o; $1 }} @_;
	my $self = bless \%opts, $class;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	'AI::Genetic::Pro::MCE'->use or die q[Cannot raise multicore support: ] . $@;
	
	return AI::Genetic::Pro::MCE->new( $self, \%args );
}
#=======================================================================
sub _Cache { $_Cache; }
#=======================================================================
# INIT #################################################################
#=======================================================================
sub _set_strict {
	my ($self) = @_;
	
	# fitness
	my $fitness = $self->fitness();
	my $replacement = sub {
		my @tmp = @{$_[1]};
		my $ret = $fitness->(@_);
		my @cmp = @{$_[1]};
		die qq/Chromosome was modified in a fitness function from "@tmp" to "@{$_[1]}"!\n/ unless compare(\@tmp, \@cmp);
		return $ret;
	};
	$self->fitness($replacement);
}
#=======================================================================
sub _fitness_cached {
	my ($self, $chromosome) = @_;
	
	#my $key = md5_hex(${tied(@$chromosome)});
	my $key = md5_hex( $self->_package ? md5_hex( ${ tied( @$chromosome ) } ) : join( q[:], @$chromosome ) );
	return $_Cache->{$key} if exists $_Cache->{$key};
	
	$_Cache->{$key} = $self->_fitness_real->($self, $chromosome);
	return $_Cache->{$key};
}
#=======================================================================
sub _init_cache {
	my ($self) = @_;
		
	$self->_fitness_real($self->fitness);
	$self->fitness(\&_fitness_cached);
	return;
}
#=======================================================================
sub _check_data_ref {
	my ($self, $data_org) = @_;
	my $data = clone($data_org);
	my $ars;
	for(0..$#$data){
		next if $ars->{$data->[$_]};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	return $data;
}
#=======================================================================
# we have to find C to (in some cases) incrase value of range
# due to design model
sub _find_fix_range {
	my ($self, $data) = @_;

	for my $idx (0..$#$data){
		if($data->[$idx]->[1] < 1){ 
			my $const = 1 - $data->[$idx]->[1];

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}

	return $data;
}
#=======================================================================
sub init { 
	my ( $self, $data ) = @_;
	
	croak q/You have to pass some data to "init"!/ unless $data;
	#-------------------------------------------------------------------
	$self->generation(0);

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	else{ for(@{$self->_translations}){ $size = $_->[2] if $_->[2] > $size; } }		# Provisional patch for rangevector values truncated to signed  8-bit quantities. Thx to Tod Hagan

	my $package = get_package_by_element_size($size);
	$self->_package($package);

	my $length = ref $data ? sub { $#$data; } : sub { $data - 1 };
	if($self->variable_length){
		$length = ref $data ? sub { 1 + int( rand( $#{ $self->_init } ) ); } : sub { 1 + int( rand( $self->_init - 1) ); };
	}

	$self->_length( $length );

	$self->chromosomes( [ ] );

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->_calculate_fitness_all();
}
#=======================================================================
# SAVE / LOAD ##########################################################
#=======================================================================
sub spew {
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	STORABLE->use( qw( store retrieve freeze thaw ) ) or croak(q/You need "/.STORABLE.q/" module to save a state of "/.__PACKAGE__.q/"!/);
	$Storable::Deparse = 1;
	$Storable::Eval = 1;
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
	
	return $clone;
}
#=======================================================================
sub slurp {
	my ( $self, $dump ) = @_;

	if( my $typ = $self->_package ){ 
		@{ $dump->{ chromosomes } } = map {
			my $arr = $typ->make_with_packed( $_ );

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

    %$self = %$dump;
    
	return 1;
}
#=======================================================================
sub save { 
	my ( $self, $file ) = @_;
	
	croak(q/You have to specify file!/) unless defined $file;
	
	store( $self->spew, $file );
}
#=======================================================================
sub load { 
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	STORABLE->use( qw( store retrieve freeze thaw ) ) or croak(q/You need "/.STORABLE.q/" module to load a state of "/.__PACKAGE__.q/"!/);	
	$Storable::Deparse = 1;
	$Storable::Eval = 1;
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	return $self->slurp( $clone );
}
#=======================================================================
# CHARTS ###############################################################
#=======================================================================
sub chart { 
	GD->require or croak(q/You need "/.GD.q/" module to draw chart of evolution!/);	
	my ($self, %params) = (shift, @_);

	my $graph = GD()->new(($params{-width} || 640), ($params{-height} || 480));

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

    return 1;
}
#=======================================================================
# TRANSLATIONS #########################################################
#=======================================================================
sub as_array_def_only {
	my ($self, $chromosome) = @_;
	
	return $self->as_array($chromosome) 
		if not $self->variable_length or $self->variable_length < 2;
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================
sub as_array {
	my ($self, $chromosome) = @_;
	
	if($self->type eq q/bitvector/){
		# This could lead to internal error, bacause of underlaying Tie::Array::Packed
		#return @$chromosome if wantarray;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================
sub as_string_def_only {	
	my ($self, $chromosome) = @_;
	
	return $self->as_string($chromosome) 
		if not $self->variable_length or $self->variable_length < 2;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
	return join(q//, @$array) if $self->type eq q/bitvector/;
	return join(q/___/, @$array);
}
#=======================================================================
sub as_string {	
	return join(q//, @{$_[1]}) if $_[0]->type eq q/bitvector/;
	return 	join(q/___/, map { defined $_ ? $_ : q/ / } $_[0]->as_array($_[1]));
}
#=======================================================================
sub as_value { 
	my ($self, $chromosome) = @_;
	croak(q/You MUST call 'as_value' as method of 'AI::Genetic::Pro' object./)
		unless defined $_[0] and ref $_[0] and ( ref $_[0] eq 'AI::Genetic::Pro' or ref $_[0] eq 'AI::Genetic::Pro::MCE');
	croak(q/You MUST pass 'AI::Genetic::Pro::Chromosome' object to 'as_value' method./) 
		unless defined $_[1] and ref $_[1] and ref $_[1] eq 'AI::Genetic::Pro::Chromosome';
	return $self->fitness->($self, $chromosome);  
}
#=======================================================================
# ALGORITHM ############################################################
#=======================================================================
sub _calculate_fitness_all {
	my ($self) = @_;
	
	$self->_fitness( { } );
	$self->_fitness->{$_} = $self->fitness()->($self, $self->chromosomes->[$_]) 
		for 0..$#{$self->chromosomes};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#	$self->chromosomes(\@chromosomes);

	return;
}
#=======================================================================
sub _select_parents {
	my ($self) = @_;
	unless($self->_selector){
		croak "You must specify a selection strategy!"
			unless defined $self->selection;
		my @tmp = @{$self->selection};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->_parents($self->_selector->run($self));
	
	return;
}
#=======================================================================
sub _crossover {
	my ($self) = @_;
	
	unless($self->_strategist){
		my @tmp = @{$self->strategy};
		my $strategist = q/AI::Genetic::Pro::Crossover::/ . shift @tmp;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->chromosomes( $a );
	
	return;
}
#=======================================================================
sub _mutation {
	my ($self) = @_;
	
	unless($self->_mutator){
		my $mutator = q/AI::Genetic::Pro::Mutation::/ . ucfirst(lc($self->type));
		unless($mutator->require){

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
	
	return $self->_mutator->run($self);
}
#=======================================================================
sub _save_history {
	my @tmp;
	if($_[0]->history){ @tmp = $_[0]->getAvgFitness; }
	else { @tmp = (undef, undef, undef); }
	
	push @{$_[0]->_history->[0]}, $tmp[0]; 
	push @{$_[0]->_history->[1]}, $tmp[1];
	push @{$_[0]->_history->[2]}, $tmp[2];
	return 1;
}
#=======================================================================
sub inject {
	my ($self, $candidates) = @_;
	
	for(@$candidates){
		push @{$self->chromosomes}, 
			AI::Genetic::Pro::Chromosome->new_from_data($self->_translations, $self->type, $self->_package, $_, $self->_fix_range);

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->population( $self->population + scalar( @$candidates ) );

	return 1;
}
#=======================================================================
sub _state {
	my ( $self ) = @_;
	
	my @res;
	
	if( $self->_package ){

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
	
	return \@res;
}
#=======================================================================
sub evolve {
	my ($self, $generations) = @_;

	# generations must be defined
	$generations ||= -1; 	 
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
}
#=======================================================================
# ALIASES ##############################################################
#=======================================================================
sub people { $_[0]->chromosomes() }
#=======================================================================
sub getHistory { $_[0]->_history()  }
#=======================================================================
sub mutProb { shift->mutation(@_) }
#=======================================================================
sub crossProb { shift->crossover(@_) }
#=======================================================================
sub intType { shift->type() }
#=======================================================================
# STATS ################################################################
#=======================================================================
sub getFittest_as_arrayref { 
	my ($self, $n, $uniq) = @_;
	$n ||= 1;
	
	$self->_calculate_fitness_all() unless scalar %{ $self->_fitness };
	my @keys = sort { $self->_fitness->{$a} <=> $self->_fitness->{$b} } 0..$#{$self->chromosomes};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
	$n = scalar @keys if $n > scalar @keys;
	return [ reverse @{$self->chromosomes}[ splice @keys, $#keys - $n + 1, $n ] ];
}
#=======================================================================
sub getFittest { return wantarray ? @{ shift->getFittest_as_arrayref(@_) } : shift @{ shift->getFittest_as_arrayref(@_) }; }
#=======================================================================
sub getAvgFitness {
	my ($self) = @_;
	
	my @minmax = minmax values %{$self->_fitness};
	my $mean = sum(values %{$self->_fitness}) / scalar values %{$self->_fitness};
	return $minmax[1], int($mean), $minmax[0];

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=head1 SYNOPSIS

    use AI::Genetic::Pro;
    
    sub fitness {
        my ($ga, $chromosome) = @_;
        return oct('0b' . $ga->as_string($chromosome)); 
    }
    
    sub terminate {
        my ($ga) = @_;
        my $result = oct('0b' . $ga->as_string($ga->getFittest));
        return $result == 4294967295 ? 1 : 0;
    }
    

 view all matches for this distribution


AI-Image

 view release on metacpan or  search on metacpan

lib/AI/Image.pm  view on Meta::CPAN

$VERSION = eval $VERSION;

my $http = HTTP::Tiny->new;

# Create Image object
sub new {
    my $class = shift;
    my %attr  = @_;

    $attr{'error'}      = '';

lib/AI/Image.pm  view on Meta::CPAN

my %header = (
    'OpenAI' => &_get_header_openai,
);

# Returns true if last operation was success
sub success {
    my $self = shift;
    return !$self->{'error'};
}

# Returns error if last operation failed
sub error {
    my $self = shift;
    return $self->{'error'};
}

# Header for calling OpenAI
sub _get_header_openai {
    my $self = shift;
    $self->{'key'} = '' unless defined $self->{'key'};
    return {
         'Authorization' => 'Bearer ' . $self->{'key'},
         'Content-type'  => 'application/json'
     };
 }

# Get URL from image prompt
sub image {
    my ($self, $prompt) = @_;

    my $response = $http->post($url{$self->{'api'}}, {
         'headers' => {
             'Authorization' => 'Bearer ' . $self->{'key'},

 view all matches for this distribution


AI-LibNeural

 view release on metacpan or  search on metacpan

LibNeural.pm  view on Meta::CPAN

our @EXPORT = qw(
);

our $VERSION = '0.02';

sub AUTOLOAD {
    # This AUTOLOAD is used to 'autoload' constants from the constant()
    # XS function.  If a constant is not found then control is passed
    # to the AUTOLOAD in AutoLoader.
    my $constname;
    our $AUTOLOAD;

LibNeural.pm  view on Meta::CPAN

    }
    {
	no strict 'refs';
	# Fixed between 5.005_53 and 5.005_61
#	if ($] >= 5.00561) {
#	    *$AUTOLOAD = sub () { $val };
#	}
#	else {
	    *$AUTOLOAD = sub { $val };
#	}
    }
    goto &$AUTOLOAD;
}

 view all matches for this distribution


AI-Logic-AnswerSet

 view release on metacpan or  search on metacpan

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	
);

our $VERSION = '0.02';

sub executeFromFileAndSave {		#Executes DLV with a file as input and saves the output in another file

	open DLVW, ">>", "$_[1]";
	print DLVW $_[2];
	close DLVW;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	open(STDOUT,">&SAVESTDOUT"); #close file and restore STDOUT
	close OUTPUT;

}

sub executeAndSave {	#Executes DLV and saves the output of the program written by the user in a file

	open(SAVESTDOUT, ">&STDOUT") or die "Can't save STDOUT: $!\n";
	open(STDOUT, ">$_[0]") or die "Can't open STDOUT to $_[0]", "$!\n";

	my @args = ("./dlv --");

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN



}


sub iterativeExec {	# Executes an input program with several instances and stores them in a bidimensional array

	my @input = @_;

	my @returned_value;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	return @returned_value;

}

sub singleExec {	 # Executes a single input program or opens the DLV terminal and stores it in an array

	my @input = @_;
	my @returned_value;

	if(@input) {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @returned_value;
}

sub selectOutput {	# Select one of the outputs returned by the iterative execution of more input programs 

	my @stdoutput = @{$_[0]};
	my $n = $_[1];

	return @{$stdoutput[$n]};
	
}

sub getFacts {	# Return the facts of the input program

	my $input = shift;

	my @isAFile = stat($input);

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}
	return @facts;
	
}

sub addCode {	#Adds code to input

	my $program = $_[0];
	my $code = $_[1];
	my @isAFile = stat($program);

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

		$$program = "$$program $code";
	}
		
}

sub getASFromFile {	#Gets the Answer Set from the file where the output was saved

	open RESULT, "<", "$_[0]" or die $!;
	my @result = <RESULT>;
	my @arr;
	foreach my $line (@result) {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	close RESULT;
	return @arr;
}

sub getAS { #Returns the Answer Sets from the array where the output was saved

	my @result = @_;
	my @arr;

	foreach my $line (@result) {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @arr;
}

sub statistics {	# Return an array of hashes in which the statistics of every predicate of every answerSets are stored
			# If a condition of comparison is specified(number of predicates) it returns the answer sets that satisfy
			# that condition 

	my @as = @{$_[0]};
	my @pred = @{$_[1]};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @stat;
}

sub _evaluate {		#private use only

	my $value = shift;
	my $num = shift;
	my $operator = shift;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

		return 0;
	}
	return 0;
}

sub mapAS {	#Mapping of the Answer Sets in an array of hashes

	my $countAS = 0;

	my @answerSets = @{$_[0]};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}
	return @pred;

}

sub _existsPred {	#Verifies the existence of a predicate (private use only)

	my $pred = $_[0];
	my @predList = @{$_[1]};

	my $size = @predList;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}
	return 0;
		
}

sub getPred {	#Returns the predicates from the array of hashes

	my @pr = @{$_[0]};
	return @{$pr[$_[1]]{$_[2]}};
}

sub getProjection {	#Returns the values selected by the user

	my @pr = @{$_[0]};
	my @projection;

	my @res = @{$pr[$_[1]]{$_[2]}};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @projection;
}

sub createNewFile {

	my $file = $_[0];
	my $code = $_[1];

	open FILE, ">", $file;
	print FILE "$code\n";
	close FILE;

}

sub addFacts {

	my $name = $_[0];
	my @facts = @{$_[1]};
	my $append = $_[2];
	my $filename = $_[3];

 view all matches for this distribution


AI-ML

 view release on metacpan or  search on metacpan

inc/MyBuilder.pm  view on Meta::CPAN

use Path::Tiny;

my $EXTRA_O_FLAGS = "";
my $EXTRA_FLAGS = "-lblas -llapack";

sub ACTION_code {
    my $self = shift;

    $EXTRA_O_FLAGS .= " -DUSE_REAL" unless exists $self->args->{'with-float'};

    $self->update_XS("XS/ML.xs.inc");

inc/MyBuilder.pm  view on Meta::CPAN

    $self->dispatch("compile_xs");

    $self->SUPER::ACTION_code;
}

sub update_XS {
    my ($self, $file) = @_;
    my $output = $file;
    $output =~ s/\.inc$//;

    open my $i_fh, "<", $file   or die "$!";

inc/MyBuilder.pm  view on Meta::CPAN

    }
    close $o_fh;
    close $i_fh;
}

sub ACTION_create_objects {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $c_progs = $self->rscan_dir("C", qr/\.c$/);
    for my $file (@$c_progs) {

inc/MyBuilder.pm  view on Meta::CPAN

            include_dirs => ["."]
        );
    }
}

sub ACTION_compile_xs {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $archdir = path($self->blib, "arch", "auto", "AI", "ML");
    $archdir->mkpath unless -d $archdir;

 view all matches for this distribution


AI-MXNet-Gluon-Contrib

 view release on metacpan or  search on metacpan

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN


    Example:

        $net = nn->Concurrent();
        # use net's name_scope to give children blocks appropriate names.
        $net->name_scope(sub {
            $net->add(nn->Dense(10, activation=>'relu'));
            $net->add(nn->Dense(20));
            $net->add(nn->Identity());
        });

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN


    Example:

        $net = nn->HybridConcurrent();
        # use net's name_scope to give children blocks appropriate names.
        $net->name_scope(sub {
            $net->add(nn->Dense(10, activation=>'relu'));
            $net->add(nn->Dense(20));
            $net->add(nn->Identity());
        });

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN


    Example:

        $net = nn->HybridConcurrent();
        # use net's name_scope to give child Blocks appropriate names.
        $net->name_scope(sub {
            $net->add(nn->Dense(10, activation=>'relu'));
            $net->add(nn->Dense(20));
            $net->add(nn->Identity());
        });
=cut

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN

has 'output_dim'         => (is => 'ro', isa => 'Int', required => 1);
has 'dtype'              => (is => 'ro', isa => 'Dtype', default => 'float32');
has 'weight_initializer' => (is => 'ro', isa => 'Maybe[Initializer]');
method python_constructor_arguments() { [qw/input_dim output_dim dtype weight_initializer/] }

sub BUILD
{
    my $self = shift;
    $self->_kwargs({
        input_dim => $self->input_dim, 
        output_dim => $self->output_dim,

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN

{
    my $weight = $self->weight->row_sparse_data($x);
    return AI::MXNet::NDArray->Embedding($x, $weight, { name=>'fwd', %{ $self->_kwargs } });
}

use overload '""' => sub {
    my $self = shift;
    $self->_class_name.'('.$self->input_dim.' -> '.$self->input_dim.', '.$self->dtype.')';
};

__PACKAGE__->register('AI::MXNet::Gluon::NN');

 view all matches for this distribution


AI-MXNet-Gluon-ModelZoo

 view release on metacpan or  search on metacpan

examples/image_classification.pl  view on Meta::CPAN

GetOptions(
    ## my Pembroke Welsh Corgi Kyuubi, enjoing Solar eclipse of August 21, 2017
    'image=s' => \(my $image = 'http://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/'.
                               'gluon/dataset/kyuubi.jpg'),
    'model=s' => \(my $model = 'resnet152_v2'),
    'help'    => sub { HelpMessage(0) },
) or HelpMessage(1);

## get a pretrained model (download parameters file if necessary)
my $net = get_model($model, pretrained => 1);

 view all matches for this distribution


AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

use warnings;
use AI::MXNet ('mx');

## preparing the samples
## to train our network
sub samples {
    my($batch_size, $func) = @_;
    # get samples
    my $n = 16384;
    ## creates a pdl with $n rows and two columns with random
    ## floats in the range between 0 and 1

examples/calculator.pl  view on Meta::CPAN

        label => $validation_label,
    ));
}

## the network model
sub nn_fc {
    my $data = mx->sym->Variable('data');
    my $ln = mx->sym->exp(mx->sym->FullyConnected(
        data => mx->sym->log($data),
        num_hidden => 1,
    ));

examples/calculator.pl  view on Meta::CPAN

	num_hidden => 1
    );
    return mx->sym->MAERegressionOutput(data => $fc, name => 'softmax');
}

sub learn_function {
    my(%args) = @_;
    my $func = $args{func};
    my $batch_size = $args{batch_size}//128;
    my($train_iter, $eval_iter) = samples($batch_size, $func);
    my $sym = nn_fc();

examples/calculator.pl  view on Meta::CPAN

    my ($arg_params) = $model->get_params;
    for my $k (sort keys %$arg_params)
    {
	print "$k -> ". $arg_params->{$k}->aspdl."\n";
    }
    return sub {
        my($n, $m) = @_;
        return $model->predict(mx->io->NDArrayIter(
            batch_size => 1,
            data => PDL->new([[ $n, $m ]]),
        ))->aspdl->list;
    };
}

my $add = learn_function(func => sub {
    my($n, $m) = @_;
    return $n + $m;
});
my $sub = learn_function(func => sub {
    my($n, $m) = @_;
    return $n - $m;
}, batch_size => 50, epoch => 40);
my $mul = learn_function(func => sub {
    my($n, $m) = @_;
    return $n * $m;
}, batch_size => 50, epoch => 40);
my $div = learn_function(func => sub {
    my($n, $m) = @_;
    return $n / $m;
}, batch_size => 10, epoch => 80);


 view all matches for this distribution


AI-MaxEntropy

 view release on metacpan or  search on metacpan

inc/Module/AutoInstall.pm  view on Meta::CPAN


# See if it's a testing or non-interactive session
_accept_default( $ENV{AUTOMATED_TESTING} or ! -t STDIN ); 
_init();

sub _accept_default {
    $AcceptDefault = shift;
}

sub missing_modules {
    return @Missing;
}

sub do_install {
    __PACKAGE__->install(
        [
            $Config
            ? ( UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )
            : ()

inc/Module/AutoInstall.pm  view on Meta::CPAN

        @Missing,
    );
}

# initialize various flags, and/or perform install
sub _init {
    foreach my $arg (
        @ARGV,
        split(
            /[\s\t]+/,
            $ENV{PERL_AUTOINSTALL} || $ENV{PERL_EXTUTILS_AUTOINSTALL} || ''

inc/Module/AutoInstall.pm  view on Meta::CPAN

        }
    }
}

# overrides MakeMaker's prompt() to automatically accept the default choice
sub _prompt {
    goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault;

    my ( $prompt, $default ) = @_;
    my $y = ( $default =~ /^[Yy]/ );

inc/Module/AutoInstall.pm  view on Meta::CPAN

    print "$default\n";
    return $default;
}

# the workhorse
sub import {
    my $class = shift;
    my @args  = @_ or return;
    my $core_all;

    print "*** $class version " . $class->VERSION . "\n";

inc/Module/AutoInstall.pm  view on Meta::CPAN

    *{'main::WriteMakefile'} = \&Write if caller(0) eq 'main';
}

# Check to see if we are currently running under CPAN.pm and/or CPANPLUS;
# if we are, then we simply let it taking care of our dependencies
sub _check_lock {
    return unless @Missing;

    if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) {
        print <<'END_MESSAGE';

inc/Module/AutoInstall.pm  view on Meta::CPAN


    close LOCK;
    return;
}

sub install {
    my $class = shift;

    my $i;    # used below to strip leading '-' from config keys
    my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } );

inc/Module/AutoInstall.pm  view on Meta::CPAN

    close FAILED if $args{do_once};

    return @installed;
}

sub _install_cpanplus {
    my @modules   = @{ +shift };
    my @config    = _cpanplus_config( @{ +shift } );
    my $installed = 0;

    require CPANPLUS::Backend;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    }

    return $installed;
}

sub _cpanplus_config {
	my @config = ();
	while ( @_ ) {
		my ($key, $value) = (shift(), shift());
		if ( $key eq 'prerequisites_policy' ) {
			if ( $value eq 'follow' ) {

inc/Module/AutoInstall.pm  view on Meta::CPAN

		}
	}
	return @config;
}

sub _install_cpan {
    my @modules   = @{ +shift };
    my @config    = @{ +shift };
    my $installed = 0;
    my %args;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    }

    return $installed;
}

sub _has_cpanplus {
    return (
        $HasCPANPLUS = (
            $INC{'CPANPLUS/Config.pm'}
              or _load('CPANPLUS::Shell::Default')
        )
    );
}

# make guesses on whether we're under the CPAN installation directory
sub _under_cpan {
    require Cwd;
    require File::Spec;

    my $cwd  = File::Spec->canonpath( Cwd::cwd() );
    my $cpan = File::Spec->canonpath( $CPAN::Config->{cpan_home} );

    return ( index( $cwd, $cpan ) > -1 );
}

sub _update_to {
    my $class = __PACKAGE__;
    my $ver   = shift;

    return
      if defined( _version_check( _load($class), $ver ) );  # no need to upgrade

inc/Module/AutoInstall.pm  view on Meta::CPAN

*** Cannot bootstrap myself. :-( Installation terminated.
.
}

# check if we're connected to some host, using inet_aton
sub _connected_to {
    my $site = shift;

    return (
        ( _load('Socket') and Socket::inet_aton($site) ) or _prompt(
            qq(

inc/Module/AutoInstall.pm  view on Meta::CPAN

          ) =~ /^[Yy]/
    );
}

# check if a directory is writable; may create it on demand
sub _can_write {
    my $path = shift;
    mkdir( $path, 0755 ) unless -e $path;

    return 1 if -w $path;

inc/Module/AutoInstall.pm  view on Meta::CPAN

==> Should we try to install the required module(s) anyway?), 'n'
    ) =~ /^[Yy]/;
}

# load a module and return the version it reports
sub _load {
    my $mod  = pop;    # class/instance doesn't matter
    my $file = $mod;

    $file =~ s|::|/|g;
    $file .= '.pm';

inc/Module/AutoInstall.pm  view on Meta::CPAN

    local $@;
    return eval { require $file; $mod->VERSION } || ( $@ ? undef: 0 );
}

# Load CPAN.pm and it's configuration
sub _load_cpan {
    return if $CPAN::VERSION;
    require CPAN;
    if ( $CPAN::HandleConfig::VERSION ) {
        # Newer versions of CPAN have a HandleConfig module
        CPAN::HandleConfig->load;

inc/Module/AutoInstall.pm  view on Meta::CPAN

        CPAN::Config->load;
    }
}

# compare two versions, either use Sort::Versions or plain comparison
sub _version_check {
    my ( $cur, $min ) = @_;
    return unless defined $cur;

    $cur =~ s/\s+$//;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    local $^W = 0;    # shuts off 'not numeric' bugs
    return ( $cur >= $min ? $cur : undef );
}

# nothing; this usage is deprecated.
sub main::PREREQ_PM { return {}; }

sub _make_args {
    my %args = @_;

    $args{PREREQ_PM} = { %{ $args{PREREQ_PM} || {} }, @Existing, @Missing }
      if $UnderCPAN or $TestOnly;

inc/Module/AutoInstall.pm  view on Meta::CPAN


    return %args;
}

# a wrapper to ExtUtils::MakeMaker::WriteMakefile
sub Write {
    require Carp;
    Carp::croak "WriteMakefile: Need even number of args" if @_ % 2;

    if ($CheckOnly) {
        print << ".";

inc/Module/AutoInstall.pm  view on Meta::CPAN

.

    return 1;
}

sub postamble {
    $PostambleUsed = 1;

    return << ".";

config :: installdeps

 view all matches for this distribution


AI-MegaHAL

 view release on metacpan or  search on metacpan

lib/AI/MegaHAL.pm  view on Meta::CPAN

	     megahal_cleanup);

@ISA = qw(Exporter DynaLoader);
$VERSION = '0.08';

sub AUTOLOAD {
    # This AUTOLOAD is used to 'autoload' constants from the constant()
    # XS function.  If a constant is not found then control is passed
    # to the AUTOLOAD in AutoLoader.

    my $constname;

lib/AI/MegaHAL.pm  view on Meta::CPAN

    }
    {
	no strict 'refs';
	# Fixed between 5.005_53 and 5.005_61
	if ($] >= 5.00561) {
	    *$AUTOLOAD = sub () { $val };
	}
	else {
	    *$AUTOLOAD = sub { $val };
	}
    }
    goto &$AUTOLOAD;
}

sub new {
    my ($class,%args) = @_;
    my $self;

    # Bless ourselves into the AI::MegaHAL class.
    $self = bless({ },$class);

lib/AI/MegaHAL.pm  view on Meta::CPAN

    $self->_initialize();

    return $self;
}

sub initial_greeting {
    my $self = shift;

    return megahal_initial_greeting();
}

sub do_reply {
    my ($self,$text) = @_;

    return megahal_do_reply($text,0);
}

sub learn {
    my ($self,$text) = @_;

    return megahal_learn($text,0);
}

sub _initialize {
    my $self = shift;

    megahal_initialize();
    return;
}

sub _cleanup {
    my $self = shift;

    megahal_cleanup();
    return;
}

sub DESTROY {
    my $self = shift;

    $self->_cleanup() if($self->{'AutoSave'});
    return;
}

 view all matches for this distribution


AI-MicroStructure

 view release on metacpan or  search on metacpan

bin/from-folder.pl  view on Meta::CPAN


GetOptions (\%opts, "cache_file=s");



sub translate
{
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;
   my $name = md5_hex($rel_name);
    my $go = 0;

bin/from-folder.pl  view on Meta::CPAN




find(\&translate, "$TOP/./");

sub translate {
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;

  $set->insert(AI::MicroStructure::Object->new($rel_name));

 view all matches for this distribution


AI-NNEasy

 view release on metacpan or  search on metacpan

lib/AI/NNEasy.pm  view on Meta::CPAN


  
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;

  
my $CLASS = 'AI::NNEasy' ; sub __CLASS__ { 'AI::NNEasy' } ;

  
use Class::HPLOO::Base ;

  use AI::NNEasy::NN ;
  use Storable qw(freeze thaw) ;
  use Data::Dumper ;
  


  sub NNEasy { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    my @out_types = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $error_ok = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    $this->{ERROR_OK} = $error_ok ;
    
    return $this ;
  }
  
  sub _layer_conf { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $def = shift(@_) ;
    my $conf = shift(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

    foreach my $Key ( keys %$layer_conf ) { $$layer_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}

    return $layer_conf ;
  }
  
  sub reset_nn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    $this->{NN} = AI::NNEasy::NN->new( @{ $this->{NN_ARGS} } ) ;
  }
  
  sub load { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    
    $file ||= $this->{FILE} ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      }
    }
    return ;
  }
  
  sub save { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    
    $file ||= $this->{FILE} ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    open (my $fh,">$this->{FILE}") ;
    print $fh $dump ;
    close ($fh) ;
  }
  
  sub learn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    my $out = shift(@_) ;
    my $n = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    return $err ;
  }
  
  *_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
  
  sub _learn_set_get_output_error_pl { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $set = shift(@_) ;
    my $error_ok = shift(@_) ;
    my $ins_ok = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

  
  
  
  
    
  sub learn_set { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    my $limit = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      
    }
    
  }
  
  sub get_set_error { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

    
    $err /= $ins_ok ;
    return $err ;
  }
  
  sub run { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    
    $this->{NN}->run($in) ;
    my $out = $this->{NN}->output() ;
    return $out ;
  }
  
  sub run_get_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    my $out = $this->run(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

    }
    
    return $out ;
  }
  
  sub out_type_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $val = shift(@_) ;
    
    my ($out_type , %err) ;

lib/AI/NNEasy.pm  view on Meta::CPAN


L<Class::HPLOO> enables this kind of syntax for Perl classes:

  class Foo {
    
    sub bar($x , $y) {
      $this->add($x , $y) ;
    }
    
    sub[C] int add( int x , int y ) {
      int res = x + y ;

 view all matches for this distribution


AI-NNFlex

 view release on metacpan or  search on metacpan

examples/bp.pl  view on Meta::CPAN


#==============================================================
#********** THIS IS THE MAIN PROGRAM **************************
#==============================================================

sub main
 {

 # initiate the weights
  initWeights();

examples/bp.pl  view on Meta::CPAN





#***********************************
sub calcNet()
 {
    #calculate the outputs of the hidden neurons
    #the hidden neurons are tanh

    for(my $i = 0;$i<$numHidden;$i++)

examples/bp.pl  view on Meta::CPAN

    $errThisPat = $outPred - $trainOutput[$patNum];
 }


#************************************
 sub WeightChangesHO()
 #adjust the weights hidden-output
 {
   for(my $k = 0;$k<$numHidden;$k++)
   {
    $weightChange = $LR_HO * $errThisPat * $hiddenVal[$k];

examples/bp.pl  view on Meta::CPAN

   }
 }


#************************************
 sub WeightChangesIH()
 #adjust the weights input-hidden
 {
  for(my $i = 0;$i<$numHidden;$i++)
  {
   for(my $k = 0;$k<$numInputs;$k++)

examples/bp.pl  view on Meta::CPAN

  }
 }


#************************************
 sub initWeights()
 {

  for(my $j = 0;$j<$numHidden;$j++)
  {
    $weightsHO[$j] = (rand() - 0.5)/2;

examples/bp.pl  view on Meta::CPAN


 }


#************************************
 sub initData()
 {

    print "initialising data\n";

    # the data here is the XOR data

examples/bp.pl  view on Meta::CPAN


 }


#************************************
 sub tanh()
 {


	my $x = shift;

examples/bp.pl  view on Meta::CPAN

        }
 }


#************************************
 sub displayResults()
    {
     for(my $i = 0;$i<$numPatterns;$i++)
        {
        $patNum = $i;
        calcNet();

examples/bp.pl  view on Meta::CPAN

        }
    }


#************************************
sub calcOverallError()
    {
     $RMSerror = 0.0;
     for(my $i = 0;$i<$numPatterns;$i++)
        {
        $patNum = $i;

 view all matches for this distribution


AI-NaiveBayes

 view release on metacpan or  search on metacpan

lib/AI/NaiveBayes.pm  view on Meta::CPAN


with Storage(format => 'Storable', io => 'File');

has model   => (is => 'ro', isa => 'HashRef[HashRef]', required => 1);

sub train {
    my $self = shift;
    my $learner = AI::NaiveBayes::Learner->new();
    for my $example ( @_ ){
        $learner->add_example( %$example );
    }
    return $learner->classifier;
}


sub classify {
    my ($self, $newattrs) = @_;
    $newattrs or die "Missing parameter for classify()";

    my $m = $self->model;

lib/AI/NaiveBayes.pm  view on Meta::CPAN

    rescale(\%scores);

    return AI::NaiveBayes::Classification->new( label_sums => \%scores, features => \%features );
}

sub rescale {
    my ($scores) = @_;

    # Scale everything back to a reasonable area in logspace (near zero), un-loggify, and normalize
    my $total = 0;
    my $max = max(values %$scores);

 view all matches for this distribution


AI-NaiveBayes1

 view release on metacpan or  search on metacpan

NaiveBayes1.pm  view on Meta::CPAN

use vars @EXPORT_OK;

# non-exported package globals go here
use vars qw();

sub new {
  my $package = shift;
  return bless {
                attributes => [ ],
		labels     => [ ],
		attvals    => {},

NaiveBayes1.pm  view on Meta::CPAN

		smoothing => {},
		attribute_type => {},
	       }, $package;
}

sub set_real {
    my ($self, @attr) = @_;
    foreach my $a (@attr) { $self->{attribute_type}{$a} = 'real' }
}

sub import_from_YAML {
    my $package = shift;
    my $yaml = shift;
    my $self = YAML::Load($yaml);
    return bless $self, $package;
}

sub import_from_YAML_file {
    my $package = shift;
    my $yamlf = shift;
    my $self = YAML::LoadFile($yamlf);
    return bless $self, $package;
}

# assume that the last header count means counts
# after optionally removing counts, the last header is label
sub add_table {
    my $self = shift;
    my @atts = (); my $lbl=''; my $cnt = '';
    while (@_) {
	my $table = shift;
	if ($table =~ /^(.*)\n[ \t]*-+\n/) {

NaiveBayes1.pm  view on Meta::CPAN

} # end of add_table

# Simplified; not generally compatible.
# Assume that the last header is label.  The first row contains
# attribute names.
sub add_csv_file {
    my $self = shift; my $fn = shift; local *F;
    open(F,$fn) or die "Cannot open CSV file `$fn': $!";
    local $_ = <F>; my @atts = (); my $lbl=''; my $cnt = '';
    chomp; @atts = split(/\s*,\s*/, $_); $lbl = pop @atts;
    while (<F>) {

NaiveBayes1.pm  view on Meta::CPAN

			     cases=>($cnt?$v[1]:1) );
    }
    close(F);
} # end of add_csv_file

sub drop_attributes {
    my $self = shift;
    foreach my $a (@_) {
	my @tmp = grep { $a ne $_ } @{ $self->{attributes} };
	$self->{attributes} = \@tmp;
	delete($self->{attvals}{$a});

NaiveBayes1.pm  view on Meta::CPAN

	delete($self->{real_stat}{$a});
	delete($self->{smoothing}{$a});
    }
} # end of drop_attributes

sub add_instances {
  my ($self, %params) = @_;
  for ('attributes', 'label', 'cases') {
      die "Missing required '$_' parameter" unless exists $params{$_};
  }

NaiveBayes1.pm  view on Meta::CPAN

      }
      $self->{stat_attributes}{$a}{$attval}{$params{label}} += $params{cases};
  }
}

sub add_instance {
    my ($self, %params) = @_; $params{cases} = 1;
    $self->add_instances(%params);
}

sub train {
    my $self = shift;
    my $m = $self->{model} = {};
    
    $m->{labelprob} = {};
    foreach my $label (keys(%{$self->{stat_labels}}))

NaiveBayes1.pm  view on Meta::CPAN

		sqrt($m->{real_stat}{$att}{$label}{stddev} /
		     ($m->{real_stat}{$att}{$label}{count}-1)
		     );
	}
    }				# foreach real attribute
}				# end of sub train

sub predict {
  my ($self, %params) = @_;
  my $newattrs = $params{attributes} or die "Missing 'attributes' parameter for predict()";
  my $m = $self->{model};  # For convenience
  
  my %scores;

NaiveBayes1.pm  view on Meta::CPAN

  $sumPx += $scores{$_} foreach (keys(%scores));
  $scores{$_} /= $sumPx foreach (keys(%scores));
  return \%scores;
}

sub print_model {
    my $self = shift;
    my $withcounts = '';
    if ($#_>-1 && $_[0] eq 'with counts')
    { shift @_; $withcounts = 1; }
    my $m = $self->{model};

NaiveBayes1.pm  view on Meta::CPAN

    }

    return $r;
}

sub _append_lines {
    my @l = @_;
    my $m = 0;
    foreach (@l) { $m = length($_) if length($_) > $m }
    @l = map 
    { while (length($_) < $m) { $_.=substr($_,length($_)-1) }; $_ }
    @l;
    return @l;
}

sub labels {
  my $self = shift;
  return @{ $self->{labels} };
}

sub attributes {
  my $self = shift;
  return keys %{ $self->{stat_attributes} };
}

sub export_to_YAML {
    my $self = shift;
    require YAML;
    return YAML::Dump($self);
}

sub export_to_YAML_file {
    my $self = shift;
    my $file = shift;
    require YAML;
    YAML::DumpFile($file, $self);
}

 view all matches for this distribution


AI-Nerl

 view release on metacpan or  search on metacpan

examples/digits/digits.pl  view on Meta::CPAN

   show784($delta(:,6));
   show784($delta(:,4));
}
#die join (',',$nncost->dims);
use PDL::Graphics2D;
sub show784{
   my $w = shift;
   $w = $w->squeeze;
   my $min = $w->minimum;
   $w -= $min;
   my $max = $w->maximum;
   $w /= $max;
   $w = $w->reshape(28,28);
   imag2d $w;
}
sub sigmoid{
   my $foo = shift;
   return 1/(1+E**-$foo);
}

sub logistic{
   #find sigmoid before calling this.
   #grad=logistic(sigmoid(foo))
   my $foo = shift;
   return $foo * (1-$foo);
}

 view all matches for this distribution


AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

package AI::NeuralNet::BackProp::neuron;
	
	use strict;
	
	# Dummy constructor
    sub new {
    	bless {}, shift
	}	
	
	# Rounds floats to ints
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	
	# Receives input from other neurons. They must
	# be registered as a synapse of this neuron to effectively
	# input.
	sub input {
		my $self 	 =	shift;
		my $sid		 =	shift;
		my $value	 =	shift;
		
		# We simply weight the value sent by the neuron. The neuron identifies itself to us

BackProp.pm  view on Meta::CPAN

		$self->output() if($self->input_complete());
	}
	
	# Loops thru and outputs to every neuron that this
	# neuron is registered as synapse of.
	sub output {
		my $self	=	shift;
		my $size	=	$self->{OUTPUTS}->{SIZE} || 0;
		my $value	=	$self->get_output();
		for (0..$size-1) {
			AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
			$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
		}
	}
	
	# Used internally by output().
	sub get_output {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value		=	0;
		my $state		= 	0;
		my (@map,@weight);

BackProp.pm  view on Meta::CPAN

		# Just return the $state
		return $state;
	}
	
	# Used by input() to check if all registered synapses have fired.
	sub input_complete {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $retvalue	=	1;
		
		# Very simple loop. Doesn't need explaning.

BackProp.pm  view on Meta::CPAN

	}
	
	# Used to recursively adjust the weights of synapse input channeles
	# to give a desired value. Designed to be called via 
	# AI::NeuralNet::BackProp::NeuralNetwork::learn().
	sub weight	{                
		my $self		=	shift;
		my $ammount		=	shift;
		my $what		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value;
		AI::NeuralNet::BackProp::out1("Weight: ammount is $ammount, what is $what with size at $size.\n");
		      
		# Now this sub is the main cog in the learning wheel. It is called recursively on 
		# each neuron that has been bad (given incorrect output.)
		for my $i (0..$size-1) {
			$value		=	$self->{SYNAPSES}->{LIST}->[$i]->{VALUE};

if(0) {

BackProp.pm  view on Meta::CPAN

	}
	
	# Registers some neuron as a synapse of this neuron.           
	# This is called exclusively by connect(), except for
	# in initalize_group() to connect the _map() package.
	sub register_synapse {
		my $self	=	shift;
		my $synapse	=	shift;
		my $sid		=	$self->{SYNAPSES}->{SIZE} || 0;
		$self->{SYNAPSES}->{LIST}->[$sid]->{PKG}		=	$synapse;
		$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}		=	1.00		if(!$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});

BackProp.pm  view on Meta::CPAN

	# Called via AI::NeuralNet::BackProp::NeuralNetwork::initialize_group() to 
	# form the neuron grids.
	# This just registers another synapes as a synapse to output to from this one, and
	# then we ask that synapse to let us register as an input connection and we
	# save the sid that the ouput synapse returns.
	sub connect {
		my $self	=	shift;
		my $to		=	shift;
		my $oid		=	$self->{OUTPUTS}->{SIZE} || 0;
		AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
		$self->{OUTPUTS}->{LIST}->[$oid]->{PKG}	=	$to;

BackProp.pm  view on Meta::CPAN

	
	use Benchmark;          
	use strict;
	
	# Returns the number of elements in an array ref, undef on error
	sub _FETCHSIZE {
		my $a=$_[0];
		my ($b,$x);
		return undef if(substr($a,0,5) ne "ARRAY");
		foreach $b (@{$a}) { $x++ };
		return $x;
	}

	# Debugging subs
	$AI::NeuralNet::BackProp::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; } 
	sub out1  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
	sub out2  { print  shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
	sub out3  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG) }
	sub out4  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;

BackProp.pm  view on Meta::CPAN

	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1}; #AI::NeuralNet::BackProp::_FETCHSIZE($a1);

BackProp.pm  view on Meta::CPAN

		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift);
		sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in order. Usage:
	#
	#	 learn_set(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a percentage string.
	#
	sub learn_set {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};

BackProp.pm  view on Meta::CPAN

			$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
		return $res;
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in RANDOM order. Usage:
	#
	#	 learn_set_rand(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a true value.
	#
	sub learn_set_rand {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};

BackProp.pm  view on Meta::CPAN

		
		return 1; 
	}

	# Returns the index of the element in array REF passed with the highest comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;
		
		my ($el,$len,$tmp);
		foreach $el (@{$ref1}) {

BackProp.pm  view on Meta::CPAN

		}
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;
		
		my ($el,$len,$tmp);
		foreach $el (@{$ref1}) {

BackProp.pm  view on Meta::CPAN

		}
		return $tmp;
	}  
	
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		return AI::NeuralNet::BackProp::PCX->new($self,shift);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my (@map,$ic);
		my @ws 		=	split(/[\s\t]/,shift);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);

BackProp.pm  view on Meta::CPAN

		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			return $a+1 if($self->{_CRUNCHED}->{LIST}->[$a] eq $_[0]);
		}
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_CRUNCHED}->{LIST}->[$el-1].' ';
		}
		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	}
	
	# Sets/Removes value ranging
	sub range {
		my $self	=	shift;
		my $ref		=	shift;
		my $b		=	shift;
		if(substr($ref,0,5) ne "ARRAY") {
			if(($ref == 0) && (!defined $b)) {

BackProp.pm  view on Meta::CPAN

		$self->{rRef} = $ref;
		return $ref;
	}
	
	# Used internally to scale outputs to fit range
	sub _range {
		my $self	=	shift;  
		my $in		=	shift;
		my $rA		=	$self->{rA};
		my $rB		=	$self->{rB};
		my $rS		=	$self->{rS};

BackProp.pm  view on Meta::CPAN

			
		
	# Initialzes the base for a new neural network.
	# It is recomended that you call learn() before run()ing a pattern.
	# See documentation above for usage.
	sub new {
    	no strict;
    	my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $size	=	shift;

BackProp.pm  view on Meta::CPAN

		
		return $self;
	}	

	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};

BackProp.pm  view on Meta::CPAN

	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self	=	shift;
		my $file	=	shift;
		my $load_flag = shift || 0;
	    
	    return undef if(!(-f $file));

BackProp.pm  view on Meta::CPAN

	
	    return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		for my $a (0..$self->{SIZE}-1) {
			print "Neuron $a: ";
			for my $b (0..$self->{DIV}-1) {
				print $self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},"\t";

BackProp.pm  view on Meta::CPAN

			print "\n";
		}
	}
	
	# Used internally by new() and learn().
	# This is the sub block that actually creats
	# the connections between the synapse chains and
	# also connects the run packages and the map packages
	# to the appropiate ends of the neuron grids.
	sub initialize_group() {
		my $self	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};

BackProp.pm  view on Meta::CPAN

	}
	

	# When called with an array refrence to a pattern, returns a refrence
	# to an array associated with that pattern. See usage in documentation.
	sub run {
		my $self	 =	  shift;
		my $map		 =	  shift;
		my $t0 		 =	new Benchmark;
        $self->{RUN}->run($map);
		$self->{LAST_TIME}=timestr(timediff(new Benchmark, $t0));
        return $self->map();
	}
    
    # This automatically uncrunches a response after running it
	sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# Returns benchmark and loop's ran or learned
	# for last run(), or learn()
	# operation preformed.
	#
	sub benchmarked {
		my $self	=	shift;
		return $self->{LAST_TIME};
	}
	    
	# Used to retrieve map from last internal run operation.
	sub map {
		my $self	 =	  shift;
		$self->{MAP}->map();
	}
	
	# Forces network to learn pattern passed and give desired
	# results. See usage in POD.
	sub learn {
		my $self	=	shift;
		my $omap	=	shift;
		my $res		=	shift;
		my %args    =   @_;
		my $inc 	=	$args{inc} || 0.20;

BackProp.pm  view on Meta::CPAN

package AI::NeuralNet::BackProp::_run;
	
	use strict;
	
	# Dummy constructor.
	sub new {
		bless { PARENT => $_[1] }, $_[0]
	}
	
	# This is so we comply with the neuron interface.
	sub weight {}
	sub input  {}
	
	# Again, compliance with neuron interface.
	sub register_synapse {
		my $self	=	shift;		
		my $sid		=	$self->{REGISTRATION} || 0;
		$self->{REGISTRATION}	=	++$sid;
		$self->{RMAP}->{$sid-1}	= 	$self->{PARENT}->{_tmp_synapse};
		return $sid-1;
	}
	
	# Here is the real meat of this package.
	# run() does one thing: It fires values
	# into the first layer of the network.
	sub run {
		my $self	=	shift;
		my $map		=	shift;
		my $x		=	0;
		$map = $self->{PARENT}->crunch($map) if($map == 0);
		return undef if(substr($map,0,5) ne "ARRAY");

BackProp.pm  view on Meta::CPAN

package AI::NeuralNet::BackProp::_map;
	
	use strict;
	
	# Dummy constructor.
	sub new {
		bless { PARENT => $_[1] }, $_[0]
	}
	
	# Compliance with neuron interface
	sub weight {}
	
	# Compliance with neuron interface
	sub register_synapse {
		my $self	=	shift;		
		my $sid		=	$self->{REGISTRATION} || 0;
		$self->{REGISTRATION}	=	++$sid;
		$self->{RMAP}->{$sid-1} = 	$self->{PARENT}->{_tmp_synapse};
		return $sid-1;

BackProp.pm  view on Meta::CPAN

	
	# This acts just like a regular neuron by receiving
	# values from input synapes. Yet, unlike a regularr
	# neuron, it doesnt weight the values, just stores
	# them to be retrieved by a call to map().
	sub input  {
		no strict 'refs';             
		my $self	=	shift;
		my $sid		=	shift;
		my $value	=	shift;
		my $size	=	$self->{PARENT}->{DIV};

BackProp.pm  view on Meta::CPAN

		AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
	}
	
	# Here we simply collect the value of every neuron connected to this
	# one from the layer below us and return an array ref to the final map..
	sub map {
		my $self	=	shift;
		my $size	=	$self->{PARENT}->{DIV};
		my $out		=	$self->{PARENT}->{OUT};
		my $divide  =	AI::NeuralNet::BackProp->intr($size/$out);
		my @map = ();

BackProp.pm  view on Meta::CPAN

			      
# load_pcx() wrapper package
package AI::NeuralNet::BackProp::PCX;

	# Called by load_pcx in AI::NeuralNet::BackProp;
	sub new {
		my $type	=	shift;
		my $self	=	{ 
			parent  => $_[0],
			file    => $_[1]
		};

BackProp.pm  view on Meta::CPAN

	}

	# Returns a rectangular block defined by an array ref in the form of
	# 		[$x1,$y1,$x2,$y2]
	# Return value is an array ref
	sub get_block {
		my $self	=	shift;
		my $ref		=	shift;
		my ($x1,$y1,$x2,$y2)	=	@{$ref};
		my @block	=	();
		my $count	=	0;

BackProp.pm  view on Meta::CPAN

		}
		return \@block;
	}
			
	# Returns pixel at $x,$y
	sub get {
		my $self	=	shift;
		my ($x,$y)  =	(shift,shift);
		return $self->{image}->[$y*320+$x];
	}
	
	# Returns array of (r,g,b) value from palette index passed
	sub rgb {
		my $self	=	shift;
		my $color	=	shift;
		return ($self->{palette}->[$color]->{red},$self->{palette}->[$color]->{green},$self->{palette}->[$color]->{blue});
	}
		
	# Returns mean of (rgb) value of palette index passed
	sub avg {
		my $self	=	shift;
		my $color	=	shift;
		return $self->{parent}->intr(($self->{palette}->[$color]->{red}+$self->{palette}->[$color]->{green}+$self->{palette}->[$color]->{blue})/3);
	}
	
	# Loads and decompresses a PCX-format 320x200, 8-bit image file and returns 
	# two arrays, first is a 64000-byte long array, each element contains a palette
	# index, and the second array is a 255-byte long array, each element is a hash
	# ref with the keys 'red', 'green', and 'blue', each key contains the respective color
	# component for that color index in the palette.
	sub load_pcx {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		
		# open the file
		open(FILE, "$_[0]");
		binmode(FILE);

 view all matches for this distribution


AI-NeuralNet-FastSOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN

	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;
    for my $i (0..$#$l) {
	return $i if $v == $l->[$i];
    }

 view all matches for this distribution


AI-NeuralNet-Hopfield

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN


has 'matrix_rows'   => ( is => 'rw', isa => 'Int');

has 'matrix_cols'   => ( is => 'rw', isa => 'Int');

sub BUILD {
	my $self = shift;
	my $args = shift;
	my $matrix = Math::SparseMatrix->new($args->{row}, $args->{col});
	$self->matrix($matrix);	
	$self->matrix_rows($args->{row});
	$self->matrix_cols($args->{col});
}

sub train() {	
	my $self = shift;
	my @pattern = @_;	

	if ( ($#pattern + 1) != $self->matrix_rows) {
		die "Can't train a pattern of size " . ($#pattern + 1) . " on a hopfield network of size " , $self->matrix_rows;

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

	my $m5 = &add($self->matrix, $m4);
	
	$self->matrix($m5);	
}

sub evaluate() {
	my $self = shift;
	my @pattern = @_;

	my @output = ();

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return @output;
}

sub convert_array() {
	my $rows    = shift;
	my $cols 	= shift;
	my @pattern = @_;
	my $result  = Math::SparseMatrix->new(1, $cols);

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub transpose() {
	my $matrix  = shift;
	my $rows = $matrix->{_rows};
	my $cols = $matrix->{_cols};

	my $inverse = Math::SparseMatrix->new($cols, $rows);

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $inverse;
}

sub multiply() {
	my $matrix_a  = shift;
	my $matrix_b  = shift;

	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub identity() {
	my $size = shift;

	if ($size < 1) {
		die "Identity matrix must be at least of size 1.";
	}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$result->set($i, $i, 1);
	}
	return $result;
}

sub subtract() {
	my $matrix_a = shift;
	my $matrix_b = shift;

    my $a_rows = $matrix_a->{_rows};
    my $a_cols = $matrix_a->{_cols};

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub add() {
	#weight matrix.
    my $matrix_a = shift;
	#identity matrix.
    my $matrix_b = shift;

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub dot_product() {
	my $matrix_a = shift;
	my $matrix_b = shift;
	
	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$result += $array_a[$i] * $array_b[$i];
	}
	return $result;
}

sub packed_array() {
	my $matrix = shift;
	my @result = ();

	for (my $r = 1; $r <= $matrix->{_rows}; $r++) {
		for (my $c = 1; $c <= $matrix->{_cols}; $c++) {

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return @result;
}

sub get_col() {
	my $self = shift;
	my $col  = shift;

	my $matrix = $self->matrix();
	

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$new_matrix->set($row, 1, $value);
	}
	return $new_matrix;
}

sub print_matrix() {
    my $matrix  = shift;
    my $rs = $matrix->{_rows};
    my $cs = $matrix->{_cols};

	for (my $i = 1; $i <= $rs; $i++) {

 view all matches for this distribution


AI-NeuralNet-Kohonen-Demo-RGB

 view release on metacpan or  search on metacpan

RGB.pm  view on Meta::CPAN

use Tk qw/DoOneEvent DONT_WAIT/;

#
# Used only by &tk_train
#
sub tk_show { my $self=shift;
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour = sprintf("#%02x%02x%02x",
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),

RGB.pm  view on Meta::CPAN


Over-rides the base class to provide TK displays of the map

=cut

sub train { my ($self,$epochs) = (shift,shift);
	my $label_txt;

	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	$self->{mw} = MainWindow->new(
		-width	=> 200+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 200+($self->{map_dim_y} * $self->{display_scale}),
	);
    my $quit_flag = 0;
    my $quit_code = sub {$quit_flag = 1};
    $self->{mw}->protocol('WM_DELETE_WINDOW' => $quit_code);

	$self->{c} = $self->{mw}->Canvas(
		-width	=> 50+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 50+($self->{map_dim_y} * $self->{display_scale}),

 view all matches for this distribution


AI-NeuralNet-Kohonen-Visual

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

Test the test file in this distribution, or:

	package YourClass;
	use base "AI::NeuralNet::Kohonen::Visual";

	sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
		# From here you return a TK colour name.
		# Get it as you please; for example, values of a 3D map:
		return sprintf("#%02x%02x%02x",
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Over-rides the base class to provide TK displays of the map.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	&{$self->{train_start}} if exists $self->{train_start};

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Accepts: C<x> and C<y> co-ordinates in the map.

=cut

sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
	my $_0 = $self->{map}->[$x]->[$y]->{weight}->[0];
	$_0 = $self->{missing_colour} || 0 if $_0 eq $self->{missing_mask};
	my $_1 = $self->{map}->[$x]->[$y]->{weight}->[1];
	$_1 = $self->{missing_colour} || 0 if $_1 eq $self->{missing_mask};
	my $_2 = $self->{map}->[$x]->[$y]->{weight}->[2];

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Depracated: see L<METHOD create_empty_map>.

=cut

sub prepare_display {
	return $_[0]->create_empty_map;
}

=head1 METHOD create_empty_map

Sets up a TK C<MainWindow> and C<Canvas> to
act as an empty map.

=cut

sub create_empty_map { my $self = shift;
	my ($w,$h);
	if ($self->{display} and $self->{display} eq 'hex'){
		$w = ($self->{map_dim_x}+1) * ($self->{display_scale}+2);
		$h = ($self->{map_dim_y}+1) * ($self->{display_scale}+2);
	} else {

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

		-height	=> $h + 20,
	);
	$self->{_mw}->fontCreate(qw/TAG -family verdana -size 8 -weight bold/);
	$self->{_mw}->resizable( 0, 0);
    $self->{_quit_flag} = 0;
    $self->{_mw}->protocol('WM_DELETE_WINDOW' => sub {$self->{_quit_flag}=1});
	$self->{_canvas} = $self->{_mw}->Canvas(
		-width	=> $w,
		-height	=> $h,
		-relief	=> 'raised',
		-border => 2,
	);
	$self->{_canvas}->pack(-side=>'top');
	$self->{_label} = $self->{_mw}->Button(
		-command      => sub { $self->{_mw}->destroy;$self->{_mw} = undef; },
		-relief       => 'groove',
		-text         => ' ',
		-wraplength   => $w,
		-textvariable => \$self->{_label_txt}
	);

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


See also L<METHOD get_colour_for>.

=cut

sub plot_map { my ($self,$args) = (shift,{@_});
	$self->{plotted} = 1;
	# MW may have been destroyed
	$self->prepare_display if not defined $self->{_mw};
	my $yo = 5+($self->{display_scale}/2);
	for my $x (0..$self->{map_dim_x}){

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Very naive: no attempt to check the text will appear on the map.

=cut

sub label_map { my ($self,$x,$y,$t) = (shift,shift,shift,shift);
	$self->{_canvas}->createText(
		$x*$self->{display_scale}+($self->{display_scale}),
		$y*$self->{display_scale}+($self->{display_scale}),
		-text	=> $t,
		-anchor => 'w',

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Calls TK's C<MainLoop> to keep a window open until the user closes it.

=cut

sub main_loop { my $self = shift;
	$self->plot_map unless $self->{plotted};
	MainLoop;
}


 view all matches for this distribution


AI-NeuralNet-Kohonen

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=back

=cut

sub new {
	my $class					= shift;
	my %args					= @_;
	my $self 					= bless \%args,$class;

	$self->{missing_mask}		= 'x' unless defined $self->{missing_mask};

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L<AI::NerualNet::Kohonen::Node/CONSTRUCTOR new>.

=cut

sub randomise_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

As L<METHOD randomise_map> but sets all C<map> nodes to
either the value supplied as the only paramter, or C<undef>.

=cut

sub clear_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	my $val = shift || $self->{missing_mask};
	my $w = [];

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Returns a true value.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;
	&{$self->{train_start}} if exists $self->{train_start};
	for my $epoch (1..$epochs){
		$self->{t} = $epoch;
		&{$self->{epoch_start}} if exists $self->{epoch_start};

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

and L<AI::NeuralNet::Kohonen::Node/distance_from>,

=cut


sub find_bmu { my ($self,$target) = (shift,shift);
	my $closest = [];	# [value, x,y] value and co-ords of closest match
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $distance = $self->{map}->[$x]->[$y]->distance_from( $target );
			$closest = [$distance,0,0] if $x==0 and $y==0;

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

Returns: reference to an array that is the weight of the node, or
C<undef> on failure.

=cut

sub get_weight_at { my ($self,$x,$y) = (shift,shift,shift);
	return undef if $x<0 or $y<0 or $x>$self->{map_dim_x} or $y>$self->{map_dim_y};
	return $self->{map}->[$x]->[$y]->{weight};
}


lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L<METHOD find_bmu>, and L</METHOD get_weight_at>.

=cut

sub get_results { my ($self,$targets)=(shift,shift);
	$self->{results} = [];
	if (not defined $targets){
		$targets = $self->{input};
	} elsif (not $targets eq $self->{input}){
		foreach (@$targets){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


This may change, as it seems misleading to re-use that field.

=cut

sub map_results { my $self=shift;

}


=head1 METHOD dump

Print the current weight values to the screen.

=cut

sub dump { my $self=shift;
	print "    ";
	for my $x (0..$self->{map_dim_x}){
		printf ("  %02d ",$x);
	}
	print"\n","-"x107,"\n";

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Returns: a true value.

=cut

sub smooth { my ($self,$smooth) = (shift,shift);
	$smooth = $self->{smoothing} if not $smooth and defined $self->{smoothing};
	return unless $smooth;
	$smooth = int( sqrt $self->{map_dim_a} );
	my $mask = _make_gaussian_mask($smooth);

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L</FILE FORMAT>.

=cut

sub load_input { my ($self,$path) = (shift,shift);
	local *IN;
	if (not open IN,$path){
		warn "Could not open file <$path>: $!";
		return undef;
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Return C<undef> on failure, a true value on success.

=cut

sub save_file { my ($self,$path) = (shift,shift);
	local *OUT;
	if (not open OUT,">$path"){
		warn "Could not open file for writing <$path>: $!";
		return undef;
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


#
# Process ASCII from table field or input file
# Accepts: ASCII as array or array ref
#
sub _process_input_text { my ($self) = (shift);
	if (not defined $_[1]){
		if (ref $_[0] eq 'ARRAY'){
			@_ = @{$_[0]};
		} else {
			@_ = split/[\n\r\f]+/,$_[0];

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

unless the C<targeting> field is defined, when the targets are
iterated over.

=cut

sub _select_target { my $self=shift;
	if (not $self->{targeting}){
		return $self->{input}->[
			(int rand(scalar @{$self->{input}}))
		];
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

is calculated by the C<Node> class - see
L<AI::NeuralNet::Kohonen::Node/distance_effect>.

=cut

sub _adjust_neighbours_of { my ($self,$bmu,$target) = (shift,shift,shift);
	my $neighbour_radius = int (
		($self->{map_dim_a}/$self->{neighbour_factor}) * exp(- $self->{t} / $self->{time_constant})
	);

	# Distance from co-ord vector (0,0) as integer

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	L(t) = L  exp ( -  ------ )
	        0     (    lambda )

=cut

sub _decay_learning_rate { my $self=shift;
	$self->{l} =  (
		$self->{learning_rate} * exp(- $self->{t} / $self->{time_constant})
	);
}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Returns: reference to a 2d array that is the mask.

=cut

sub _make_gaussian_mask { my ($smooth) = (shift);
	my $f = 4; # Cut-off threshold
	my $g_mask_2d = [];
	for my $x (0..$smooth){
		$g_mask_2d->[$x] = [];
		for my $y (0..$smooth){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See also L<_decay_learning_rate>.

=cut

sub _gauss_weight { my ($r, $sigma) = (shift,shift);
	return exp( -($r**2) / (2 * $sigma**2) );
}


=head1 PUBLIC METHOD quantise_error

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

or those in the C<input> field.

=cut


sub quantise_error { my ($self,$targets) = (shift,shift);
	my $qerror=0;
	if (not defined $targets){
		$targets = $self->{input};
	} else {
		foreach (@$targets){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

the data passed was a comment, or the C<weight_dim> flag was
not set); a true value on success.

=cut

sub _add_input_from_str { my ($self) = (shift);
	$_ = shift;
	s/#.*$//g;
	return undef if /^$/ or not defined $self->{weight_dim};
	my @i = split /\s+/,$_;
	return undef if $#i < $self->{weight_dim}; # catch bad lines

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN



#
# Processes the 'table' paramter to the constructor
#
sub _process_table { my $self = shift;
	$_ = $self->_process_input_text( $self->{table} );
	undef $self->{table};
	return $_;
}

 view all matches for this distribution


AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

   	# See POD for usage of this variable.
	$AI::NeuralNet::Mesh::Connector = '_c';
	
	# Debugging subs
	$AI::NeuralNet::Mesh::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::Mesh::DEBUG = shift || 0; } 
	sub d { shift if(substr($_[0],0,4) eq 'AI::'); my ($a,$b,$c)=(shift,shift,$AI::NeuralNet::Mesh::DEBUG); print $a if($c == $b); return $c }
	sub verbose {debug @_};
	sub verbosity {debug @_};
	sub v {debug @_};
	
	
	# Return version of ::ID string passed or current version of this
	# module if no string is passed. Used in load() to detect file versions.
	sub version {
		shift if(substr($_[0],0,4) eq 'AI::');
		substr((split(/\s/,(shift || $AI::NeuralNet::Mesh::ID)))[2],1);
	}                                  
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Package constructor
	sub new {
		no strict 'refs';
		my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $nodes	=	shift;

Mesh.pm  view on Meta::CPAN

	}	
    

    # Internal usage
    # Connects one range of nodes to another range
    sub _c {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;

Mesh.pm  view on Meta::CPAN

		}
	}
    
    # Internal usage
    # Creates the mesh of neurons
    sub _init {
    	my $self		=	shift;
    	my $nodes		=	$self->{nodes};
    	my $outputs		=	$self->{outputs} || $nodes;
    	my $inputs		=	$self->{inputs}  || $nodes;
    	my $layers		=	$self->{total_layers};

Mesh.pm  view on Meta::CPAN

				$self->{mesh}->[$x]->add_input_node($self->{input}->{cap});
		}
	}
    
    # See POD for usage
    sub extend {
    	my $self	=	shift;
    	my $layers	=	shift;
    
    	# Looks like we got ourselves a layer specs array
		if(ref($layers) eq "ARRAY") { 

Mesh.pm  view on Meta::CPAN

		}
		return 1;
	}
    
    # See POD for usage
    sub extend_layer {
    	my $self	=	shift;
    	my $layer	=	shift || 0;
    	my $specs	=	shift;
    	if(!$specs) {
    		$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";

Mesh.pm  view on Meta::CPAN

    	}
    	return 1;
    }
    
    # Pseudo-internal usage
    sub add_nodes {
    	no strict 'refs';
		my $self	=	shift;
    	my $layer	=	shift;
    	my $nodes	=	shift;
    	my $n		=	0;

Mesh.pm  view on Meta::CPAN

		$self->_c($#{$self->{mesh}}-$more+1,$#{$self->{mesh}},$n+$self->{layers}->[$layer],$n+$self->{layers}->[$layer]+$self->{layers}->[$layer+1]);
    }
        
        
    # See POD for usage
    sub run {
    	my $self	=	shift;
    	my $inputs	=	shift;
    	my $const	=	$self->{const};
    	#my $start	=	new Benchmark;
    	$inputs		=	$self->crunch($inputs) if($inputs == 0);

Mesh.pm  view on Meta::CPAN

    	#$self->{benchmark} = timestr(timediff(new Benchmark, $start));
    	return $self->{output}->get_outputs();
    }    
    
    # See POD for usage
    sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# See POD for usage
	sub learn {
    	my $self	=	shift;					
    	my $inputs	=	shift;					# input set
    	my $outputs	=	shift;					# target outputs
    	my %args	=	@_;						# get args into hash
    	my $inc		=	$args{inc} || 0.002;	# learning gradient

Mesh.pm  view on Meta::CPAN

   		return $str;
   	}


	# See POD for usage
	sub learn_set {
		my $self	=	shift;
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2;
		my $inc		=	$args{inc};

Mesh.pm  view on Meta::CPAN

			return $data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
	}
	
	# See POD for usage
	sub run_set {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my (@results,$res);
		for my $x (0..$len) {

Mesh.pm  view on Meta::CPAN

	# $seperator is an optional variable specifying the seperator
	# character between values. $seperator defaults to ',' (a single comma). 
	# NOTE: This does not handle quoted fields, or any other record
	# seperator other than "\n".
	#
	sub load_set {
		my $self	=	shift;
		my $file	=	shift;
		my $attr	=	shift || 0;
		my $sep		=	shift || ',';
		my $data	=	[];

Mesh.pm  view on Meta::CPAN

		}
		return $data;
	}
	
	# See POD for usage
	sub get_outs {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my $outs	=	[];
		for my $x (0..$len) {

Mesh.pm  view on Meta::CPAN

		}
		return $outs;
	}
	
	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		no strict 'refs';
		
		open(FILE,">$file");

Mesh.pm  view on Meta::CPAN

	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    my @lines;

Mesh.pm  view on Meta::CPAN

		
		return $self;
	}
	
	# Load entire network state from disk.
	sub load_old {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    if(!(-f $file)) {

Mesh.pm  view on Meta::CPAN

		
		return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		my $n 		=	0;    
		no strict 'refs';
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {

Mesh.pm  view on Meta::CPAN

	# 	$output	= &$type($sum_of_inputs,$self);
	# The code ref then has access to all the data in that node (thru the
	# blessed refrence $self) and is expected to return the value to be used
	# as the output for that node. The sum of all the inputs to that node
	# is already summed and passed as the first argument.
	sub activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    
		no strict 'refs';

Mesh.pm  view on Meta::CPAN

			$self->{mesh}->[$_]->{activation} = $value; 
		}
	}
	
	# Applies an activation type to a specific node
	sub node_activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    

Mesh.pm  view on Meta::CPAN

	}
	
	# Set the activation threshold for a specific layer.
	# Only applicable if that layer uses "sigmoid" or "sigmoid_2"
	# usage: $net->threshold($layer,$threshold);
	sub threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;
		no strict 'refs';

Mesh.pm  view on Meta::CPAN

			$self->{mesh}->[$_]->{threshold} = $value;
		}
	}
	
	# Applies a threshold to a specific node     
	sub node_threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;

Mesh.pm  view on Meta::CPAN

	
	# Set mean (avg.) flag for a layer.
	# usage: $net->mean($layer,$flag);
	# If $flag is true, it enables finding the mean for that layer,
	# If $flag is false, disables mean.
	sub mean {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0;
		my $n		=	0;
		no strict 'refs';

Mesh.pm  view on Meta::CPAN

		}
	}
	
	  
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		my $file	=	shift;
		eval('use PCX::Loader');
		if(@_) {
			$self->{error}="Cannot load PCX::Loader module: @_";

Mesh.pm  view on Meta::CPAN

		}
		return PCX::Loader->new($self,$file);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my @ws 		=	split(/[\s\t]/,shift);
		my (@map,$ic);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);

Mesh.pm  view on Meta::CPAN

		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_crunched}->{_length}-1) {
			return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
		}
		$self->{error} = "Word \"$_[0]\" not found.";
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) 
	# and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_crunched}->{list}->[$el-1].' ';

Mesh.pm  view on Meta::CPAN

		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 
	# disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	} 

	# Sets/gets run const. facter in the network. Setting a value of 0 
	# disables run const. factor. 
	sub const {
		my $self	=	shift;
		my $const	=	shift;
		return $self->{const}	if(!(defined $const));
		$self->{const}	=	$const;
	}
	
	# Return benchmark time from last learn() operation.
	sub benchmark {
		shift->{benchmarked};
	}
	
	# Same as benchmark()
	sub benchmarked {
		benchmark(shift);
	}
	
	# Return the last error in the mesh, or undef if no error.
	sub error {
		my $self = shift;
		return undef if !$self->{error};
		chomp($self->{error});
		return $self->{error}."\n";
	}

Mesh.pm  view on Meta::CPAN

	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;

Mesh.pm  view on Meta::CPAN

	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1};

Mesh.pm  view on Meta::CPAN

		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift); 
		sprintf("%.3f",$fa/$fb*100); #((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100
	}
	
	# Returns the index of the element in array REF passed with the highest 
	# comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] > $ref1->[$tmp]) }
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest 
	# comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] < $ref1->[$tmp]) }
		return $tmp;

Mesh.pm  view on Meta::CPAN

	# net TWICE on the data set, because the first time
	# the range() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#	
	sub range {
		my @r=@_;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$r[intr($_[0]/$_[1]->{t}*$#r)]}
	}
	
	#

Mesh.pm  view on Meta::CPAN

	# net at least TWICE on the data set, because the first 
	# time the ramp() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#
	sub ramp {
		my $r=shift||1;my $t=($r<2)?0:-1;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$_[0]/$_[1]->{t}*$r-$b}
	}

	# Self explanitory, pretty much. $threshold is used to decide if an input 
	# is true or false (1 or 0). If an input is below $threshold, it is false.
	sub and_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $self->{_parent}->{const} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $sum/$self->{_inputs_size};
		}
	}
	
	# Self explanitory, $threshold is used same as above.
	sub or_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $sum/$self->{_inputs_size} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $self->{_parent}->{const};
		}

Mesh.pm  view on Meta::CPAN

package AI::NeuralNet::Mesh::node;
	
	use strict;

	# Node constructor
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
			_outputs	=>	[]

Mesh.pm  view on Meta::CPAN

		bless $self, $type;
	}

	# Receive inputs from other nodes, and also send
	# outputs on.	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		
		$self->{_inputs}->[$from_id]->{value} = $input * $self->{_inputs}->[$from_id]->{weight};

Mesh.pm  view on Meta::CPAN

		} else {
			$self->{_parent}->d("all inputs have NOT fired for $self.\n",1);
		}
	}

	sub add_input_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_inputs_size} || 0;
		$self->{_inputs}->[$i]->{node}	 = $node;
		$self->{_inputs}->[$i]->{value}	 = 0;

Mesh.pm  view on Meta::CPAN

		$self->{_inputs}->[$i]->{fired}	 = 0;
		$self->{_inputs_size} = ++$i;
		return $i-1;
	}
	
	sub add_output_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_outputs_size} || 0;
		$self->{_outputs}->[$i]->{node}		= $node;
		$self->{_outputs}->[$i]->{from_id}	= $node->add_input_node($self);
		$self->{_outputs_size} = ++$i;
		return $i-1;
	}     
	
	sub adjust_weight {
		my $self	=	shift;
		my $inc		=	shift;
		for my $i (@{$self->{_inputs}}) {
			$i->{weight} += $inc * $i->{weight};
			$i->{node}->adjust_weight($inc) if($i->{node});

Mesh.pm  view on Meta::CPAN


1;	
	
# Internal usage, prevents recursion on empty nodes.
package AI::NeuralNet::Mesh::cap;
	sub new     { bless {}, shift }
	sub input           {}
	sub adjust_weight   {}
	sub add_output_node {}
	sub add_input_node  {}
1;

# Internal usage, collects data from output layer.
package AI::NeuralNet::Mesh::output;
	
	use strict;
	
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
		};
		bless $self, $type;
	}
	
	sub add_input_node {
		my $self	=	shift;
		return (++$self->{_inputs_size})-1;
	}
	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		$self->{_parent}->d("GOT INPUT [$input] FROM [$from_id]\n",1);
		$self->{_inputs}->[$from_id] = $self->{_parent}->intr($input);
	}
	
	sub get_outputs {
		my $self	=	shift;
		return $self->{_inputs};
	}

1;

Mesh.pm  view on Meta::CPAN

		    nodes        => 2,
		    activation   => linear
		},
		{
		    nodes        => 3,
		    activation   => sub {
		        my $sum  =  shift;
		        return $sum + rand()*1;
		    }
		},
		{

Mesh.pm  view on Meta::CPAN


The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.

	= line 1 = 	sub {
	= line 2 =		my @values = ( 6..10 );
	= line 3 =		my $sum   = shift;
	= line 4 =		my $self  = shift;
	= line 5 =		$self->{top_value}=$sum if($sum>$self->{top_value});
	= line 6 =		my $index = intr($sum/$self->{top_value}*$#values);

Mesh.pm  view on Meta::CPAN

	use AI::NeuralNet::Mesh ':acts';

Let's look at the code real quick, as it shows how to get at the indivudal
input connections:

	= line 1 =	sub {
	= line 2 =		my $sum  = shift;
	= line 3 =		my $self = shift;
	= line 4 =		my $threshold = 0.50;
	= line 5 =		for my $x (0..$self->{_inputs_size}-1) { 
	= line 6 =			return 0.000001 if(!$self->{_inputs}->[$x]->{value}<$threshold)

Mesh.pm  view on Meta::CPAN

The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. 

Example connector:

	sub connect_three {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;

Mesh.pm  view on Meta::CPAN


For the fun of it, we'll take a quick look at the default connector.
Below is the actual default connector code, albeit a bit cleaned up, as well as
line numbers added.

	= line 1  =	sub _c {
	= line 2  =    	my $self	=	shift;
	= line 3  =    	my $r1a		=	shift;
	= line 4  =    	my $r1b		=	shift;
	= line 5  =    	my $r2a		=	shift;
	= line 6  =    	my $r2b		=	shift;

 view all matches for this distribution


AI-NeuralNet-SOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN

	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;
    for my $i (0..$#$l) {
	return $i if $v == $l->[$i];
    }

 view all matches for this distribution


AI-NeuralNet-Simple

 view release on metacpan or  search on metacpan

examples/game_ai.pl  view on Meta::CPAN

        $knife, 
        $gun, 
        $enemies])];
}

sub prompt 
{
    my ($message,$domain) = @_;
    my $valid_response = 0;
    my $response;
    do {

examples/game_ai.pl  view on Meta::CPAN

        $valid_response = $response =~ /$domain/;
    } until $valid_response;
    return $response;
}

sub display_result
{
    my ($net,@data) = @_;
    my $result      = $net->winner(\@data);
    my @health      = qw/Poor Average Good/;
    my @knife       = qw/No Yes/;

 view all matches for this distribution


AI-Ollama-Client

 view release on metacpan or  search on metacpan

lib/AI/Ollama/Client.pm  view on Meta::CPAN


Check to see if a blob exists on the Ollama server which is useful when creating models.

=cut

around 'checkBlob' => sub ( $super, $self, %options ) {
    $super->( $self, %options )->then( sub( $res ) {
        if( $res->code =~ /^2\d\d$/ ) {
            return Future->done( 1 )
        } else {
            return Future->done( 0 )

lib/AI/Ollama/Client.pm  view on Meta::CPAN


Returns a L<< AI::Ollama::GenerateCompletionResponse >>.

=cut

around 'generateCompletion' => sub ( $super, $self, %options ) {
    # Encode images as base64, if images exist:
    # (but create a copy so we don't over write the input array)
    if (my $images = $options{images}) {

        # Allow { filename => '/etc/passwd' }

 view all matches for this distribution


AI-PBDD

 view release on metacpan or  search on metacpan

lib/AI/PBDD.pm  view on Meta::CPAN


$VERSION = '0.01';

bootstrap AI::PBDD $VERSION;

sub satCount {
  my ($bdd, $vars_ignored) = @_;

  if (!defined($vars_ignored)) {
    return satCount__I($bdd);
  } else {
    return satCount__II($bdd, $vars_ignored);
  }
}

sub printDot {
  my ($bdd, $filename) = @_;

  if (!defined($filename)) {
      printDot__I($bdd);
  } else {
      printDot__II($bdd, $filename);
  }
}

sub makeSet {
  my ($vars, $size, $offset) = @_;

  if (!defined($offset)) {
      return makeSetI($vars, $size);
  } else {
      return makeSetII($vars, $size, $offset);
  }
}

sub createPair {
  my ($old, $new) = @_;
  my $size = @$old;

  return createPairI($old, $new, $size);
}

 view all matches for this distribution


AI-PSO

 view release on metacpan or  search on metacpan

examples/NeuralNet/pso_ann.pl  view on Meta::CPAN

my $annInputs = "pso.dat";

my $expectedValue = 3.5;	# this is the value that we want to train the ANN to produce (just like the example in t/PTO.t)


sub test_fitness_function(@) {
    my (@arr) = (@_);
	&writeAnnConfig($annConfig, $numInputs, $numHidden, $xferFunc, @arr);
	my $netValue = &runANN($annConfig, $annInputs);
	print "network value = $netValue\n";

examples/NeuralNet/pso_ann.pl  view on Meta::CPAN




##### io #########

sub writeAnnConfig() {
	my ($configFile, $inputs, $hidden, $func, @weights) = (@_);

	open(ANN, ">$configFile");
	print ANN "$inputs $hidden\n";
	print ANN "$func\n";

examples/NeuralNet/pso_ann.pl  view on Meta::CPAN

	}
	print ANN "\n";
	close(ANN);
}

sub runANN($$) {
	my ($configFile, $dataFile) = @_;
	my $networkValue = `ann_compute $configFile $dataFile`;
	chomp($networkValue);
	return $networkValue;
}

 view all matches for this distribution


AI-ParticleSwarmOptimization-MCE

 view release on metacpan or  search on metacpan

example/PSOTest-MultiCore.pl  view on Meta::CPAN

#use AI::ParticleSwarmOptimization;
use AI::ParticleSwarmOptimization::MCE;
#use AI::ParticleSwarmOptimization::Pmap;
use Data::Dumper; $::Data::Dumper::Sortkeys = 1;
#=======================================================================
sub calcFit {
    my @values = @_;
    my $offset = int (-@values / 2);
    my $sum;

	select( undef, undef, undef, 0.01 );	# Simulation of heavy processing...

 view all matches for this distribution


AI-ParticleSwarmOptimization

 view release on metacpan or  search on metacpan

Samples/PSOPlatTest.pl  view on Meta::CPAN


printf ",# Fit %.5f at (%s) after %d iterations\n",
    $fit, join (', ', map {sprintf '%.4f', $_} @values), $iters;


sub calcFit {
    my @values = @_;
    my $offset = int (-@values / 2);
    my $sum;

    $sum += ($_ - $offset++)**2 for @values;

 view all matches for this distribution


( run in 0.757 second using v1.01-cache-2.11-cpan-a5abf4f5562 )