Result:
found more than 191 distributions - search limited to the first 2001 files matching your query ( run in 0.605 )


AI-Gene-Sequence

 view release on metacpan or  search on metacpan

AI/Gene/Sequence.pm  view on Meta::CPAN

##
# calls mutation method at random
# 0: number of mutations to perform
# 1: ref to hash of probs to use (otherwise uses default mutations and probs)

sub mutate {
  my $self = shift;
  my $num_mutates = +$_[0] || 1;
  my $rt = 0;
  my ($hr_probs, $muts);
  if (ref $_[1] eq 'HASH') { # use non standard mutations or probs

AI/Gene/Sequence.pm  view on Meta::CPAN


##
# creates a normalised and cumulative prob distribution for the
# keys of the referenced hash

sub _normalise {
  my $hr = $_[0];
  my $h2 = {};
  my $muts = [keys %{$hr}];
  my $sum = 0;
  foreach (values %{$hr}) {

AI/Gene/Sequence.pm  view on Meta::CPAN

##
# inserts one element into the sequence
# 0: number to perform ( or 1)
# 1: position to mutate (undef for random)

sub mutate_insert {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];
    my $pos = defined($_[1]) ? $_[1] : int rand $length;
    next if $pos > $length; # further than 1 place after gene
    my @token = $self->generate_token;
    my $new = $self->[0];
    substr($new, $pos, 0) = $token[0];
    next unless $self->valid_gene($new, $pos);
    $self->[0] = $new;
    splice @{$self->[1]}, $pos, 0, $token[1];
    $rt++;
  }

AI/Gene/Sequence.pm  view on Meta::CPAN

# removes element(s) from sequence
# 0: number of times to perform
# 1: position to affect (undef for rand)
# 2: length to affect, undef => 1, 0 => random length

sub mutate_remove {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];
    my $len = !defined($_[2]) ? 1 : ($_[2] || int rand $length);
    next if ($length - $len) <= 0;
    my $pos = defined($_[1]) ? $_[1] : int rand $length;
    next if $pos >= $length; # outside of gene
    my $new = $self->[0];
    substr($new, $pos, $len) = '';
    next unless $self->valid_gene($new, $pos);
    $self->[0] = $new;
    splice @{$self->[1]}, $pos, $len;
    $rt++;
  }

AI/Gene/Sequence.pm  view on Meta::CPAN

# 0: number to perform (or 1)
# 1: posn to copy from (undef for rand)
# 2: posn to splice in (undef for rand)
# 3: length            (undef for 1, 0 for random)

sub mutate_duplicate {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];

AI/Gene/Sequence.pm  view on Meta::CPAN

    my $pos1 = defined($_[1]) ? $_[1] : int rand $length;
    my $pos2 = defined($_[2]) ? $_[2] : int rand $length;
    my $new = $self->[0];
    next if ($pos1 + $len) > $length;
    next if $pos2 > $length;
    my $seq = substr($new, $pos1, $len);
    substr($new, $pos2,0) = $seq;
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    splice @{$self->[1]}, $pos2, 0, @{$self->[1]}[$pos1..($pos1+$len-1)];
    $rt++;
  }

AI/Gene/Sequence.pm  view on Meta::CPAN

# 0: num to perform  (or 1)
# 1: pos to get from          (undef for rand)
# 2: pos to start replacement (undef for rand)
# 3: length to operate on     (undef => 1, 0 => rand)

sub mutate_overwrite {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  
  for (1..$num) {

AI/Gene/Sequence.pm  view on Meta::CPAN

    my $len = !defined($_[3]) ? 1 : ($_[3] || int rand $length);
    my $pos1 = defined($_[1]) ? $_[1] : int rand $length;
    my $pos2 = defined($_[2]) ? $_[2] : int rand $length;
    next if ( ($pos1 + $len) >= $length
	      or $pos2 > $length);
    substr($new, $pos2, $len) = substr($new, $pos1, $len);
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    splice (@{$self->[1]}, $pos2, $len,
	    @{$self->[1]}[$pos1..($pos1+$len-1)] );
    $rt++;

AI/Gene/Sequence.pm  view on Meta::CPAN

# Takes a run of tokens and reverses their order, is a noop with 1 item
# 0: number to perform
# 1: posn to start from (undef for rand)
# 2: length             (undef=>1, 0=>rand)

sub mutate_reverse {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  
  for (1..$num) {

AI/Gene/Sequence.pm  view on Meta::CPAN

    my $len = !defined($_[2]) ? 1 : ($_[2] || int rand $length);

    next if ($pos >= $length
	    or $pos + $len > $length);

    my $chunk = reverse split('', substr($new, $pos, $len));
    substr($new, $pos, $len) = join('', $chunk);
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    splice (@{$self->[1]}, $pos, $len,
	    reverse( @{$self->[1]}[$pos..($pos+$len-1)] ));
    $rt++;

AI/Gene/Sequence.pm  view on Meta::CPAN

##
# Changes token into one of same type (ie. passes type to generate..)
# 0: number to perform
# 1: position to affect (undef for rand)

sub mutate_minor {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $pos = defined $_[1] ? $_[1] : int rand length $self->[0];
    next if $pos >= length($self->[0]); # pos lies outside of gene
    my $type = substr($self->[0], $pos, 1);
    my @token = $self->generate_token($type, $self->[1][$pos]);
    # still need to check for niceness, just in case
    if ($token[0] eq $type) {
      $self->[1][$pos] = $token[1];
    }
    else {
      my $new = $self->[0];
      substr($new, $pos, 1) = $token[0];
      next unless $self->valid_gene($new, $pos);
      $self->[0] = $new;
      $self->[1][$pos] = $token[1];
    }
    $rt++;

AI/Gene/Sequence.pm  view on Meta::CPAN

##
# Changes one token into some other token
# 0: number to perform
# 1: position to affect (undef for random)

sub mutate_major {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $pos = defined $_[1] ? $_[1] : int rand length $self->[0];
    next if $pos >= length($self->[0]); # outside of gene
    my @token = $self->generate_token();
    my $new = $self->[0];
    substr($new, $pos, 1) = $token[0];
    next unless $self->valid_gene($new, $pos);
    $self->[0] = $new;
    $self->[1][$pos] = $token[1];
    $rt++;
  }

AI/Gene/Sequence.pm  view on Meta::CPAN

# 1: start of first sequence   (undef for rand)
# 2: start of second sequence  (undef for rand)
# 3: length of first sequence  (undef for 1, 0 for rand)
# 4: length of second sequence (undef for 1, 0 for rand)

sub mutate_switch {
  my $self = shift;
  my $num = $_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];

AI/Gene/Sequence.pm  view on Meta::CPAN

    if ( ($pos1 + $len1) > $pos2 # ensure no overlaps
	 or ($pos2 + $len2) > $length
	 or $pos1 >= $length ) {
      next;
    }
    my $chunk1 = substr($new, $pos1, $len1, substr($new, $pos2, $len2,''));
    substr($new,$pos2 -$len1 + $len2,0) = $chunk1;
    next unless $self->valid_gene($new);
    $self->[0]= $new;
    my @chunk1 = splice(@{$self->[1]}, $pos1, $len1,
			splice(@{$self->[1]}, $pos2, $len2) );
    splice @{$self->[1]}, $pos2 + $len2 - $len1,0, @chunk1;

AI/Gene/Sequence.pm  view on Meta::CPAN

# 0: number to perform
# 1: posn to get from   (undef for rand)
# 2: posn to put        (undef for rand)
# 3: length of sequence (undef for 1, 0 for rand)

sub mutate_shuffle {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  
  for (1..$num) {

AI/Gene/Sequence.pm  view on Meta::CPAN

	or $pos2 >= $length      # outside gene
	or ($pos2 < ($pos1 + $len) and $pos2 > $pos1)) { # overlap
      next;
    }
    if ($pos1 < $pos2) {
      substr($new, $pos2-$len,0) = substr($new, $pos1, $len, '');
    }
    else {
      substr($new, $pos2, 0) = substr($new, $pos1, $len, '');
    }
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    if ($pos1 < $pos2) {
      splice (@{$self->[1]}, $pos2-$len, 0, 

AI/Gene/Sequence.pm  view on Meta::CPAN

# can be called with a token type to produce, or with none.
# if called with a token type, it will also be passed the original
# token as the second argument.
# should return a two element list of the token type followed by the token itself.

sub generate_token {
  my $self = shift;
  my $token_type = $_[0];
  my $letter = ('a'..'z')[rand 25];
  unless ($token_type) {
    return ($letter) x2;

AI/Gene/Sequence.pm  view on Meta::CPAN

}

# takes sting of token types to be checked for validity.
# If a mutation affects only one place, then the position of the
# mutation can be passed as a second argument.
sub valid_gene {1}

## You might also want to have methods like the following,
# they will not be called by the 'sequence' methods.

# Default constructor
sub new {
  my $gene = ['',[]];
  return bless $gene, ref $_[0] || $_[0];
}

# remember that clone method may require deep copying depending on
# your specific needs

sub clone {
  my $self = shift;
  my $new = [$self->[0]];
  $new->[1] = [@{$self->[1]}];
  return bless $new, ref $self;
}

# You need some way to use the gene you've made and mutated, but
# this will let you have a look, if it starts being odd.

sub render_gene {
  my $self = shift;
  my $return =  "$self\n";
  $return .= $self->[0] . "\n";
  $return .= (join ',', @{$self->[1]}). "\n";
  return $return;
}

# used for testing

sub _test_dump {
  my $self = shift;
  my @rt = ($self->[0], join('',@{$self->[1]}));
  return @rt;
}
1;

AI/Gene/Sequence.pm  view on Meta::CPAN

 our @ISA = qw(AI::Gene::Sequence);

 my %things = ( a => [qw(a1 a2 a3 a4 a5)],
	       b => [qw(b1 b2 b3 b4 b5)],);

 sub generate_token {
  my $self = shift;
  my ($type, $prev) = @_;
  if ($type) {
    $prev = ${ $things{$type} }[rand @{ $things{$type} }];
  } 

AI/Gene/Sequence.pm  view on Meta::CPAN

    $prev = ${$things{$type}}[rand @{$things{$type}}];
  }
  return ($type, $prev); 
 }

 sub valid_gene {
   my $self = shift;
   return 0 if $_[0] =~ /(.)\1/;
   return 1;
 }

 sub seed {
   my $self = shift;
   $self->[0] = 'ababab';
   @{$self->[1]} = qw(A1 B1 A2 B2 A3 B3);
 }

 sub render {
   my $self = shift;
   return join(' ', @{$self->[1]});
 } 

 # elsewhere

 view all matches for this distribution


AI-General

 view release on metacpan or  search on metacpan

General.pm  view on Meta::CPAN



our $VERSION = '0.01';


sub new {
	my ( $class ) = @_;
	my $self = bless [], $class;
	$self->dwim( "Implement self" );
	return $self;
}


sub dwim {
	my ( $self, $args ) = @_;

	#... TO DO
}
	

 view all matches for this distribution


AI-Genetic-Pro

 view release on metacpan or  search on metacpan

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

use constant GD 		=> 'GD::Graph::linespoints'; 
#=======================================================================
my $_Cache = { };
my $_temp_chromosome;
#=======================================================================
sub new {
	my ( $class, %args ) = ( shift, @_ );
	
	#-------------------------------------------------------------------
	my %opts = map { if(ref $_){$_}else{ /^-?(.*)$/o; $1 }} @_;
	my $self = bless \%opts, $class;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	'AI::Genetic::Pro::MCE'->use or die q[Cannot raise multicore support: ] . $@;
	
	return AI::Genetic::Pro::MCE->new( $self, \%args );
}
#=======================================================================
sub _Cache { $_Cache; }
#=======================================================================
# INIT #################################################################
#=======================================================================
sub _set_strict {
	my ($self) = @_;
	
	# fitness
	my $fitness = $self->fitness();
	my $replacement = sub {
		my @tmp = @{$_[1]};
		my $ret = $fitness->(@_);
		my @cmp = @{$_[1]};
		die qq/Chromosome was modified in a fitness function from "@tmp" to "@{$_[1]}"!\n/ unless compare(\@tmp, \@cmp);
		return $ret;
	};
	$self->fitness($replacement);
}
#=======================================================================
sub _fitness_cached {
	my ($self, $chromosome) = @_;
	
	#my $key = md5_hex(${tied(@$chromosome)});
	my $key = md5_hex( $self->_package ? md5_hex( ${ tied( @$chromosome ) } ) : join( q[:], @$chromosome ) );
	return $_Cache->{$key} if exists $_Cache->{$key};
	
	$_Cache->{$key} = $self->_fitness_real->($self, $chromosome);
	return $_Cache->{$key};
}
#=======================================================================
sub _init_cache {
	my ($self) = @_;
		
	$self->_fitness_real($self->fitness);
	$self->fitness(\&_fitness_cached);
	return;
}
#=======================================================================
sub _check_data_ref {
	my ($self, $data_org) = @_;
	my $data = clone($data_org);
	my $ars;
	for(0..$#$data){
		next if $ars->{$data->[$_]};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	return $data;
}
#=======================================================================
# we have to find C to (in some cases) incrase value of range
# due to design model
sub _find_fix_range {
	my ($self, $data) = @_;

	for my $idx (0..$#$data){
		if($data->[$idx]->[1] < 1){ 
			my $const = 1 - $data->[$idx]->[1];

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}

	return $data;
}
#=======================================================================
sub init { 
	my ( $self, $data ) = @_;
	
	croak q/You have to pass some data to "init"!/ unless $data;
	#-------------------------------------------------------------------
	$self->generation(0);

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	else{ for(@{$self->_translations}){ $size = $_->[2] if $_->[2] > $size; } }		# Provisional patch for rangevector values truncated to signed  8-bit quantities. Thx to Tod Hagan

	my $package = get_package_by_element_size($size);
	$self->_package($package);

	my $length = ref $data ? sub { $#$data; } : sub { $data - 1 };
	if($self->variable_length){
		$length = ref $data ? sub { 1 + int( rand( $#{ $self->_init } ) ); } : sub { 1 + int( rand( $self->_init - 1) ); };
	}

	$self->_length( $length );

	$self->chromosomes( [ ] );

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->_calculate_fitness_all();
}
#=======================================================================
# SAVE / LOAD ##########################################################
#=======================================================================
sub spew {
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	STORABLE->use( qw( store retrieve freeze thaw ) ) or croak(q/You need "/.STORABLE.q/" module to save a state of "/.__PACKAGE__.q/"!/);
	$Storable::Deparse = 1;
	$Storable::Eval = 1;
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
	
	return $clone;
}
#=======================================================================
sub slurp {
	my ( $self, $dump ) = @_;

	if( my $typ = $self->_package ){ 
		@{ $dump->{ chromosomes } } = map {
			my $arr = $typ->make_with_packed( $_ );

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

    %$self = %$dump;
    
	return 1;
}
#=======================================================================
sub save { 
	my ( $self, $file ) = @_;
	
	croak(q/You have to specify file!/) unless defined $file;
	
	store( $self->spew, $file );
}
#=======================================================================
sub load { 
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	STORABLE->use( qw( store retrieve freeze thaw ) ) or croak(q/You need "/.STORABLE.q/" module to load a state of "/.__PACKAGE__.q/"!/);	
	$Storable::Deparse = 1;
	$Storable::Eval = 1;
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	return $self->slurp( $clone );
}
#=======================================================================
# CHARTS ###############################################################
#=======================================================================
sub chart { 
	GD->require or croak(q/You need "/.GD.q/" module to draw chart of evolution!/);	
	my ($self, %params) = (shift, @_);

	my $graph = GD()->new(($params{-width} || 640), ($params{-height} || 480));

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

    return 1;
}
#=======================================================================
# TRANSLATIONS #########################################################
#=======================================================================
sub as_array_def_only {
	my ($self, $chromosome) = @_;
	
	return $self->as_array($chromosome) 
		if not $self->variable_length or $self->variable_length < 2;
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================
sub as_array {
	my ($self, $chromosome) = @_;
	
	if($self->type eq q/bitvector/){
		# This could lead to internal error, bacause of underlaying Tie::Array::Packed
		#return @$chromosome if wantarray;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================
sub as_string_def_only {	
	my ($self, $chromosome) = @_;
	
	return $self->as_string($chromosome) 
		if not $self->variable_length or $self->variable_length < 2;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
	return join(q//, @$array) if $self->type eq q/bitvector/;
	return join(q/___/, @$array);
}
#=======================================================================
sub as_string {	
	return join(q//, @{$_[1]}) if $_[0]->type eq q/bitvector/;
	return 	join(q/___/, map { defined $_ ? $_ : q/ / } $_[0]->as_array($_[1]));
}
#=======================================================================
sub as_value { 
	my ($self, $chromosome) = @_;
	croak(q/You MUST call 'as_value' as method of 'AI::Genetic::Pro' object./)
		unless defined $_[0] and ref $_[0] and ( ref $_[0] eq 'AI::Genetic::Pro' or ref $_[0] eq 'AI::Genetic::Pro::MCE');
	croak(q/You MUST pass 'AI::Genetic::Pro::Chromosome' object to 'as_value' method./) 
		unless defined $_[1] and ref $_[1] and ref $_[1] eq 'AI::Genetic::Pro::Chromosome';
	return $self->fitness->($self, $chromosome);  
}
#=======================================================================
# ALGORITHM ############################################################
#=======================================================================
sub _calculate_fitness_all {
	my ($self) = @_;
	
	$self->_fitness( { } );
	$self->_fitness->{$_} = $self->fitness()->($self, $self->chromosomes->[$_]) 
		for 0..$#{$self->chromosomes};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#	$self->chromosomes(\@chromosomes);

	return;
}
#=======================================================================
sub _select_parents {
	my ($self) = @_;
	unless($self->_selector){
		croak "You must specify a selection strategy!"
			unless defined $self->selection;
		my @tmp = @{$self->selection};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->_parents($self->_selector->run($self));
	
	return;
}
#=======================================================================
sub _crossover {
	my ($self) = @_;
	
	unless($self->_strategist){
		my @tmp = @{$self->strategy};
		my $strategist = q/AI::Genetic::Pro::Crossover::/ . shift @tmp;

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->chromosomes( $a );
	
	return;
}
#=======================================================================
sub _mutation {
	my ($self) = @_;
	
	unless($self->_mutator){
		my $mutator = q/AI::Genetic::Pro::Mutation::/ . ucfirst(lc($self->type));
		unless($mutator->require){

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
	
	return $self->_mutator->run($self);
}
#=======================================================================
sub _save_history {
	my @tmp;
	if($_[0]->history){ @tmp = $_[0]->getAvgFitness; }
	else { @tmp = (undef, undef, undef); }
	
	push @{$_[0]->_history->[0]}, $tmp[0]; 
	push @{$_[0]->_history->[1]}, $tmp[1];
	push @{$_[0]->_history->[2]}, $tmp[2];
	return 1;
}
#=======================================================================
sub inject {
	my ($self, $candidates) = @_;
	
	for(@$candidates){
		push @{$self->chromosomes}, 
			AI::Genetic::Pro::Chromosome->new_from_data($self->_translations, $self->type, $self->_package, $_, $self->_fix_range);

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->population( $self->population + scalar( @$candidates ) );

	return 1;
}
#=======================================================================
sub _state {
	my ( $self ) = @_;
	
	my @res;
	
	if( $self->_package ){

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
	
	return \@res;
}
#=======================================================================
sub evolve {
	my ($self, $generations) = @_;

	# generations must be defined
	$generations ||= -1; 	 
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	}
}
#=======================================================================
# ALIASES ##############################################################
#=======================================================================
sub people { $_[0]->chromosomes() }
#=======================================================================
sub getHistory { $_[0]->_history()  }
#=======================================================================
sub mutProb { shift->mutation(@_) }
#=======================================================================
sub crossProb { shift->crossover(@_) }
#=======================================================================
sub intType { shift->type() }
#=======================================================================
# STATS ################################################################
#=======================================================================
sub getFittest_as_arrayref { 
	my ($self, $n, $uniq) = @_;
	$n ||= 1;
	
	$self->_calculate_fitness_all() unless scalar %{ $self->_fitness };
	my @keys = sort { $self->_fitness->{$a} <=> $self->_fitness->{$b} } 0..$#{$self->chromosomes};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
	$n = scalar @keys if $n > scalar @keys;
	return [ reverse @{$self->chromosomes}[ splice @keys, $#keys - $n + 1, $n ] ];
}
#=======================================================================
sub getFittest { return wantarray ? @{ shift->getFittest_as_arrayref(@_) } : shift @{ shift->getFittest_as_arrayref(@_) }; }
#=======================================================================
sub getAvgFitness {
	my ($self) = @_;
	
	my @minmax = minmax values %{$self->_fitness};
	my $mean = sum(values %{$self->_fitness}) / scalar values %{$self->_fitness};
	return $minmax[1], int($mean), $minmax[0];

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=head1 SYNOPSIS

    use AI::Genetic::Pro;
    
    sub fitness {
        my ($ga, $chromosome) = @_;
        return oct('0b' . $ga->as_string($chromosome)); 
    }
    
    sub terminate {
        my ($ga) = @_;
        my $result = oct('0b' . $ga->as_string($ga->getFittest));
        return $result == 4294967295 ? 1 : 0;
    }
    

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=over 8

=item -fitness

This defines a I<fitness> function. It expects a reference to a subroutine.

=item -terminate 

This defines a I<terminate> function. It expects a reference to a subroutine.

=item -type

This defines the type of chromosomes. Currently, C<AI::Genetic::Pro> supports four types:

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


This initializes a population where each individual/chromosome has 10 genes.

=item B<listvector>

For listvectors, the argument is an anonymous list of lists. The number of sub-lists is equal to the number of genes of each individual/chromosome. Each sub-list defines the possible string values that the corresponding gene can assume.

    $ga->init([
               [qw/red blue green/],
               [qw/big medium small/],
               [qw/very_fat fat fit thin very_thin/],

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


This initializes a population where each individual/chromosome has 3 genes and each gene can assume one of the given values.

=item B<rangevector>

For rangevectors, the argument is an anonymous list of lists. The number of sub-lists is equal to the number of genes of each individual/chromosome. Each sub-list defines the minimum and maximum integer values that the corresponding gene can assume.

    $ga->init([
               [1, 5],
               [0, 20],
               [4, 9],

 view all matches for this distribution


AI-Genetic

 view release on metacpan or  search on metacpan

Genetic.pm  view on Meta::CPAN

		     listvector  => 'AI::Genetic::IndListVector',
		    );

##################

# sub new():
# This is the constructor. It creates a new AI::Genetic
# object. Options are:
# -population: set the population size
# -crossover:  set the crossover probability
# -mutation:   set the mutation probability
# -fitness:    set the fitness function
# -type:       set the genome type. See docs.
# -terminate:  set termination sub.

sub new {
  my ($class, %args) = @_;

  my $self = bless {
		    ADDSEL => {},   # user-defined selections
		    ADDCRS => {},   # user-defined crossovers
		    ADDMUT => {},   # user-defined mutations
		    ADDSTR => {},   # user-defined strategies
		   } => $class;

  $self->{FITFUNC}    = $args{-fitness}    || sub { 1 };
  $self->{CROSSRATE}  = $args{-crossover}  || 0.95;
  $self->{MUTPROB}    = $args{-mutation}   || 0.05;
  $self->{POPSIZE}    = $args{-population} || 100;
  $self->{TYPE}       = $args{-type}       || 'bitvector';
  $self->{TERM}       = $args{-terminate}  || sub { 0 };

  $self->{PEOPLE}     = [];   # list of individuals
  $self->{GENERATION} = 0;    # current gen.

  $self->{INIT}       = 0;    # whether pop is initialized or not.

Genetic.pm  view on Meta::CPAN

  $self->{INDIVIDUAL} = '';   # name of individual class to use().

  return $self;
}

# sub createStrategy():
# This method creates a new strategy.
# It takes two arguments: name of strategy, and
# anon sub that implements it.

sub createStrategy {
  my ($self, $name, $sub) = @_;

  if (ref($sub) eq 'CODE') {
    $self->{ADDSTR}{$name} = $sub;
  } else {
    # we don't know what this operation is.
    carp <<EOC;
ERROR: Must specify anonymous subroutine for strategy.
       Strategy '$name' will be deleted.
EOC
    ;
    delete $self->{ADDSTR}{$name};
    return undef;
  }

  return $name;
}

# sub evolve():
# This method evolves the population using a specific strategy
# for a specific number of generations.

sub evolve {
  my ($self, $strategy, $gens) = @_;

  unless ($self->{INIT}) {
    carp "can't evolve() before init()";
    return undef;

Genetic.pm  view on Meta::CPAN

#      print STDERR "    Genes are: @{$f->genes}.\n";
#    }
  }
}

# sub sortIndividuals():
# This method takes as input an anon list of individuals, and returns
# another anon list of the same individuals but sorted in decreasing
# score.

sub sortIndividuals {
  my ($self, $list) = @_;

  # make sure all score's are calculated.
  # This is to avoid a bug in Perl where a sort is called from whithin another
  # sort, and they are in different packages, then you get a use of uninit value

Genetic.pm  view on Meta::CPAN

  $_->score for @$list;

  return [sort {$b->score <=> $a->score} @$list];
}

# sub sortPopulation():
# This method sorts the population of individuals.

sub sortPopulation {
  my $self = shift;

  return if $self->{SORTED};

  $self->{PEOPLE} = $self->sortIndividuals($self->{PEOPLE});
  $self->{SORTED} = 1;
}

# sub getFittest():
# This method returns the fittest individuals.

sub getFittest {
  my ($self, $N) = @_;

  $N ||= 1;
  $N = 1 if $N < 1;

Genetic.pm  view on Meta::CPAN

  return $r[0] if $N == 1 && not wantarray;

  return @r;
}

# sub init():
# This method initializes the population to completely
# random individuals. It deletes all current individuals!!!
# It also examines the type of individuals we want, and
# require()s the proper class. Throws an error if it can't.
# Must pass to it an anon list that will be passed to the
# newRandom method of the individual.

# In case of bitvector, $newArgs is length of bitvector.
# In case of rangevector, $newArgs is anon list of anon lists.
# each sub-anon list has two elements, min number and max number.
# In case of listvector, $newArgs is anon list of anon lists.
# Each sub-anon list contains possible values of gene.

sub init {
  my ($self, $newArgs) = @_;

  $self->{INIT} = 0;

  my $ind;

Genetic.pm  view on Meta::CPAN

  $_->fitness($self->{FITFUNC}) for @{$self->{PEOPLE}};

  $self->{INIT} = 1;
}

# sub people():
# returns the current list of individuals in the population.
# note: this returns the actual array ref, so any changes
# made to it (ex, shift/pop/etc) will be reflected in the
# population.

sub people {
  my $self = shift;

  if (@_) {
    $self->{PEOPLE} = shift;
    $self->{SORTED} = 0;

Genetic.pm  view on Meta::CPAN


  $self->{PEOPLE};
}

# useful little methods to set/query parameters.
sub size       { $_[0]{POPSIZE}    = $_[1] if defined $_[1]; $_[0]{POPSIZE}   }
sub crossProb  { $_[0]{CROSSRATE}  = $_[1] if defined $_[1]; $_[0]{CROSSRATE} }
sub mutProb    { $_[0]{MUTPROB}    = $_[1] if defined $_[1]; $_[0]{MUTPROB}   }
sub indType    { $_[0]{INDIVIDUAL} }
sub generation { $_[0]{GENERATION} }

# sub inject():
# This method is used to add individuals to the current population.
# The point of it is that sometimes the population gets stagnant,
# so it could be useful add "fresh blood".
# Takes a variable number of arguments. The first argument is the
# total number, N, of new individuals to add. The remaining arguments
# are genomes to inject. There must be at most N genomes to inject.
# If the number, n, of genomes to inject is less than N, N - n random
# genomes are added. Perhaps an example will help?
# returns 1 on success and undef on error.

sub inject {
  my ($self, $count, @genomes) = @_;

  unless ($self->{INIT}) {
    carp "can't inject() before init()";
    return undef;

Genetic.pm  view on Meta::CPAN


     $ga->init(10);
     $ga->evolve('rouletteTwoPoint', 100);
     print "Best score = ", $ga->getFittest->score, ".\n";

     sub fitnessFunc {
         my $genes = shift;

         my $fitness;
         # assign a number to $fitness based on the @$genes
         # ...

         return $fitness;
      }

      sub terminateFunc {
         my $ga = shift;

         # terminate if reached some threshold.
         return 1 if $ga->getFittest->score > $THRESHOLD;
         return 0;

Genetic.pm  view on Meta::CPAN


This defines the mutation rate. Defaults to 0.05.

=item I<-fitness>

This defines a fitness function. It expects a reference to a subroutine.
More details are given in L</"FITNESS FUNCTION">.

=item I<-type>

This defines the type of the genome. Currently, AI::Genetic

Genetic.pm  view on Meta::CPAN


Defaults to I<bitvector>.

=item I<-terminate>

This option allows the definition of a termination subroutine.
It expects a subroutine reference. This sub will be called at
the end of each generation with one argument: the AI::Genetic
object. Evolution terminates if the sub returns a true value.

=back

=item I<$ga>-E<gt>B<createStrategy>(I<strategy_name>, I<sub_ref>)

This method allows the creation of a custom-made strategy to be used
during evolution. It expects a unique strategy name, and a subroutine
reference as arguments. The subroutine will be called with one argument:
the AI::Genetic object. It is expected to alter the population at each
generation. See L</"STRATEGIES"> for more information.

=item I<$ga>-E<gt>B<init>(I<initArgs>)

Genetic.pm  view on Meta::CPAN

this initializes a population where each individual has 10 genes.

=item o

For listvectors, the argument is an anonymous list of lists. The
number of sub-lists is equal to the number of genes of each individual.
Each sub-list defines the possible string values that the corresponding gene
can assume.

    $ga->init([
               [qw/red blue green/],
               [qw/big medium small/],

Genetic.pm  view on Meta::CPAN

can assume one of the given values.

=item o

For rangevectors, the argument is an anonymous list of lists. The
number of sub-lists is equal to the number of genes of each individual.
Each sub-list defines the minimum and maximum integer values that the
corresponding gene can assume.

    $ga->init([
               [1, 5],
               [0, 20],

Genetic.pm  view on Meta::CPAN


The population is sorted according to the individuals' fitnesses.

=item o

The subroutine corresponding to the named strategy is called with one argument,
the AI::Genetic object. This subroutine is expected to alter the object itself.

=item o

If a termination subroutine is given, it is executed and the return value is
checked. Evolution terminates if this sub returns a true value.

=back

=item I<$ga>-E<gt>B<getFittest>(?I<N>?)

Genetic.pm  view on Meta::CPAN

own custom-made strategy. Consult their manpages for more info.
A custom-made strategy can be defined using the I<strategy()>
method and is called at the beginning of each generation. The only
argument to it is the AI::Genetic object itself. Note that the
population at this point is sorted accoring to each individual's
fitness score. It is expected that the strategy sub will modify
the population stored in the AI::Genetic object. Here's the
pseudo-code of events:

    for (1 .. num_generations) {
      sort population;
      call strategy_sub;
      if (termination_sub exists) {
        call termination_sub;
        last if returned true value;
      }
    }

=head1 A NOTE ON SPEED/EFFICIENCY

 view all matches for this distribution


AI-Image

 view release on metacpan or  search on metacpan

lib/AI/Image.pm  view on Meta::CPAN

$VERSION = eval $VERSION;

my $http = HTTP::Tiny->new;

# Create Image object
sub new {
    my $class = shift;
    my %attr  = @_;

    $attr{'error'}      = '';

lib/AI/Image.pm  view on Meta::CPAN

my %header = (
    'OpenAI' => &_get_header_openai,
);

# Returns true if last operation was success
sub success {
    my $self = shift;
    return !$self->{'error'};
}

# Returns error if last operation failed
sub error {
    my $self = shift;
    return $self->{'error'};
}

# Header for calling OpenAI
sub _get_header_openai {
    my $self = shift;
    $self->{'key'} = '' unless defined $self->{'key'};
    return {
         'Authorization' => 'Bearer ' . $self->{'key'},
         'Content-type'  => 'application/json'
     };
 }

# Get URL from image prompt
sub image {
    my ($self, $prompt) = @_;

    my $response = $http->post($url{$self->{'api'}}, {
         'headers' => {
             'Authorization' => 'Bearer ' . $self->{'key'},

 view all matches for this distribution


AI-LibNeural

 view release on metacpan or  search on metacpan

LibNeural.pm  view on Meta::CPAN

our @EXPORT = qw(
);

our $VERSION = '0.02';

sub AUTOLOAD {
    # This AUTOLOAD is used to 'autoload' constants from the constant()
    # XS function.  If a constant is not found then control is passed
    # to the AUTOLOAD in AutoLoader.
    my $constname;
    our $AUTOLOAD;

LibNeural.pm  view on Meta::CPAN

    }
    {
	no strict 'refs';
	# Fixed between 5.005_53 and 5.005_61
#	if ($] >= 5.00561) {
#	    *$AUTOLOAD = sub () { $val };
#	}
#	else {
	    *$AUTOLOAD = sub { $val };
#	}
    }
    goto &$AUTOLOAD;
}

 view all matches for this distribution


AI-Logic-AnswerSet

 view release on metacpan or  search on metacpan

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	
);

our $VERSION = '0.02';

sub executeFromFileAndSave {		#Executes DLV with a file as input and saves the output in another file

	open DLVW, ">>", "$_[1]";
	print DLVW $_[2];
	close DLVW;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	open(STDOUT,">&SAVESTDOUT"); #close file and restore STDOUT
	close OUTPUT;

}

sub executeAndSave {	#Executes DLV and saves the output of the program written by the user in a file

	open(SAVESTDOUT, ">&STDOUT") or die "Can't save STDOUT: $!\n";
	open(STDOUT, ">$_[0]") or die "Can't open STDOUT to $_[0]", "$!\n";

	my @args = ("./dlv --");

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN



}


sub iterativeExec {	# Executes an input program with several instances and stores them in a bidimensional array

	my @input = @_;

	my @returned_value;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	return @returned_value;

}

sub singleExec {	 # Executes a single input program or opens the DLV terminal and stores it in an array

	my @input = @_;
	my @returned_value;

	if(@input) {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @returned_value;
}

sub selectOutput {	# Select one of the outputs returned by the iterative execution of more input programs 

	my @stdoutput = @{$_[0]};
	my $n = $_[1];

	return @{$stdoutput[$n]};
	
}

sub getFacts {	# Return the facts of the input program

	my $input = shift;

	my @isAFile = stat($input);

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}
	return @facts;
	
}

sub addCode {	#Adds code to input

	my $program = $_[0];
	my $code = $_[1];
	my @isAFile = stat($program);

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

		$$program = "$$program $code";
	}
		
}

sub getASFromFile {	#Gets the Answer Set from the file where the output was saved

	open RESULT, "<", "$_[0]" or die $!;
	my @result = <RESULT>;
	my @arr;
	foreach my $line (@result) {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	close RESULT;
	return @arr;
}

sub getAS { #Returns the Answer Sets from the array where the output was saved

	my @result = @_;
	my @arr;

	foreach my $line (@result) {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @arr;
}

sub statistics {	# Return an array of hashes in which the statistics of every predicate of every answerSets are stored
			# If a condition of comparison is specified(number of predicates) it returns the answer sets that satisfy
			# that condition 

	my @as = @{$_[0]};
	my @pred = @{$_[1]};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @stat;
}

sub _evaluate {		#private use only

	my $value = shift;
	my $num = shift;
	my $operator = shift;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

		return 0;
	}
	return 0;
}

sub mapAS {	#Mapping of the Answer Sets in an array of hashes

	my $countAS = 0;

	my @answerSets = @{$_[0]};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}
	return @pred;

}

sub _existsPred {	#Verifies the existence of a predicate (private use only)

	my $pred = $_[0];
	my @predList = @{$_[1]};

	my $size = @predList;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}
	return 0;
		
}

sub getPred {	#Returns the predicates from the array of hashes

	my @pr = @{$_[0]};
	return @{$pr[$_[1]]{$_[2]}};
}

sub getProjection {	#Returns the values selected by the user

	my @pr = @{$_[0]};
	my @projection;

	my @res = @{$pr[$_[1]]{$_[2]}};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	}

	return @projection;
}

sub createNewFile {

	my $file = $_[0];
	my $code = $_[1];

	open FILE, ">", $file;
	print FILE "$code\n";
	close FILE;

}

sub addFacts {

	my $name = $_[0];
	my @facts = @{$_[1]};
	my $append = $_[2];
	my $filename = $_[3];

 view all matches for this distribution


AI-ML

 view release on metacpan or  search on metacpan

inc/MyBuilder.pm  view on Meta::CPAN

use Path::Tiny;

my $EXTRA_O_FLAGS = "";
my $EXTRA_FLAGS = "-lblas -llapack";

sub ACTION_code {
    my $self = shift;

    $EXTRA_O_FLAGS .= " -DUSE_REAL" unless exists $self->args->{'with-float'};

    $self->update_XS("XS/ML.xs.inc");

inc/MyBuilder.pm  view on Meta::CPAN

    $self->dispatch("compile_xs");

    $self->SUPER::ACTION_code;
}

sub update_XS {
    my ($self, $file) = @_;
    my $output = $file;
    $output =~ s/\.inc$//;

    open my $i_fh, "<", $file   or die "$!";

inc/MyBuilder.pm  view on Meta::CPAN

    }
    close $o_fh;
    close $i_fh;
}

sub ACTION_create_objects {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $c_progs = $self->rscan_dir("C", qr/\.c$/);
    for my $file (@$c_progs) {

inc/MyBuilder.pm  view on Meta::CPAN

            include_dirs => ["."]
        );
    }
}

sub ACTION_compile_xs {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $archdir = path($self->blib, "arch", "auto", "AI", "ML");
    $archdir->mkpath unless -d $archdir;

 view all matches for this distribution


AI-MXNet-Gluon-Contrib

 view release on metacpan or  search on metacpan

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN


    Example:

        $net = nn->Concurrent();
        # use net's name_scope to give children blocks appropriate names.
        $net->name_scope(sub {
            $net->add(nn->Dense(10, activation=>'relu'));
            $net->add(nn->Dense(20));
            $net->add(nn->Identity());
        });

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN


    Example:

        $net = nn->HybridConcurrent();
        # use net's name_scope to give children blocks appropriate names.
        $net->name_scope(sub {
            $net->add(nn->Dense(10, activation=>'relu'));
            $net->add(nn->Dense(20));
            $net->add(nn->Identity());
        });

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN


    Example:

        $net = nn->HybridConcurrent();
        # use net's name_scope to give child Blocks appropriate names.
        $net->name_scope(sub {
            $net->add(nn->Dense(10, activation=>'relu'));
            $net->add(nn->Dense(20));
            $net->add(nn->Identity());
        });
=cut

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN

has 'output_dim'         => (is => 'ro', isa => 'Int', required => 1);
has 'dtype'              => (is => 'ro', isa => 'Dtype', default => 'float32');
has 'weight_initializer' => (is => 'ro', isa => 'Maybe[Initializer]');
method python_constructor_arguments() { [qw/input_dim output_dim dtype weight_initializer/] }

sub BUILD
{
    my $self = shift;
    $self->_kwargs({
        input_dim => $self->input_dim, 
        output_dim => $self->output_dim,

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN

{
    my $weight = $self->weight->row_sparse_data($x);
    return AI::MXNet::NDArray->Embedding($x, $weight, { name=>'fwd', %{ $self->_kwargs } });
}

use overload '""' => sub {
    my $self = shift;
    $self->_class_name.'('.$self->input_dim.' -> '.$self->input_dim.', '.$self->dtype.')';
};

__PACKAGE__->register('AI::MXNet::Gluon::NN');

 view all matches for this distribution


AI-MXNet-Gluon-ModelZoo

 view release on metacpan or  search on metacpan

examples/image_classification.pl  view on Meta::CPAN

GetOptions(
    ## my Pembroke Welsh Corgi Kyuubi, enjoing Solar eclipse of August 21, 2017
    'image=s' => \(my $image = 'http://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/'.
                               'gluon/dataset/kyuubi.jpg'),
    'model=s' => \(my $model = 'resnet152_v2'),
    'help'    => sub { HelpMessage(0) },
) or HelpMessage(1);

## get a pretrained model (download parameters file if necessary)
my $net = get_model($model, pretrained => 1);

 view all matches for this distribution


AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

use warnings;
use AI::MXNet ('mx');

## preparing the samples
## to train our network
sub samples {
    my($batch_size, $func) = @_;
    # get samples
    my $n = 16384;
    ## creates a pdl with $n rows and two columns with random
    ## floats in the range between 0 and 1

examples/calculator.pl  view on Meta::CPAN

        label => $validation_label,
    ));
}

## the network model
sub nn_fc {
    my $data = mx->sym->Variable('data');
    my $ln = mx->sym->exp(mx->sym->FullyConnected(
        data => mx->sym->log($data),
        num_hidden => 1,
    ));

examples/calculator.pl  view on Meta::CPAN

	num_hidden => 1
    );
    return mx->sym->MAERegressionOutput(data => $fc, name => 'softmax');
}

sub learn_function {
    my(%args) = @_;
    my $func = $args{func};
    my $batch_size = $args{batch_size}//128;
    my($train_iter, $eval_iter) = samples($batch_size, $func);
    my $sym = nn_fc();

examples/calculator.pl  view on Meta::CPAN

    my ($arg_params) = $model->get_params;
    for my $k (sort keys %$arg_params)
    {
	print "$k -> ". $arg_params->{$k}->aspdl."\n";
    }
    return sub {
        my($n, $m) = @_;
        return $model->predict(mx->io->NDArrayIter(
            batch_size => 1,
            data => PDL->new([[ $n, $m ]]),
        ))->aspdl->list;
    };
}

my $add = learn_function(func => sub {
    my($n, $m) = @_;
    return $n + $m;
});
my $sub = learn_function(func => sub {
    my($n, $m) = @_;
    return $n - $m;
}, batch_size => 50, epoch => 40);
my $mul = learn_function(func => sub {
    my($n, $m) = @_;
    return $n * $m;
}, batch_size => 50, epoch => 40);
my $div = learn_function(func => sub {
    my($n, $m) = @_;
    return $n / $m;
}, batch_size => 10, epoch => 80);


print "12345 + 54321 ≈ ", $add->(12345, 54321), "\n";
print "188 - 88 ≈ ", $sub->(188, 88), "\n";
print "250 * 2 ≈ ", $mul->(250, 2), "\n";
print "250 / 2 ≈ ", $div->(250, 2), "\n";

 view all matches for this distribution


AI-MaxEntropy

 view release on metacpan or  search on metacpan

inc/Module/AutoInstall.pm  view on Meta::CPAN


# See if it's a testing or non-interactive session
_accept_default( $ENV{AUTOMATED_TESTING} or ! -t STDIN ); 
_init();

sub _accept_default {
    $AcceptDefault = shift;
}

sub missing_modules {
    return @Missing;
}

sub do_install {
    __PACKAGE__->install(
        [
            $Config
            ? ( UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )
            : ()

inc/Module/AutoInstall.pm  view on Meta::CPAN

        @Missing,
    );
}

# initialize various flags, and/or perform install
sub _init {
    foreach my $arg (
        @ARGV,
        split(
            /[\s\t]+/,
            $ENV{PERL_AUTOINSTALL} || $ENV{PERL_EXTUTILS_AUTOINSTALL} || ''

inc/Module/AutoInstall.pm  view on Meta::CPAN

        }
    }
}

# overrides MakeMaker's prompt() to automatically accept the default choice
sub _prompt {
    goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault;

    my ( $prompt, $default ) = @_;
    my $y = ( $default =~ /^[Yy]/ );

inc/Module/AutoInstall.pm  view on Meta::CPAN

    print "$default\n";
    return $default;
}

# the workhorse
sub import {
    my $class = shift;
    my @args  = @_ or return;
    my $core_all;

    print "*** $class version " . $class->VERSION . "\n";

inc/Module/AutoInstall.pm  view on Meta::CPAN

    *{'main::WriteMakefile'} = \&Write if caller(0) eq 'main';
}

# Check to see if we are currently running under CPAN.pm and/or CPANPLUS;
# if we are, then we simply let it taking care of our dependencies
sub _check_lock {
    return unless @Missing;

    if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) {
        print <<'END_MESSAGE';

inc/Module/AutoInstall.pm  view on Meta::CPAN


    close LOCK;
    return;
}

sub install {
    my $class = shift;

    my $i;    # used below to strip leading '-' from config keys
    my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } );

inc/Module/AutoInstall.pm  view on Meta::CPAN

    close FAILED if $args{do_once};

    return @installed;
}

sub _install_cpanplus {
    my @modules   = @{ +shift };
    my @config    = _cpanplus_config( @{ +shift } );
    my $installed = 0;

    require CPANPLUS::Backend;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    }

    return $installed;
}

sub _cpanplus_config {
	my @config = ();
	while ( @_ ) {
		my ($key, $value) = (shift(), shift());
		if ( $key eq 'prerequisites_policy' ) {
			if ( $value eq 'follow' ) {

inc/Module/AutoInstall.pm  view on Meta::CPAN

		}
	}
	return @config;
}

sub _install_cpan {
    my @modules   = @{ +shift };
    my @config    = @{ +shift };
    my $installed = 0;
    my %args;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    }

    return $installed;
}

sub _has_cpanplus {
    return (
        $HasCPANPLUS = (
            $INC{'CPANPLUS/Config.pm'}
              or _load('CPANPLUS::Shell::Default')
        )
    );
}

# make guesses on whether we're under the CPAN installation directory
sub _under_cpan {
    require Cwd;
    require File::Spec;

    my $cwd  = File::Spec->canonpath( Cwd::cwd() );
    my $cpan = File::Spec->canonpath( $CPAN::Config->{cpan_home} );

    return ( index( $cwd, $cpan ) > -1 );
}

sub _update_to {
    my $class = __PACKAGE__;
    my $ver   = shift;

    return
      if defined( _version_check( _load($class), $ver ) );  # no need to upgrade

inc/Module/AutoInstall.pm  view on Meta::CPAN

*** Cannot bootstrap myself. :-( Installation terminated.
.
}

# check if we're connected to some host, using inet_aton
sub _connected_to {
    my $site = shift;

    return (
        ( _load('Socket') and Socket::inet_aton($site) ) or _prompt(
            qq(

inc/Module/AutoInstall.pm  view on Meta::CPAN

          ) =~ /^[Yy]/
    );
}

# check if a directory is writable; may create it on demand
sub _can_write {
    my $path = shift;
    mkdir( $path, 0755 ) unless -e $path;

    return 1 if -w $path;

inc/Module/AutoInstall.pm  view on Meta::CPAN

==> Should we try to install the required module(s) anyway?), 'n'
    ) =~ /^[Yy]/;
}

# load a module and return the version it reports
sub _load {
    my $mod  = pop;    # class/instance doesn't matter
    my $file = $mod;

    $file =~ s|::|/|g;
    $file .= '.pm';

inc/Module/AutoInstall.pm  view on Meta::CPAN

    local $@;
    return eval { require $file; $mod->VERSION } || ( $@ ? undef: 0 );
}

# Load CPAN.pm and it's configuration
sub _load_cpan {
    return if $CPAN::VERSION;
    require CPAN;
    if ( $CPAN::HandleConfig::VERSION ) {
        # Newer versions of CPAN have a HandleConfig module
        CPAN::HandleConfig->load;

inc/Module/AutoInstall.pm  view on Meta::CPAN

        CPAN::Config->load;
    }
}

# compare two versions, either use Sort::Versions or plain comparison
sub _version_check {
    my ( $cur, $min ) = @_;
    return unless defined $cur;

    $cur =~ s/\s+$//;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    local $^W = 0;    # shuts off 'not numeric' bugs
    return ( $cur >= $min ? $cur : undef );
}

# nothing; this usage is deprecated.
sub main::PREREQ_PM { return {}; }

sub _make_args {
    my %args = @_;

    $args{PREREQ_PM} = { %{ $args{PREREQ_PM} || {} }, @Existing, @Missing }
      if $UnderCPAN or $TestOnly;

inc/Module/AutoInstall.pm  view on Meta::CPAN


    return %args;
}

# a wrapper to ExtUtils::MakeMaker::WriteMakefile
sub Write {
    require Carp;
    Carp::croak "WriteMakefile: Need even number of args" if @_ % 2;

    if ($CheckOnly) {
        print << ".";

inc/Module/AutoInstall.pm  view on Meta::CPAN

.

    return 1;
}

sub postamble {
    $PostambleUsed = 1;

    return << ".";

config :: installdeps

 view all matches for this distribution


AI-MegaHAL

 view release on metacpan or  search on metacpan

lib/AI/MegaHAL.pm  view on Meta::CPAN

	     megahal_cleanup);

@ISA = qw(Exporter DynaLoader);
$VERSION = '0.08';

sub AUTOLOAD {
    # This AUTOLOAD is used to 'autoload' constants from the constant()
    # XS function.  If a constant is not found then control is passed
    # to the AUTOLOAD in AutoLoader.

    my $constname;

lib/AI/MegaHAL.pm  view on Meta::CPAN

    }
    {
	no strict 'refs';
	# Fixed between 5.005_53 and 5.005_61
	if ($] >= 5.00561) {
	    *$AUTOLOAD = sub () { $val };
	}
	else {
	    *$AUTOLOAD = sub { $val };
	}
    }
    goto &$AUTOLOAD;
}

sub new {
    my ($class,%args) = @_;
    my $self;

    # Bless ourselves into the AI::MegaHAL class.
    $self = bless({ },$class);

lib/AI/MegaHAL.pm  view on Meta::CPAN

    $self->_initialize();

    return $self;
}

sub initial_greeting {
    my $self = shift;

    return megahal_initial_greeting();
}

sub do_reply {
    my ($self,$text) = @_;

    return megahal_do_reply($text,0);
}

sub learn {
    my ($self,$text) = @_;

    return megahal_learn($text,0);
}

sub _initialize {
    my $self = shift;

    megahal_initialize();
    return;
}

sub _cleanup {
    my $self = shift;

    megahal_cleanup();
    return;
}

sub DESTROY {
    my $self = shift;

    $self->_cleanup() if($self->{'AutoSave'});
    return;
}

 view all matches for this distribution


AI-MicroStructure

 view release on metacpan or  search on metacpan

bin/from-folder.pl  view on Meta::CPAN


GetOptions (\%opts, "cache_file=s");



sub translate
{
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;
   my $name = md5_hex($rel_name);
    my $go = 0;

bin/from-folder.pl  view on Meta::CPAN




find(\&translate, "$TOP/./");

sub translate {
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;

  $set->insert(AI::MicroStructure::Object->new($rel_name));

 view all matches for this distribution


AI-NNEasy

 view release on metacpan or  search on metacpan

lib/AI/NNEasy.pm  view on Meta::CPAN


  
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;

  
my $CLASS = 'AI::NNEasy' ; sub __CLASS__ { 'AI::NNEasy' } ;

  
use Class::HPLOO::Base ;

  use AI::NNEasy::NN ;
  use Storable qw(freeze thaw) ;
  use Data::Dumper ;
  


  sub NNEasy { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    my @out_types = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $error_ok = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    $this->{ERROR_OK} = $error_ok ;
    
    return $this ;
  }
  
  sub _layer_conf { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $def = shift(@_) ;
    my $conf = shift(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

    foreach my $Key ( keys %$layer_conf ) { $$layer_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}

    return $layer_conf ;
  }
  
  sub reset_nn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    $this->{NN} = AI::NNEasy::NN->new( @{ $this->{NN_ARGS} } ) ;
  }
  
  sub load { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    
    $file ||= $this->{FILE} ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      }
    }
    return ;
  }
  
  sub save { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    
    $file ||= $this->{FILE} ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    open (my $fh,">$this->{FILE}") ;
    print $fh $dump ;
    close ($fh) ;
  }
  
  sub learn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    my $out = shift(@_) ;
    my $n = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    return $err ;
  }
  
  *_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
  
  sub _learn_set_get_output_error_pl { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $set = shift(@_) ;
    my $error_ok = shift(@_) ;
    my $ins_ok = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

  
  
  
  
    
  sub learn_set { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    my $limit = shift(@_) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      
    }
    
  }
  
  sub get_set_error { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

    
    $err /= $ins_ok ;
    return $err ;
  }
  
  sub run { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    
    $this->{NN}->run($in) ;
    my $out = $this->{NN}->output() ;
    return $out ;
  }
  
  sub run_get_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    my $out = $this->run(@_) ;
    

lib/AI/NNEasy.pm  view on Meta::CPAN

    }
    
    return $out ;
  }
  
  sub out_type_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $val = shift(@_) ;
    
    my ($out_type , %err) ;

lib/AI/NNEasy.pm  view on Meta::CPAN


L<Class::HPLOO> enables this kind of syntax for Perl classes:

  class Foo {
    
    sub bar($x , $y) {
      $this->add($x , $y) ;
    }
    
    sub[C] int add( int x , int y ) {
      int res = x + y ;
      return res ;
    }
    
  }

 view all matches for this distribution


AI-NNFlex

 view release on metacpan or  search on metacpan

examples/bp.pl  view on Meta::CPAN


#==============================================================
#********** THIS IS THE MAIN PROGRAM **************************
#==============================================================

sub main
 {

 # initiate the weights
  initWeights();

examples/bp.pl  view on Meta::CPAN





#***********************************
sub calcNet()
 {
    #calculate the outputs of the hidden neurons
    #the hidden neurons are tanh

    for(my $i = 0;$i<$numHidden;$i++)

examples/bp.pl  view on Meta::CPAN

    $errThisPat = $outPred - $trainOutput[$patNum];
 }


#************************************
 sub WeightChangesHO()
 #adjust the weights hidden-output
 {
   for(my $k = 0;$k<$numHidden;$k++)
   {
    $weightChange = $LR_HO * $errThisPat * $hiddenVal[$k];

examples/bp.pl  view on Meta::CPAN

   }
 }


#************************************
 sub WeightChangesIH()
 #adjust the weights input-hidden
 {
  for(my $i = 0;$i<$numHidden;$i++)
  {
   for(my $k = 0;$k<$numInputs;$k++)

examples/bp.pl  view on Meta::CPAN

  }
 }


#************************************
 sub initWeights()
 {

  for(my $j = 0;$j<$numHidden;$j++)
  {
    $weightsHO[$j] = (rand() - 0.5)/2;

examples/bp.pl  view on Meta::CPAN


 }


#************************************
 sub initData()
 {

    print "initialising data\n";

    # the data here is the XOR data

examples/bp.pl  view on Meta::CPAN


 }


#************************************
 sub tanh()
 {


	my $x = shift;

examples/bp.pl  view on Meta::CPAN

        }
 }


#************************************
 sub displayResults()
    {
     for(my $i = 0;$i<$numPatterns;$i++)
        {
        $patNum = $i;
        calcNet();

examples/bp.pl  view on Meta::CPAN

        }
    }


#************************************
sub calcOverallError()
    {
     $RMSerror = 0.0;
     for(my $i = 0;$i<$numPatterns;$i++)
        {
        $patNum = $i;

 view all matches for this distribution


AI-NaiveBayes

 view release on metacpan or  search on metacpan

lib/AI/NaiveBayes.pm  view on Meta::CPAN


with Storage(format => 'Storable', io => 'File');

has model   => (is => 'ro', isa => 'HashRef[HashRef]', required => 1);

sub train {
    my $self = shift;
    my $learner = AI::NaiveBayes::Learner->new();
    for my $example ( @_ ){
        $learner->add_example( %$example );
    }
    return $learner->classifier;
}


sub classify {
    my ($self, $newattrs) = @_;
    $newattrs or die "Missing parameter for classify()";

    my $m = $self->model;

lib/AI/NaiveBayes.pm  view on Meta::CPAN

    rescale(\%scores);

    return AI::NaiveBayes::Classification->new( label_sums => \%scores, features => \%features );
}

sub rescale {
    my ($scores) = @_;

    # Scale everything back to a reasonable area in logspace (near zero), un-loggify, and normalize
    my $total = 0;
    my $max = max(values %$scores);

lib/AI/NaiveBayes.pm  view on Meta::CPAN

probability of each word appearing in a document is unaffected by the
presence or absence of each other word in the document.  We assume
this even though we know this isn't true: for example, the word
"iodized" is far more likely to appear in a document that contains the
word "salt" than it is to appear in a document that contains the word
"subroutine".  Luckily, as it turns out, making this assumption even
when it isn't true may have little effect on our results, as the
following paper by Pedro Domingos argues:
L<"http://www.cs.washington.edu/homes/pedrod/mlj97.ps.gz">

=head1 SEE ALSO

 view all matches for this distribution


AI-NaiveBayes1

 view release on metacpan or  search on metacpan

NaiveBayes1.pm  view on Meta::CPAN

use vars @EXPORT_OK;

# non-exported package globals go here
use vars qw();

sub new {
  my $package = shift;
  return bless {
                attributes => [ ],
		labels     => [ ],
		attvals    => {},

NaiveBayes1.pm  view on Meta::CPAN

		smoothing => {},
		attribute_type => {},
	       }, $package;
}

sub set_real {
    my ($self, @attr) = @_;
    foreach my $a (@attr) { $self->{attribute_type}{$a} = 'real' }
}

sub import_from_YAML {
    my $package = shift;
    my $yaml = shift;
    my $self = YAML::Load($yaml);
    return bless $self, $package;
}

sub import_from_YAML_file {
    my $package = shift;
    my $yamlf = shift;
    my $self = YAML::LoadFile($yamlf);
    return bless $self, $package;
}

# assume that the last header count means counts
# after optionally removing counts, the last header is label
sub add_table {
    my $self = shift;
    my @atts = (); my $lbl=''; my $cnt = '';
    while (@_) {
	my $table = shift;
	if ($table =~ /^(.*)\n[ \t]*-+\n/) {

NaiveBayes1.pm  view on Meta::CPAN

} # end of add_table

# Simplified; not generally compatible.
# Assume that the last header is label.  The first row contains
# attribute names.
sub add_csv_file {
    my $self = shift; my $fn = shift; local *F;
    open(F,$fn) or die "Cannot open CSV file `$fn': $!";
    local $_ = <F>; my @atts = (); my $lbl=''; my $cnt = '';
    chomp; @atts = split(/\s*,\s*/, $_); $lbl = pop @atts;
    while (<F>) {

NaiveBayes1.pm  view on Meta::CPAN

			     cases=>($cnt?$v[1]:1) );
    }
    close(F);
} # end of add_csv_file

sub drop_attributes {
    my $self = shift;
    foreach my $a (@_) {
	my @tmp = grep { $a ne $_ } @{ $self->{attributes} };
	$self->{attributes} = \@tmp;
	delete($self->{attvals}{$a});

NaiveBayes1.pm  view on Meta::CPAN

	delete($self->{real_stat}{$a});
	delete($self->{smoothing}{$a});
    }
} # end of drop_attributes

sub add_instances {
  my ($self, %params) = @_;
  for ('attributes', 'label', 'cases') {
      die "Missing required '$_' parameter" unless exists $params{$_};
  }

NaiveBayes1.pm  view on Meta::CPAN

      }
      $self->{stat_attributes}{$a}{$attval}{$params{label}} += $params{cases};
  }
}

sub add_instance {
    my ($self, %params) = @_; $params{cases} = 1;
    $self->add_instances(%params);
}

sub train {
    my $self = shift;
    my $m = $self->{model} = {};
    
    $m->{labelprob} = {};
    foreach my $label (keys(%{$self->{stat_labels}}))

NaiveBayes1.pm  view on Meta::CPAN

		sqrt($m->{real_stat}{$att}{$label}{stddev} /
		     ($m->{real_stat}{$att}{$label}{count}-1)
		     );
	}
    }				# foreach real attribute
}				# end of sub train

sub predict {
  my ($self, %params) = @_;
  my $newattrs = $params{attributes} or die "Missing 'attributes' parameter for predict()";
  my $m = $self->{model};  # For convenience
  
  my %scores;

NaiveBayes1.pm  view on Meta::CPAN

  $sumPx += $scores{$_} foreach (keys(%scores));
  $scores{$_} /= $sumPx foreach (keys(%scores));
  return \%scores;
}

sub print_model {
    my $self = shift;
    my $withcounts = '';
    if ($#_>-1 && $_[0] eq 'with counts')
    { shift @_; $withcounts = 1; }
    my $m = $self->{model};

NaiveBayes1.pm  view on Meta::CPAN

    my @lines;
    push @lines, 'category ', '-';
    push @lines, "$_ " foreach @labels;
    @lines = _append_lines(@lines);
    @lines = map { $_.='| ' } @lines;
    $lines[1] = substr($lines[1],0,length($lines[1])-2).'+-';
    $lines[0] .= "P(category) ";
    foreach my $i (2..$#lines) {
	my $label = $labels[$i-2];
	$lines[$i] .= $m->{labelprob}{$label} .' ';
	if ($withcounts) {

NaiveBayes1.pm  view on Meta::CPAN

    }

    return $r;
}

sub _append_lines {
    my @l = @_;
    my $m = 0;
    foreach (@l) { $m = length($_) if length($_) > $m }
    @l = map 
    { while (length($_) < $m) { $_.=substr($_,length($_)-1) }; $_ }
    @l;
    return @l;
}

sub labels {
  my $self = shift;
  return @{ $self->{labels} };
}

sub attributes {
  my $self = shift;
  return keys %{ $self->{stat_attributes} };
}

sub export_to_YAML {
    my $self = shift;
    require YAML;
    return YAML::Dump($self);
}

sub export_to_YAML_file {
    my $self = shift;
    my $file = shift;
    require YAML;
    YAML::DumpFile($file, $self);
}

 view all matches for this distribution


AI-Nerl

 view release on metacpan or  search on metacpan

examples/digits/digits.pl  view on Meta::CPAN

   show784($delta(:,6));
   show784($delta(:,4));
}
#die join (',',$nncost->dims);
use PDL::Graphics2D;
sub show784{
   my $w = shift;
   $w = $w->squeeze;
   my $min = $w->minimum;
   $w -= $min;
   my $max = $w->maximum;
   $w /= $max;
   $w = $w->reshape(28,28);
   imag2d $w;
}
sub sigmoid{
   my $foo = shift;
   return 1/(1+E**-$foo);
}

sub logistic{
   #find sigmoid before calling this.
   #grad=logistic(sigmoid(foo))
   my $foo = shift;
   return $foo * (1-$foo);
}

 view all matches for this distribution


AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

package AI::NeuralNet::BackProp::neuron;
	
	use strict;
	
	# Dummy constructor
    sub new {
    	bless {}, shift
	}	
	
	# Rounds floats to ints
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	
	# Receives input from other neurons. They must
	# be registered as a synapse of this neuron to effectively
	# input.
	sub input {
		my $self 	 =	shift;
		my $sid		 =	shift;
		my $value	 =	shift;
		
		# We simply weight the value sent by the neuron. The neuron identifies itself to us

BackProp.pm  view on Meta::CPAN

		$self->output() if($self->input_complete());
	}
	
	# Loops thru and outputs to every neuron that this
	# neuron is registered as synapse of.
	sub output {
		my $self	=	shift;
		my $size	=	$self->{OUTPUTS}->{SIZE} || 0;
		my $value	=	$self->get_output();
		for (0..$size-1) {
			AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
			$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
		}
	}
	
	# Used internally by output().
	sub get_output {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value		=	0;
		my $state		= 	0;
		my (@map,@weight);

BackProp.pm  view on Meta::CPAN

		# Just return the $state
		return $state;
	}
	
	# Used by input() to check if all registered synapses have fired.
	sub input_complete {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $retvalue	=	1;
		
		# Very simple loop. Doesn't need explaning.

BackProp.pm  view on Meta::CPAN

	}
	
	# Used to recursively adjust the weights of synapse input channeles
	# to give a desired value. Designed to be called via 
	# AI::NeuralNet::BackProp::NeuralNetwork::learn().
	sub weight	{                
		my $self		=	shift;
		my $ammount		=	shift;
		my $what		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value;
		AI::NeuralNet::BackProp::out1("Weight: ammount is $ammount, what is $what with size at $size.\n");
		      
		# Now this sub is the main cog in the learning wheel. It is called recursively on 
		# each neuron that has been bad (given incorrect output.)
		for my $i (0..$size-1) {
			$value		=	$self->{SYNAPSES}->{LIST}->[$i]->{VALUE};

if(0) {

BackProp.pm  view on Meta::CPAN

	}
	
	# Registers some neuron as a synapse of this neuron.           
	# This is called exclusively by connect(), except for
	# in initalize_group() to connect the _map() package.
	sub register_synapse {
		my $self	=	shift;
		my $synapse	=	shift;
		my $sid		=	$self->{SYNAPSES}->{SIZE} || 0;
		$self->{SYNAPSES}->{LIST}->[$sid]->{PKG}		=	$synapse;
		$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}		=	1.00		if(!$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});

BackProp.pm  view on Meta::CPAN

	# Called via AI::NeuralNet::BackProp::NeuralNetwork::initialize_group() to 
	# form the neuron grids.
	# This just registers another synapes as a synapse to output to from this one, and
	# then we ask that synapse to let us register as an input connection and we
	# save the sid that the ouput synapse returns.
	sub connect {
		my $self	=	shift;
		my $to		=	shift;
		my $oid		=	$self->{OUTPUTS}->{SIZE} || 0;
		AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
		$self->{OUTPUTS}->{LIST}->[$oid]->{PKG}	=	$to;

BackProp.pm  view on Meta::CPAN

	
	use Benchmark;          
	use strict;
	
	# Returns the number of elements in an array ref, undef on error
	sub _FETCHSIZE {
		my $a=$_[0];
		my ($b,$x);
		return undef if(substr($a,0,5) ne "ARRAY");
		foreach $b (@{$a}) { $x++ };
		return $x;
	}

	# Debugging subs
	$AI::NeuralNet::BackProp::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; } 
	sub out1  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
	sub out2  { print  shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
	sub out3  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG) }
	sub out4  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;

BackProp.pm  view on Meta::CPAN

	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1}; #AI::NeuralNet::BackProp::_FETCHSIZE($a1);
		my $a2s	=	$#{$a2}; #AI::NeuralNet::BackProp::_FETCHSIZE($a2);
		my ($a,$b,$diff,$t);

BackProp.pm  view on Meta::CPAN

		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift);
		sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in order. Usage:
	#
	#	 learn_set(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a percentage string.
	#
	sub learn_set {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};
		my $max		=	$args{max};

BackProp.pm  view on Meta::CPAN

			$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
		return $res;
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in RANDOM order. Usage:
	#
	#	 learn_set_rand(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a true value.
	#
	sub learn_set_rand {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};
		my $max		=	$args{max};

BackProp.pm  view on Meta::CPAN

		
		return 1; 
	}

	# Returns the index of the element in array REF passed with the highest comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;
		
		my ($el,$len,$tmp);
		foreach $el (@{$ref1}) {
			$len++;

BackProp.pm  view on Meta::CPAN

		}
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;
		
		my ($el,$len,$tmp);
		foreach $el (@{$ref1}) {
			$len++;

BackProp.pm  view on Meta::CPAN

		}
		return $tmp;
	}  
	
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		return AI::NeuralNet::BackProp::PCX->new($self,shift);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my (@map,$ic);
		my @ws 		=	split(/[\s\t]/,shift);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);

BackProp.pm  view on Meta::CPAN

		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			return $a+1 if($self->{_CRUNCHED}->{LIST}->[$a] eq $_[0]);
		}
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_CRUNCHED}->{LIST}->[$el-1].' ';
		}
		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	}
	
	# Sets/Removes value ranging
	sub range {
		my $self	=	shift;
		my $ref		=	shift;
		my $b		=	shift;
		if(substr($ref,0,5) ne "ARRAY") {
			if(($ref == 0) && (!defined $b)) {
				$ref	= $self->crunch($ref);
				#print "\$ref is a string, crunching to ",join(',',@{$ref}),"\n";
			} else {
    			my $a	= $ref;

BackProp.pm  view on Meta::CPAN

		$self->{rRef} = $ref;
		return $ref;
	}
	
	# Used internally to scale outputs to fit range
	sub _range {
		my $self	=	shift;  
		my $in		=	shift;
		my $rA		=	$self->{rA};
		my $rB		=	$self->{rB};
		my $rS		=	$self->{rS};

BackProp.pm  view on Meta::CPAN

			
		
	# Initialzes the base for a new neural network.
	# It is recomended that you call learn() before run()ing a pattern.
	# See documentation above for usage.
	sub new {
    	no strict;
    	my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $size	=	shift;

BackProp.pm  view on Meta::CPAN

		
		return $self;
	}	

	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};

BackProp.pm  view on Meta::CPAN

	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self	=	shift;
		my $file	=	shift;
		my $load_flag = shift || 0;
	    
	    return undef if(!(-f $file));

BackProp.pm  view on Meta::CPAN

	
	    return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		for my $a (0..$self->{SIZE}-1) {
			print "Neuron $a: ";
			for my $b (0..$self->{DIV}-1) {
				print $self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},"\t";

BackProp.pm  view on Meta::CPAN

			print "\n";
		}
	}
	
	# Used internally by new() and learn().
	# This is the sub block that actually creats
	# the connections between the synapse chains and
	# also connects the run packages and the map packages
	# to the appropiate ends of the neuron grids.
	sub initialize_group() {
		my $self	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};

BackProp.pm  view on Meta::CPAN

	}
	

	# When called with an array refrence to a pattern, returns a refrence
	# to an array associated with that pattern. See usage in documentation.
	sub run {
		my $self	 =	  shift;
		my $map		 =	  shift;
		my $t0 		 =	new Benchmark;
        $self->{RUN}->run($map);
		$self->{LAST_TIME}=timestr(timediff(new Benchmark, $t0));
        return $self->map();
	}
    
    # This automatically uncrunches a response after running it
	sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# Returns benchmark and loop's ran or learned
	# for last run(), or learn()
	# operation preformed.
	#
	sub benchmarked {
		my $self	=	shift;
		return $self->{LAST_TIME};
	}
	    
	# Used to retrieve map from last internal run operation.
	sub map {
		my $self	 =	  shift;
		$self->{MAP}->map();
	}
	
	# Forces network to learn pattern passed and give desired
	# results. See usage in POD.
	sub learn {
		my $self	=	shift;
		my $omap	=	shift;
		my $res		=	shift;
		my %args    =   @_;
		my $inc 	=	$args{inc} || 0.20;

BackProp.pm  view on Meta::CPAN

package AI::NeuralNet::BackProp::_run;
	
	use strict;
	
	# Dummy constructor.
	sub new {
		bless { PARENT => $_[1] }, $_[0]
	}
	
	# This is so we comply with the neuron interface.
	sub weight {}
	sub input  {}
	
	# Again, compliance with neuron interface.
	sub register_synapse {
		my $self	=	shift;		
		my $sid		=	$self->{REGISTRATION} || 0;
		$self->{REGISTRATION}	=	++$sid;
		$self->{RMAP}->{$sid-1}	= 	$self->{PARENT}->{_tmp_synapse};
		return $sid-1;
	}
	
	# Here is the real meat of this package.
	# run() does one thing: It fires values
	# into the first layer of the network.
	sub run {
		my $self	=	shift;
		my $map		=	shift;
		my $x		=	0;
		$map = $self->{PARENT}->crunch($map) if($map == 0);
		return undef if(substr($map,0,5) ne "ARRAY");
		foreach my $el (@{$map}) {
			# Catch ourself if we try to run more inputs than neurons
			return $x if($x>$self->{PARENT}->{DIV}-1);
			
			# Here we add a small ammount of randomness to the network.

BackProp.pm  view on Meta::CPAN

package AI::NeuralNet::BackProp::_map;
	
	use strict;
	
	# Dummy constructor.
	sub new {
		bless { PARENT => $_[1] }, $_[0]
	}
	
	# Compliance with neuron interface
	sub weight {}
	
	# Compliance with neuron interface
	sub register_synapse {
		my $self	=	shift;		
		my $sid		=	$self->{REGISTRATION} || 0;
		$self->{REGISTRATION}	=	++$sid;
		$self->{RMAP}->{$sid-1} = 	$self->{PARENT}->{_tmp_synapse};
		return $sid-1;

BackProp.pm  view on Meta::CPAN

	
	# This acts just like a regular neuron by receiving
	# values from input synapes. Yet, unlike a regularr
	# neuron, it doesnt weight the values, just stores
	# them to be retrieved by a call to map().
	sub input  {
		no strict 'refs';             
		my $self	=	shift;
		my $sid		=	shift;
		my $value	=	shift;
		my $size	=	$self->{PARENT}->{DIV};

BackProp.pm  view on Meta::CPAN

		AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
	}
	
	# Here we simply collect the value of every neuron connected to this
	# one from the layer below us and return an array ref to the final map..
	sub map {
		my $self	=	shift;
		my $size	=	$self->{PARENT}->{DIV};
		my $out		=	$self->{PARENT}->{OUT};
		my $divide  =	AI::NeuralNet::BackProp->intr($size/$out);
		my @map = ();

BackProp.pm  view on Meta::CPAN

			      
# load_pcx() wrapper package
package AI::NeuralNet::BackProp::PCX;

	# Called by load_pcx in AI::NeuralNet::BackProp;
	sub new {
		my $type	=	shift;
		my $self	=	{ 
			parent  => $_[0],
			file    => $_[1]
		};

BackProp.pm  view on Meta::CPAN

	}

	# Returns a rectangular block defined by an array ref in the form of
	# 		[$x1,$y1,$x2,$y2]
	# Return value is an array ref
	sub get_block {
		my $self	=	shift;
		my $ref		=	shift;
		my ($x1,$y1,$x2,$y2)	=	@{$ref};
		my @block	=	();
		my $count	=	0;

BackProp.pm  view on Meta::CPAN

		}
		return \@block;
	}
			
	# Returns pixel at $x,$y
	sub get {
		my $self	=	shift;
		my ($x,$y)  =	(shift,shift);
		return $self->{image}->[$y*320+$x];
	}
	
	# Returns array of (r,g,b) value from palette index passed
	sub rgb {
		my $self	=	shift;
		my $color	=	shift;
		return ($self->{palette}->[$color]->{red},$self->{palette}->[$color]->{green},$self->{palette}->[$color]->{blue});
	}
		
	# Returns mean of (rgb) value of palette index passed
	sub avg {
		my $self	=	shift;
		my $color	=	shift;
		return $self->{parent}->intr(($self->{palette}->[$color]->{red}+$self->{palette}->[$color]->{green}+$self->{palette}->[$color]->{blue})/3);
	}
	
	# Loads and decompresses a PCX-format 320x200, 8-bit image file and returns 
	# two arrays, first is a 64000-byte long array, each element contains a palette
	# index, and the second array is a 255-byte long array, each element is a hash
	# ref with the keys 'red', 'green', and 'blue', each key contains the respective color
	# component for that color index in the palette.
	sub load_pcx {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		
		# open the file
		open(FILE, "$_[0]");
		binmode(FILE);
		

BackProp.pm  view on Meta::CPAN


The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Will probably return an array ref containing 1,2,3. A subsequent call of:

    $net->crunch("How is Jane?");

Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored

BackProp.pm  view on Meta::CPAN

neural net related topics as they pertain to AI::NeuralNet::BackProp. I will also 
announce in the group each time a new release of AI::NeuralNet::BackProp is available.

The list address is at: ai-neuralnet-backprop@egroups.com 

To subscribe, send a blank email to: ai-neuralnet-backprop-subscribe@egroups.com  


=cut

 view all matches for this distribution


AI-NeuralNet-FastSOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN

	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;
    for my $i (0..$#$l) {
	return $i if $v == $l->[$i];
    }

 view all matches for this distribution


AI-NeuralNet-Hopfield

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN


has 'matrix_rows'   => ( is => 'rw', isa => 'Int');

has 'matrix_cols'   => ( is => 'rw', isa => 'Int');

sub BUILD {
	my $self = shift;
	my $args = shift;
	my $matrix = Math::SparseMatrix->new($args->{row}, $args->{col});
	$self->matrix($matrix);	
	$self->matrix_rows($args->{row});
	$self->matrix_cols($args->{col});
}

sub train() {	
	my $self = shift;
	my @pattern = @_;	

	if ( ($#pattern + 1) != $self->matrix_rows) {
		die "Can't train a pattern of size " . ($#pattern + 1) . " on a hopfield network of size " , $self->matrix_rows;

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN


	my $m3 = &multiply($m1, $m2);

	my $identity = &identity($m3->{_rows});

	my $m4 = &subtract($m3, $identity);

	my $m5 = &add($self->matrix, $m4);
	
	$self->matrix($m5);	
}

sub evaluate() {
	my $self = shift;
	my @pattern = @_;

	my @output = ();

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return @output;
}

sub convert_array() {
	my $rows    = shift;
	my $cols 	= shift;
	my @pattern = @_;
	my $result  = Math::SparseMatrix->new(1, $cols);

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub transpose() {
	my $matrix  = shift;
	my $rows = $matrix->{_rows};
	my $cols = $matrix->{_cols};

	my $inverse = Math::SparseMatrix->new($cols, $rows);

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $inverse;
}

sub multiply() {
	my $matrix_a  = shift;
	my $matrix_b  = shift;

	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub identity() {
	my $size = shift;

	if ($size < 1) {
		die "Identity matrix must be at least of size 1.";
	}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$result->set($i, $i, 1);
	}
	return $result;
}

sub subtract() {
	my $matrix_a = shift;
	my $matrix_b = shift;

    my $a_rows = $matrix_a->{_rows};
    my $a_cols = $matrix_a->{_cols};

	my $b_rows = $matrix_b->{_rows};
	my $b_cols = $matrix_b->{_cols};

	if ($a_rows != $b_rows) {
		die "To subtract the matrixes they must have the same number of rows and columns.";
	}

	if ($a_cols != $b_cols) {
		die "To subtract the matrixes they must have the same number of rows and columns.  Matrix a has ";
	}

	my $result = Math::SparseMatrix->new($a_rows, $a_cols);

	for (my $result_row = 1; $result_row <= $a_rows; $result_row++) {

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub add() {
	#weight matrix.
    my $matrix_a = shift;
	#identity matrix.
    my $matrix_b = shift;

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return $result;
}

sub dot_product() {
	my $matrix_a = shift;
	my $matrix_b = shift;
	
	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$result += $array_a[$i] * $array_b[$i];
	}
	return $result;
}

sub packed_array() {
	my $matrix = shift;
	my @result = ();

	for (my $r = 1; $r <= $matrix->{_rows}; $r++) {
		for (my $c = 1; $c <= $matrix->{_cols}; $c++) {

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		}
	}
	return @result;
}

sub get_col() {
	my $self = shift;
	my $col  = shift;

	my $matrix = $self->matrix();
	

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$new_matrix->set($row, 1, $value);
	}
	return $new_matrix;
}

sub print_matrix() {
    my $matrix  = shift;
    my $rs = $matrix->{_rows};
    my $cs = $matrix->{_cols};

	for (my $i = 1; $i <= $rs; $i++) {

 view all matches for this distribution


AI-NeuralNet-Kohonen-Demo-RGB

 view release on metacpan or  search on metacpan

RGB.pm  view on Meta::CPAN

	exit;


=head1 DESCRIPTION

A sub-class of C<AI::NeuralNet::Kohonen>
that impliments extra methods to make use of TK
in a very slow demonstration of how a SOM can collapse
a three dimensional space (RGB colour values) into a
two dimensional space (the display). See L<SYNOPSIS>.

RGB.pm  view on Meta::CPAN

use Tk qw/DoOneEvent DONT_WAIT/;

#
# Used only by &tk_train
#
sub tk_show { my $self=shift;
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour = sprintf("#%02x%02x%02x",
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),

RGB.pm  view on Meta::CPAN


Over-rides the base class to provide TK displays of the map

=cut

sub train { my ($self,$epochs) = (shift,shift);
	my $label_txt;

	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	$self->{mw} = MainWindow->new(
		-width	=> 200+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 200+($self->{map_dim_y} * $self->{display_scale}),
	);
    my $quit_flag = 0;
    my $quit_code = sub {$quit_flag = 1};
    $self->{mw}->protocol('WM_DELETE_WINDOW' => $quit_code);

	$self->{c} = $self->{mw}->Canvas(
		-width	=> 50+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 50+($self->{map_dim_y} * $self->{display_scale}),

 view all matches for this distribution


AI-NeuralNet-Kohonen-Visual

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

Test the test file in this distribution, or:

	package YourClass;
	use base "AI::NeuralNet::Kohonen::Visual";

	sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
		# From here you return a TK colour name.
		# Get it as you please; for example, values of a 3D map:
		return sprintf("#%02x%02x%02x",
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

=head1 DESCRIPTION

Provides TK-based visualisation routines for C<AI::NueralNet::Kohonen>.
Replaces the earlier C<AI::NeuralNet::Kohonen::Demo::RGB>.

This is a sub-class of C<AI::NeuralNet::Kohonen>
that impliments extra methods to make use of TK.

This moudle is itself intended to be sub-classed by you,
where you provide a version of the method C<get_colour_for>:
see L<METHOD get_colour_for> and L<SYNOPSIS> for details.


=head1 CONSTRUCTOR (new)

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Over-rides the base class to provide TK displays of the map.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	&{$self->{train_start}} if exists $self->{train_start};

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

	return 1;
}

=head1 METHOD get_colour_for

This method is intended to be sub-classed.

Currently it only operates on the first three elements
of a weight vector, turning them into RGB values.

It returns the a TK colour for a node at position C<x>,C<y> in the

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Accepts: C<x> and C<y> co-ordinates in the map.

=cut

sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
	my $_0 = $self->{map}->[$x]->[$y]->{weight}->[0];
	$_0 = $self->{missing_colour} || 0 if $_0 eq $self->{missing_mask};
	my $_1 = $self->{map}->[$x]->[$y]->{weight}->[1];
	$_1 = $self->{missing_colour} || 0 if $_1 eq $self->{missing_mask};
	my $_2 = $self->{map}->[$x]->[$y]->{weight}->[2];

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Depracated: see L<METHOD create_empty_map>.

=cut

sub prepare_display {
	return $_[0]->create_empty_map;
}

=head1 METHOD create_empty_map

Sets up a TK C<MainWindow> and C<Canvas> to
act as an empty map.

=cut

sub create_empty_map { my $self = shift;
	my ($w,$h);
	if ($self->{display} and $self->{display} eq 'hex'){
		$w = ($self->{map_dim_x}+1) * ($self->{display_scale}+2);
		$h = ($self->{map_dim_y}+1) * ($self->{display_scale}+2);
	} else {

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

		-height	=> $h + 20,
	);
	$self->{_mw}->fontCreate(qw/TAG -family verdana -size 8 -weight bold/);
	$self->{_mw}->resizable( 0, 0);
    $self->{_quit_flag} = 0;
    $self->{_mw}->protocol('WM_DELETE_WINDOW' => sub {$self->{_quit_flag}=1});
	$self->{_canvas} = $self->{_mw}->Canvas(
		-width	=> $w,
		-height	=> $h,
		-relief	=> 'raised',
		-border => 2,
	);
	$self->{_canvas}->pack(-side=>'top');
	$self->{_label} = $self->{_mw}->Button(
		-command      => sub { $self->{_mw}->destroy;$self->{_mw} = undef; },
		-relief       => 'groove',
		-text         => ' ',
		-wraplength   => $w,
		-textvariable => \$self->{_label_txt}
	);

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


See also L<METHOD get_colour_for>.

=cut

sub plot_map { my ($self,$args) = (shift,{@_});
	$self->{plotted} = 1;
	# MW may have been destroyed
	$self->prepare_display if not defined $self->{_mw};
	my $yo = 5+($self->{display_scale}/2);
	for my $x (0..$self->{map_dim_x}){

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Very naive: no attempt to check the text will appear on the map.

=cut

sub label_map { my ($self,$x,$y,$t) = (shift,shift,shift,shift);
	$self->{_canvas}->createText(
		$x*$self->{display_scale}+($self->{display_scale}),
		$y*$self->{display_scale}+($self->{display_scale}),
		-text	=> $t,
		-anchor => 'w',

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Calls TK's C<MainLoop> to keep a window open until the user closes it.

=cut

sub main_loop { my $self = shift;
	$self->plot_map unless $self->{plotted};
	MainLoop;
}


 view all matches for this distribution


AI-NeuralNet-Kohonen

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=back

=cut

sub new {
	my $class					= shift;
	my %args					= @_;
	my $self 					= bless \%args,$class;

	$self->{missing_mask}		= 'x' unless defined $self->{missing_mask};

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L<AI::NerualNet::Kohonen::Node/CONSTRUCTOR new>.

=cut

sub randomise_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

As L<METHOD randomise_map> but sets all C<map> nodes to
either the value supplied as the only paramter, or C<undef>.

=cut

sub clear_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	my $val = shift || $self->{missing_mask};
	my $w = [];

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Returns a true value.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;
	&{$self->{train_start}} if exists $self->{train_start};
	for my $epoch (1..$epochs){
		$self->{t} = $epoch;
		&{$self->{epoch_start}} if exists $self->{epoch_start};

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

and L<AI::NeuralNet::Kohonen::Node/distance_from>,

=cut


sub find_bmu { my ($self,$target) = (shift,shift);
	my $closest = [];	# [value, x,y] value and co-ords of closest match
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $distance = $self->{map}->[$x]->[$y]->distance_from( $target );
			$closest = [$distance,0,0] if $x==0 and $y==0;

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

Returns: reference to an array that is the weight of the node, or
C<undef> on failure.

=cut

sub get_weight_at { my ($self,$x,$y) = (shift,shift,shift);
	return undef if $x<0 or $y<0 or $x>$self->{map_dim_x} or $y>$self->{map_dim_y};
	return $self->{map}->[$x]->[$y]->{weight};
}


lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L<METHOD find_bmu>, and L</METHOD get_weight_at>.

=cut

sub get_results { my ($self,$targets)=(shift,shift);
	$self->{results} = [];
	if (not defined $targets){
		$targets = $self->{input};
	} elsif (not $targets eq $self->{input}){
		foreach (@$targets){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


This may change, as it seems misleading to re-use that field.

=cut

sub map_results { my $self=shift;

}


=head1 METHOD dump

Print the current weight values to the screen.

=cut

sub dump { my $self=shift;
	print "    ";
	for my $x (0..$self->{map_dim_x}){
		printf ("  %02d ",$x);
	}
	print"\n","-"x107,"\n";

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Returns: a true value.

=cut

sub smooth { my ($self,$smooth) = (shift,shift);
	$smooth = $self->{smoothing} if not $smooth and defined $self->{smoothing};
	return unless $smooth;
	$smooth = int( sqrt $self->{map_dim_a} );
	my $mask = _make_gaussian_mask($smooth);

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L</FILE FORMAT>.

=cut

sub load_input { my ($self,$path) = (shift,shift);
	local *IN;
	if (not open IN,$path){
		warn "Could not open file <$path>: $!";
		return undef;
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Return C<undef> on failure, a true value on success.

=cut

sub save_file { my ($self,$path) = (shift,shift);
	local *OUT;
	if (not open OUT,">$path"){
		warn "Could not open file for writing <$path>: $!";
		return undef;
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


#
# Process ASCII from table field or input file
# Accepts: ASCII as array or array ref
#
sub _process_input_text { my ($self) = (shift);
	if (not defined $_[1]){
		if (ref $_[0] eq 'ARRAY'){
			@_ = @{$_[0]};
		} else {
			@_ = split/[\n\r\f]+/,$_[0];

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

unless the C<targeting> field is defined, when the targets are
iterated over.

=cut

sub _select_target { my $self=shift;
	if (not $self->{targeting}){
		return $self->{input}->[
			(int rand(scalar @{$self->{input}}))
		];
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

is calculated by the C<Node> class - see
L<AI::NeuralNet::Kohonen::Node/distance_effect>.

=cut

sub _adjust_neighbours_of { my ($self,$bmu,$target) = (shift,shift,shift);
	my $neighbour_radius = int (
		($self->{map_dim_a}/$self->{neighbour_factor}) * exp(- $self->{t} / $self->{time_constant})
	);

	# Distance from co-ord vector (0,0) as integer

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	L(t) = L  exp ( -  ------ )
	        0     (    lambda )

=cut

sub _decay_learning_rate { my $self=shift;
	$self->{l} =  (
		$self->{learning_rate} * exp(- $self->{t} / $self->{time_constant})
	);
}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


Returns: reference to a 2d array that is the mask.

=cut

sub _make_gaussian_mask { my ($smooth) = (shift);
	my $f = 4; # Cut-off threshold
	my $g_mask_2d = [];
	for my $x (0..$smooth){
		$g_mask_2d->[$x] = [];
		for my $y (0..$smooth){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See also L<_decay_learning_rate>.

=cut

sub _gauss_weight { my ($r, $sigma) = (shift,shift);
	return exp( -($r**2) / (2 * $sigma**2) );
}


=head1 PUBLIC METHOD quantise_error

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

or those in the C<input> field.

=cut


sub quantise_error { my ($self,$targets) = (shift,shift);
	my $qerror=0;
	if (not defined $targets){
		$targets = $self->{input};
	} else {
		foreach (@$targets){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

the data passed was a comment, or the C<weight_dim> flag was
not set); a true value on success.

=cut

sub _add_input_from_str { my ($self) = (shift);
	$_ = shift;
	s/#.*$//g;
	return undef if /^$/ or not defined $self->{weight_dim};
	my @i = split /\s+/,$_;
	return undef if $#i < $self->{weight_dim}; # catch bad lines

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN



#
# Processes the 'table' paramter to the constructor
#
sub _process_table { my $self = shift;
	$_ = $self->_process_input_text( $self->{table} );
	undef $self->{table};
	return $_;
}

 view all matches for this distribution


AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

    use Benchmark; 

   	# See POD for usage of this variable.
	$AI::NeuralNet::Mesh::Connector = '_c';
	
	# Debugging subs
	$AI::NeuralNet::Mesh::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::Mesh::DEBUG = shift || 0; } 
	sub d { shift if(substr($_[0],0,4) eq 'AI::'); my ($a,$b,$c)=(shift,shift,$AI::NeuralNet::Mesh::DEBUG); print $a if($c == $b); return $c }
	sub verbose {debug @_};
	sub verbosity {debug @_};
	sub v {debug @_};
	
	
	# Return version of ::ID string passed or current version of this
	# module if no string is passed. Used in load() to detect file versions.
	sub version {
		shift if(substr($_[0],0,4) eq 'AI::');
		substr((split(/\s/,(shift || $AI::NeuralNet::Mesh::ID)))[2],1);
	}                                  
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Package constructor
	sub new {
		no strict 'refs';
		my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $nodes	=	shift;

Mesh.pm  view on Meta::CPAN

	}	
    

    # Internal usage
    # Connects one range of nodes to another range
    sub _c {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;

Mesh.pm  view on Meta::CPAN

		}
	}
    
    # Internal usage
    # Creates the mesh of neurons
    sub _init {
    	my $self		=	shift;
    	my $nodes		=	$self->{nodes};
    	my $outputs		=	$self->{outputs} || $nodes;
    	my $inputs		=	$self->{inputs}  || $nodes;
    	my $layers		=	$self->{total_layers};

Mesh.pm  view on Meta::CPAN

				$self->{mesh}->[$x]->add_input_node($self->{input}->{cap});
		}
	}
    
    # See POD for usage
    sub extend {
    	my $self	=	shift;
    	my $layers	=	shift;
    
    	# Looks like we got ourselves a layer specs array
		if(ref($layers) eq "ARRAY") { 

Mesh.pm  view on Meta::CPAN

		}
		return 1;
	}
    
    # See POD for usage
    sub extend_layer {
    	my $self	=	shift;
    	my $layer	=	shift || 0;
    	my $specs	=	shift;
    	if(!$specs) {
    		$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";

Mesh.pm  view on Meta::CPAN

    	}
    	return 1;
    }
    
    # Pseudo-internal usage
    sub add_nodes {
    	no strict 'refs';
		my $self	=	shift;
    	my $layer	=	shift;
    	my $nodes	=	shift;
    	my $n		=	0;

Mesh.pm  view on Meta::CPAN

		$self->_c($#{$self->{mesh}}-$more+1,$#{$self->{mesh}},$n+$self->{layers}->[$layer],$n+$self->{layers}->[$layer]+$self->{layers}->[$layer+1]);
    }
        
        
    # See POD for usage
    sub run {
    	my $self	=	shift;
    	my $inputs	=	shift;
    	my $const	=	$self->{const};
    	#my $start	=	new Benchmark;
    	$inputs		=	$self->crunch($inputs) if($inputs == 0);

Mesh.pm  view on Meta::CPAN

    	#$self->{benchmark} = timestr(timediff(new Benchmark, $start));
    	return $self->{output}->get_outputs();
    }    
    
    # See POD for usage
    sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# See POD for usage
	sub learn {
    	my $self	=	shift;					
    	my $inputs	=	shift;					# input set
    	my $outputs	=	shift;					# target outputs
    	my %args	=	@_;						# get args into hash
    	my $inc		=	$args{inc} || 0.002;	# learning gradient

Mesh.pm  view on Meta::CPAN

   		return $str;
   	}


	# See POD for usage
	sub learn_set {
		my $self	=	shift;
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2;
		my $inc		=	$args{inc};

Mesh.pm  view on Meta::CPAN

			return $data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
	}
	
	# See POD for usage
	sub run_set {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my (@results,$res);
		for my $x (0..$len) {

Mesh.pm  view on Meta::CPAN

	# $seperator is an optional variable specifying the seperator
	# character between values. $seperator defaults to ',' (a single comma). 
	# NOTE: This does not handle quoted fields, or any other record
	# seperator other than "\n".
	#
	sub load_set {
		my $self	=	shift;
		my $file	=	shift;
		my $attr	=	shift || 0;
		my $sep		=	shift || ',';
		my $data	=	[];

Mesh.pm  view on Meta::CPAN

		}
		return $data;
	}
	
	# See POD for usage
	sub get_outs {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my $outs	=	[];
		for my $x (0..$len) {

Mesh.pm  view on Meta::CPAN

		}
		return $outs;
	}
	
	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		no strict 'refs';
		
		open(FILE,">$file");

Mesh.pm  view on Meta::CPAN

	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    my @lines;

Mesh.pm  view on Meta::CPAN

		
		return $self;
	}
	
	# Load entire network state from disk.
	sub load_old {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    if(!(-f $file)) {

Mesh.pm  view on Meta::CPAN

		
		return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		my $n 		=	0;    
		no strict 'refs';
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {

Mesh.pm  view on Meta::CPAN

	# 	$output	= &$type($sum_of_inputs,$self);
	# The code ref then has access to all the data in that node (thru the
	# blessed refrence $self) and is expected to return the value to be used
	# as the output for that node. The sum of all the inputs to that node
	# is already summed and passed as the first argument.
	sub activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    
		no strict 'refs';

Mesh.pm  view on Meta::CPAN

			$self->{mesh}->[$_]->{activation} = $value; 
		}
	}
	
	# Applies an activation type to a specific node
	sub node_activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    

Mesh.pm  view on Meta::CPAN

	}
	
	# Set the activation threshold for a specific layer.
	# Only applicable if that layer uses "sigmoid" or "sigmoid_2"
	# usage: $net->threshold($layer,$threshold);
	sub threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;
		no strict 'refs';

Mesh.pm  view on Meta::CPAN

			$self->{mesh}->[$_]->{threshold} = $value;
		}
	}
	
	# Applies a threshold to a specific node     
	sub node_threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;

Mesh.pm  view on Meta::CPAN

	
	# Set mean (avg.) flag for a layer.
	# usage: $net->mean($layer,$flag);
	# If $flag is true, it enables finding the mean for that layer,
	# If $flag is false, disables mean.
	sub mean {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0;
		my $n		=	0;
		no strict 'refs';

Mesh.pm  view on Meta::CPAN

		}
	}
	
	  
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		my $file	=	shift;
		eval('use PCX::Loader');
		if(@_) {
			$self->{error}="Cannot load PCX::Loader module: @_";

Mesh.pm  view on Meta::CPAN

		}
		return PCX::Loader->new($self,$file);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my @ws 		=	split(/[\s\t]/,shift);
		my (@map,$ic);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);

Mesh.pm  view on Meta::CPAN

		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_crunched}->{_length}-1) {
			return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
		}
		$self->{error} = "Word \"$_[0]\" not found.";
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) 
	# and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_crunched}->{list}->[$el-1].' ';

Mesh.pm  view on Meta::CPAN

		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 
	# disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	} 

	# Sets/gets run const. facter in the network. Setting a value of 0 
	# disables run const. factor. 
	sub const {
		my $self	=	shift;
		my $const	=	shift;
		return $self->{const}	if(!(defined $const));
		$self->{const}	=	$const;
	}
	
	# Return benchmark time from last learn() operation.
	sub benchmark {
		shift->{benchmarked};
	}
	
	# Same as benchmark()
	sub benchmarked {
		benchmark(shift);
	}
	
	# Return the last error in the mesh, or undef if no error.
	sub error {
		my $self = shift;
		return undef if !$self->{error};
		chomp($self->{error});
		return $self->{error}."\n";
	}

Mesh.pm  view on Meta::CPAN

	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;

Mesh.pm  view on Meta::CPAN

	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1};
		my $a2s	=	$#{$a2};
		my ($a,$b,$diff,$t);

Mesh.pm  view on Meta::CPAN

		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift); 
		sprintf("%.3f",$fa/$fb*100); #((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100
	}
	
	# Returns the index of the element in array REF passed with the highest 
	# comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] > $ref1->[$tmp]) }
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest 
	# comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] < $ref1->[$tmp]) }
		return $tmp;
	}  

Mesh.pm  view on Meta::CPAN

	# net TWICE on the data set, because the first time
	# the range() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#	
	sub range {
		my @r=@_;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$r[intr($_[0]/$_[1]->{t}*$#r)]}
	}
	
	#
	# ramp() preforms smooth ramp activation between 0 and 1 if $r is 1, 
	# or between -1 and 1 if $r is 2. $r defaults to 1, as you can see.	

Mesh.pm  view on Meta::CPAN

	# net at least TWICE on the data set, because the first 
	# time the ramp() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#
	sub ramp {
		my $r=shift||1;my $t=($r<2)?0:-1;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$_[0]/$_[1]->{t}*$r-$b}
	}

	# Self explanitory, pretty much. $threshold is used to decide if an input 
	# is true or false (1 or 0). If an input is below $threshold, it is false.
	sub and_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $self->{_parent}->{const} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $sum/$self->{_inputs_size};
		}
	}
	
	# Self explanitory, $threshold is used same as above.
	sub or_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $sum/$self->{_inputs_size} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $self->{_parent}->{const};
		}

Mesh.pm  view on Meta::CPAN

package AI::NeuralNet::Mesh::node;
	
	use strict;

	# Node constructor
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
			_outputs	=>	[]

Mesh.pm  view on Meta::CPAN

		bless $self, $type;
	}

	# Receive inputs from other nodes, and also send
	# outputs on.	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		
		$self->{_inputs}->[$from_id]->{value} = $input * $self->{_inputs}->[$from_id]->{weight};

Mesh.pm  view on Meta::CPAN

		} else {
			$self->{_parent}->d("all inputs have NOT fired for $self.\n",1);
		}
	}

	sub add_input_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_inputs_size} || 0;
		$self->{_inputs}->[$i]->{node}	 = $node;
		$self->{_inputs}->[$i]->{value}	 = 0;

Mesh.pm  view on Meta::CPAN

		$self->{_inputs}->[$i]->{fired}	 = 0;
		$self->{_inputs_size} = ++$i;
		return $i-1;
	}
	
	sub add_output_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_outputs_size} || 0;
		$self->{_outputs}->[$i]->{node}		= $node;
		$self->{_outputs}->[$i]->{from_id}	= $node->add_input_node($self);
		$self->{_outputs_size} = ++$i;
		return $i-1;
	}     
	
	sub adjust_weight {
		my $self	=	shift;
		my $inc		=	shift;
		for my $i (@{$self->{_inputs}}) {
			$i->{weight} += $inc * $i->{weight};
			$i->{node}->adjust_weight($inc) if($i->{node});

Mesh.pm  view on Meta::CPAN


1;	
	
# Internal usage, prevents recursion on empty nodes.
package AI::NeuralNet::Mesh::cap;
	sub new     { bless {}, shift }
	sub input           {}
	sub adjust_weight   {}
	sub add_output_node {}
	sub add_input_node  {}
1;

# Internal usage, collects data from output layer.
package AI::NeuralNet::Mesh::output;
	
	use strict;
	
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
		};
		bless $self, $type;
	}
	
	sub add_input_node {
		my $self	=	shift;
		return (++$self->{_inputs_size})-1;
	}
	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		$self->{_parent}->d("GOT INPUT [$input] FROM [$from_id]\n",1);
		$self->{_inputs}->[$from_id] = $self->{_parent}->intr($input);
	}
	
	sub get_outputs {
		my $self	=	shift;
		return $self->{_inputs};
	}

1;

Mesh.pm  view on Meta::CPAN

It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.

Mesh.pm  view on Meta::CPAN



=item AI::NeuralNet::Mesh->new(\@array_of_hashes);

Another dandy constructor...this is my favorite. It allows you to tailor the number of layers,
the size of the layers, the activation type (you can even add anonymous inline subs with this one),
and even the threshold, all with one array ref-ed constructor.

Example:

	my $net = AI::NeuralNet::Mesh->new([

Mesh.pm  view on Meta::CPAN

		    nodes        => 2,
		    activation   => linear
		},
		{
		    nodes        => 3,
		    activation   => sub {
		        my $sum  =  shift;
		        return $sum + rand()*1;
		    }
		},
		{

Mesh.pm  view on Meta::CPAN

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.

The activation type for each layer is preserved across load/save calls. 

EXCEPTION: Due to the constraints of Perl, I cannot load/save the actual subs that the code
ref option points to. Therefore, you must re-apply any code ref activation types after a 
load() call.

=item $net->node_activation($layer,$node,$type);

Mesh.pm  view on Meta::CPAN


The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Will probably return an array ref containing 1,2,3. A subsequent call of:

    $net->crunch("How is Jane?");

Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored

Mesh.pm  view on Meta::CPAN


The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.

	= line 1 = 	sub {
	= line 2 =		my @values = ( 6..10 );
	= line 3 =		my $sum   = shift;
	= line 4 =		my $self  = shift;
	= line 5 =		$self->{top_value}=$sum if($sum>$self->{top_value});
	= line 6 =		my $index = intr($sum/$self->{top_value}*$#values);

Mesh.pm  view on Meta::CPAN

	use AI::NeuralNet::Mesh ':acts';

Let's look at the code real quick, as it shows how to get at the indivudal
input connections:

	= line 1 =	sub {
	= line 2 =		my $sum  = shift;
	= line 3 =		my $self = shift;
	= line 4 =		my $threshold = 0.50;
	= line 5 =		for my $x (0..$self->{_inputs_size}-1) { 
	= line 6 =			return 0.000001 if(!$self->{_inputs}->[$x]->{value}<$threshold)

Mesh.pm  view on Meta::CPAN

The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. 

Example connector:

	sub connect_three {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;

Mesh.pm  view on Meta::CPAN


add_output_node() takes one simple arguemnt: A blessed refrence to a node that it is supposed
to output its final value TO. We get this blessed refrence with more simple addition.

$y + $r2a gives us the node directly above the first node (supposedly...I'll get to the "supposedly"
part in a minute.) By adding or subtracting from this number we get the neighbor nodes.
In the above example you can see we check the $y index to see that we havn't come close to
any of the edges of the range.

Using $y+$r2a we get the index of the node to pass to add_output_node() on the first node at
$y+B<$r1a>. 

Mesh.pm  view on Meta::CPAN


For the fun of it, we'll take a quick look at the default connector.
Below is the actual default connector code, albeit a bit cleaned up, as well as
line numbers added.

	= line 1  =	sub _c {
	= line 2  =    	my $self	=	shift;
	= line 3  =    	my $r1a		=	shift;
	= line 4  =    	my $r1b		=	shift;
	= line 5  =    	my $r2a		=	shift;
	= line 6  =    	my $r2b		=	shift;

Mesh.pm  view on Meta::CPAN

each time a new release of AI::NeuralNet::Mesh is available.

The list address is at:
	 ai-neuralnet-backprop@egroups.com 
	 
To subscribe, send a blank email:
	ai-neuralnet-backprop-subscribe@egroups.com  


=cut


 view all matches for this distribution


AI-NeuralNet-SOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN

	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;
    for my $i (0..$#$l) {
	return $i if $v == $l->[$i];
    }

 view all matches for this distribution


AI-NeuralNet-Simple

 view release on metacpan or  search on metacpan

examples/game_ai.pl  view on Meta::CPAN

    my $health  = prompt("Am I in poor, average, or good health? ", qr/^(?i:[pag])/);
    my $knife   = prompt("Do I have a knife? ", qr/^(?i:[yn])/);
    my $gun     = prompt("Do I have a gun? ", qr/^(?i:[yn])/);
    my $enemies = prompt("How many enemies can I see? ", qr/^\d+$/);
    
    $health = substr $health, 0, 1;
    $health =~ tr/pag/012/;
    foreach ($knife,$gun) {
        $_ = substr $_, 0, 1;
        tr/yn/10/;
    }
    printf "I think I will %s!\n\n", $actions[$net->winner([
        $health, 
        $knife, 
        $gun, 
        $enemies])];
}

sub prompt 
{
    my ($message,$domain) = @_;
    my $valid_response = 0;
    my $response;
    do {
        print $message;
        chomp($response = <STDIN>);
        exit if substr(lc $response, 0, 1) eq 'q';
        $valid_response = $response =~ /$domain/;
    } until $valid_response;
    return $response;
}

sub display_result
{
    my ($net,@data) = @_;
    my $result      = $net->winner(\@data);
    my @health      = qw/Poor Average Good/;
    my @knife       = qw/No Yes/;

 view all matches for this distribution


AI-Ollama-Client

 view release on metacpan or  search on metacpan

lib/AI/Ollama/Client.pm  view on Meta::CPAN


Check to see if a blob exists on the Ollama server which is useful when creating models.

=cut

around 'checkBlob' => sub ( $super, $self, %options ) {
    $super->( $self, %options )->then( sub( $res ) {
        if( $res->code =~ /^2\d\d$/ ) {
            return Future->done( 1 )
        } else {
            return Future->done( 0 )
        }

lib/AI/Ollama/Client.pm  view on Meta::CPAN

          my $str = $res->get;
          say $str;
      }

      Future::Mojo->done( defined $res );
  } until => sub($done) { $done->get };

Generate a response for a given prompt with a provided model.

Returns a L<< AI::Ollama::GenerateCompletionResponse >>.

=cut

around 'generateCompletion' => sub ( $super, $self, %options ) {
    # Encode images as base64, if images exist:
    # (but create a copy so we don't over write the input array)
    if (my $images = $options{images}) {

        # Allow { filename => '/etc/passwd' }

 view all matches for this distribution


AI-PBDD

 view release on metacpan or  search on metacpan

lib/AI/PBDD.pm  view on Meta::CPAN


$VERSION = '0.01';

bootstrap AI::PBDD $VERSION;

sub satCount {
  my ($bdd, $vars_ignored) = @_;

  if (!defined($vars_ignored)) {
    return satCount__I($bdd);
  } else {
    return satCount__II($bdd, $vars_ignored);
  }
}

sub printDot {
  my ($bdd, $filename) = @_;

  if (!defined($filename)) {
      printDot__I($bdd);
  } else {
      printDot__II($bdd, $filename);
  }
}

sub makeSet {
  my ($vars, $size, $offset) = @_;

  if (!defined($offset)) {
      return makeSetI($vars, $size);
  } else {
      return makeSetII($vars, $size, $offset);
  }
}

sub createPair {
  my ($old, $new) = @_;
  my $size = @$old;

  return createPairI($old, $new, $size);
}

 view all matches for this distribution


( run in 0.605 second using v1.01-cache-2.11-cpan-7add2cbd662 )