Result:
found more than 108 distributions - search limited to the first 2001 files matching your query ( run in 1.158 )


AI-FuzzyInference

 view release on metacpan or  search on metacpan

FuzzyInference.pm  view on Meta::CPAN

############################################

# this hash defines the possible interpretations of the
# standard fuzzy logic operations.
my %_operations = (
		   '&' => {
		       min     => sub { (sort {$a <=> $b} @_)[0] },
		       product => sub { my $p = 1; $p *= $_ for @_; $p },
		       default => 'min',
		   },
		   '|'  => {
		       max     => sub { (sort {$a <=> $b} @_)[-1] },
		       sum     => sub { my $s = 0; $s += $_ for @_; $s > 1 ? 1 : $s },
		       default => 'max',
		   },
		   '!' => {
		       complement => sub { 1 - $_[0] },
		       custom  => sub {},
		       default    => 'complement',
		   },
		   );

# this hash defines the currently implemented implication methods.
my %_implication     = qw(
			  clip    1
			  scale   1
			  default clip
			  );

# this hash defines the currently implemented aggregation methods.
my %_aggregation     = qw(
			  max     1
			  default max
			  );

# this hash defines the currently implemented defuzzification methods.
my %_defuzzification = qw(
			  centroid 1
			  default  centroid
			  );

# sub new() - constructor.
# 
# doesn't take any arguments. Returns an initialized AI::FuzzyInference object.

sub new {
    my $self  = shift;
    my $class = ref($self) || $self;

    my $obj = bless {} => $class;

    $obj->_init;

    return $obj;
}

# sub _init() - private method.
#
# no arguments. Initializes the data structures we will need.
# It also defines the default logic operations we might need.

sub _init {
    my $self = shift;

    $self->{SET}     = new AI::FuzzyInference::Set;
    $self->{INVARS}  = {};
    $self->{OUTVARS} = {};
    $self->{RULES}   = [];
    $self->{RESULTS} = {};

    $self->{IMPLICATION}     = $_implication{default};
    $self->{AGGREGATION}     = $_aggregation{default};
    $self->{DEFUZZIFICATION} = $_defuzzification{default};

    for my $op (qw/& | !/) {
	$self->{OPERATIONS}{$op} = $_operations{$op}{default};
    }
}

# sub implication() - public method.
#
# one optional argument: has to match one of the keys of the %_implication hash.
# used to query/set the implication method.

sub implication {
    my ($self,
	$new,
	) = @_;

    if (defined $new and exists $_implication{$new}) {
	$self->{IMPLICATION} = $new;
    }

    return $self->{IMPLICATION};
}

# sub aggregation() - public method.
#
# one optional argument: has to match one of the keys of the %_aggregation hash.
# used to query/set the aggregation method.

sub aggregation {
    my ($self,
	$new,
	) = @_;

    if (defined $new and exists $_aggregation{$new}) {
	$self->{AGGREGATION} = $new;
    }

    return $self->{AGGREGATION};
}

# sub defuzzification() - public method.
#
# one optional argument: has to match one of the keys of the %_defuzzification hash.
# used to query/set the defuzzification method.

sub defuzzification {
    my ($self,
	$new,
	) = @_;

    if (defined $new and exists $_defuzzification{$new}) {
	$self->{DEFUZZIFICATION} = $new;
    }

    return $self->{DEFUZZIFICATION};
}

# sub operation() - public method.
#
# two arguments: first one mandatory and specifies the logic operation
# in question. Second one is optional and has to match one of the keys
# of the %{$_operations{$first_arg}} hash.
# Used to query/set the logic operations method.

sub operation {
    my ($self,
	$op,
	$new,
	) = @_;

    return unless defined $op && exists $_operations{$op};

    if (defined $new and exists $_operations{$op}{$new}) {
	$self->{OPERATIONS}{$op} = $new;
    }

    return $self->{OPERATIONS}{$op};
}

# sub inVar() - public method.
#
# 4 arguments or more : First is a name of a new input variable.

FuzzyInference.pm  view on Meta::CPAN

#                            5, 1,
#                            10,0],
#                  ....);

sub inVar {
    my ($self,
	$var,
	$xmin,
	$xmax,
	@sets,
	) = @_;

    $self->{INVARS}{$var} = [$xmin, $xmax];

    while (@sets) {
	my $s = shift @sets;
	my $c = shift @sets;

	$self->{SET}->add("$var:$s", $xmin, $xmax, @$c);
    }
}

# sub outVar() - public method.
#
# 4 arguments or more : First is a name of a new output variable.

FuzzyInference.pm  view on Meta::CPAN

# Additional argumets constitute a hash. The keys of the hash
# are term set names defined for the given variable. The values
# are the coordinates of the vertices of the term sets.

sub outVar {
    my ($self,
	$var,
	$xmin,
	$xmax,
	@sets,
	) = @_;

    $self->{OUTVARS}{$var} = [$xmin, $xmax];

    while (@sets) {
	my $s = shift @sets;
	my $c = shift @sets;

	$self->{SET}->add("$var:$s", $xmin, $xmax, @$c);
    }
}

# sub addRule() - public method.
#
# Adds fuzzy if-then inference rules.

FuzzyInference.pm  view on Meta::CPAN

#               'x=low  & y=small' => 'z = fast',
#               'x=high & y=tiny'  => 'z=veryfast');
# spaces are optional. The characters [&=|] are special.

sub addRule {
    my ($self, %rules) = @_;

    for my $k (keys %rules) {
	my $v = $rules{$k};
	s/\s+//g for $v, $k;

	push @{$self->{RULES}} => [$k, $v];
    }

    return 1;
}

# sub show() - public method.
#
# This method displays the computed values of all
# output variables.
# It is ugly, and will be removed. Here for debugging.

sub show {
    my $self = shift;

    for my $var (keys %{$self->{RESULTS}}) {
	print "Var $var = $self->{RESULTS}{$var}.\n";
    }
}

# sub value() - public method.
#
# one argument: the name of an output variable.
# This method returns the computed value of a given output var.

sub value {
    my ($self,
	$var,
	) = @_;

    return undef unless exists $self->{RESULTS}{$var};
    return $self->{RESULTS}{$var};
}

# sub reset() - public method
# 
# cleans the data structures used.

sub reset {
    my $self = shift;

    my @list   =  $self->{SET}->listMatching(q|:implicated$|);
    push @list => $self->{SET}->listMatching(q|:aggregated$|);

    $self->{SET}->delete($_) for @list;

    $self->{RESULTS} = {};
}

# sub compute() - public method
#
# This method takes as input crisp values for each

FuzzyInference.pm  view on Meta::CPAN

# ex.
# $z = $obj->compute(x => 5,
#                    y => 24);

sub compute {
    my ($self,
	%vars,
	) = @_;

    $self->reset();

    # First thing we do is to fuzzify the inputs.
    $self->_fuzzify(%vars);

    # Now, apply the rules to see which ones fire.
    $self->_infer;

    # implicate
    $self->_implicate;

    # aggregate
    $self->_aggregate;

    # defuzzify .. final step.
    $self->_defuzzify;

    return 1;
}

# sub _defuzzify() - private method.
#
# no arguments. This method applies the defuzzification technique
# to get a crisp value out of the aggregated set of each output
# var.

sub _defuzzify {
    my $self = shift;

    my $_defuzzification = $self->{DEFUZZIFICATION};

    # iterate through all output vars.
    for my $var (keys %{$self->{OUTVARS}}) {

	my $result = 0;
	if ($self->{SET}->exists("$var:aggregated")) {
	    $result = $self->{SET}->$_defuzzification("$var:aggregated");
	}

	$self->{RESULTS}{$var} = $result;
    }
}

# sub _aggregate() - private method.
#
# no arguments. This method applies the aggregation technique to get
# one fuzzy set out of the implicated sets of each output var.

sub _aggregate {
    my $self = shift;

    my $_aggregation = $self->{AGGREGATION};

    # iterate through all output vars.
    for my $var (keys %{$self->{OUTVARS}}) {

	# get implicated sets.
	my @list = $self->{SET}->listMatching("\Q$var\E:.*:implicated\$");

	next unless @list;

	my $i = 0;
	my $current = shift @list;

	# aggregate everything together.
	while (@list) {
	    my $new  = shift @list;
	    my $name = "temp" . $i++;

	    my @c = $self->{SET}->$_aggregation($current, $new);
	    $self->{SET}->add($name, @{$self->{OUTVARS}{$var}}, @c);
	    $current = $name;
	}

	# rename the final aggregated set.
	my @c = $self->{SET}->coords($current);
	$self->{SET}->add("$var:aggregated", @{$self->{OUTVARS}{$var}}, @c);

	# delete the temporary sets.
	for my $j (0 .. $i - 1) {
	    $self->{SET}->delete("temp$j");
	}
    }
}

# sub _implicate() - private method.
#
# no arguments. This method applies the implication technique
# to all the fired rules to find a support value for each
# output variable.

sub _implicate {
  my $self = shift;

  my $_implication = $self->{IMPLICATION};

  my %ind;

  for my $ref (@{$self->{FIRED}}) {
    my ($i, $val) = @$ref;
    my ($var, $ts) = split /=/, $self->{RULES}[$i][1];

    if ($val > 0) {
      $ind{$var}{$ts}++;
      my @c = $self->{SET}->$_implication("$var:$ts", $val);
      my @u = @{$self->{OUTVARS}{$var}}; # the universe
      $self->{SET}->add("$var:$ts:$ind{$var}{$ts}:implicated", @u, @c);
    }
  }
}

# sub _fuzzify() - private method.
#
# one argument: a hash. The keys are input variables. The
# values are the crisp values of the input variables (same arguments
# as compute()). It finds the degree of membership of each input
# variable in each of its term sets.

sub _fuzzify {
    my ($self, %vars) = @_;

    my %terms;

    for my $var (keys %vars) {
	my $val = $vars{$var};

	for my $ts ($self->{SET}->listMatching("\Q$var\E")) {
	    my $deg = $self->{SET}->membership($ts, $val);

	    $terms{$var}{$ts} = $deg;
	}
    }

    $self->{FUZZIFY} = \%terms;
}

# sub _infer() - private method.
#
# no arguments. This method applies the logic operations to combine
# multiple parts of the antecedent of a rule to get one crisp value 
# that is the degree of support of that rule.
# Rules with positive support "fire".

sub _infer {
    my $self = shift;

    my @fired; # keep list of fired rules.

    for my $i (0 .. $#{$self->{RULES}}) {
	my $r = $self->{RULES}[$i][0];   # precedent

	my $val = 0;
	while ($r =~ /([&|]?)([^&|]+)/g) {
	    my ($op, $ant) = ($1, $2);
	    my ($var, $ts) = split /=/ => $ant;

	    $ts = "$var:$ts";

	    if ($op) {
		#$val = $self->{LOGIC}{$op}{SUB}->($val, $self->{FUZZIFY}{$var}{$ts});
		$val = $_operations{$op}{$self->{OPERATIONS}{$op}}->($val, $self->{FUZZIFY}{$var}{$ts});
	    } else {
		$val = $self->{FUZZIFY}{$var}{$ts};
	    }
	}

	# We only care about positive values.
	push @fired => [$i, $val];
    }

    $self->{FIRED} = \@fired;
}

__END__

=pod

FuzzyInference.pm  view on Meta::CPAN


AI::FuzzyInference - A module to implement a Fuzzy Inference System.

=head1 SYNOPSYS

    use AI::FuzzyInference;

    my $s = new AI::FuzzyInference;

    $s->inVar('service', 0, 10,
	  poor      => [0, 0,
			2, 1,
			4, 0],
	  good      => [2, 0,
			4, 1,
			6, 0],
	  excellent => [4, 0,
			6, 1,
			8, 0],
	  amazing   => [6, 0,
			8, 1,
			10, 0],
	  );

    $s->inVar('food', 0, 10,
	  poor      => [0, 0,
			2, 1,
			4, 0],
	  good      => [2, 0,
			4, 1,
			6, 0],
	  excellent => [4, 0,
			6, 1,
			8, 0],
	  amazing   => [6, 0,
			8, 1,
			10, 0],
	  );

    $s->outVar('tip', 5, 30,
	   poor      => [5, 0,
			 10, 1,
			 15, 0],
	   good      => [10, 0,
			 15, 1,
			 20, 0],
	   excellent => [15, 0,
			 20, 1,
			 25, 0],
	   amazing   => [20, 0,
			 25, 1,
			 30, 0],
	   );

    $s->addRule(
	    'service=poor      & food=poor'      => 'tip=poor',
	    'service=good      & food=poor'      => 'tip=poor',
	    'service=excellent & food=poor'      => 'tip=good',
	    'service=amazing   & food=poor'      => 'tip=good',

	    'service=poor      & food=good'      => 'tip=poor',
	    'service=good      & food=good'      => 'tip=good',
	    'service=excellent & food=good'      => 'tip=good',
	    'service=amazing   & food=good'      => 'tip=excellent',

	    'service=poor      & food=excellent' => 'tip=good',
	    'service=good      & food=excellent' => 'tip=excellent',
	    'service=excellent & food=excellent' => 'tip=excellent',
	    'service=amazing   & food=excellent' => 'tip=amazing',

	    'service=poor      & food=amazing'   => 'tip=good',
	    'service=good      & food=amazing'   => 'tip=excellent',
	    'service=excellent & food=amazing'   => 'tip=amazing',
	    'service=amazing   & food=amazing'   => 'tip=amazing',

	    );

    $s->compute(service => 2,
	    food    => 7);

=head1 DESCRIPTION

This module implements a fuzzy inference system. Very briefly, an FIS
is a system defined by a set of input and output variables, and a set

FuzzyInference.pm  view on Meta::CPAN

=item inVar()

This method defines an input variable, along with its universe of
discourse, and its term sets. Here's an example:

      $obj->inVar('height',
                  5, 8,   # xmin, xmax (in feet, say)
                  'tall' => [5,   0,
                             5.5, 1,
                             6,   0],
                  'medium' => [5.5, 0,
                             6.5, 1,
                             7, 0],
                  'short' => [6.5, 0,
                             7, 1]
		  );

This example defines an input variable called I<height>. The minimum
possible value for height is 5, and the maximum is 8. It also defines
3 term sets associated with height: I<tall>, I<medium> and I<short>.
The shape of each of these triangular term sets is completely

FuzzyInference.pm  view on Meta::CPAN

strings have to be separated by C<&> or C<|> indicating the fuzzy
I<AND> and I<OR> operations respectively. Each consequent must be a
single string. Each string has the form: C<var = term_set>. Spaces
are completely optional. Example:

    $obj->addRule('height=short & weight=big' => 'diet = necessary',
		  'height=tall & weight=tiny' => 'diet = are_you_kidding_me');

The first rule basically says I<If the height is short, and the weight is
big, then diet is necessary>.

=item compute()

FuzzyInference.pm  view on Meta::CPAN

runs those values through the FIS, generating corresponding values for
the output variables. It always returns a true value. To get the actual
values of the output variables, look at the C<value()> method below.
Example:

    $obj->compute(x => 5,
		  y => 24);

Note that any subsequent call to C<compute()> will implicitly clear out
the old computed values before recomputing the new ones. This is done
through a call to the C<reset()> method below.

FuzzyInference.pm  view on Meta::CPAN


But, if you insist, here's the traditional way:

To install this module type the following:

   perl Makefile.PL
   make
   make test
   make install


=head1 AUTHOR

Copyright 2002, Ala Qumsieh. All rights reserved.

 view all matches for this distribution


AI-Gene-Sequence

 view release on metacpan or  search on metacpan

AI/Gene/Sequence.pm  view on Meta::CPAN

require 5.6.0;
use strict;
use warnings;

BEGIN {
  use Exporter   ();
  our ($VERSION, @ISA, @EXPORT, @EXPORT_OK, %EXPORT_TAGS);
  $VERSION     = 0.22;
  @ISA         = qw(Exporter);
  @EXPORT      = ();
  %EXPORT_TAGS = ();
  @EXPORT_OK   = qw();
}
our @EXPORT_OK;

my ($probs,$mut_keys) = _normalise( { map {$_ => 1} 
				      qw(insert remove overwrite 
					 duplicate minor major 
					 switch shuffle reverse) } );

##
# calls mutation method at random
# 0: number of mutations to perform
# 1: ref to hash of probs to use (otherwise uses default mutations and probs)

sub mutate {
  my $self = shift;
  my $num_mutates = +$_[0] || 1;
  my $rt = 0;
  my ($hr_probs, $muts);
  if (ref $_[1] eq 'HASH') { # use non standard mutations or probs
    ($hr_probs, $muts) = _normalise($_[1]);
  }
  else {                     # use standard mutations and probs
    $hr_probs = $probs;
    $muts = $mut_keys;
  }

 MUT_CYCLE: for (1..$num_mutates) {
    my $rand = rand;
    foreach my $mutation (@{$muts}) {
      next unless $rand < $hr_probs->{$mutation};
      my $mut = 'mutate_' . $mutation;
      $rt += $self->$mut(1);
      next MUT_CYCLE;
    }
  }
  return $rt;
}

##
# creates a normalised and cumulative prob distribution for the
# keys of the referenced hash

sub _normalise {
  my $hr = $_[0];
  my $h2 = {};
  my $muts = [keys %{$hr}];
  my $sum = 0;
  foreach (values %{$hr}) {
    $sum += $_;
  }
  if ($sum <= 0) {
    die "Cannot randomly mutate with bad probability distribution";
  }
  else {
    my $cum;
    @{$h2}{ @{$muts} } = map {$cum +=$_; $cum / $sum} @{$hr}{ @{$muts} };
    return ($h2, $muts);
  }
}

##
# inserts one element into the sequence
# 0: number to perform ( or 1)
# 1: position to mutate (undef for random)

sub mutate_insert {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];
    my $pos = defined($_[1]) ? $_[1] : int rand $length;
    next if $pos > $length; # further than 1 place after gene
    my @token = $self->generate_token;
    my $new = $self->[0];
    substr($new, $pos, 0) = $token[0];
    next unless $self->valid_gene($new, $pos);
    $self->[0] = $new;
    splice @{$self->[1]}, $pos, 0, $token[1];
    $rt++;
  }
  return $rt;
}

##
# removes element(s) from sequence
# 0: number of times to perform
# 1: position to affect (undef for rand)
# 2: length to affect, undef => 1, 0 => random length

sub mutate_remove {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];
    my $len = !defined($_[2]) ? 1 : ($_[2] || int rand $length);
    next if ($length - $len) <= 0;
    my $pos = defined($_[1]) ? $_[1] : int rand $length;
    next if $pos >= $length; # outside of gene
    my $new = $self->[0];
    substr($new, $pos, $len) = '';
    next unless $self->valid_gene($new, $pos);
    $self->[0] = $new;
    splice @{$self->[1]}, $pos, $len;
    $rt++;
  }
  return $rt;
}

##
# copies an element or run of elements into a random place in the gene
# 0: number to perform (or 1)
# 1: posn to copy from (undef for rand)
# 2: posn to splice in (undef for rand)
# 3: length            (undef for 1, 0 for random)

sub mutate_duplicate {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];
    my $len = !defined($_[3]) ? 1 : ($_[3] || int rand $length);
    my $pos1 = defined($_[1]) ? $_[1] : int rand $length;
    my $pos2 = defined($_[2]) ? $_[2] : int rand $length;
    my $new = $self->[0];
    next if ($pos1 + $len) > $length;
    next if $pos2 > $length;
    my $seq = substr($new, $pos1, $len);
    substr($new, $pos2,0) = $seq;
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    splice @{$self->[1]}, $pos2, 0, @{$self->[1]}[$pos1..($pos1+$len-1)];
    $rt++;
  }
  return $rt;
}

##
# Duplicates a sequence and writes it on top of some other position
# 0: num to perform  (or 1)
# 1: pos to get from          (undef for rand)
# 2: pos to start replacement (undef for rand)
# 3: length to operate on     (undef => 1, 0 => rand)

sub mutate_overwrite {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  
  for (1..$num) {
    my $new = $self->[0];
    my $length = length $self->[0];
    my $len = !defined($_[3]) ? 1 : ($_[3] || int rand $length);
    my $pos1 = defined($_[1]) ? $_[1] : int rand $length;
    my $pos2 = defined($_[2]) ? $_[2] : int rand $length;
    next if ( ($pos1 + $len) >= $length
	      or $pos2 > $length);
    substr($new, $pos2, $len) = substr($new, $pos1, $len);
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    splice (@{$self->[1]}, $pos2, $len,
	    @{$self->[1]}[$pos1..($pos1+$len-1)] );
    $rt++;
  }

  return $rt;
}

##
# Takes a run of tokens and reverses their order, is a noop with 1 item
# 0: number to perform
# 1: posn to start from (undef for rand)
# 2: length             (undef=>1, 0=>rand)

sub mutate_reverse {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  
  for (1..$num) {
    my $length = length $self->[0];
    my $new = $self->[0];
    my $pos = defined($_[1]) ? $_[1] : int rand $length;
    my $len = !defined($_[2]) ? 1 : ($_[2] || int rand $length);

    next if ($pos >= $length
	    or $pos + $len > $length);

    my $chunk = reverse split('', substr($new, $pos, $len));
    substr($new, $pos, $len) = join('', $chunk);
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    splice (@{$self->[1]}, $pos, $len,
	    reverse( @{$self->[1]}[$pos..($pos+$len-1)] ));
    $rt++;
  }
  return $rt;
}

##
# Changes token into one of same type (ie. passes type to generate..)
# 0: number to perform
# 1: position to affect (undef for rand)

sub mutate_minor {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $pos = defined $_[1] ? $_[1] : int rand length $self->[0];
    next if $pos >= length($self->[0]); # pos lies outside of gene
    my $type = substr($self->[0], $pos, 1);
    my @token = $self->generate_token($type, $self->[1][$pos]);
    # still need to check for niceness, just in case
    if ($token[0] eq $type) {
      $self->[1][$pos] = $token[1];
    }
    else {
      my $new = $self->[0];
      substr($new, $pos, 1) = $token[0];
      next unless $self->valid_gene($new, $pos);
      $self->[0] = $new;
      $self->[1][$pos] = $token[1];
    }
    $rt++;
  }
  return $rt;
}

##
# Changes one token into some other token
# 0: number to perform
# 1: position to affect (undef for random)

sub mutate_major {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $pos = defined $_[1] ? $_[1] : int rand length $self->[0];
    next if $pos >= length($self->[0]); # outside of gene
    my @token = $self->generate_token();
    my $new = $self->[0];
    substr($new, $pos, 1) = $token[0];
    next unless $self->valid_gene($new, $pos);
    $self->[0] = $new;
    $self->[1][$pos] = $token[1];
    $rt++;
  }
  return $rt;
}

##
# swaps over two sequences within the gene
# any sort of oddness can occur if regions overlap

AI/Gene/Sequence.pm  view on Meta::CPAN

# 2: start of second sequence  (undef for rand)
# 3: length of first sequence  (undef for 1, 0 for rand)
# 4: length of second sequence (undef for 1, 0 for rand)

sub mutate_switch {
  my $self = shift;
  my $num = $_[0] || 1;
  my $rt = 0;
  for (1..$num) {
    my $length = length $self->[0];
    my $pos1 = defined $_[1] ? $_[1] : int rand $length;
    my $pos2 = defined $_[2] ? $_[2] : int rand $length;
    my $len1 = !defined($_[3]) ? 1 : ($_[3] || int rand $length);
    my $len2 = !defined($_[4]) ? 1 : ($_[4] || int rand $length);

    my $new = $self->[0];
    next if $pos1 == $pos2;
    if ($pos1 > $pos2) { # ensure $pos1 comes first
      ($pos1, $pos2) = ($pos2, $pos1);
      ($len1, $len2) = ($len2, $len1);
    }
    if ( ($pos1 + $len1) > $pos2 # ensure no overlaps
	 or ($pos2 + $len2) > $length
	 or $pos1 >= $length ) {
      next;
    }
    my $chunk1 = substr($new, $pos1, $len1, substr($new, $pos2, $len2,''));
    substr($new,$pos2 -$len1 + $len2,0) = $chunk1;
    next unless $self->valid_gene($new);
    $self->[0]= $new;
    my @chunk1 = splice(@{$self->[1]}, $pos1, $len1,
			splice(@{$self->[1]}, $pos2, $len2) );
    splice @{$self->[1]}, $pos2 + $len2 - $len1,0, @chunk1;
    $rt++;
  }
  return $rt;
}

##
# takes a sequence, removes it, then inserts it at another position
# odd things might occur if posn to replace to lies within area taken from

AI/Gene/Sequence.pm  view on Meta::CPAN

# 1: posn to get from   (undef for rand)
# 2: posn to put        (undef for rand)
# 3: length of sequence (undef for 1, 0 for rand)

sub mutate_shuffle {
  my $self = shift;
  my $num = +$_[0] || 1;
  my $rt = 0;
  
  for (1..$num) {
    my $length = length $self->[0];
    my $pos1 = defined($_[1]) ? $_[1] : int rand $length;
    my $pos2 = defined($_[2]) ? $_[2] : int rand $length;
    my $len = !defined($_[3]) ? 1 : ($_[3] || int rand $length);

    my $new = $self->[0];
    if ($pos1 +$len > $length   # outside gene
	or $pos2 >= $length      # outside gene
	or ($pos2 < ($pos1 + $len) and $pos2 > $pos1)) { # overlap
      next;
    }
    if ($pos1 < $pos2) {
      substr($new, $pos2-$len,0) = substr($new, $pos1, $len, '');
    }
    else {
      substr($new, $pos2, 0) = substr($new, $pos1, $len, '');
    }
    next unless $self->valid_gene($new);
    $self->[0] = $new;
    if ($pos1 < $pos2) {
      splice (@{$self->[1]}, $pos2-$len, 0, 
	      splice(@{$self->[1]}, $pos1, $len) );
    }
    else {
      splice(@{$self->[1]}, $pos2, 0,
	     splice(@{$self->[1]}, $pos1, $len) );
    }
    $rt++;
  }
  return $rt;
}

# These are intended to be overriden, simple versions are
# provided for the sake of testing.

AI/Gene/Sequence.pm  view on Meta::CPAN

# if called with a token type, it will also be passed the original
# token as the second argument.
# should return a two element list of the token type followed by the token itself.

sub generate_token {
  my $self = shift;
  my $token_type = $_[0];
  my $letter = ('a'..'z')[rand 25];
  unless ($token_type) {
    return ($letter) x2;
  }
  return ($token_type) x2;
}

# takes sting of token types to be checked for validity.
# If a mutation affects only one place, then the position of the
# mutation can be passed as a second argument.

AI/Gene/Sequence.pm  view on Meta::CPAN

## You might also want to have methods like the following,
# they will not be called by the 'sequence' methods.

# Default constructor
sub new {
  my $gene = ['',[]];
  return bless $gene, ref $_[0] || $_[0];
}

# remember that clone method may require deep copying depending on
# your specific needs

sub clone {
  my $self = shift;
  my $new = [$self->[0]];
  $new->[1] = [@{$self->[1]}];
  return bless $new, ref $self;
}

# You need some way to use the gene you've made and mutated, but
# this will let you have a look, if it starts being odd.

sub render_gene {
  my $self = shift;
  my $return =  "$self\n";
  $return .= $self->[0] . "\n";
  $return .= (join ',', @{$self->[1]}). "\n";
  return $return;
}

# used for testing

sub _test_dump {
  my $self = shift;
  my @rt = ($self->[0], join('',@{$self->[1]}));
  return @rt;
}
1;

__END__;

=pod

=head1 NAME

 AI::Gene::Sequence

=head1 SYNOPSIS

A base class for storing and mutating genetic sequences.

 package Somegene;
 use AI::Gene::Sequence;
 our @ISA = qw(AI::Gene::Sequence);

 my %things = ( a => [qw(a1 a2 a3 a4 a5)],
	       b => [qw(b1 b2 b3 b4 b5)],);

 sub generate_token {
  my $self = shift;
  my ($type, $prev) = @_;
  if ($type) {
    $prev = ${ $things{$type} }[rand @{ $things{$type} }];
  } 
  else {
    $type = ('a','b')[rand 2];
    $prev = ${$things{$type}}[rand @{$things{$type}}];
  }
  return ($type, $prev); 
 }

 sub valid_gene {
   my $self = shift;
   return 0 if $_[0] =~ /(.)\1/;
   return 1;
 }

 sub seed {
   my $self = shift;
   $self->[0] = 'ababab';
   @{$self->[1]} = qw(A1 B1 A2 B2 A3 B3);
 }

 sub render {
   my $self = shift;
   return join(' ', @{$self->[1]});
 } 

 # elsewhere
 package main;

 my $gene = Somegene->new;
 $gene->seed;
 print $gene->render, "\n";
 $gene->mutate(5);
 print $gene->render, "\n";
 $gene->mutate(5);
 print $gene->render, "\n";

=head1 DESCRIPTION

This is a class which provides generic methods for the
creation and mutation of genetic sequences.  Various mutations

AI/Gene/Sequence.pm  view on Meta::CPAN

tokens themselves, this allows for arbitary data to be
stored as a token in a gene.

For instance, a regular expression could be encoded as:

 $self = ['ccartm',['a', 'b', '|', '[A-Z]', '\W', '*?'] ]

Using a string to indicate the sort of thing held at the
corresponding part of the gene allows for a simple test
of the validity of a proposed gene by using a regular
expression.

AI/Gene/Sequence.pm  view on Meta::CPAN

length I<len>.

=item C<mutate_shuffle([num, pos1, pos2, len])>

This takes a sequence (starting at I<pos1> length I<len>)
 from within a gene and moves
it to another position (starting at I<pos2>).  Odd things might occur if the
position to move the sequence into lies within the
section to be moved, but the module will try its hardest
to cause a mutation.

 view all matches for this distribution


AI-General

 view release on metacpan or  search on metacpan

General.pm  view on Meta::CPAN


our $VERSION = '0.01';


sub new {
	my ( $class ) = @_;
	my $self = bless [], $class;
	$self->dwim( "Implement self" );
	return $self;
}


sub dwim {
	my ( $self, $args ) = @_;

	#... TO DO
}
	

1;


__END__

General.pm  view on Meta::CPAN


AI::General - A general-purpose artificial intelligence

=head1 SYNOPSIS

  use AI::General;
  
  AI::General->dwim( "Prove NP != P" ) or
  	die $AI::General::excuse;
  	
  
  
=head1 DESCRIPTION

This module is a general purpose artificial intelligence.  It consists
of one method, dwim ('Do what I mean'), which can take any number of 
arguments.  

 view all matches for this distribution


AI-Genetic-Pro

 view release on metacpan or  search on metacpan

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#-----------------------------------------------------------------------
use AI::Genetic::Pro::Array::Type 	qw( get_package_by_element_size );
use AI::Genetic::Pro::Chromosome;
#-----------------------------------------------------------------------
__PACKAGE__->mk_accessors(qw(
	mce
	type
	population
	terminate
	chromosomes 
	crossover 
	native
	parents 		_parents 
	history 		_history
	fitness 		_fitness 		_fitness_real
	cache
	mutation 		_mutator
	strategy 		_strategist
	selection 		_selector 
	_translations
	generation
	preserve		
	variable_length
	_fix_range
	_package
	_length
	strict			_strict
	workers
	size
	_init
));
#=======================================================================
# Additional modules
use constant STORABLE	=> 'Storable';
use constant GD 		=> 'GD::Graph::linespoints'; 
#=======================================================================
my $_Cache = { };
my $_temp_chromosome;
#=======================================================================
sub new {
	my ( $class, %args ) = ( shift, @_ );
	
	#-------------------------------------------------------------------
	my %opts = map { if(ref $_){$_}else{ /^-?(.*)$/o; $1 }} @_;
	my $self = bless \%opts, $class;
	
	#-------------------------------------------------------------------
	$AI::Genetic::Pro::Array::Type::Native = 1 if $self->native;
	
	#-------------------------------------------------------------------
	croak(q/Type of chromosomes cannot be "combination" if "variable length" feature is active!/)
		if $self->type eq q/combination/ and $self->variable_length;
	croak(q/You must specify a crossover strategy with -strategy!/)
		unless defined ($self->strategy);
	croak(q/Type of chromosomes cannot be "combination" if strategy is not one of: OX, PMX!/)
		if $self->type eq q/combination/ and ($self->strategy->[0] ne q/OX/ and $self->strategy->[0] ne q/PMX/);
	croak(q/Strategy cannot be "/,$self->strategy->[0],q/" if "variable length" feature is active!/ )
		if ($self->strategy->[0] eq 'PMX' or $self->strategy->[0] eq 'OX') and $self->variable_length;
	
	#-------------------------------------------------------------------
	$self->_set_strict if $self->strict;

	#-------------------------------------------------------------------
	return $self unless $self->mce;

	#-------------------------------------------------------------------
	delete $self->{ mce };
	'AI::Genetic::Pro::MCE'->use or die q[Cannot raise multicore support: ] . $@;
	
	return AI::Genetic::Pro::MCE->new( $self, \%args );
}
#=======================================================================
sub _Cache { $_Cache; }
#=======================================================================
# INIT #################################################################
#=======================================================================
sub _set_strict {
	my ($self) = @_;
	
	# fitness
	my $fitness = $self->fitness();
	my $replacement = sub {
		my @tmp = @{$_[1]};
		my $ret = $fitness->(@_);
		my @cmp = @{$_[1]};
		die qq/Chromosome was modified in a fitness function from "@tmp" to "@{$_[1]}"!\n/ unless compare(\@tmp, \@cmp);
		return $ret;
	};
	$self->fitness($replacement);
}
#=======================================================================
sub _fitness_cached {
	my ($self, $chromosome) = @_;
	
	#my $key = md5_hex(${tied(@$chromosome)});
	my $key = md5_hex( $self->_package ? md5_hex( ${ tied( @$chromosome ) } ) : join( q[:], @$chromosome ) );
	return $_Cache->{$key} if exists $_Cache->{$key};
	
	$_Cache->{$key} = $self->_fitness_real->($self, $chromosome);
	return $_Cache->{$key};
}
#=======================================================================
sub _init_cache {
	my ($self) = @_;
		
	$self->_fitness_real($self->fitness);
	$self->fitness(\&_fitness_cached);
	return;
}
#=======================================================================
sub _check_data_ref {
	my ($self, $data_org) = @_;
	my $data = clone($data_org);
	my $ars;
	for(0..$#$data){
		next if $ars->{$data->[$_]};
		$ars->{$data->[$_]} = 1;
		unshift @{$data->[$_]}, undef;
	}
	return $data;
}
#=======================================================================
# we have to find C to (in some cases) incrase value of range
# due to design model
sub _find_fix_range {
	my ($self, $data) = @_;

	for my $idx (0..$#$data){
		if($data->[$idx]->[1] < 1){ 
			my $const = 1 - $data->[$idx]->[1];
			push @{$self->_fix_range}, $const; 
			$data->[$idx]->[1] += $const;
			$data->[$idx]->[2] += $const;
		}else{ push @{$self->_fix_range}, 0; }
	}

	return $data;
}
#=======================================================================
sub init { 
	my ( $self, $data ) = @_;
	
	croak q/You have to pass some data to "init"!/ unless $data;
	#-------------------------------------------------------------------
	$self->generation(0);
	$self->_init( $data );
	$self->_fitness( { } );
	$self->_fix_range( [ ] );
	$self->_history( [  [ ], [ ], [ ] ] );
	$self->_init_cache if $self->cache;
	#-------------------------------------------------------------------
	
	if($self->type eq q/listvector/){
		croak(q/You have to pass array reference if "type" is set to "listvector"/) unless ref $data eq 'ARRAY';
		$self->_translations( $self->_check_data_ref($data) );
	}elsif($self->type eq q/bitvector/){
		croak(q/You have to pass integer if "type" is set to "bitvector"/) if $data !~ /^\d+$/o;
		$self->_translations( [ [ 0, 1 ] ] );
		$self->_translations->[$_] = $self->_translations->[0] for 1..$data-1;
	}elsif($self->type eq q/combination/){
		croak(q/You have to pass array reference if "type" is set to "combination"/) unless ref $data eq 'ARRAY';
		$self->_translations( [ clone($data) ] );
		$self->_translations->[$_] = $self->_translations->[0] for 1..$#$data;
	}elsif($self->type eq q/rangevector/){
		croak(q/You have to pass array reference if "type" is set to "rangevector"/) unless ref $data eq 'ARRAY';
		$self->_translations( $self->_find_fix_range( $self->_check_data_ref($data) ));
	}else{
		croak(q/You have to specify first "type" of vector!/);
	}
	
	my $size = 0;

	if($self->type ne q/rangevector/){ for(@{$self->_translations}){ $size = $#$_ if $#$_ > $size; } }
#	else{ for(@{$self->_translations}){ $size = $_->[1] if $_->[1] > $size; } }
	else{ for(@{$self->_translations}){ $size = $_->[2] if $_->[2] > $size; } }		# Provisional patch for rangevector values truncated to signed  8-bit quantities. Thx to Tod Hagan

	my $package = get_package_by_element_size($size);
	$self->_package($package);

	my $length = ref $data ? sub { $#$data; } : sub { $data - 1 };
	if($self->variable_length){
		$length = ref $data ? sub { 1 + int( rand( $#{ $self->_init } ) ); } : sub { 1 + int( rand( $self->_init - 1) ); };
	}

	$self->_length( $length );

	$self->chromosomes( [ ] );
	push @{$self->chromosomes}, 
		AI::Genetic::Pro::Chromosome->new($self->_translations, $self->type, $package, $length->())
			for 1..$self->population;
	
	$self->_calculate_fitness_all();
}
#=======================================================================
# SAVE / LOAD ##########################################################
#=======================================================================
sub spew {
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	STORABLE->use( qw( store retrieve freeze thaw ) ) or croak(q/You need "/.STORABLE.q/" module to save a state of "/.__PACKAGE__.q/"!/);
	$Storable::Deparse = 1;
	$Storable::Eval = 1;
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	my ( $self ) = @_;
 	
	my $clone = { 
		_selector	=> undef,
		_strategist	=> undef,
		_mutator	=> undef,
	};
	
	$clone->{ chromosomes } = [ map { ${ tied( @$_ ) } } @{ $self->chromosomes } ] 
		if $self->_package;
	
	foreach my $key(keys %$self){
		next if exists $clone->{$key};
		$clone->{$key} = $self->{$key};
	}
	
	return $clone;
}
#=======================================================================
sub slurp {
	my ( $self, $dump ) = @_;

	if( my $typ = $self->_package ){ 
		@{ $dump->{ chromosomes } } = map {
			my $arr = $typ->make_with_packed( $_ );
			bless $arr, q[AI::Genetic::Pro::Chromosome];
		} @{ $dump->{ chromosomes } };
	}
    
    %$self = %$dump;
    
	return 1;
}
#=======================================================================
sub save { 
	my ( $self, $file ) = @_;
	
	croak(q/You have to specify file!/) unless defined $file;
	
	store( $self->spew, $file );
}
#=======================================================================
sub load { 
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	STORABLE->use( qw( store retrieve freeze thaw ) ) or croak(q/You need "/.STORABLE.q/" module to load a state of "/.__PACKAGE__.q/"!/);	
	$Storable::Deparse = 1;
	$Storable::Eval = 1;
	#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
	my ($self, $file) = @_;
	croak(q/You have to specify file!/) unless defined $file;

	my $clone = retrieve($file);
	return carp('Incorrect file!') unless $clone;
	
	return $self->slurp( $clone );
}
#=======================================================================
# CHARTS ###############################################################
#=======================================================================
sub chart { 
	GD->require or croak(q/You need "/.GD.q/" module to draw chart of evolution!/);	
	my ($self, %params) = (shift, @_);

	my $graph = GD()->new(($params{-width} || 640), ($params{-height} || 480));

	my $data = $self->getHistory;

	if(defined $params{-font}){
    	$graph->set_title_font  ($params{-font}, 12);
    	$graph->set_x_label_font($params{-font}, 10);
    	$graph->set_y_label_font($params{-font}, 10);
    	$graph->set_legend_font ($params{-font},  8);
	}
	
    $graph->set_legend(
    	$params{legend1} || q/Max value/,
    	$params{legend2} || q/Mean value/,
    	$params{legend3} || q/Min value/,
    );

    $graph->set(
        x_label_skip        => int(($data->[0]->[-1]*4)/100),
        x_labels_vertical   => 1,
        x_label_position    => .5,
        y_label_position    => .5,
        y_long_ticks        => 1,   # poziome linie
        x_ticks             => 1,   # poziome linie

        l_margin            => 10,
        b_margin            => 10,
        r_margin            => 10,
        t_margin            => 10,

        show_values         => (defined $params{-show_values} ? 1 : 0),
        values_vertical     => 1,
        values_format       => ($params{-format} || '%.2f'),

        zero_axis           => 1,
        #interlaced          => 1,
        logo_position       => 'BR',
        legend_placement    => 'RT',

        bgclr               => 'white',
        boxclr              => '#FFFFAA',
        transparent         => 0,

        title       		=> ($params{'-title'}   || q/Evolution/ ),
        x_label     		=> ($params{'-x_label'} || q/Generation/),
        y_label     		=> ($params{'-y_label'} || q/Value/     ),
        
        ( $params{-logo} && -f $params{-logo} ? ( logo => $params{-logo} ) : ( ) )
    );
	
	
    my $gd = $graph->plot( [ [ 0..$#{$data->[0]} ], @$data ] ) or croak($@);
    open(my $fh, '>', $params{-filename}) or croak($@);
    binmode $fh;
    print $fh $gd->png;
    close $fh;
    
    return 1;
}
#=======================================================================
# TRANSLATIONS #########################################################
#=======================================================================
sub as_array_def_only {
	my ($self, $chromosome) = @_;
	
	return $self->as_array($chromosome) 
		if not $self->variable_length or $self->variable_length < 2;
	
	if( $self->type eq q/bitvector/ ){
		return $self->as_array($chromosome);
	}else{
		my $ar = $self->as_array($chromosome);
		my $idx = first_index { $_ } @$ar;
		my @array = @$ar[$idx..$#$chromosome];
		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================
sub as_array {
	my ($self, $chromosome) = @_;
	
	if($self->type eq q/bitvector/){
		# This could lead to internal error, bacause of underlaying Tie::Array::Packed
		#return @$chromosome if wantarray;
		#return $chromosome;
		
		my @chr = @$chromosome;
		return @chr if wantarray;
		return \@chr;
		
	}elsif($self->type eq q/rangevector/){
		my $fix_range = $self->_fix_range;
		my $c = -1;
		#my @array = map { $c++; warn "WARN: $c | ",scalar @$chromosome,"\n" if not defined $fix_range->[$c]; $_ ? $_ - $fix_range->[$c] : undef } @$chromosome;
		my @array = map { $c++; $_ ? $_ - $fix_range->[$c] : undef } @$chromosome;
		
		return @array if wantarray;
		return \@array;
	}else{
		my $cnt = 0;
		my @array = map { $self->_translations->[$cnt++]->[$_] } @$chromosome;
		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================
sub as_string_def_only {	
	my ($self, $chromosome) = @_;
	
	return $self->as_string($chromosome) 
		if not $self->variable_length or $self->variable_length < 2;

	my $array = $self->as_array_def_only($chromosome);
	
	return join(q//, @$array) if $self->type eq q/bitvector/;
	return join(q/___/, @$array);
}
#=======================================================================
sub as_string {	
	return join(q//, @{$_[1]}) if $_[0]->type eq q/bitvector/;
	return 	join(q/___/, map { defined $_ ? $_ : q/ / } $_[0]->as_array($_[1]));
}
#=======================================================================
sub as_value { 
	my ($self, $chromosome) = @_;
	croak(q/You MUST call 'as_value' as method of 'AI::Genetic::Pro' object./)
		unless defined $_[0] and ref $_[0] and ( ref $_[0] eq 'AI::Genetic::Pro' or ref $_[0] eq 'AI::Genetic::Pro::MCE');
	croak(q/You MUST pass 'AI::Genetic::Pro::Chromosome' object to 'as_value' method./) 
		unless defined $_[1] and ref $_[1] and ref $_[1] eq 'AI::Genetic::Pro::Chromosome';
	return $self->fitness->($self, $chromosome);  
}
#=======================================================================
# ALGORITHM ############################################################
#=======================================================================
sub _calculate_fitness_all {
	my ($self) = @_;
	
	$self->_fitness( { } );
	$self->_fitness->{$_} = $self->fitness()->($self, $self->chromosomes->[$_]) 
		for 0..$#{$self->chromosomes};

# sorting the population is not necessary	
#	my (@chromosomes, %fitness);
#	for my $idx (sort { $self->_fitness->{$a} <=> $self->_fitness->{$b} } keys %{$self->_fitness}){
#		push @chromosomes, $self->chromosomes->[$idx];

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#	}
#	
#	$self->_fitness(\%fitness);
#	$self->chromosomes(\@chromosomes);

	return;
}
#=======================================================================
sub _select_parents {
	my ($self) = @_;
	unless($self->_selector){
		croak "You must specify a selection strategy!"
			unless defined $self->selection;
		my @tmp = @{$self->selection};
		my $selector = q/AI::Genetic::Pro::Selection::/ . shift @tmp;
		$selector->require or die $!;
		$self->_selector($selector->new(@tmp));
	}
	
	$self->_parents($self->_selector->run($self));
	
	return;
}
#=======================================================================
sub _crossover {
	my ($self) = @_;
	
	unless($self->_strategist){
		my @tmp = @{$self->strategy};
		my $strategist = q/AI::Genetic::Pro::Crossover::/ . shift @tmp;
		$strategist->require or die $!;
		$self->_strategist($strategist->new(@tmp));
	}

	my $a = $self->_strategist->run($self);
	$self->chromosomes( $a );
	
	return;
}
#=======================================================================
sub _mutation {
	my ($self) = @_;
	
	unless($self->_mutator){
		my $mutator = q/AI::Genetic::Pro::Mutation::/ . ucfirst(lc($self->type));
		unless($mutator->require){
			$mutator = q/AI::Genetic::Pro::Mutation::Listvector/;
			$mutator->require;
		}
		$self->_mutator($mutator->new);
	}
	
	return $self->_mutator->run($self);
}
#=======================================================================
sub _save_history {
	my @tmp;
	if($_[0]->history){ @tmp = $_[0]->getAvgFitness; }
	else { @tmp = (undef, undef, undef); }
	
	push @{$_[0]->_history->[0]}, $tmp[0]; 
	push @{$_[0]->_history->[1]}, $tmp[1];
	push @{$_[0]->_history->[2]}, $tmp[2];
	return 1;
}
#=======================================================================
sub inject {
	my ($self, $candidates) = @_;
	
	for(@$candidates){
		push @{$self->chromosomes}, 
			AI::Genetic::Pro::Chromosome->new_from_data($self->_translations, $self->type, $self->_package, $_, $self->_fix_range);
		$self->_fitness->{$#{$self->chromosomes}} = $self->fitness()->($self, $self->chromosomes->[-1]);

	}			
	$self->_strict( [ ] );
	$self->population( $self->population + scalar( @$candidates ) );

	return 1;
}
#=======================================================================
sub _state {
	my ( $self ) = @_;
	
	my @res;
	
	if( $self->_package ){
		@res = map { 
			[
				${ tied( @{ $self->chromosomes->[ $_ ] } ) },
				$self->_fitness->{ $_ },
			]
		} 0 .. $self->population - 1
	}else{
		@res = map { 
			[
				$self->chromosomes->[ $_ ],
				$self->_fitness->{ $_ },
			]
		} 0 .. $self->population - 1
	}
	
	return \@res;
}
#=======================================================================
sub evolve {
	my ($self, $generations) = @_;

	# generations must be defined
	$generations ||= -1; 	 
	
	if($self->strict and $self->_strict){
		for my $idx (0..$#{$self->chromosomes}){
			croak(q/Chromosomes was modified outside the 'evolve' function!/) unless $self->chromosomes->[$idx] and $self->_strict->[$idx];
			my @tmp0 = @{$self->chromosomes->[$idx]};
			my @tmp1 = @{$self->_strict->[$idx]};
			croak(qq/Chromosome was modified outside the 'evolve' function from "@tmp0" to "@tmp1"!/) unless compare(\@tmp0, \@tmp1);
		}
	}
	
	# split into two loops just for speed
	unless($self->preserve){
		for(my $i = 0; $i != $generations; $i++){
			# terminate ----------------------------------------------------
			last if $self->terminate and $self->terminate->($self);
			# update generation --------------------------------------------
			$self->generation($self->generation + 1);
			# update history -----------------------------------------------
			$self->_save_history;
			# selection ----------------------------------------------------
			$self->_select_parents();
			# crossover ----------------------------------------------------
			$self->_crossover();
			# mutation -----------------------------------------------------
			$self->_mutation();
		}
	}else{
		croak('You cannot preserve more chromosomes than is in population!') if $self->preserve > $self->population;
		my @preserved;
		for(my $i = 0; $i != $generations; $i++){
			# terminate ----------------------------------------------------
			last if $self->terminate and $self->terminate->($self);
			# update generation --------------------------------------------
			$self->generation($self->generation + 1);
			# update history -----------------------------------------------
			$self->_save_history;
			#---------------------------------------------------------------
			# preservation of N unique chromosomes
			@preserved = map { clone($_) } @{ $self->getFittest_as_arrayref($self->preserve - 1, 1) };
			# selection ----------------------------------------------------
			$self->_select_parents();
			# crossover ----------------------------------------------------
			$self->_crossover();
			# mutation -----------------------------------------------------
			$self->_mutation();
			#---------------------------------------------------------------
			for(@preserved){
				my $idx = int rand @{$self->chromosomes};
				$self->chromosomes->[$idx] = $_;
				$self->_fitness->{$idx} = $self->fitness()->($self, $_);
			}
		}
	}
	
	if($self->strict){
		$self->_strict( [ ] );
		push @{$self->_strict}, $_->clone for @{$self->chromosomes};
	}
}
#=======================================================================
# ALIASES ##############################################################
#=======================================================================
sub people { $_[0]->chromosomes() }

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

sub intType { shift->type() }
#=======================================================================
# STATS ################################################################
#=======================================================================
sub getFittest_as_arrayref { 
	my ($self, $n, $uniq) = @_;
	$n ||= 1;
	
	$self->_calculate_fitness_all() unless scalar %{ $self->_fitness };
	my @keys = sort { $self->_fitness->{$a} <=> $self->_fitness->{$b} } 0..$#{$self->chromosomes};
	
	if($uniq){
		my %grep;
		my $chromosomes = $self->chromosomes;
		if( my $pkg = $self->_package ){
			my %tmp;
			@keys = grep { 
				my $key = ${ tied( @{ $chromosomes->[ $_ ] } ) };
				#my $key = md5_hex( ${ tied( @{ $chromosomes->[ $_ ] } ) } ); # ?
				$tmp{ $key } && 0 or $tmp{ $key } = 1;
			} @keys;
			#@keys = grep { 
			#		my $add_to_list = 0;
			#		my $key = md5_hex(${tied(@{$chromosomes->[$_]})});
			#		unless($grep{$key}) { 
			#			$grep{$key} = 1; 
			#			$add_to_list = 1;
			#		}
			#		$add_to_list;
			#	} @keys;
		}else{
			my %tmp;
			@keys = grep { 
				my $key = md5_hex( join( q[:], @{ $chromosomes->[ $_ ] } ) );
				$tmp{ $key } && 0 or $tmp{ $key } = 1;
			} @keys;
		}
	}
	
	$n = scalar @keys if $n > scalar @keys;
	return [ reverse @{$self->chromosomes}[ splice @keys, $#keys - $n + 1, $n ] ];
}
#=======================================================================
sub getFittest { return wantarray ? @{ shift->getFittest_as_arrayref(@_) } : shift @{ shift->getFittest_as_arrayref(@_) }; }
#=======================================================================
sub getAvgFitness {
	my ($self) = @_;
	
	my @minmax = minmax values %{$self->_fitness};
	my $mean = sum(values %{$self->_fitness}) / scalar values %{$self->_fitness};
	return $minmax[1], int($mean), $minmax[0];
}
#=======================================================================
1;


lib/AI/Genetic/Pro.pm  view on Meta::CPAN


AI::Genetic::Pro - Efficient genetic algorithms for professional purpose with support for multiprocessing.

=head1 SYNOPSIS

    use AI::Genetic::Pro;
    
    sub fitness {
        my ($ga, $chromosome) = @_;
        return oct('0b' . $ga->as_string($chromosome)); 
    }
    
    sub terminate {
        my ($ga) = @_;
        my $result = oct('0b' . $ga->as_string($ga->getFittest));
        return $result == 4294967295 ? 1 : 0;
    }
    
    my $ga = AI::Genetic::Pro->new(        
        -fitness         => \&fitness,        # fitness function
        -terminate       => \&terminate,      # terminate function
        -type            => 'bitvector',      # type of chromosomes
        -population      => 1000,             # population
        -crossover       => 0.9,              # probab. of crossover
        -mutation        => 0.01,             # probab. of mutation
        -parents         => 2,                # number  of parents
        -selection       => [ 'Roulette' ],   # selection strategy
        -strategy        => [ 'Points', 2 ],  # crossover strategy
        -cache           => 0,                # cache results
        -history         => 1,                # remember best results
        -preserve        => 3,                # remember the bests
        -variable_length => 1,                # turn variable length ON
        -mce             => 1,                # optional MCE support
        -workers         => 3,                # number of workers (MCE)
    );
	
    # init population of 32-bit vectors
    $ga->init(32);
	
    # evolve 10 generations
    $ga->evolve(10);
    
    # best score
    print "SCORE: ", $ga->as_value($ga->getFittest), ".\n";
    
    # save evolution path as a chart
    $ga->chart(-filename => 'evolution.png');
     
    # save state of GA
    $ga->save('genetic.sga');
    
    # load state of GA
    $ga->load('genetic.sga');

=head1 DESCRIPTION

This module provides efficient implementation of a genetic algorithm for
professional purpose with support for multiprocessing. It was designed to operate as fast as possible

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item -preserve

This defines injection of the bests chromosomes into a next generation. It causes a little slow down, however (very often) much better results are achieved. You can specify, how many chromosomes will be preserved, i.e.

    -preserve => 1, # only one chromosome will be preserved
    # or
    -preserve => 9, # 9 chromosomes will be preserved
    # and so on...

Attention! You cannot preserve more chromosomes than exist in your population.

=item -variable_length

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item level 0

Feature is inactive (default). Example:

	-variable_length => 0
	
    # chromosomes (i.e. bitvectors)
    0 1 0 0 1 1 0 1 1 1 0 1 0 1
    0 0 1 1 0 1 1 1 1 0 0 1 1 0
    0 1 1 1 0 1 0 0 1 1 0 1 1 1
    0 1 0 0 1 1 0 1 1 1 1 0 1 0
    # ...and so on

=item level 1 

Feature is active, but chromosomes can varies B<only on the right side>, Example:

	-variable_length => 1
	
    # chromosomes (i.e. bitvectors)
    0 1 0 0 1 1 0 1 1 1 
    0 0 1 1 0 1 1 1 1
    0 1 1 1 0 1 0 0 1 1 0 1 1 1
    0 1 0 0 1 1 0 1 1 1
    # ...and so on
	
=item level 2 

Feature is active and chromosomes can varies B<on the left side and on 
the right side>; unwanted values/genes on the left side are replaced with C<undef>, ie.
 
	-variable_length => 2
 
    # chromosomes (i.e. bitvectors)
    x x x 0 1 1 0 1 1 1 
    x x x x 0 1 1 1 1
    x 1 1 1 0 1 0 0 1 1 0 1 1 1
    0 1 0 0 1 1 0 1 1 1
    # where 'x' means 'undef'
    # ...and so on

In this situation returned chromosomes in an array context ($ga-E<gt>as_array($chromosome)) 
can have B<undef> values on the left side (only). In a scalar context each 
undefined value is replaced with a single space. If You don't want to see
any C<undef> or space, just use C<as_array_def_only> and C<as_string_def_only> 

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item -selection

This defines how individuals/chromosomes are selected to crossover. It expects an array reference listed below:

    -selection => [ $type, @params ]

where type is one of:

=over 8

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item C<-selection =E<gt> [ 'RouletteDistribution', 'beta', $aa, $bb ]>

I<Beta> distribution.  The density of the beta is:

    X^($aa - 1) * (1 - X)^($bb - 1) / B($aa , $bb) for 0 < X < 1.

C<$aa> and C<$bb> are set by default to number of parents.

B<Argument restrictions:> Both $aa and $bb must not be less than 1.0E-37.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item C<-selection =E<gt> [ 'Distribution', 'beta', $aa, $bb ]>

I<Beta> distribution.  The density of the beta is:

    X^($aa - 1) * (1 - X)^($bb - 1) / B($aa , $bb) for 0 < X < 1.

C<$aa> and C<$bb> are set by default to number of parents.

B<Argument restrictions:> Both $aa and $bb must not be less than 1.0E-37.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

=item -strategy 

This defines the astrategy of crossover operation. It expects an array
reference listed below:

    -strategy => [ $type, @params ]

where type is one of:

=over 4

=item PointsSimple

Simple crossover in one or many points. The best chromosomes/individuals are
selected for the new generation. For example:

    -strategy => [ 'PointsSimple', $n ]

where C<$n> is the number of points for crossing.

=item PointsBasic

Crossover in one or many points. In basic crossover selected parents are
crossed and one (randomly-chosen) child is moved to the new generation. For
example:

    -strategy => [ 'PointsBasic', $n ]

where C<$n> is the number of points for crossing.

=item Points

Crossover in one or many points. In normal crossover selected parents are crossed and the best child is moved to the new generation. For example:

    -strategy => [ 'Points', $n ]

where C<$n> is number of points for crossing.

=item PointsAdvenced

Crossover in one or many points. After crossover the best
chromosomes/individuals from all parents and chidren are selected for the  new
generation. For example:

    -strategy => [ 'PointsAdvanced', $n ]

where C<$n> is the number of points for crossing.

=item Distribution

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item C<-strategy =E<gt> [ 'Distribution', 'beta', $aa, $bb ]>

I<Beta> distribution.  The density of the beta is:

    X^($aa - 1) * (1 - X)^($bb - 1) / B($aa , $bb) for 0 < X < 1.

C<$aa> and C<$bb> are set by default to the number of parents.

B<Argument restrictions:> Both $aa and $bb must not be less than 1.0E-37.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item I<$ga>-E<gt>B<inject>($chromosomes)

Inject new, user defined, chromosomes into the current population. See example below:

    # example for bitvector
    my $chromosomes = [
        [ 1, 1, 0, 1, 0, 1 ],
        [ 0, 0, 0, 1, 0, 1 ],
        [ 0, 1, 0, 1, 0, 0 ],
        ...
    ];
    
    # inject
    $ga->inject($chromosomes);

If You want to delete some chromosomes from population, just C<splice> them:

    my @remove = qw(1 2 3 9 12);
	for my $idx (sort { $b <=> $a }  @remove){
        splice @{$ga->chromosomes}, $idx, 1;
    }

=item I<$ga>-E<gt>B<population>($population)

Set/get size of the population. This defines the size of the population, i.e. how many chromosomes to simultaneously exist at each generation.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=back

In example:

    my $type = $ga->type();

=item I<$ga>-E<gt>B<type>()

Alias for C<indType>.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item B<bitvector>

For bitvectors, the argument is simply the length of the bitvector.

    $ga->init(10);

This initializes a population where each individual/chromosome has 10 genes.

=item B<listvector>

For listvectors, the argument is an anonymous list of lists. The number of sub-lists is equal to the number of genes of each individual/chromosome. Each sub-list defines the possible string values that the corresponding gene can assume.

    $ga->init([
               [qw/red blue green/],
               [qw/big medium small/],
               [qw/very_fat fat fit thin very_thin/],
              ]);

This initializes a population where each individual/chromosome has 3 genes and each gene can assume one of the given values.

=item B<rangevector>

For rangevectors, the argument is an anonymous list of lists. The number of sub-lists is equal to the number of genes of each individual/chromosome. Each sub-list defines the minimum and maximum integer values that the corresponding gene can assume.

    $ga->init([
               [1, 5],
               [0, 20],
               [4, 9],
              ]);

This initializes a population where each individual/chromosome has 3 genes and each gene can assume an integer within the corresponding range.

=item B<combination>

For combination, the argument is an anonymous list of possible values of gene.

    $ga->init( [ 'a', 'b', 'c' ] );

This initializes a population where each chromosome has 3 genes and each gene
is a unique combination of 'a', 'b' and 'c'. For example genes looks something
like that:

    [ 'a', 'b', 'c' ]    # gene 1
    [ 'c', 'a', 'b' ]    # gene 2
    [ 'b', 'c', 'a' ]    # gene 3
    # ...and so on...

=back

=item I<$ga>-E<gt>B<evolve>($n)

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item I<$ga>-E<gt>B<getHistory>()

Get history of the evolution. It is in a format listed below:

	[
		# gen0   gen1   gen2   ...          # generations
		[ max0,  max1,  max2,  ... ],       # max values
		[ mean,  mean1, mean2, ... ],       # mean values
		[ min0,  min1,  min2,  ... ],       # min values
	]

=item I<$ga>-E<gt>B<getAvgFitness>()

Get I<max>, I<mean> and I<min> score of the current generation. In example:

    my ($max, $mean, $min) = $ga->getAvgFitness();

=item I<$ga>-E<gt>B<getFittest>($n, $unique)

This function returns a list of the fittest chromosomes from the current
population.  You can specify how many chromosomes should be returned and if
the returned chromosomes should be unique. See example below.

    # only one - the best
    my ($best) = $ga->getFittest;

    # or 5 bests chromosomes, NOT unique
    my @bests = $ga->getFittest(5);

    # or 7 bests and UNIQUE chromosomes
    my @bests = $ga->getFittest(7, 1);

If you want to get a large number of chromosomes, try to use the
C<getFittest_as_arrayref> function instead (for efficiency).

=item I<$ga>-E<gt>B<getFittest_as_arrayref>($n, $unique)

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


Path to logo (png/jpg image) to embed in a chart (default: none).

=item For example:

	$ga->chart(-width => 480, height => 320, -filename => 'chart.png');

=back

=item I<$ga>-E<gt>B<save>($file)

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

chromosome. If I<variable_length> is turned off, this function is just an
alias for C<as_array>. If I<variable_length> is turned on and is set to 
level 2, this function will return only C<not undef> values from chromosome. 
See example below:

    # -variable_length => 2, -type => 'bitvector'
	
    my @chromosome = $ga->as_array($chromosome)
    # @chromosome looks something like that
    # ( undef, undef, undef, 1, 0, 1, 1, 1, 0 )
	
    @chromosome = $ga->as_array_def_only($chromosome)
    # @chromosome looks something like that
    # ( 1, 0, 1, 1, 1, 0 )

=item I<$ga>-E<gt>B<as_string>($chromosome)

Return a string representation of the specified chromosome. See example below:

	# -type => 'bitvector'
	
	my $string = $ga->as_string($chromosome);
	# $string looks something like that
	# 1___0___1___1___1___0 
	
	# or 
	
	# -type => 'listvector'
	
	$string = $ga->as_string($chromosome);
	# $string looks something like that
	# element0___element1___element2___element3...

Attention! If I<variable_length> is turned on and is set to level 2, it is 
possible to get C<undef> values on the left side of the vector. In the returned
string C<undef> values will be replaced with B<spaces>. If you don't want
to see any I<spaces>, use C<as_string_def_only> instead of C<as_string>.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

Return a string representation of specified chromosome. If I<variable_length> 
is turned off, this function is just alias for C<as_string>. If I<variable_length> 
is turned on and is set to level 2, this function will return a string without
C<undef> values. See example below:

	# -variable_length => 2, -type => 'bitvector'
	
	my $string = $ga->as_string($chromosome);
	# $string looks something like that
	#  ___ ___ ___1___1___0 
	
	$string = $ga->as_string_def_only($chromosome);
	# $string looks something like that
	# 1___1___0 

=item I<$ga>-E<gt>B<as_value>($chromosome)

Return the score of the specified chromosome. The value of I<chromosome> is 
calculated by the fitness function.

 view all matches for this distribution


AI-Genetic

 view release on metacpan or  search on metacpan

Genetic.pm  view on Meta::CPAN


##### Shared private vars
# this hash predefines some strategies

my %_strategy = (
		 rouletteSinglePoint => \&AI::Genetic::Defaults::rouletteSinglePoint,
		 rouletteTwoPoint    => \&AI::Genetic::Defaults::rouletteTwoPoint,
		 rouletteUniform     => \&AI::Genetic::Defaults::rouletteUniform,

		 tournamentSinglePoint => \&AI::Genetic::Defaults::tournamentSinglePoint,
		 tournamentTwoPoint    => \&AI::Genetic::Defaults::tournamentTwoPoint,
		 tournamentUniform     => \&AI::Genetic::Defaults::tournamentUniform,

		 randomSinglePoint => \&AI::Genetic::Defaults::randomSinglePoint,
		 randomTwoPoint    => \&AI::Genetic::Defaults::randomTwoPoint,
		 randomUniform     => \&AI::Genetic::Defaults::randomUniform,
		);

# this hash maps the genome types to the
# classes they're defined in.

my %_genome2class = (
		     bitvector   => 'AI::Genetic::IndBitVector',
		     rangevector => 'AI::Genetic::IndRangeVector',
		     listvector  => 'AI::Genetic::IndListVector',
		    );

##################

# sub new():
# This is the constructor. It creates a new AI::Genetic

Genetic.pm  view on Meta::CPAN

# -fitness:    set the fitness function
# -type:       set the genome type. See docs.
# -terminate:  set termination sub.

sub new {
  my ($class, %args) = @_;

  my $self = bless {
		    ADDSEL => {},   # user-defined selections
		    ADDCRS => {},   # user-defined crossovers
		    ADDMUT => {},   # user-defined mutations
		    ADDSTR => {},   # user-defined strategies
		   } => $class;

  $self->{FITFUNC}    = $args{-fitness}    || sub { 1 };
  $self->{CROSSRATE}  = $args{-crossover}  || 0.95;
  $self->{MUTPROB}    = $args{-mutation}   || 0.05;
  $self->{POPSIZE}    = $args{-population} || 100;
  $self->{TYPE}       = $args{-type}       || 'bitvector';
  $self->{TERM}       = $args{-terminate}  || sub { 0 };

  $self->{PEOPLE}     = [];   # list of individuals
  $self->{GENERATION} = 0;    # current gen.

  $self->{INIT}       = 0;    # whether pop is initialized or not.
  $self->{SORTED}     = 0;    # whether the population is sorted by score or not.
  $self->{INDIVIDUAL} = '';   # name of individual class to use().

  return $self;
}

# sub createStrategy():
# This method creates a new strategy.
# It takes two arguments: name of strategy, and
# anon sub that implements it.

sub createStrategy {
  my ($self, $name, $sub) = @_;

  if (ref($sub) eq 'CODE') {
    $self->{ADDSTR}{$name} = $sub;
  } else {
    # we don't know what this operation is.
    carp <<EOC;
ERROR: Must specify anonymous subroutine for strategy.
       Strategy '$name' will be deleted.
EOC
    ;
    delete $self->{ADDSTR}{$name};
    return undef;
  }

  return $name;
}

# sub evolve():
# This method evolves the population using a specific strategy
# for a specific number of generations.

sub evolve {
  my ($self, $strategy, $gens) = @_;

  unless ($self->{INIT}) {
    carp "can't evolve() before init()";
    return undef;
  }

  my $strSub;
  if      (exists $self->{ADDSTR}{$strategy}) {
    $strSub = $self->{ADDSTR}{$strategy};
  } elsif (exists $_strategy{$strategy}) {
    $strSub = $_strategy{$strategy};
  } else {
    carp "ERROR: Do not know what strategy '$strategy' is,";
    return undef;
  }

  $gens ||= 1;

  for my $i (1 .. $gens) {
    $self->sortPopulation;
    $strSub->($self);

    $self->{GENERATION}++;
    $self->{SORTED} = 0;

    last if $self->{TERM}->($self);

#    my @f = $self->getFittest(10);
#    for my $f (@f) {
#      print STDERR "    Fitness = ", $f->score, "..\n";
#      print STDERR "    Genes are: @{$f->genes}.\n";
#    }
  }
}

# sub sortIndividuals():
# This method takes as input an anon list of individuals, and returns
# another anon list of the same individuals but sorted in decreasing
# score.

sub sortIndividuals {
  my ($self, $list) = @_;

  # make sure all score's are calculated.
  # This is to avoid a bug in Perl where a sort is called from whithin another
  # sort, and they are in different packages, then you get a use of uninit value
  # warning. See http://rt.perl.org/rt3/Ticket/Display.html?id=7063
  $_->score for @$list;

  return [sort {$b->score <=> $a->score} @$list];
}

# sub sortPopulation():
# This method sorts the population of individuals.

sub sortPopulation {
  my $self = shift;

  return if $self->{SORTED};

  $self->{PEOPLE} = $self->sortIndividuals($self->{PEOPLE});
  $self->{SORTED} = 1;
}

# sub getFittest():
# This method returns the fittest individuals.

sub getFittest {
  my ($self, $N) = @_;

  $N ||= 1;
  $N = 1 if $N < 1;

  $N = @{$self->{PEOPLE}} if $N > @{$self->{PEOPLE}};

  $self->sortPopulation;

  my @r = @{$self->{PEOPLE}}[0 .. $N-1];

  return $r[0] if $N == 1 && not wantarray;

  return @r;
}

# sub init():
# This method initializes the population to completely
# random individuals. It deletes all current individuals!!!

Genetic.pm  view on Meta::CPAN

# each sub-anon list has two elements, min number and max number.
# In case of listvector, $newArgs is anon list of anon lists.
# Each sub-anon list contains possible values of gene.

sub init {
  my ($self, $newArgs) = @_;

  $self->{INIT} = 0;

  my $ind;
  if (exists $_genome2class{$self->{TYPE}}) {
    $ind = $_genome2class{$self->{TYPE}};
  } else {
    $ind = $self->{TYPE};
  }

  eval "use $ind";  # does this work if package is in same file?
  if ($@) {
    carp "ERROR: Init failed. Can't require '$ind': $@,";
    return undef;
  }

  $self->{INDIVIDUAL} = $ind;
  $self->{PEOPLE}     = [];
  $self->{SORTED}     = 0;
  $self->{GENERATION} = 0;
  $self->{INITARGS}   = $newArgs;

  push @{$self->{PEOPLE}} =>
    $ind->newRandom($newArgs) for 1 .. $self->{POPSIZE};

  $_->fitness($self->{FITFUNC}) for @{$self->{PEOPLE}};

  $self->{INIT} = 1;
}

# sub people():
# returns the current list of individuals in the population.
# note: this returns the actual array ref, so any changes
# made to it (ex, shift/pop/etc) will be reflected in the
# population.

sub people {
  my $self = shift;

  if (@_) {
    $self->{PEOPLE} = shift;
    $self->{SORTED} = 0;
  }

  $self->{PEOPLE};
}

# useful little methods to set/query parameters.
sub size       { $_[0]{POPSIZE}    = $_[1] if defined $_[1]; $_[0]{POPSIZE}   }
sub crossProb  { $_[0]{CROSSRATE}  = $_[1] if defined $_[1]; $_[0]{CROSSRATE} }

Genetic.pm  view on Meta::CPAN

# If the number, n, of genomes to inject is less than N, N - n random
# genomes are added. Perhaps an example will help?
# returns 1 on success and undef on error.

sub inject {
  my ($self, $count, @genomes) = @_;

  unless ($self->{INIT}) {
    carp "can't inject() before init()";
    return undef;
  }

  my $ind = $self->{INDIVIDUAL};

  my @newInds;
  for my $i (1 .. $count) {
    my $genes = shift @genomes;

    if ($genes) {
      push @newInds => $ind->newSpecific($genes, $self->{INITARGS});
    } else {
      push @newInds => $ind->newRandom  ($self->{INITARGS});      
    }
  }

  $_->fitness($self->{FITFUNC}) for @newInds;

  push @{$self->{PEOPLE}} => @newInds;

  return 1;
}

__END__

=head1 NAME

AI::Genetic - A pure Perl genetic algorithm implementation.

=head1 SYNOPSIS

    use AI::Genetic;
    my $ga = new AI::Genetic(
        -fitness    => \&fitnessFunc,
        -type       => 'bitvector',
        -population => 500,
        -crossover  => 0.9,
        -mutation   => 0.01,
	-terminate  => \&terminateFunc,
       );

     $ga->init(10);
     $ga->evolve('rouletteTwoPoint', 100);
     print "Best score = ", $ga->getFittest->score, ".\n";

     sub fitnessFunc {
         my $genes = shift;

         my $fitness;
         # assign a number to $fitness based on the @$genes
         # ...

         return $fitness;
      }

      sub terminateFunc {
         my $ga = shift;

         # terminate if reached some threshold.
         return 1 if $ga->getFittest->score > $THRESHOLD;
         return 0;
      }

=head1 DESCRIPTION

This module implements a Genetic Algorithm (GA) in pure Perl.
Other Perl modules that achieve the same thing (perhaps better,

Genetic.pm  view on Meta::CPAN


=item o

For bitvectors, the argument is simply the length of the bitvector.

    $ga->init(10);

this initializes a population where each individual has 10 genes.

=item o

For listvectors, the argument is an anonymous list of lists. The
number of sub-lists is equal to the number of genes of each individual.
Each sub-list defines the possible string values that the corresponding gene
can assume.

    $ga->init([
               [qw/red blue green/],
               [qw/big medium small/],
               [qw/very_fat fat fit thin very_thin/],
              ]);

this initializes a population where each individual has 3 genes, and each gene
can assume one of the given values.

=item o

Genetic.pm  view on Meta::CPAN

For rangevectors, the argument is an anonymous list of lists. The
number of sub-lists is equal to the number of genes of each individual.
Each sub-list defines the minimum and maximum integer values that the
corresponding gene can assume.

    $ga->init([
               [1, 5],
               [0, 20],
               [4, 9],
              ]);

this initializes a population where each individual has 3 genes, and each gene
can assume an integer within the corresponding range.

=back

Genetic.pm  view on Meta::CPAN

single individual to add. If the number of genomes given, I<n>, is less than I<N>, then
I<N> - I<n> random individuals are added for a total of I<N> new individuals. Random
individuals are generated using the same arguments passed to the I<init()> method.
For example:

  $ga->inject(5,
              [qw/red big thin/],
              [qw/blue small fat/],
             );

this adds 5 new individuals, 2 with the specified genetic coding, and 3 randomly
generated.

=item I<$ga>-E<gt>B<evolve>(I<strategy>, ?I<num_generations>?)

Genetic.pm  view on Meta::CPAN

population at this point is sorted accoring to each individual's
fitness score. It is expected that the strategy sub will modify
the population stored in the AI::Genetic object. Here's the
pseudo-code of events:

    for (1 .. num_generations) {
      sort population;
      call strategy_sub;
      if (termination_sub exists) {
        call termination_sub;
        last if returned true value;
      }
    }

=head1 A NOTE ON SPEED/EFFICIENCY

Genetic algorithms are inherently slow.
Perl can be pretty fast, but will never reach the speed of optimized

Genetic.pm  view on Meta::CPAN


=head1 INSTALLATION

Either the usual:

    perl Makefile.PL
    make
    make install

or just stick it somewhere in @INC where perl can find it. It is in pure Perl.

=head1 AUTHOR & CREDITS

 view all matches for this distribution


AI-Image

 view release on metacpan or  search on metacpan

lib/AI/Image.pm  view on Meta::CPAN

package AI::Image;

use strict;
use warnings;

use strict;
use warnings;

use Carp;
use HTTP::Tiny;
use JSON::PP;

our $VERSION = '0.1';
$VERSION = eval $VERSION;

my $http = HTTP::Tiny->new;

# Create Image object
sub new {
    my $class = shift;
    my %attr  = @_;

    $attr{'error'}      = '';

    $attr{'api'}        = 'OpenAI' unless $attr{'api'};
    $attr{'error'}      = 'Invalid API' unless $attr{'api'} eq 'OpenAI';
    $attr{'error'}      = 'API Key missing' unless $attr{'key'};

    $attr{'model'}      = 'dall-e-2' unless $attr{'model'};
    $attr{'size'}       = '512x512'  unless $attr{'size'};

    return bless \%attr, $class;
}

# Define endpoints for APIs
my %url    = (
    'OpenAI' => 'https://api.openai.com/v1/images/generations',
);

# Define HTTP Headers for APIs
my %header = (
    'OpenAI' => &_get_header_openai,
);

# Returns true if last operation was success
sub success {
    my $self = shift;
    return !$self->{'error'};
}

# Returns error if last operation failed
sub error {
    my $self = shift;
    return $self->{'error'};
}

# Header for calling OpenAI
sub _get_header_openai {
    my $self = shift;
    $self->{'key'} = '' unless defined $self->{'key'};
    return {
         'Authorization' => 'Bearer ' . $self->{'key'},
         'Content-type'  => 'application/json'
     };
 }

# Get URL from image prompt
sub image {
    my ($self, $prompt) = @_;

    my $response = $http->post($url{$self->{'api'}}, {
         'headers' => {
             'Authorization' => 'Bearer ' . $self->{'key'},
             'Content-type'  => 'application/json'
         },
         content => encode_json {
             model          => $self->{'model'},
             size           => $self->{'size'},
             prompt         => $prompt,
         }
     });
     if ($response->{'content'} =~ 'invalid_api_key') {
         croak 'Incorrect API Key - check your API Key is correct';
     }
     
     if ($self->{'debug'} and !$response->{'success'}) {
         croak $response if $self->{'debug'} eq 'verbose';
         croak $response->{'content'};
     }

     my $reply = decode_json($response->{'content'});

     return $reply->{'data'}[0]->{'url'};
}

__END__

=head1 NAME

AI::Image - Generate images using OpenAI's DALL-E

=head1 VERSION

Version 0.1

=head1 SYNOPSIS

    use AI::Image;

    my $ai = AI::Image->new(
        'key'   => 'sk-......',
    );
    
    my $image_url = $ai->image("A photorealistic image of a cat wearing a top hat and monocle.");

    print $image_url;

=head1 DESCRIPTION

This module provides a simple interface to generate images using OpenAI's DALL-E API.

=head1 API KEYS

A free OpenAI API can be obtained from L<https://platform.openai.com/account/api-keys>

=head1 MODELS

Although the API Key is free, each use incurs a cost.  This is dependent on the
model chosen and the size.  The 'dall-e-3' model produces better images but at a
higher cost.  Likewise, bigger images cost more.
The default model C<dall-e-2> with the default size of C<512x512> produces resonable
results at a low cost and is a good place to start using this module.

See also L<https://platform.openai.com/docs/models/overview>

=head1 METHODS

=head2 new

  my $ai = AI::Image->new(%params);

Creates a new AI::Image object.

=head3 Parameters

=over 4

=item key

C<required> Your OpenAI API key.

=item api

The API to use (currently only 'OpenAI' is supported).

=item model

The language model to use (default: 'dall-e-2').

See L<https://platform.openai.com/docs/models/overview>

=item size

The size for the generated image (default: '512x512').

=item debug

Used for testing.  If set to any true value, the image method
will return details of the error encountered instead of C<undef>

=back

=head2 image

  my $url = $ai->image($prompt);

Generates an image based on the provided prompt and returns the URL of the generated image.  The URL is valid for 1 hour.

=head3 Parameters

=over 4

=item prompt

The textual description of the desired image.

=back

=head2 success

  my $success = $ai->success();

Returns true if the last operation was successful.

=head2 error

  my $error = $ai->error();

Returns the error message if the last operation failed.

=head1 EXAMPLE

It is common that the generated image will want to be saved as a file.  This can be easily acheived
using the C<getstore> method of L<LWP::Simple>.

    use strict;
    use warnings;
    
    use LWP::Simple;
    use AI::Image;
    
    my $ai = AI::Image->new(
        'key'   => 'sk-......',
    );
    
    my $image_url = $ai->image("A dog reading a newspaper");
    
    getstore( $image_url, 'my_ai_image.png' );


=head1 SEE ALSO

L<https://openai.com> - OpenAI official website

=head1 AUTHOR

Ian Boddison <ian at boddison.com>

=head1 BUGS

Please report any bugs or feature requests to C<bug-ai-image at rt.cpan.org>, or through
the web interface at L<https://rt.cpan.org/NoAuth/ReportBug.html?Queue=bug-ai-image>.  I will be notified, and then you'll
automatically be notified of progress on your bug as I make changes.

=head1 SUPPORT

You can find documentation for this module with the perldoc command.

    perldoc AI::Image

You can also look for information at:

=over 4

=item * RT: CPAN's request tracker (report bugs here)

L<https://rt.cpan.org/NoAuth/Bugs.html?Dist=AI-Image>

=item * Search CPAN

L<https://metacpan.org/release/AI::Image>

=back

=head1 COPYRIGHT AND LICENSE

Copyright (C) 2024 by Ian Boddison

This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.

=cut

 view all matches for this distribution


AI-LibNeural

 view release on metacpan or  search on metacpan

LibNeural.pm  view on Meta::CPAN


our @ISA = qw(Exporter DynaLoader);

# This allows declaration	use AI::LibNeural ':all';
our %EXPORT_TAGS = ( 'all' => [ qw(
	ALL
	HIDDEN
	INPUT
	OUTPUT
) ] );

our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );

our @EXPORT = qw(
);

our $VERSION = '0.02';

sub AUTOLOAD {
    # This AUTOLOAD is used to 'autoload' constants from the constant()
    # XS function.  If a constant is not found then control is passed
    # to the AUTOLOAD in AutoLoader.
    my $constname;
    our $AUTOLOAD;
    ($constname = $AUTOLOAD) =~ s/.*:://;
    croak "& not defined" if $constname eq 'constant';
    my $val = constant($constname, @_ ? $_[0] : 0);
    if ($! != 0) {
	if ($! =~ /Invalid/ || $!{EINVAL}) {
	    $AutoLoader::AUTOLOAD = $AUTOLOAD;
	    goto &AutoLoader::AUTOLOAD;
	}
	else {
	    croak "Your vendor has not defined AI::LibNeural macro $constname";
	}
    }
    {
	no strict 'refs';
	# Fixed between 5.005_53 and 5.005_61
#	if ($] >= 5.00561) {
#	    *$AUTOLOAD = sub () { $val };
#	}
#	else {
	    *$AUTOLOAD = sub { $val };
#	}
    }
    goto &$AUTOLOAD;
}

bootstrap AI::LibNeural $VERSION;

# Preloaded methods go here.

LibNeural.pm  view on Meta::CPAN


AI::LibNeural - Perl extension libneural

=head1 SYNOPSIS

  use AI::LibNeural;

  my $nn = AI::LibNeural->new( 2, 4, 1 );

  # teach it the logical AND
  $nn->train( [ 0, 0 ], [ 0.05 ], 0.0000000005, 0.2 );
  $nn->train( [ 0, 1 ], [ 0.05 ], 0.0000000005, 0.2 );
  $nn->train( [ 1, 0 ], [ 0.05 ], 0.0000000005, 0.2 );
  $nn->train( [ 1, 1 ], [ 0.95 ], 0.0000000005, 0.2 );

  my $result = $nn->run( [ 1, 1 ] );
  # result should be ~ 0.95
  $result = $nn->run( [ 0, 1 ] );
  # result should be ~ 0.05

  $nn->save('and.mem');

=head1 ABSTRACT

  Perl bindings for the libneural c++ neural netowrk library.

=head1 DESCRIPTION

Provides accessors for the libneural library as a perl object. libneural is a
C++ library that impelements a feed-forward back-proprogation neural network.

LibNeural.pm  view on Meta::CPAN


=over

=item ALL

  The total number of nodes on all three layers

=item INPUT

  The number of nodes on the input layer

=item HIDDEN

  The number of nodes on the hidden layer

=item OUTPUT

  The number of nodes on the output layer

=back

=back

 view all matches for this distribution


AI-Logic-AnswerSet

 view release on metacpan or  search on metacpan

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

require Exporter;

our @ISA = qw(Exporter);

our %EXPORT_TAGS = ( 'all' => [ qw(
	
) ] );

our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );

our @EXPORT = qw(
	
);

our $VERSION = '0.02';

sub executeFromFileAndSave {		#Executes DLV with a file as input and saves the output in another file

	open DLVW, ">>", "$_[1]";
	print DLVW $_[2];
	close DLVW;

	open(SAVESTDOUT, ">&STDOUT") or die "Can't save STDOUT: $!\n";
	open(STDOUT, ">$_[0]") or die "Can't open STDOUT to $_[0]", "$!\n";


	my @args = ("./dlv", "$_[1]");
	system(@args) == 0
		or die "system @args failed: $?";

	open(STDOUT,">&SAVESTDOUT"); #close file and restore STDOUT
	close OUTPUT;

}

sub executeAndSave {	#Executes DLV and saves the output of the program written by the user in a file

	open(SAVESTDOUT, ">&STDOUT") or die "Can't save STDOUT: $!\n";
	open(STDOUT, ">$_[0]") or die "Can't open STDOUT to $_[0]", "$!\n";

	my @args = ("./dlv --");
	system(@args) == 0 or die "system @args failed: $?";

	open(STDOUT,">&SAVESTDOUT"); #close file and restore STDOUT
	close OUTPUT;


}


sub iterativeExec {	# Executes an input program with several instances and stores them in a bidimensional array

	my @input = @_;

	my @returned_value;

	if(@input) {
		
		my $option = $input[$#input];

		if($option =~ /^-/) {
			pop(@input);
		}
		else {
			$option = "";
		}

		my $dir = pop(@input);
		my @files = qx(ls $dir);
			
		my $size = @files;

		for(my $i = 0; $i < $size; $i++) {

			my $elem = $files[$i];
			chomp $elem;
			my @args = ("./dlv", "@input", "$dir$elem", "$option");
			my (@out) = `@args`;
			push @{$returned_value[$i]}, @out;
		}
		
	}

	else {
		print "INPUT ERROR\n";
	}

	return @returned_value;

}

sub singleExec {	 # Executes a single input program or opens the DLV terminal and stores it in an array

	my @input = @_;
	my @returned_value;

	if(@input) {


		my @args = ("./dlv", "@input");
		(@returned_value) = `@args`;
		
	}

	else {
		my $command = "./dlv --";
		(@returned_value) = `$command`;		
	}

	return @returned_value;
}

sub selectOutput {	# Select one of the outputs returned by the iterative execution of more input programs 

	my @stdoutput = @{$_[0]};
	my $n = $_[1];

	return @{$stdoutput[$n]};
	
}

sub getFacts {	# Return the facts of the input program

	my $input = shift;

	my @isAFile = stat($input);

	my @facts;

	if(@isAFile) {

		open INPUT, "<", "$input";
		my @rows = <INPUT>;
		foreach my $row (@rows) {
			if($row =~ /^(\w+)(\(((\w|\d|\.)+,?)*\))?\./) {
				push @facts, $row;
			}
		}
		close INPUT;

	}
	else {
		my @str = split /\. /,$input;
		foreach my $elem (@str) {

			if($elem =~ /^(\w+)(\(((\w|\d|\.)+,?)*\))?\.?$/) {
				push @facts, $elem;
			}
		}
	}
	return @facts;
	
}

sub addCode {	#Adds code to input

	my $program = $_[0];
	my $code = $_[1];
	my @isAFile = stat($program);

	if(@isAFile) {
		open PROGRAM, ">>", $program;
		print PROGRAM "$code\n";
		close PROGRAM;
	}

	else {
		$program = \($_[0]);
		$$program = "$$program $code";
	}
		
}

sub getASFromFile {	#Gets the Answer Set from the file where the output was saved

	open RESULT, "<", "$_[0]" or die $!;
	my @result = <RESULT>;
	my @arr;
	foreach my $line (@result) {

		if($line =~ /\{\w*/) {
			$line =~ s/(\{|\})//g;
			#$line =~ s/\n//g;  # delete \n from $line
		        my @tmp = split(', ', $line);
			push @arr, @tmp;
		}

	}

	close RESULT;
	return @arr;
}

sub getAS { #Returns the Answer Sets from the array where the output was saved

	my @result = @_;
	my @arr;

	foreach my $line (@result) {


		if($line =~ /\{\w*/) {
			$line =~ s/(\{|\})//g;
			$line =~ s/(Best model:)//g;
		        my @tmp = split(', ', $line);
			push @arr, @tmp;
		}

	}

	return @arr;
}

sub statistics {	# Return an array of hashes in which the statistics of every predicate of every answerSets are stored
			# If a condition of comparison is specified(number of predicates) it returns the answer sets that satisfy
			# that condition 

	my @as = @{$_[0]};
	my @pred = @{$_[1]};
	my @num = @{$_[2]};
	my @operators = @{$_[3]};

	my @sets;
	my @ans;
	
	my $countAS = 0;
	my @stat;

	my $countPred;

	foreach my $elem (@as) {

		if($elem =~ /(\w+).*\n/) {
			push @{$sets[$countAS]}, $elem;
			if(_existsPred($1,\@pred)) {
				$stat[$countAS]{$1} += 1;
				$countAS += 1;
			}
		}

		elsif($elem =~ /(\w+).*/) {
			push @{$sets[$countAS]}, $elem;
			if(_existsPred($1,\@pred)) {
				$stat[$countAS]{$1} += 1;
			}
		}
	}

	my $comparison = 0;
	if(@num and @operators) {
		$comparison = 1;
	}
	elsif(@num and !@operators) {
		print "Error: comparison element missing";
		return @ans;
	}
	
	

	if($comparison) {
		my $size = @pred;
		my $statSize = @stat;

		for(my $j = 0; $j < $statSize; $j++) {
			for(my $i = 0; $i < $size; $i++) {

				my $t = $stat[$j]{$pred[$i]};

				if(_evaluate($t,$num[$i],$operators[$i])) {
					$countPred++;
				}
				else {
					$countPred = 0;
					break;
				}
			}

			if($countPred == $size) {
				push @ans , $sets[$j];
			}
			$countPred = 0;
		}
		return @ans;

	}

	return @stat;
}

sub _evaluate {		#private use only

	my $value = shift;
	my $num = shift;
	my $operator = shift;

	if($operator eq "==") {
		if($value == $num) {
			return 1;
		}
		return 0;
	}
	elsif($operator eq "!=") {
		if($value != $num) {
			return 1;
		}
		return 0;		
	}
	elsif($operator eq ">") {
		if($value > $num) {
			return 1;
		}
		return 0;
	}
	elsif($operator eq ">=") {
		if($value >= $num) {
			return 1;
		}
		return 0;
	}
	elsif($operator eq "<") {
		if($value < $num) {
			return 1;
		}
		return 0;
	}
	elsif($operator eq "<=") {
		if($value <= $num) {
			return 1;
		}
		return 0;
	}
	return 0;
}

sub mapAS {	#Mapping of the Answer Sets in an array of hashes

	my $countAS = 0;

	my @answerSets = @{$_[0]};

	my @second;
	if($_[1]) {
		@second = @{$_[1]};
	}

	my @third;
	if($_[2]) {
		@third = @{$_[2]};
	}

	my @selectedAS;
	
	my @predList;

	my @pred;

	if(@second) {
		if($second[0] =~ /\d+/) {

			@selectedAS = @second;
			if(@third) {
				@predList = @third;
			}

		}

		else {
			@predList = @second;
			if(@third) {
				@selectedAS = @third;
			}
		}
	}


	foreach my $elem (@answerSets) {


		if($elem =~ /(\w+).*\n/){
			if(@predList) {
				if(_existsPred($1,\@predList)) {
					push @{$pred[$countAS]{$1}}, $elem;
				}
			}
			else {
				push @{$pred[$countAS]{$1}}, $elem;
			}
			$countAS = $countAS + 1;
			
		}

		elsif($elem =~ /(\w+).*/) {
			if(@predList) {
				if(_existsPred($1,\@predList)) {
					push @{$pred[$countAS]{$1}}, $elem;
				}
			}
			else {
				push @{$pred[$countAS]{$1}}, $elem;
			}
		}
		
	}

	if(@selectedAS) {
		
		my $size = @selectedAS;

		my @selectedPred;


		for(my $i = 0; $i < $size; $i++) {
			my $as = $selectedAS[$i];
			push @selectedPred, $pred[$as];
		}

		return @selectedPred;
	}
	return @pred;

}

sub _existsPred {	#Verifies the existence of a predicate (private use only)

	my $pred = $_[0];
	my @predList = @{$_[1]};

	my $size = @predList;

	for(my $i = 0; $i < $size; $i++) {
		if($pred eq $predList[$i]) {
			return 1;
		}
	}
	return 0;
		
}

sub getPred {	#Returns the predicates from the array of hashes

	my @pr = @{$_[0]};
	return @{$pr[$_[1]]{$_[2]}};
}

sub getProjection {	#Returns the values selected by the user

	my @pr = @{$_[0]};
	my @projection;

	my @res = @{$pr[$_[1]]{$_[2]}};
	
	my $size = @res;
	my $fieldsStr;

	for(my $i = 0; $i < $size; $i++) {
		my $pred = @{$pr[$_[1]]{$_[2]}}[$i];
		if($pred =~ /(\w+)\((.+)\)/) {
			$fieldsStr = $2;
		}
		my @fields = split(',',$fieldsStr);
		push @projection , $fields[$_[3]-1];		
			
	}

	return @projection;
}

sub createNewFile {

	my $file = $_[0];
	my $code = $_[1];

	open FILE, ">", $file;
	print FILE "$code\n";
	close FILE;

}

sub addFacts {

	my $name = $_[0];
	my @facts = @{$_[1]};
	my $append = $_[2];
	my $filename = $_[3];
	
	open FILE, $append, $filename;

	foreach my $f (@facts) {
		print FILE "$name($f).\n";
	}
	close FILE;
}


1;
__END__

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

AI::Logic::AnswerSet - Perl extension for embedding ASP (Answer Set Programming) programs in Perl.


=head1 SYNOPSIS

  use AI::Logic::AnswerSet;
  
  # invoke DLV( AnwerSetProgramming-based system) and save the stdoutput
  my @stdoutput = AI::Logic::AnswerSet::singleExec("3-colorability.txt");

  # parse the output
  my @res = AI::Logic::AnswerSet::getAS(@stdoutput);

  # map the results
  my @mappedAS = AI::Logic::AnswerSet::mapAS(\@res);

  # get a predicate from the results
  my @col = AI::Logic::AnswerSet::getPred(\@mappedAS,1,"col");

  # get a term of a predicate
  my @term = AI::Logic::AnswerSet::getProjection(\@mappedAS,1,"col",2);


=head1 DESCRIPTION

This extension allows to interact with DLV, an Artificial Intelligence system

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


=head3 executeFromFileAndSave

This method allows to execute DLV with and input file and save the output in another file.

	AI::Logic::AnswerSet::executeFromFileAndSave("outprog.txt","dlvprog.txt","");

In this case the file "outprog.txt" consists of the result of the DLV invocation 
with the file "dlvprog.txt".
No code is specified in the third value of the method. It can be used to add code 
to an existing file or to a new one.

	AI::Logic::AnswerSet::executeFromFileAndSave("outprog.txt","dlvprog.txt",
	"b(X):-a(X). a(1).");
  
=head3 executeAndSave

To call DLV without an input file, directly writing the ASP code from the terminal, 
use this method, passing only the name of the output file.

	AI::Logic::AnswerSet::executeAndSave("outprog.txt");

Press Ctrl+D to stop using the DLV terminal and execute the program.

=head3 singleExec

Use this method to execute DLV whit several input files, including also
DLV options like "-nofacts".
The output will be stored inside an array.

	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");

Another way to use this method:

	my @out = AI::Logic::AnswerSet::singleExec();

In this way it will work like C<executeAndSave()> without saving the output to a file.

=head3 iterativeExec

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

one might have more than a graph, and each graph instance can be stored in a different file.
A Perl programmer might want to work with the results of all the graphs she has in her files,
so this function will be useful for this purpose.
Use it like in the following:

	my @outputs = AI::Logic::AnswerSet::iterativeExec("3col.txt","nodes.txt","./instances");

In this case the nodes of each graph are the same, but not the edges.
Notice that in order to correctly use this method, the user must specify the path 
to the instances (the edges, in this case).

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


=head3 selectOutput

This method allows to get one of the results of C<iterativeExec>.

	my @outputs = AI::Logic::AnswerSet::iterativeExec("3col.txt","nodes.txt","./instances");
	my @out = AI::Logic::AnswerSet::selectOutput(\@outputs,0);

In this case the first output is selected.

=head3 getASFromFile

Parses the output of a DLV execution saved in a file and gather the answer sets.

	AI::Logic::AnswerSet::executeFromFileAndSave("outprog.txt","dlvprog.txt","");
	my @result = AI::Logic::AnswerSet::getASFromFile("outprog.txt");

=head3 getAS

Parses the output of a DLV execution and gather the answer sets.

	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");
	my @result = AI::Logic::AnswerSet::getAS(@out);

=head3 mapAS

Parses the new output in order to save and organize the results into a hashmap.

	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");
	my @result = AI::Logic::AnswerSet::getAS(@out);
	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result);

The user can set some constraints on the data to be saved in the hashmap, such as predicates, or answer sets, or both.

	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result,@predicates,@answerSets);

For instance, think about the 3-colorability problem: imagine to 
have the edges in the hashmap, and to print the edges contained in the third answer set
returned by DLV; this is an example of the print instruction, useful to understand how
the hashmap works:

	print "Edges: @{$mappedAS[2]{edge}}\n";

In this case, we are printing the array containing the predicate "edge".

=head3 getPred

Easily manage the hashmap and get the desired predicate(see the print example
described in the method above):

	my @edges = AI::Logic::AnswerSet::getPred(\@mappedAS,3,"edge");

=head3 getProjection

Returns the projection of the n-th term of a specified predicate.
Suppose that we have the predicate "person" C<person(Name,Surename);> and
that we just want the surenames of all the instances of "person":

	my @surenames = AI::Logic::AnswerSet::getProjection(\@mappedAS,3,"person",2);

The parameters are, respectively: hashmap, number of the answer set, name of the predicate,
position of the term.

=head3 statistics

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

This method returns an array of hashes with some stats of every predicate of every answer set,
namely the number of occurrences of the specified predicates of each answer set.
If a condition is specified(number of predicates), only the answer sets that satisfy
the condition are returned.

	my @res = AI::Logic::AnswerSet::getAS(@output);
	my @predicates = ("node","edge");
	my @stats = AI::Logic::AnswerSet::statistics(\@res,\@predicates);

In this case the data structure returned is the same as the one returned by C<mapAS()>.
Hence, for each answer set (each element of the array of hashes), the hashmap will appear 
like this:

	{
		node => 6
		edge => 9
	}

This means that for a particular answer set we have 6 nodes and 9 edges.
In addition, this method can be used with some constraints:

	my @res = AI::Logic::AnswerSet::getAS(@output);
	my @predicates = ("node,"edge");
	my @numbers = (4,15);
	my @operators = (">","<");
	my @stats = AI::Logic::AnswerSet::statistics(\@res,\@predicates,\@numbers,\@operators);

Now the functions returns the answer sets that satisfy the condition, i.e., an answer set
is returned only if the number of occurrences of the predicate "node" is higher than 4, and the number of occurrences of the predicate "edge" less than 15.

=head3 getFacts

Get the logic program facts from a file or a string.

	my @facts = AI::Logic::AnswerSet::getFacts($inputFile);

or

	my $code = "a(X):-b(X). b(1). b(2).";
	my @facts = AI::Logic::AnswerSet::getFacts($code);

DLV code can be freely exploited, with the only constraint of putting a space between rules
or facts.
This is an example of wrong input code:

	my $code = "a(X):-b(X).b(1).b(2).";

=head3 addCode

Use this method to quiclky add new code to a string or a file.

	my $code = "a(X):-b(X). b(1). b(2).";
	AI::Logic::AnswerSet::addCode($code,"b(3). b(4).");

or

	my $file = "myfile.txt";
	AI::Logic::AnswerSet::addCode($file,"b(3). b(4).");

=head3 createNewFile

Creates a new file with some code.

	AI::Logic::AnswerSet::createNewFile($file,"b(3). b(4).");

=head3 addFacts

Quiclky adds facts to a file. Imagine to have some data(representing facts) 
stored inside an array; just use this method to put them in a file and give it a name.

	AI::Logic::AnswerSet::addFacts("villagers",\@villagers,">","villagersFile.txt");

In the example above, "villagers" will be the name of the facts; C<@villagers> is the array 
containing the data; ">" is the file operator(will create a new file, in this case); 
"villagersFile.txt" is the filename. The file will contain facts of the form "villagers(X)",
for each "X", appearing in the array C<@villagers>.

 view all matches for this distribution


AI-ML

 view release on metacpan or  search on metacpan

inc/MyBuilder.pm  view on Meta::CPAN


my $EXTRA_O_FLAGS = "";
my $EXTRA_FLAGS = "-lblas -llapack";

sub ACTION_code {
    my $self = shift;

    $EXTRA_O_FLAGS .= " -DUSE_REAL" unless exists $self->args->{'with-float'};

    $self->update_XS("XS/ML.xs.inc");

    $self->dispatch("create_objects");
    $self->dispatch("compile_xs");

    $self->SUPER::ACTION_code;
}

sub update_XS {
    my ($self, $file) = @_;
    my $output = $file;
    $output =~ s/\.inc$//;

    open my $i_fh, "<", $file   or die "$!";
    open my $o_fh, ">", $output or die "$!";
    while (<$i_fh>) {
        s/REAL/float/g;
        print {$o_fh} $_;
    }
    close $o_fh;
    close $i_fh;
}

sub ACTION_create_objects {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $c_progs = $self->rscan_dir("C", qr/\.c$/);
    for my $file (@$c_progs) {
        my $object = $file;
        $object =~ s/\.c$/.o/;
        next if $self->up_to_date($file, $object);
        $cbuilder->compile(
            object_file => $object,
            extra_compiler_flags => $EXTRA_O_FLAGS,
            source => $file,
            include_dirs => ["."]
        );
    }
}

sub ACTION_compile_xs {
    my $self = shift;
    my $cbuilder = $self->cbuilder;

    my $archdir = path($self->blib, "arch", "auto", "AI", "ML");
    $archdir->mkpath unless -d $archdir;

    my $xs = path("XS", "ML.xs");
    my $xs_c = path("XS", "ML.c");

    if (!$self->up_to_date($xs, $xs_c)) {
        ExtUtils::ParseXS::process_file(
            filename => $xs->stringify, 
            prototypes => 0,
            output => $xs_c->stringify
        );
    }

    my $xs_o = path("XS", "ML.o");
    if (!$self->up_to_date($xs_c, $xs_o)) {
        $cbuilder->compile(
            source => $xs_c,
            extra_compiler_flags => $EXTRA_O_FLAGS,
            include_dirs => ["."], 
            object_file => $xs_o
        );
    }
    my $bs_file = path( $archdir, "ML.bs");
    if (!$self->up_to_date($xs_o, $bs_file) ) {
        ExtUtils::Mkbootstrap::Mkbootstrap($bs_file);
        if (!-f $bs_file) {
            $bs_file->touch;
        }
    }

    my $objects = $self->rscan_dir("C", qr/\.o$/);
    push @$objects, $xs_o;
    my $lib_file = path($archdir, "ML.$Config{dlext}");
    if (!$self->up_to_date( $objects, $lib_file )) {
        $cbuilder->link(
            module_name => 'AI::ML',
            extra_linker_flags => $EXTRA_FLAGS,
            objects => $objects,
            lib_file => $lib_file,
        );
    }
}

1;

 view all matches for this distribution


AI-MXNet-Gluon-Contrib

 view release on metacpan or  search on metacpan

lib/AI/MXNet/Gluon/Contrib.pm  view on Meta::CPAN

use AI::MXNet;
use AI::MXNet::Gluon::Contrib::NN::BasicLayers;
our $VERSION = '1.33';
=head1 NAME 

    AI::MXNet::Gluon::Contrib - A collection of supplemental Gluon blocks.
=cut

1;

=head1 AUTHOR

    Sergey Kolychev, <sergeykolychev.github@gmail.com>

=head1 COPYRIGHT & LICENSE

    This library is licensed under Apache 2.0 license L<https://www.apache.org/licenses/LICENSE-2.0>

=cut

 view all matches for this distribution


AI-MXNet-Gluon-ModelZoo

 view release on metacpan or  search on metacpan

examples/image_classification.pl  view on Meta::CPAN

use AI::MXNet::Gluon::ModelZoo 'get_model';
use AI::MXNet::Gluon::Utils 'download';
use Getopt::Long qw(HelpMessage);

GetOptions(
    ## my Pembroke Welsh Corgi Kyuubi, enjoing Solar eclipse of August 21, 2017
    'image=s' => \(my $image = 'http://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/'.
                               'gluon/dataset/kyuubi.jpg'),
    'model=s' => \(my $model = 'resnet152_v2'),
    'help'    => sub { HelpMessage(0) },
) or HelpMessage(1);

## get a pretrained model (download parameters file if necessary)
my $net = get_model($model, pretrained => 1);

examples/image_classification.pl  view on Meta::CPAN

my @text_labels = map { chomp; s/^\S+\s+//; $_ } IO::File->new($fname)->getlines;

## get the image from the disk or net
if($image =~ /^https/)
{
    eval { require IO::Socket::SSL; };
    die "Need to have IO::Socket::SSL installed for https images" if $@;
}
$image = $image =~ /^https?/ ? download($image) : $image;

# Following the conventional way of preprocessing ImageNet data:
# Resize the short edge into 256 pixes,

examples/image_classification.pl  view on Meta::CPAN

# We perform an additional softmax on the output to obtain probability scores.
# And then print the top-5 recognized objects.
my $prob = $net->($image)->softmax;
for my $idx (@{ $prob->topk(k=>5)->at(0) })
{
    my $i = $idx->asscalar;
    printf(
        "With prob = %.5f, it contains %s\n",
        $prob->at(0)->at($i)->asscalar, $text_labels[$i]
    );
}

 view all matches for this distribution


AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

use AI::MXNet ('mx');

## preparing the samples
## to train our network
sub samples {
    my($batch_size, $func) = @_;
    # get samples
    my $n = 16384;
    ## creates a pdl with $n rows and two columns with random
    ## floats in the range between 0 and 1
    my $data = PDL->random(2, $n);
    ## creates the pdl with $n rows and one column with labels
    ## labels are floats that either sum or product, etc of
    ## two random values in each corresponding row of the data pdl
    my $label = $func->($data->slice('0,:'), $data->slice('1,:'));
    # partition into train/eval sets
    my $edge = int($n / 8);
    my $validation_data = $data->slice(":,0:@{[ $edge - 1 ]}");
    my $validation_label = $label->slice(":,0:@{[ $edge - 1 ]}");
    my $train_data = $data->slice(":,$edge:");
    my $train_label = $label->slice(":,$edge:");
    # build iterators around the sets
    return(mx->io->NDArrayIter(
        batch_size => $batch_size,
        data => $train_data,
        label => $train_label,
    ), mx->io->NDArrayIter(
        batch_size => $batch_size,
        data => $validation_data,
        label => $validation_label,
    ));
}

## the network model
sub nn_fc {
    my $data = mx->sym->Variable('data');
    my $ln = mx->sym->exp(mx->sym->FullyConnected(
        data => mx->sym->log($data),
        num_hidden => 1,
    ));
    my $wide = mx->sym->Concat($data, $ln);
    my $fc = mx->sym->FullyConnected(
	$wide,
	num_hidden => 1
    );
    return mx->sym->MAERegressionOutput(data => $fc, name => 'softmax');
}

sub learn_function {
    my(%args) = @_;
    my $func = $args{func};
    my $batch_size = $args{batch_size}//128;
    my($train_iter, $eval_iter) = samples($batch_size, $func);
    my $sym = nn_fc();

    ## call as ./calculator.pl 1 to just print model and exit
    if($ARGV[0]) {
        my @dsz = @{$train_iter->data->[0][1]->shape};
        my @lsz = @{$train_iter->label->[0][1]->shape};
        my $shape = {
            data          => [ $batch_size, splice @dsz,  1 ],
            softmax_label => [ $batch_size, splice @lsz, 1 ],
        };
        print mx->viz->plot_network($sym, shape => $shape)->graph->as_png;
        exit;
    }

    my $model = mx->mod->Module(
        symbol => $sym,
        context => mx->cpu(),
    );
    $model->fit($train_iter,
        eval_data => $eval_iter,
        optimizer => 'adam',
        optimizer_params => {
            learning_rate => $args{lr}//0.01,
            rescale_grad => 1/$batch_size,
            lr_scheduler  => AI::MXNet::FactorScheduler->new(
        	step => 100,
        	factor => 0.99
            )
        },
        eval_metric => 'mse',
        num_epoch => $args{epoch}//25,
    );

    # refit the model for calling on 1 sample at a time
    my $iter = mx->io->NDArrayIter(
        batch_size => 1,
        data => PDL->pdl([[ 0, 0 ]]),
        label => PDL->pdl([[ 0 ]]),
    );
    $model->reshape(
        data_shapes => $iter->provide_data,
        label_shapes => $iter->provide_label,
    );

    # wrap a helper around making predictions
    my ($arg_params) = $model->get_params;
    for my $k (sort keys %$arg_params)
    {
	print "$k -> ". $arg_params->{$k}->aspdl."\n";
    }
    return sub {
        my($n, $m) = @_;
        return $model->predict(mx->io->NDArrayIter(
            batch_size => 1,
            data => PDL->new([[ $n, $m ]]),
        ))->aspdl->list;
    };
}

my $add = learn_function(func => sub {
    my($n, $m) = @_;
    return $n + $m;
});
my $sub = learn_function(func => sub {
    my($n, $m) = @_;
    return $n - $m;
}, batch_size => 50, epoch => 40);
my $mul = learn_function(func => sub {
    my($n, $m) = @_;
    return $n * $m;
}, batch_size => 50, epoch => 40);
my $div = learn_function(func => sub {
    my($n, $m) = @_;
    return $n / $m;
}, batch_size => 10, epoch => 80);


print "12345 + 54321 ≈ ", $add->(12345, 54321), "\n";
print "188 - 88 ≈ ", $sub->(188, 88), "\n";

 view all matches for this distribution


AI-MXNetCAPI

 view release on metacpan or  search on metacpan

lib/AI/MXNetCAPI.pm  view on Meta::CPAN


AI::MXNetCAPI - Swig interface to mxnet c api

=head1 SYNOPSIS

 use AI::MXNetCAPI;

=head1 DESCRIPTION

This module provides interface to mxnet
via its api.

 view all matches for this distribution


AI-MaxEntropy

 view release on metacpan or  search on metacpan

inc/Module/AutoInstall.pm  view on Meta::CPAN

use Cwd                 ();
use ExtUtils::MakeMaker ();

use vars qw{$VERSION};
BEGIN {
	$VERSION = '1.03';
}

# special map on pre-defined feature sets
my %FeatureMap = (
    ''      => 'Core Features',    # XXX: deprecated
    '-core' => 'Core Features',
);

# various lexical flags
my ( @Missing, @Existing,  %DisabledTests, $UnderCPAN,     $HasCPANPLUS );
my ( $Config,  $CheckOnly, $SkipInstall,   $AcceptDefault, $TestOnly );

inc/Module/AutoInstall.pm  view on Meta::CPAN

# See if it's a testing or non-interactive session
_accept_default( $ENV{AUTOMATED_TESTING} or ! -t STDIN ); 
_init();

sub _accept_default {
    $AcceptDefault = shift;
}

sub missing_modules {
    return @Missing;
}

sub do_install {
    __PACKAGE__->install(
        [
            $Config
            ? ( UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )
            : ()
        ],
        @Missing,
    );
}

# initialize various flags, and/or perform install
sub _init {
    foreach my $arg (
        @ARGV,
        split(
            /[\s\t]+/,
            $ENV{PERL_AUTOINSTALL} || $ENV{PERL_EXTUTILS_AUTOINSTALL} || ''
        )
      )
    {
        if ( $arg =~ /^--config=(.*)$/ ) {
            $Config = [ split( ',', $1 ) ];
        }
        elsif ( $arg =~ /^--installdeps=(.*)$/ ) {
            __PACKAGE__->install( $Config, @Missing = split( /,/, $1 ) );
            exit 0;
        }
        elsif ( $arg =~ /^--default(?:deps)?$/ ) {
            $AcceptDefault = 1;
        }
        elsif ( $arg =~ /^--check(?:deps)?$/ ) {
            $CheckOnly = 1;
        }
        elsif ( $arg =~ /^--skip(?:deps)?$/ ) {
            $SkipInstall = 1;
        }
        elsif ( $arg =~ /^--test(?:only)?$/ ) {
            $TestOnly = 1;
        }
    }
}

# overrides MakeMaker's prompt() to automatically accept the default choice
sub _prompt {
    goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault;

    my ( $prompt, $default ) = @_;
    my $y = ( $default =~ /^[Yy]/ );

    print $prompt, ' [', ( $y ? 'Y' : 'y' ), '/', ( $y ? 'n' : 'N' ), '] ';
    print "$default\n";
    return $default;
}

# the workhorse
sub import {
    my $class = shift;
    my @args  = @_ or return;
    my $core_all;

    print "*** $class version " . $class->VERSION . "\n";
    print "*** Checking for Perl dependencies...\n";

    my $cwd = Cwd::cwd();

    $Config = [];

    my $maxlen = length(
        (
            sort   { length($b) <=> length($a) }
              grep { /^[^\-]/ }
              map  {
                ref($_)
                  ? ( ( ref($_) eq 'HASH' ) ? keys(%$_) : @{$_} )
                  : ''
              }
              map { +{@args}->{$_} }
              grep { /^[^\-]/ or /^-core$/i } keys %{ +{@args} }
        )[0]
    );

    while ( my ( $feature, $modules ) = splice( @args, 0, 2 ) ) {
        my ( @required, @tests, @skiptests );
        my $default  = 1;
        my $conflict = 0;

        if ( $feature =~ m/^-(\w+)$/ ) {
            my $option = lc($1);

            # check for a newer version of myself
            _update_to( $modules, @_ ) and return if $option eq 'version';

            # sets CPAN configuration options
            $Config = $modules if $option eq 'config';

            # promote every features to core status
            $core_all = ( $modules =~ /^all$/i ) and next
              if $option eq 'core';

            next unless $option eq 'core';
        }

        print "[" . ( $FeatureMap{ lc($feature) } || $feature ) . "]\n";

        $modules = [ %{$modules} ] if UNIVERSAL::isa( $modules, 'HASH' );

        unshift @$modules, -default => &{ shift(@$modules) }
          if ( ref( $modules->[0] ) eq 'CODE' );    # XXX: bugward combatability

        while ( my ( $mod, $arg ) = splice( @$modules, 0, 2 ) ) {
            if ( $mod =~ m/^-(\w+)$/ ) {
                my $option = lc($1);

                $default   = $arg    if ( $option eq 'default' );
                $conflict  = $arg    if ( $option eq 'conflict' );
                @tests     = @{$arg} if ( $option eq 'tests' );
                @skiptests = @{$arg} if ( $option eq 'skiptests' );

                next;
            }

            printf( "- %-${maxlen}s ...", $mod );

            if ( $arg and $arg =~ /^\D/ ) {
                unshift @$modules, $arg;
                $arg = 0;
            }

            # XXX: check for conflicts and uninstalls(!) them.
            if (
                defined( my $cur = _version_check( _load($mod), $arg ||= 0 ) ) )
            {
                print "loaded. ($cur" . ( $arg ? " >= $arg" : '' ) . ")\n";
                push @Existing, $mod => $arg;
                $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
            }
            else {
                print "missing." . ( $arg ? " (would need $arg)" : '' ) . "\n";
                push @required, $mod => $arg;
            }
        }

        next unless @required;

        my $mandatory = ( $feature eq '-core' or $core_all );

        if (
            !$SkipInstall
            and (
                $CheckOnly
                or _prompt(
                    qq{==> Auto-install the }
                      . ( @required / 2 )
                      . ( $mandatory ? ' mandatory' : ' optional' )
                      . qq{ module(s) from CPAN?},
                    $default ? 'y' : 'n',
                ) =~ /^[Yy]/
            )
          )
        {
            push( @Missing, @required );
            $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
        }

        elsif ( !$SkipInstall
            and $default
            and $mandatory
            and
            _prompt( qq{==> The module(s) are mandatory! Really skip?}, 'n', )
            =~ /^[Nn]/ )
        {
            push( @Missing, @required );
            $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
        }

        else {
            $DisabledTests{$_} = 1 for map { glob($_) } @tests;
        }
    }

    $UnderCPAN = _check_lock();    # check for $UnderCPAN

    if ( @Missing and not( $CheckOnly or $UnderCPAN ) ) {
        require Config;
        print
"*** Dependencies will be installed the next time you type '$Config::Config{make}'.\n";

        # make an educated guess of whether we'll need root permission.
        print "    (You may need to do that as the 'root' user.)\n"
          if eval '$>';
    }
    print "*** $class configuration finished.\n";

    chdir $cwd;

    # import to main::
    no strict 'refs';
    *{'main::WriteMakefile'} = \&Write if caller(0) eq 'main';
}

# Check to see if we are currently running under CPAN.pm and/or CPANPLUS;
# if we are, then we simply let it taking care of our dependencies
sub _check_lock {
    return unless @Missing;

    if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) {
        print <<'END_MESSAGE';

*** Since we're running under CPANPLUS, I'll just let it take care
    of the dependency's installation later.
END_MESSAGE
        return 1;
    }

    _load_cpan();

    # Find the CPAN lock-file
    my $lock = MM->catfile( $CPAN::Config->{cpan_home}, ".lock" );
    return unless -f $lock;

    # Check the lock
    local *LOCK;
    return unless open(LOCK, $lock);

    if (
            ( $^O eq 'MSWin32' ? _under_cpan() : <LOCK> == getppid() )
        and ( $CPAN::Config->{prerequisites_policy} || '' ) ne 'ignore'
    ) {
        print <<'END_MESSAGE';

*** Since we're running under CPAN, I'll just let it take care
    of the dependency's installation later.
END_MESSAGE
        return 1;
    }

    close LOCK;
    return;
}

sub install {
    my $class = shift;

    my $i;    # used below to strip leading '-' from config keys
    my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } );

    my ( @modules, @installed );
    while ( my ( $pkg, $ver ) = splice( @_, 0, 2 ) ) {

        # grep out those already installed
        if ( defined( _version_check( _load($pkg), $ver ) ) ) {
            push @installed, $pkg;
        }
        else {
            push @modules, $pkg, $ver;
        }
    }

    return @installed unless @modules;  # nothing to do
    return @installed if _check_lock(); # defer to the CPAN shell

    print "*** Installing dependencies...\n";

    return unless _connected_to('cpan.org');

    my %args = @config;
    my %failed;
    local *FAILED;
    if ( $args{do_once} and open( FAILED, '.#autoinstall.failed' ) ) {
        while (<FAILED>) { chomp; $failed{$_}++ }
        close FAILED;

        my @newmod;
        while ( my ( $k, $v ) = splice( @modules, 0, 2 ) ) {
            push @newmod, ( $k => $v ) unless $failed{$k};
        }
        @modules = @newmod;
    }

    if ( _has_cpanplus() ) {
        _install_cpanplus( \@modules, \@config );
    } else {
        _install_cpan( \@modules, \@config );
    }

    print "*** $class installation finished.\n";

    # see if we have successfully installed them
    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        if ( defined( _version_check( _load($pkg), $ver ) ) ) {
            push @installed, $pkg;
        }
        elsif ( $args{do_once} and open( FAILED, '>> .#autoinstall.failed' ) ) {
            print FAILED "$pkg\n";
        }
    }

    close FAILED if $args{do_once};

    return @installed;
}

sub _install_cpanplus {
    my @modules   = @{ +shift };
    my @config    = _cpanplus_config( @{ +shift } );
    my $installed = 0;

    require CPANPLUS::Backend;
    my $cp   = CPANPLUS::Backend->new;
    my $conf = $cp->configure_object;

    return unless $conf->can('conf') # 0.05x+ with "sudo" support
               or _can_write($conf->_get_build('base'));  # 0.04x

    # if we're root, set UNINST=1 to avoid trouble unless user asked for it.
    my $makeflags = $conf->get_conf('makeflags') || '';
    if ( UNIVERSAL::isa( $makeflags, 'HASH' ) ) {
        # 0.03+ uses a hashref here
        $makeflags->{UNINST} = 1 unless exists $makeflags->{UNINST};

    } else {
        # 0.02 and below uses a scalar
        $makeflags = join( ' ', split( ' ', $makeflags ), 'UNINST=1' )
          if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } );

    }
    $conf->set_conf( makeflags => $makeflags );
    $conf->set_conf( prereqs   => 1 );

    

    while ( my ( $key, $val ) = splice( @config, 0, 2 ) ) {
        $conf->set_conf( $key, $val );
    }

    my $modtree = $cp->module_tree;
    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        print "*** Installing $pkg...\n";

        MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall;

        my $success;
        my $obj = $modtree->{$pkg};

        if ( $obj and defined( _version_check( $obj->{version}, $ver ) ) ) {
            my $pathname = $pkg;
            $pathname =~ s/::/\\W/;

            foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) {
                delete $INC{$inc};
            }

            my $rv = $cp->install( modules => [ $obj->{module} ] );

            if ( $rv and ( $rv->{ $obj->{module} } or $rv->{ok} ) ) {
                print "*** $pkg successfully installed.\n";
                $success = 1;
            } else {
                print "*** $pkg installation cancelled.\n";
                $success = 0;
            }

            $installed += $success;
        } else {
            print << ".";
*** Could not find a version $ver or above for $pkg; skipping.
.
        }

        MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall;
    }

    return $installed;
}

sub _cpanplus_config {
	my @config = ();
	while ( @_ ) {
		my ($key, $value) = (shift(), shift());
		if ( $key eq 'prerequisites_policy' ) {
			if ( $value eq 'follow' ) {
				$value = CPANPLUS::Internals::Constants::PREREQ_INSTALL();
			} elsif ( $value eq 'ask' ) {
				$value = CPANPLUS::Internals::Constants::PREREQ_ASK();
			} elsif ( $value eq 'ignore' ) {
				$value = CPANPLUS::Internals::Constants::PREREQ_IGNORE();
			} else {
				die "*** Cannot convert option $key = '$value' to CPANPLUS version.\n";
			}
		} else {
			die "*** Cannot convert option $key to CPANPLUS version.\n";
		}
	}
	return @config;
}

sub _install_cpan {
    my @modules   = @{ +shift };
    my @config    = @{ +shift };
    my $installed = 0;
    my %args;

    _load_cpan();
    require Config;

    if (CPAN->VERSION < 1.80) {
        # no "sudo" support, probe for writableness
        return unless _can_write( MM->catfile( $CPAN::Config->{cpan_home}, 'sources' ) )
                  and _can_write( $Config::Config{sitelib} );
    }

    # if we're root, set UNINST=1 to avoid trouble unless user asked for it.
    my $makeflags = $CPAN::Config->{make_install_arg} || '';
    $CPAN::Config->{make_install_arg} =
      join( ' ', split( ' ', $makeflags ), 'UNINST=1' )
      if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } );

    # don't show start-up info
    $CPAN::Config->{inhibit_startup_message} = 1;

    # set additional options
    while ( my ( $opt, $arg ) = splice( @config, 0, 2 ) ) {
        ( $args{$opt} = $arg, next )
          if $opt =~ /^force$/;    # pseudo-option
        $CPAN::Config->{$opt} = $arg;
    }

    local $CPAN::Config->{prerequisites_policy} = 'follow';

    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall;

        print "*** Installing $pkg...\n";

        my $obj     = CPAN::Shell->expand( Module => $pkg );
        my $success = 0;

        if ( $obj and defined( _version_check( $obj->cpan_version, $ver ) ) ) {
            my $pathname = $pkg;
            $pathname =~ s/::/\\W/;

            foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) {
                delete $INC{$inc};
            }

            my $rv = $args{force} ? CPAN::Shell->force( install => $pkg )
                                  : CPAN::Shell->install($pkg);
            $rv ||= eval {
                $CPAN::META->instance( 'CPAN::Distribution', $obj->cpan_file, )
                  ->{install}
                  if $CPAN::META;
            };

            if ( $rv eq 'YES' ) {
                print "*** $pkg successfully installed.\n";
                $success = 1;
            }
            else {
                print "*** $pkg installation failed.\n";
                $success = 0;
            }

            $installed += $success;
        }
        else {
            print << ".";
*** Could not find a version $ver or above for $pkg; skipping.
.
        }

        MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall;
    }

    return $installed;
}

sub _has_cpanplus {
    return (
        $HasCPANPLUS = (
            $INC{'CPANPLUS/Config.pm'}
              or _load('CPANPLUS::Shell::Default')
        )
    );
}

# make guesses on whether we're under the CPAN installation directory
sub _under_cpan {
    require Cwd;
    require File::Spec;

    my $cwd  = File::Spec->canonpath( Cwd::cwd() );
    my $cpan = File::Spec->canonpath( $CPAN::Config->{cpan_home} );

    return ( index( $cwd, $cpan ) > -1 );
}

sub _update_to {
    my $class = __PACKAGE__;
    my $ver   = shift;

    return
      if defined( _version_check( _load($class), $ver ) );  # no need to upgrade

    if (
        _prompt( "==> A newer version of $class ($ver) is required. Install?",
            'y' ) =~ /^[Nn]/
      )
    {
        die "*** Please install $class $ver manually.\n";
    }

    print << ".";
*** Trying to fetch it from CPAN...
.

    # install ourselves
    _load($class) and return $class->import(@_)
      if $class->install( [], $class, $ver );

    print << '.'; exit 1;

*** Cannot bootstrap myself. :-( Installation terminated.
.
}

# check if we're connected to some host, using inet_aton
sub _connected_to {
    my $site = shift;

    return (
        ( _load('Socket') and Socket::inet_aton($site) ) or _prompt(
            qq(
*** Your host cannot resolve the domain name '$site', which
    probably means the Internet connections are unavailable.
==> Should we try to install the required module(s) anyway?), 'n'
          ) =~ /^[Yy]/
    );
}

# check if a directory is writable; may create it on demand
sub _can_write {
    my $path = shift;
    mkdir( $path, 0755 ) unless -e $path;

    return 1 if -w $path;

    print << ".";
*** You are not allowed to write to the directory '$path';
    the installation may fail due to insufficient permissions.
.

    if (
        eval '$>' and lc(`sudo -V`) =~ /version/ and _prompt(
            qq(
==> Should we try to re-execute the autoinstall process with 'sudo'?),
            ((-t STDIN) ? 'y' : 'n')
        ) =~ /^[Yy]/
      )
    {

        # try to bootstrap ourselves from sudo
        print << ".";
*** Trying to re-execute the autoinstall process with 'sudo'...
.
        my $missing = join( ',', @Missing );
        my $config = join( ',',
            UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )
          if $Config;

        return
          unless system( 'sudo', $^X, $0, "--config=$config",
            "--installdeps=$missing" );

        print << ".";
*** The 'sudo' command exited with error!  Resuming...
.
    }

    return _prompt(
        qq(
==> Should we try to install the required module(s) anyway?), 'n'
    ) =~ /^[Yy]/;
}

# load a module and return the version it reports
sub _load {
    my $mod  = pop;    # class/instance doesn't matter
    my $file = $mod;

    $file =~ s|::|/|g;
    $file .= '.pm';

    local $@;
    return eval { require $file; $mod->VERSION } || ( $@ ? undef: 0 );
}

# Load CPAN.pm and it's configuration
sub _load_cpan {
    return if $CPAN::VERSION;
    require CPAN;
    if ( $CPAN::HandleConfig::VERSION ) {
        # Newer versions of CPAN have a HandleConfig module
        CPAN::HandleConfig->load;
    } else {
    	# Older versions had the load method in Config directly
        CPAN::Config->load;
    }
}

# compare two versions, either use Sort::Versions or plain comparison
sub _version_check {
    my ( $cur, $min ) = @_;
    return unless defined $cur;

    $cur =~ s/\s+$//;

    # check for version numbers that are not in decimal format
    if ( ref($cur) or ref($min) or $cur =~ /v|\..*\./ or $min =~ /v|\..*\./ ) {
        if ( ( $version::VERSION or defined( _load('version') )) and
             version->can('new') 
            ) {

            # use version.pm if it is installed.
            return (
                ( version->new($cur) >= version->new($min) ) ? $cur : undef );
        }
        elsif ( $Sort::Versions::VERSION or defined( _load('Sort::Versions') ) )
        {

            # use Sort::Versions as the sorting algorithm for a.b.c versions
            return ( ( Sort::Versions::versioncmp( $cur, $min ) != -1 )
                ? $cur
                : undef );
        }

        warn "Cannot reliably compare non-decimal formatted versions.\n"
          . "Please install version.pm or Sort::Versions.\n";
    }

    # plain comparison
    local $^W = 0;    # shuts off 'not numeric' bugs
    return ( $cur >= $min ? $cur : undef );
}

# nothing; this usage is deprecated.
sub main::PREREQ_PM { return {}; }

sub _make_args {
    my %args = @_;

    $args{PREREQ_PM} = { %{ $args{PREREQ_PM} || {} }, @Existing, @Missing }
      if $UnderCPAN or $TestOnly;

    if ( $args{EXE_FILES} and -e 'MANIFEST' ) {
        require ExtUtils::Manifest;
        my $manifest = ExtUtils::Manifest::maniread('MANIFEST');

        $args{EXE_FILES} =
          [ grep { exists $manifest->{$_} } @{ $args{EXE_FILES} } ];
    }

    $args{test}{TESTS} ||= 't/*.t';
    $args{test}{TESTS} = join( ' ',
        grep { !exists( $DisabledTests{$_} ) }
          map { glob($_) } split( /\s+/, $args{test}{TESTS} ) );

    my $missing = join( ',', @Missing );
    my $config =
      join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )
      if $Config;

    $PostambleActions = (
        $missing
        ? "\$(PERL) $0 --config=$config --installdeps=$missing"
        : "\$(NOECHO) \$(NOOP)"
    );

    return %args;
}

# a wrapper to ExtUtils::MakeMaker::WriteMakefile
sub Write {
    require Carp;
    Carp::croak "WriteMakefile: Need even number of args" if @_ % 2;

    if ($CheckOnly) {
        print << ".";
*** Makefile not written in check-only mode.
.
        return;
    }

    my %args = _make_args(@_);

    no strict 'refs';

    $PostambleUsed = 0;
    local *MY::postamble = \&postamble unless defined &MY::postamble;
    ExtUtils::MakeMaker::WriteMakefile(%args);

    print << "." unless $PostambleUsed;
*** WARNING: Makefile written with customized MY::postamble() without
    including contents from Module::AutoInstall::postamble() --
    auto installation features disabled.  Please contact the author.
.

    return 1;
}

sub postamble {
    $PostambleUsed = 1;

    return << ".";

config :: installdeps
\t\$(NOECHO) \$(NOOP)

checkdeps ::

 view all matches for this distribution


AI-MegaHAL

 view release on metacpan or  search on metacpan

lib/AI/MegaHAL.pm  view on Meta::CPAN

use strict;

use vars qw(@EXPORT @ISA $VERSION $AUTOLOAD);

@EXPORT = qw(megahal_setnoprompt
	     megahal_setnowrap
	     megahal_setnobanner
	     megahal_seterrorfile
	     megahal_setstatusfile
	     megahal_initialize
	     megahal_initial_greeting
	     megahal_command
	     megahal_do_reply
	     megahal_learn
	     megahal_output
	     megahal_input
	     megahal_cleanup);

@ISA = qw(Exporter DynaLoader);
$VERSION = '0.08';

sub AUTOLOAD {
    # This AUTOLOAD is used to 'autoload' constants from the constant()
    # XS function.  If a constant is not found then control is passed
    # to the AUTOLOAD in AutoLoader.

    my $constname;
    ($constname = $AUTOLOAD) =~ s/.*:://;
    croak "& not defined" if $constname eq 'constant';
    my $val = constant($constname, @_ ? $_[0] : 0);
    if ($! != 0) {
	if ($! =~ /Invalid/ || $!{EINVAL}) {
	    $AutoLoader::AUTOLOAD = $AUTOLOAD;
	    goto &AutoLoader::AUTOLOAD;
	}
	else {
	    croak "Your vendor has not defined AI::MegaHAL macro $constname";
	}
    }
    {
	no strict 'refs';
	# Fixed between 5.005_53 and 5.005_61
	if ($] >= 5.00561) {
	    *$AUTOLOAD = sub () { $val };
	}
	else {
	    *$AUTOLOAD = sub { $val };
	}
    }
    goto &$AUTOLOAD;
}

sub new {
    my ($class,%args) = @_;
    my $self;

    # Bless ourselves into the AI::MegaHAL class.
    $self = bless({ },$class);

    # Make sure that we can find a brain or a training file somewhere
    # else die with an error.
    my $path = $args{'Path'} || ".";
    if(-e "$path/megahal.brn" || -e "$path/megahal.trn") {
	chdir($path) || die("Error: chdir: $!\n");
    } else {
	die("Error: unable to locate megahal.brn or megahal.trn\n");
    }

    # Set some of the options that may have been passed to us.
    megahal_setnobanner() if(! $args{'Banner'});
    megahal_setnowrap()   if(! $args{'Wrap'});
    megahal_setnoprompt() if(! $args{'Prompt'});

    # This flag indicates whether or not we should automatically save
    # our brain when the object goes out of scope.
    $self->{'AutoSave'} = $args{'AutoSave'};

    # Initialize ourselves.
    $self->_initialize();

    return $self;
}

sub initial_greeting {
    my $self = shift;

    return megahal_initial_greeting();
}

sub do_reply {
    my ($self,$text) = @_;

    return megahal_do_reply($text,0);
}

sub learn {
    my ($self,$text) = @_;

    return megahal_learn($text,0);
}

sub _initialize {
    my $self = shift;

    megahal_initialize();
    return;
}

sub _cleanup {
    my $self = shift;

    megahal_cleanup();
    return;
}

sub DESTROY {
    my $self = shift;

    $self->_cleanup() if($self->{'AutoSave'});
    return;
}

bootstrap AI::MegaHAL $VERSION;
1;

lib/AI/MegaHAL.pm  view on Meta::CPAN

=head1 SYNOPSIS

use AI::MegaHAL;

my $megahal = AI::MegaHAL->new('Path'     => './',
                           'Banner'   => 0,
                           'Prompt'   => 0,
                           'Wrap'     => 0,
                           'AutoSave' => 0);

my $text = $megahal->initial_greeting();

$text = $megahal->do_reply($message);

lib/AI/MegaHAL.pm  view on Meta::CPAN

This package provides a Perl interface to the MegaHAL conversation simulator written by Jason Hutchens.

=head1 CONSTRUCTOR

$megahal = AI::MegaHAL->new('Path'     => './',
                           'Banner'   => 0,
                           'Prompt'   => 0,
                           'Wrap'     => 0,
                           'AutoSave' => 0);

Creates a new AI::MegaHAL object.  The object constructor can optionaly receive the following named parameters:

=over 4

 view all matches for this distribution


AI-MicroStructure

 view release on metacpan or  search on metacpan

bin/from-folder.pl  view on Meta::CPAN

use Digest::MD5 qw(md5_hex);
use Data::Dumper;
use Data::Printer;
use Env qw(PWD);
my ($TOP,$storage) ;
       $TOP = $ARGV[0] unless(!@ARGV);
       $storage = $ARGV[1] unless(!@ARGV);
        $storage = "/tmp" unless($storage);
our $cache = {};
our $files={};
our $curSysDate = `date +"%F"`;
        $curSysDate=~ s/\n//g;

our %opts = (cache_file =>
              sprintf("%s/%s_.cache",
              $storage,$curSysDate));

GetOptions (\%opts, "cache_file=s");



sub translate
{
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;
   my $name = md5_hex($rel_name);
    my $go = 0;

     foreach(@ARGV){
    my $t = $_;

    if( ! -d $t &&  $rel_name !~ m/($t)/i){
        $go ++;
        print $rel_name."\n";
       }
    }
        

        if (/\.(html|htm)$/) {
          $files->{html}->{$name}=$rel_name;
        }elsif (/\.txt$/) {
          $files->{latex}->{$name}=$rel_name;
        }elsif (/\.json$/) {
          $files->{text}->{$name}=$rel_name;
        }
        
  }


find(\&translate, "$TOP");


p @{[keys %$files,reverse @ARGV,$storage]};

__DATA__
our $c = AI::MicroStructure::Context->new(@ARGV);
    $c->retrieveIndex($PWD."/t/docs"); #"/home/santex/data-hub/data-hub" structures=0 text=1 json=1



   my $style = {};
      $style->{explicit}  = 1;
ok($c->simpleMixedSearch($style,$_)) && ok($c->play($style,$_))   for
 qw(atom antimatter planet);



ok(print Dumper $c->intersect($style,$_)) for
 qw(atom antimatter planet);

ok(print Dumper $c->similar($style,$_)) for
 qw(atom antimatter planet);

#p @out;

1;

bin/from-folder.pl  view on Meta::CPAN

use Fi  le::Find;
use Data::Dumper;
use Storable qw(lock_store lock_retrieve);
use Getopt::Long;
our $curSysDate = `date +"%F"`;
    $curSysDate=~ s/\n//g;

our %opts = (cache_file =>
              sprintf("/tmp/%s.cache",
              $curSysDate));

GetOptions (\%opts, "cache_file=s");

our $cache = {};
our @target = split("\/",$opts{cache_file});
my $set = AI::MicroStructure::ObjectSet->new();

eval {
    local $^W = 0;  # because otherwhise doesn't pass errors
#`rm $opts{cache_file}`;
    $cache = lock_retrieve($opts{cache_file});

    $cache = {} unless $cache;

    warn "New cache!\n" unless defined $cache;
};


END{

  lock_store($cache,$opts{cache_file});

  print Dumper [$set->size,$set->members];


  }




find(\&translate, "$TOP/./");

sub translate {
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;

  $set->insert(AI::MicroStructure::Object->new($rel_name));

}
#print Dumper join "-", soundex(("rock'n'roll", 'rock and roll', 'rocknroll'));

 view all matches for this distribution


AI-NNEasy

 view release on metacpan or  search on metacpan

lib/AI/NNEasy.pm  view on Meta::CPAN

## Original file:    ./lib/AI/NNEasy.hploo
## Generation date:  2005-01-16 22:07:24
##
## ** Do not change this file, use the original HPLOO source! **
#############################################################################

#############################################################################
## Name:        NNEasy.pm
## Purpose:     AI::NNEasy
## Author:      Graciliano M. P. 
## Modified by:

lib/AI/NNEasy.pm  view on Meta::CPAN

## RCS-ID:      
## Copyright:   (c) 2005 Graciliano M. P. 
## Licence:     This program is free software; you can redistribute it and/or
##              modify it under the same terms as Perl itself
#############################################################################


{ package AI::NNEasy ;

  
use strict qw(vars) ; no warnings ;

  
use vars qw(%CLASS_HPLOO @ISA $VERSION) ;

  
$VERSION = '0.06' ;

  
@ISA = qw(Class::HPLOO::Base UNIVERSAL) ;

  
my $CLASS = 'AI::NNEasy' ; sub __CLASS__ { 'AI::NNEasy' } ;

  
use Class::HPLOO::Base ;

  use AI::NNEasy::NN ;
  use Storable qw(freeze thaw) ;
  use Data::Dumper ;
  


  sub NNEasy { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    my @out_types = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $error_ok = shift(@_) ;
    my $in = shift(@_) ;
    my $out = shift(@_) ;
    my @layers = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $conf = shift(@_) ;
    
    $file ||= 'nneasy.nne' ;
  
    if ( $this->load($file) ) {
      return $this ;
    }
  
    my $in_sz  = ref $in  ? $in->{nodes}  : $in ;
    my $out_sz = ref $out ? $out->{nodes} : $out ;
  
    @layers = ($in_sz+$out_sz) if !@layers ;
    
    foreach my $layers_i ( @layers ) {
      $layers_i = $in_sz+$out_sz if $layers_i <= 0 ;
    }
    
    $conf ||= {} ;
    
    my $decay = $$conf{decay} || 0 ; 
    
    my $nn_in  = $this->_layer_conf( { decay=>$decay } , $in ) ;
    my $nn_out  = $this->_layer_conf( { decay=>$decay , activation_function=>'linear' } , $out ) ;
    
    foreach my $layers_i ( @layers ) {
      $layers_i = $this->_layer_conf( { decay=>$decay } , $layers_i ) ;
    }
    
    my $nn_conf = {random_connections=>0 , networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1} ;
    foreach my $Key ( keys %$nn_conf ) { $$nn_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}
    
    $this->{NN_ARGS} = [[ $nn_in , @layers , $nn_out ] , $nn_conf] ;

    $this->{NN} = AI::NNEasy::NN->new( @{$this->{NN_ARGS}} ) ;
    
    $this->{FILE} = $file ;
    
    @out_types = (0,1) if !@out_types ;
    
    @out_types = sort {$a <=> $b} @out_types ;
    
    $this->{OUT_TYPES} = \@out_types ;
    
    if ( $error_ok <= 0 ) {
      my ($min_dif , $last) ;
      my $i = -1 ;
      foreach my $out_types_i ( @out_types ) {
        ++$i ;
        if ($i > 0) {
          my $dif = $out_types_i - $last ;
          $min_dif = $dif if !defined $min_dif || $dif < $min_dif ;
        }
        $last = $out_types_i ;
      }
      $error_ok = $min_dif / 2 ;
      $error_ok -= $error_ok*0.1 ;
    }
    
    $this->{ERROR_OK} = $error_ok ;
    
    return $this ;
  }
  
  sub _layer_conf { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $def = shift(@_) ;
    my $conf = shift(@_) ;
    
    $def ||= {} ;
    $conf = { nodes=>$conf } if !ref($conf) ;
    
    foreach my $Key ( keys %$def ) { $$conf{$Key} = $$def{$Key} if !exists $$conf{$Key} ;}
  
    my $layer_conf  = {nodes=>1  , persistent_activation=>0 , decay=>0 , random_activation=>0 , threshold=>0 , activation_function=>'tanh' , random_weights=>1} ;
    foreach my $Key ( keys %$layer_conf ) { $$layer_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}

    return $layer_conf ;
  }
  
  sub reset_nn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    $this->{NN} = AI::NNEasy::NN->new( @{ $this->{NN_ARGS} } ) ;
  }
  
  sub load { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    
    $file ||= $this->{FILE} ;
    if ( -s $file ) {
      open (my $fh, $file) ;
      my $dump = join '' , <$fh> ;
      close ($fh) ;
      
      my $restored = thaw($dump) ;
      
      if ($restored) {
        my $fl = $this->{FILE} ;
        %$this = %$restored ;
        $this->{FILE} = $fl if $fl ;
        return 1 ;
      }
    }
    return ;
  }
  
  sub save { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $file = shift(@_) ;
    
    $file ||= $this->{FILE} ;
        
    my $dump = freeze( {%$this} ) ;
    open (my $fh,">$this->{FILE}") ;
    print $fh $dump ;
    close ($fh) ;
  }
  
  sub learn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    my $out = shift(@_) ;
    my $n = shift(@_) ;
    
    $n ||= 100 ;
    
    my $err ;
    for (1..100) {
      $this->{NN}->run($in) ;
      $err = $this->{NN}->learn($out) ;
    }
    
    $err *= -1 if $err < 0 ;
    return $err ;
  }
  
  *_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
  
  sub _learn_set_get_output_error_pl { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $set = shift(@_) ;
    my $error_ok = shift(@_) ;
    my $ins_ok = shift(@_) ;
    my $verbose = shift(@_) ;
    
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      $this->{NN}->learn($$set[$i+1]) ;
    }

    my ($err,$learn_ok,$print) ;
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      ++$learn_ok if $er < $error_ok ;
      $err += $er ;
      $print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
    }
    
    $err /= $ins_ok ;
    
    return ( $err , $learn_ok , $print ) ;
  }
  
  
  
  
    
  sub learn_set { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    my $limit = shift(@_) ;
    my $verbose = shift(@_) ;
    
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
    
    my $err_static_limit = 15 ;
    my $err_static_limit_positive ;

    if ( ref($limit) eq 'ARRAY' ) {
      ($limit,$err_static_limit,$err_static_limit_positive) = @$limit ;
    }
    
    $limit ||= 30000 ;
    $err_static_limit_positive ||= $err_static_limit/2 ;
  
    my $error_ok = $this->{ERROR_OK} ;
    
    my $check_diff_count = 1000 ;
    
    my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
    
    $err_static = 0 ;
    
    while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
      ($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
      
      ++$counter ;
      
      if ( !($counter % 100) || $learn_ok == $ins_ok ) {
        my $err_diff = $err_last - $err ;
        $err_diff *= -1 if $err_diff < 0 ;
        
        $err_count += $err_diff ;
        
        ++$err_static if $err_diff <= 0.00001 || $err > 1 ;
        
        print "err_static = $err_static\n" if $verbose && $err_static ;

        $err_last = $err ;
        
        my $reseted ;
        if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {
          $err_static = 0 ;
          $counter -= 2000 ;
          $reseted = 1 ;
          ++$reset_count1 ;
          
          if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
            $reset_count1 = $reset_count2 = 0 ;
            print "** Reseting NN...\n" if $verbose ;
            $this->reset_nn ;
          }
          else {
            print "** Reseting weights due NULL diff...\n" if $verbose ;
            $this->{NN}->init ;
          }
        }
        
        if ( !($counter % $check_diff_count) ) {
          $err_count /= ($check_diff_count/100) ;
          
          print "ERR COUNT> $err_count\n" if $verbose ;
          
          if ( !$reseted && $err_count < 0.001 ) {
            $err_static = 0 ;
            $counter -= 1000 ;
            ++$reset_count2 ;
            
            if ( ($reset_count1 + $reset_count2) > 2 ) {
              $reset_count1 = $reset_count2 = 0 ;
              print "** Reseting NN...\n" if $verbose ;
              $this->reset_nn ;
            }
            else {
              print "** Reseting weights due LOW diff...\n" if $verbose ;
              $this->{NN}->init ;
            }
          }

          $err_count = 0 ;
        }
        
        if ( $verbose ) {
          print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
          print $print ;
        }
      }

      print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
      
    }
    
  }
  
  sub get_set_error { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
  
    my $err ;
    for (my $i = 0 ; $i < @set ; $i+=2) {
      $this->{NN}->run($set[$i]) ;
      my $er = $this->{NN}->RMSErr($set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      $err += $er ;
    }
    
    $err /= $ins_ok ;
    return $err ;
  }
  
  sub run { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $in = shift(@_) ;
    
    $this->{NN}->run($in) ;
    my $out = $this->{NN}->output() ;
    return $out ;
  }
  
  sub run_get_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    my $out = $this->run(@_) ;
    
    foreach my $out_i ( @$out ) {
      $out_i = $this->out_type_winner($out_i) ;
    }
    
    return $out ;
  }
  
  sub out_type_winner { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $val = shift(@_) ;
    
    my ($out_type , %err) ;
    
    foreach my $types_i ( @{ $this->{OUT_TYPES} } ) {
      my $er = $types_i - $val ;
      $er *= -1 if $er < 0 ;
      $err{$types_i} = $er ;
    }
    
    my $min_type_err = (sort { $err{$a} <=> $err{$b} } keys %err)[0] ;
    $out_type = $min_type_err ;

    return $out_type ;
  }


my $INLINE_INSTALL ; BEGIN { use Config ; my @installs = ($Config{installarchlib} , $Config{installprivlib} , $Config{installsitelib}) ; foreach my $i ( @installs ) { $i =~ s/[\\\/]/\//gs ;} $INLINE_INSTALL = 1 if ( __FILE__ =~ /\.pm$/ && ( join(" ",...

use Inline C => <<'__INLINE_C_SRC__' , ( $INLINE_INSTALL ? (NAME => 'AI::NNEasy' , VERSION => '0.06') : () ) ;


#define OBJ_SV(self)		SvRV( self )
#define OBJ_HV(self)		(HV*) SvRV( self )
#define OBJ_AV(self)		(AV*) SvRV( self )

#define FETCH_ATTR(hv,k)	*hv_fetch(hv, k , strlen(k) , 0)
#define FETCH_ATTR_PV(hv,k)	SvPV( FETCH_ATTR(hv,k) , len)
#define FETCH_ATTR_NV(hv,k)	SvNV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_IV(hv,k)	SvIV( FETCH_ATTR(hv,k) )  
#define FETCH_ATTR_HV(hv,k)	(HV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_AV(hv,k)	(AV*) FETCH_ATTR(hv,k)
#define FETCH_ATTR_SV_REF(hv,k)	SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_HV_REF(hv,k)	(HV*) SvRV( FETCH_ATTR(hv,k) )
#define FETCH_ATTR_AV_REF(hv,k)	(AV*) SvRV( FETCH_ATTR(hv,k) )

#define FETCH_ELEM(av,i)		*av_fetch(av,i,0)
#define FETCH_ELEM_HV_REF(av,i)	(HV*) SvRV( FETCH_ELEM(av,i) )
#define FETCH_ELEM_AV_REF(av,i)	(AV*) SvRV( FETCH_ELEM(av,i) )

SV* _av_join( AV* av ) {
    SV* ret = sv_2mortal(newSVpv("",0)) ;
    int i ;
    for (i = 0 ; i <= av_len(av) ; ++i) {
      SV* elem = *av_fetch(av, i ,0) ;
      if (i > 0) sv_catpv(ret , " ") ;
      sv_catsv(ret , elem) ;
    }
    return ret ;
}

void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
    dXSARGS;
    
    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    AV* set_av = OBJ_AV( set ) ;
    SV* nn = FETCH_ATTR(self_hv , "NN") ;
    SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
    SV* ret ;
    double err = 0 ;
    double er = 0 ;
    int learn_ok = 0 ;
        
    for (i = 0 ; i <= av_len(set_av) ; i+=2) {
      SV* set_in = *av_fetch(set_av, i ,0) ;
      SV* set_out = *av_fetch(set_av, i+1 ,0) ;

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_in );
      PUTBACK ;
      call_method("run", G_DISCARD) ;
      
      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_out );
      PUTBACK ;
      call_method("learn", G_SCALAR) ;
    }
    
    for (i = 0 ; i <= av_len(set_av) ; i+=2) {
      SV* set_in = *av_fetch(set_av, i ,0) ;
      SV* set_out = *av_fetch(set_av, i+1 ,0) ;

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_in );
      PUTBACK ;
      call_method("run", G_DISCARD) ;
      
      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_out );
      PUTBACK ;
      call_method("RMSErr", G_SCALAR) ;
      
      SPAGAIN ;
      ret = POPs ;
      er = SvNV(ret) ;
      if (er < 0) er *= -1 ;
      if (er < error_ok) ++learn_ok ;
      err += er ;
      
      if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
                       SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
                       SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
                       er
                     ) ;

    }
    
    err /= ins_ok ;

    if (verbose) {
      EXTEND(SP , 3) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
        ST(2) = print_verbose ;
      XSRETURN(3) ;
    }
    else {
      EXTEND(SP , 2) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
      XSRETURN(2) ;
    }
}

__INLINE_C_SRC__


}


1;

__END__

=head1 NAME

AI::NNEasy - Define, learn and use easy Neural Networks of different types using a portable code in Perl and XS.

=head1 DESCRIPTION

The main purpose of this module is to create easy Neural Networks with Perl.

The module was designed to can be extended to multiple network types, learning algorithms and activation functions.
This architecture was 1st based in the module L<AI::NNFlex>, than I have rewrited it to fix some
serialization bugs, and have otimized the code and added some XS functions to get speed
in the learning process. Finally I have added an intuitive inteface to create and use the NN,
and added a winner algorithm to the output.

I have writed this module because after test different NN module on Perl I can't find
one that is portable through Linux and Windows, easy to use and the most important,
one that really works in a reall problem.

With this module you don't need to learn much about NN to be able to construct one, you just
define the construction of the NN, learn your set of inputs, and use it.

=head1 USAGE

Here's an example of a NN to compute XOR:

  use AI::NNEasy ;
  
  ## Our maximal error for the output calculation.
  my $ERR_OK = 0.1 ;

  ## Create the NN:
  my $nn = AI::NNEasy->new(
  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  $ERR_OK ,   ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  ) ;
  
  
  ## Our set of inputs and outputs to learn:
  my @set = (
  [0,0] => [0],
  [0,1] => [1],
  [1,0] => [1],
  [1,1] => [0],
  );
  
  ## Calculate the actual error for the set:
  my $set_err = $nn->get_set_error(\@set) ;
  
  ## If set error is bigger than maximal error lest's learn this set:
  if ( $set_err > $ERR_OK ) {
    $nn->learn_set( \@set ) ;
    ## Save the NN:
    $nn->save ;
  }
  
  ## Use the NN:
  
  my $out = $nn->run_get_winner([0,0]) ;
  print "0 0 => @$out\n" ; ## 0 0 => 0
  
  my $out = $nn->run_get_winner([0,1]) ;
  print "0 1 => @$out\n" ; ## 0 1 => 1
  
  my $out = $nn->run_get_winner([1,0]) ;
  print "1 0 => @$out\n" ; ## 1 0 => 1
  
  my $out = $nn->run_get_winner([1,1]) ;
  print "1 1 => @$out\n" ; ## 1 1 => 0
  
  ## or just interate through the @set:
  for (my $i = 0 ; $i < @set ; $i+=2) {
    my $out = $nn->run_get_winner($set[$i]) ;
    print "@{$set[$i]}) => @$out\n" ;
  }

=head1 METHODS

=head2 new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )

=over 4

=item FILE

The file path to save the NN. Default: 'nneasy.nne'.

=item @OUTPUT_TYPES

An array of outputs that the NN can have, so the NN can find the nearest number in this
list to give your the right output.

=item ERROR_OK

The maximal error of the calculated output.

If not defined ERROR_OK will be calculated by the minimal difference between 2 types at
@OUTPUT_TYPES dived by 2:

  @OUTPUT_TYPES = [0 , 0.5 , 1] ;
  
  ERROR_OK = (1 - 0.5) / 2 = 0.25 ;

=item IN_SIZE

The input size (number of nodes in the inpute layer).

=item OUT_SIZE

The output size (number of nodes in the output layer).

=item @HIDDEN_LAYERS

A list of size of hidden layers. By default we have 1 hidden layer, and
the size is calculated by I<(IN_SIZE + OUT_SIZE)>. So, for a NN of
2 inputs and 1 output the hidden layer have 3 nodes.

=item %CONF

Conf can be used to define special parameters of the NN:

Default:

 {networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1}
 
Options:

=over 4

=item networktype

The type of the NN. For now only accepts I<'feedforward'>.

=item random_weights

Maximum value for initial weight.

=item learning_algorithm

Algorithm to train the NN. Accepts I<'backprop'> and I<'reinforce'>.

=item learning_rate

Rate used in the learning_algorithm.

=item bias

If true will create a BIAS node. Usefull when you have NULL inputs, like [0,0].

=back

=back

Here's a completly example of use:

  my $nn = AI::NNEasy->new(
  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  0.1 ,       ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  {random_connections=>0 , networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1} ,
  ) ;

And a simple example that will create a NN equal of the above:

  my $nn = AI::NNEasy->new('xor.nne' , [0,1] , 0.1 , 2 , 1 ) ;

=head2 load

Load the NN if it was previously saved.

=head2 save

Save the NN to a file using L<Storable>.

=head2 learn (@IN , @OUT , N)

Learn the input.

=over 4

=item @IN

The values of one input.

=item @OUT

The values of the output for the input above.

=item N

Number of times that this input should be learned. Default: 100

Example:

  $nn->learn( [0,1] , [1] , 10 ) ;

=back

=head2 learn_set (@SET , OK_OUTPUTS , LIMIT , VERBOSE)

Learn a set of inputs until get the right error for the outputs.

=over 4

=item @SET

A list of inputs and outputs.

=item OK_OUTPUTS

Minimal number of outputs that should be OK when calculating the erros.

By default I<OK_OUTPUTS> should have the same size of number of different
inouts in the @SET.

=item LIMIT

Limit of interations when learning. Default: 30000

=item VERBOSE

If TRUE turn verbose method ON when learning.

=back

=head2 get_set_error (@SET , OK_OUTPUTS)

Get the actual error of a set in the NN. If the returned error is bigger than
I<ERROR_OK> defined on I<new()> you should learn or relearn the set.

=head2 run (@INPUT)

Run a input and return the output calculated by the NN based in what the NN already have learned.

=head2 run_get_winner (@INPUT)

Same of I<run()>, but the output will return the nearest output value based in the
I<@OUTPUT_TYPES> defined at I<new()>.

For example an input I<[0,1]> learned that have
the output I<[1]>, actually will return something like 0.98324 as output and
not 1, since the error never should be 0. So, with I<run_get_winner()>
we get the output of I<run()>, let's say that is 0.98324, and find what output
is near of this number, that in this case should be 1. An output [0], will return
by I<run()> something like 0.078964, and I<run_get_winner()> return 0.

=head1 Samples

Inside the release sources you can find the directory ./samples where you have some
examples of code using this module.

=head1 INLINE C

Some functions of this module have I<Inline> functions writed in C.

I have made a C version only for the functions that are wild called, like:

  AI::NNEasy::_learn_set_get_output_error

  AI::NNEasy::NN::tanh

  AI::NNEasy::NN::feedforward::run
  
  AI::NNEasy::NN::backprop::hiddenToOutput
  AI::NNEasy::NN::backprop::hiddenOrInputToHidden
  AI::NNEasy::NN::backprop::RMSErr

What give to us the speed that we need to learn fast the inputs, but at the same time
be able to create flexible NN.

=head1 Class::HPLOO

I have used L<Class::HPLOO> to write fast the module, specially the XS support.

L<Class::HPLOO> enables this kind of syntax for Perl classes:

  class Foo {
    
    sub bar($x , $y) {
      $this->add($x , $y) ;
    }
    
    sub[C] int add( int x , int y ) {
      int res = x + y ;
      return res ;
    }
    
  }

What make possible to write the module in 2 days! ;-P

=head1 Basics of a Neural Network

I<- This is just a simple text for lay pleople,
to try to make them to understand what is a Neural Network and how it works
without need to read a lot of books -.>

A NN is based in nodes/neurons and layers, where we have the input layer, the hidden layers and the output layer.

For example, here we have a NN with 2 inputs, 1 hidden layer, and 2 outputs:

         Input  Hidden  Output
 input1  ---->n1\    /---->n4---> output1
                 \  /
                  n3
                 /  \
 input2  ---->n2/    \---->n5---> output2


Basically, when we have an input, let's say [0,1], it will active I<n2>, that will
active I<n3> and I<n3> will active I<n4> and I<n5>, but the link between I<n3> and I<n4> has a I<weight>, and
between I<n3> and I<n5> another I<weight>. The idea is to find the I<weights> between the
nodes that can give to us an output near the real output. So, if the output of [0,1]
is [1,1], the nodes I<output1> and I<output2> should give to us a number near 1,
let's say 0.98654. And if the output for [0,0] is [0,0], I<output1> and I<output2> should give to us a number near 0,
let's say 0.078875.

What is hard in a NN is to find this I<weights>. By default L<AI::NNEasy> uses
I<backprop> as learning algorithm. With I<backprop> it pastes the inputs through
the Neural Network and adjust the I<weights> using random numbers until we find
a set of I<weights> that give to us the right output.

The secret of a NN is the number of hidden layers and nodes/neurons for each layer.
Basically the best way to define the hidden layers is 1 layer of (INPUT_NODES+OUTPUT_NODES).
So, a layer of 2 input nodes and 1 output node, should have 3 nodes in the hidden layer.
This definition exists because the number of inputs define the maximal variability of
the inputs (N**2 for bollean inputs), and the output defines if the variability is reduced by some logic restriction, like
int the XOR example, where we have 2 inputs and 1 output, so, hidden is 3. And as we can see in the
logic we have 3 groups of inputs:

  0 0 => 0 # false
  0 1 => 1 # or
  1 0 => 1 # or
  1 1 => 1 # true

Actually this is not the real explanation, but is the easiest way to understand that
you need to have a number of nodes/neuros in the hidden layer that can give the
right output for your problem.

Other inportant step of a NN is the learning fase. Where we get a set of inputs
and paste them through the NN until we have the right output. This process basically
will adjust the nodes I<weights> until we have an output near the real output that we want.

Other important concept is that the inputs and outputs in the NN should be from 0 to 1.
So, you can define sets like:

  0 0      => 0
  0 0.5    => 0.5
  0.5 0.5  => 1
  1 0.5    => 0
  1 1      => 1

But what is really recomended is to always use bollean values, just 0 or 1, for inputs and outputs,
since the learning fase will be faster and works better for complex problems.

=head1 SEE ALSO

L<AI::NNFlex>, L<AI::NeuralNet::Simple>, L<Class::HPLOO>, L<Inline>.

=head1 AUTHOR

Graciliano M. P. <gmpassos@cpan.org>

I will appreciate any type of feedback (include your opinions and/or suggestions). ;-P

Thanks a lot to I<Charles Colbourn <charlesc at nnflex.g0n.net>>, that is the
author of L<AI::NNFlex>, that 1st wrote it, since NNFlex was my starting point to
do this NN work, and 2nd to be in touch with the development of L<AI::NNEasy>.

=head1 COPYRIGHT

This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.

=cut

 view all matches for this distribution


AI-NNFlex

 view release on metacpan or  search on metacpan

examples/add.pl  view on Meta::CPAN





my $network = AI::NNFlex::Backprop->new(
                learningrate=>.00001,
		fahlmanconstant=>0,
		fixedweights=>1,
		momentum=>0.3,
		bias=>0);


$network->add_layer(    nodes=>2,
            activationfunction=>"linear");


$network->add_layer(    nodes=>2,
            activationfunction=>"linear");

$network->add_layer(    nodes=>1,
            activationfunction=>"linear");


$network->init();

# Taken from Mesh ex_add.pl

examples/add.pl  view on Meta::CPAN

]);

my $err = 10;
# Stop after 4096 epochs -- don't want to wait more than that
for ( my $i = 0; ($err > 0.0001) && ($i < 4096); $i++ ) {
    $err = $dataset->learn($network);
    print "Epoch = $i error = $err\n";
}

foreach (@{$dataset->run($network)})
{
    foreach (@$_){print $_}
    print "\n";    
}

print "this should be 4000 - ";
$network->run([2000,2000]);
foreach ( @{$network->output}){print $_."\n";}

 foreach my $a ( 1..10 ) {
     foreach my $b ( 1..10 ) {
     my($ans) = $a+$b;
     my($nnans) = @{$network->run([$a,$b])};
     print "[$a] [$b] ans=$ans but nnans=$nnans\n" unless $ans == $nnans;
     }
 }


 view all matches for this distribution


AI-NNVMCAPI

 view release on metacpan or  search on metacpan

lib/AI/NNVMCAPI.pm  view on Meta::CPAN


AI::NNVMCAPI - Swig interface to nnvm c api

=head1 SYNOPSIS

 use AI::NNVMCAPI;

=head1 DESCRIPTION

This module provides interface to nnvm
via its api.

 view all matches for this distribution


AI-NaiveBayes

 view release on metacpan or  search on metacpan

lib/AI/NaiveBayes.pm  view on Meta::CPAN

with Storage(format => 'Storable', io => 'File');

has model   => (is => 'ro', isa => 'HashRef[HashRef]', required => 1);

sub train {
    my $self = shift;
    my $learner = AI::NaiveBayes::Learner->new();
    for my $example ( @_ ){
        $learner->add_example( %$example );
    }
    return $learner->classifier;
}


sub classify {
    my ($self, $newattrs) = @_;
    $newattrs or die "Missing parameter for classify()";

    my $m = $self->model;

    # Note that we're using the log(prob) here.  That's why we add instead of multiply.

    my %scores = %{$m->{prior_probs}};
    my %features;
    while (my ($feature, $value) = each %$newattrs) {
        next unless exists $m->{attributes}{$feature};  # Ignore totally unseen features
        while (my ($label, $attributes) = each %{$m->{probs}}) {
            my $score = ($attributes->{$feature} || $m->{smoother}{$label})*$value;  # P($feature|$label)**$value
            $scores{$label} += $score;
            $features{$feature}{$label} = $score;
        }
    }

    rescale(\%scores);

    return AI::NaiveBayes::Classification->new( label_sums => \%scores, features => \%features );
}

sub rescale {
    my ($scores) = @_;

    # Scale everything back to a reasonable area in logspace (near zero), un-loggify, and normalize
    my $total = 0;
    my $max = max(values %$scores);
    foreach (values %$scores) {
        $_ = exp($_ - $max);
        $total += $_**2;
    }
    $total = sqrt($total);
    foreach (values %$scores) {
        $_ /= $total;
    }
}


__PACKAGE__->meta->make_immutable;

lib/AI/NaiveBayes.pm  view on Meta::CPAN


version 0.04

=head1 SYNOPSIS

    # AI::NaiveBayes objects are created by AI::NaiveBayes::Learner
    # but for quick start you can use the 'train' class method
    # that is a shortcut using default AI::NaiveBayes::Learner settings

    my $classifier = AI::NaiveBayes->train( 
        {
            attributes => {
                sheep => 1, very => 1,  valuable => 1, farming => 1
            },
            labels => ['farming']
        },
        {
            attributes => {
                vampires => 1, cannot => 1, see => 1, their => 1,
                images => 1, mirrors => 1
            },
            labels => ['vampire']
        },
    );

    # Classify a feature vector
    my $result = $classifier->classify({bar => 3, blurp => 2});
    
    # $result is now a AI::NaiveBayes::Classification object
    
    my $best_category = $result->best_category;

=head1 DESCRIPTION

This module implements the classic "Naive Bayes" machine learning
algorithm.  This is a low level class that accepts only pre-computed feature-vectors

lib/AI/NaiveBayes.pm  view on Meta::CPAN


=item classify( HASHREF )

Classifies a feature-vector of the form:

    { feature1 => weight1, feature2 => weight2, ... }

The result is a C<AI::NaiveBayes::Classification> object.

=item rescale

lib/AI/NaiveBayes.pm  view on Meta::CPAN

=head1 THEORY

Bayes' Theorem is a way of inverting a conditional probability. It
states:

    P(y|x) P(x)
        P(x|y) = -------------
    P(y)

The notation C<P(x|y)> means "the probability of C<x> given C<y>."  See also
L<"http://mathforum.org/dr.math/problems/battisfore.03.22.99.html">
for a simple but complete example of Bayes' Theorem.

In this case, we want to know the probability of a given category given a
certain string of words in a document, so we have:

    P(words | cat) P(cat)
        P(cat | words) = --------------------
    P(words)

We have applied Bayes' Theorem because C<P(cat | words)> is a difficult
quantity to compute directly, but C<P(words | cat)> and C<P(cat)> are accessible
(see below).

The greater the expression above, the greater the probability that the given
document belongs to the given category.  So we want to find the maximum
value.  We write this as

    P(words | cat) P(cat)
        Best category =   ArgMax      -----------------------
    cat in cats          P(words)

Since C<P(words)> doesn't change over the range of categories, we can get rid
of it.  That's good, because we didn't want to have to compute these values
anyway.  So our new formula is:

    Best category =   ArgMax      P(words | cat) P(cat)
        cat in cats

Finally, we note that if C<w1, w2, ... wn> are the words in the document,
then this expression is equivalent to:

    Best category =   ArgMax      P(w1|cat)*P(w2|cat)*...*P(wn|cat)*P(cat)
        cat in cats

That's the formula I use in my document categorization code.  The last
step is the only non-rigorous one in the derivation, and this is the
"naive" part of the Naive Bayes technique.  It assumes that the
probability of each word appearing in a document is unaffected by the

 view all matches for this distribution


AI-NaiveBayes1

 view release on metacpan or  search on metacpan

NaiveBayes1.pm  view on Meta::CPAN


# non-exported package globals go here
use vars qw();

sub new {
  my $package = shift;
  return bless {
                attributes => [ ],
		labels     => [ ],
		attvals    => {},
		real_stat  => {},
		numof_instances => 0,
		stat_labels => {},
		stat_attributes => {},
		smoothing => {},
		attribute_type => {},
	       }, $package;
}

sub set_real {
    my ($self, @attr) = @_;
    foreach my $a (@attr) { $self->{attribute_type}{$a} = 'real' }
}

sub import_from_YAML {
    my $package = shift;
    my $yaml = shift;
    my $self = YAML::Load($yaml);
    return bless $self, $package;
}

sub import_from_YAML_file {
    my $package = shift;
    my $yamlf = shift;
    my $self = YAML::LoadFile($yamlf);
    return bless $self, $package;
}

# assume that the last header count means counts
# after optionally removing counts, the last header is label
sub add_table {
    my $self = shift;
    my @atts = (); my $lbl=''; my $cnt = '';
    while (@_) {
	my $table = shift;
	if ($table =~ /^(.*)\n[ \t]*-+\n/) {
	    my $a = $1; $table = $';
	    $a =~ s/^\s+//; $a =~ s/\s+$//;
	    if ($a =~ /\s*\bcount\s*$/) {
		$a=$`; $cnt=1; } else { $cnt='' }
	    @atts = split(/\s+/, $a);
	    $lbl = pop @atts;
	}
	while ($table ne '') {
	    $table =~ /^(.*)\n?/ or die;
	    my $r=$1; $table = $';
	    $r =~ s/^\s+//; $r=~ s/\s+$//;
	    if ($r =~ /^-+$/) { next }
	    my @v = split(/\s+/, $r);
	    die "values (#=$#v): {@v}\natts (#=$#atts): @atts, lbl=$lbl,\n".
                 "count: $cnt\n" unless $#v-($cnt?2:1) == $#atts;
	    my %av=(); my @a = @atts;
	    while (@a) { $av{shift @a} = shift(@v) }
	    $self->add_instances(attributes=>\%av,
				 label=>"$lbl=$v[0]",
				 cases=>($cnt?$v[1]:1) );
	}
    }
} # end of add_table

# Simplified; not generally compatible.
# Assume that the last header is label.  The first row contains
# attribute names.
sub add_csv_file {
    my $self = shift; my $fn = shift; local *F;
    open(F,$fn) or die "Cannot open CSV file `$fn': $!";
    local $_ = <F>; my @atts = (); my $lbl=''; my $cnt = '';
    chomp; @atts = split(/\s*,\s*/, $_); $lbl = pop @atts;
    while (<F>) {
	chomp; my @v = split(/\s*,\s*/, $_);
	die "values (#=$#v): {@v}\natts (#=$#atts): @atts, lbl=$lbl,\n".
	    "count: $cnt\n" unless $#v-($cnt?2:1) == $#atts;
	my %av=(); my @a = @atts;
	while (@a) { $av{shift @a} = shift(@v) }
	$self->add_instances(attributes=>\%av,
			     label=>"$lbl=$v[0]",
			     cases=>($cnt?$v[1]:1) );
    }
    close(F);
} # end of add_csv_file

sub drop_attributes {
    my $self = shift;
    foreach my $a (@_) {
	my @tmp = grep { $a ne $_ } @{ $self->{attributes} };
	$self->{attributes} = \@tmp;
	delete($self->{attvals}{$a});
	delete($self->{stat_attributes}{$a});
	delete($self->{attribute_type}{$a});
	delete($self->{real_stat}{$a});
	delete($self->{smoothing}{$a});
    }
} # end of drop_attributes

sub add_instances {
  my ($self, %params) = @_;
  for ('attributes', 'label', 'cases') {
      die "Missing required '$_' parameter" unless exists $params{$_};
  }

  if (scalar(keys(%{ $self->{stat_attributes} })) == 0) {
      foreach my $a (keys(%{$params{attributes}})) {
	  $self->{stat_attributes}{$a} = {};
	  push @{ $self->{attributes} }, $a;
	  $self->{attvals}{$a} = [ ];
	  $self->{attribute_type}{$a} = 'nominal' unless defined($self->{attribute_type}{$a});
      }
  } else {
      foreach my $a (keys(%{$self->{stat_attributes}}))
      { die "attribute not given in instance: $a"
	    unless exists($params{attributes}{$a}) }
  }

  $self->{numof_instances} += $params{cases};

  push @{ $self->{labels} }, $params{label} unless
      exists $self->{stat_labels}->{$params{label}};

  $self->{stat_labels}{$params{label}} += $params{cases};

  foreach my $a (keys(%{$self->{stat_attributes}})) {
      if ( not exists($params{attributes}{$a}) )
      { die "attribute $a not given" }
      my $attval = $params{attributes}{$a};
      if (not exists($self->{stat_attributes}{$a}{$attval})) {
	  push @{ $self->{attvals}{$a} }, $attval;
	  $self->{stat_attributes}{$a}{$attval} = {};
      }
      $self->{stat_attributes}{$a}{$attval}{$params{label}} += $params{cases};
  }
}

sub add_instance {
    my ($self, %params) = @_; $params{cases} = 1;
    $self->add_instances(%params);
}

sub train {
    my $self = shift;
    my $m = $self->{model} = {};
    
    $m->{labelprob} = {};
    foreach my $label (keys(%{$self->{stat_labels}}))
    { $m->{labelprob}{$label} = $self->{stat_labels}{$label} /
                                $self->{numof_instances} } 

    $m->{condprob} = {};
    $m->{condprobe} = {};
    foreach my $att (keys(%{$self->{stat_attributes}})) {
        next if $self->{attribute_type}{$att} eq 'real';
	$m->{condprob}{$att} = {};
	$m->{condprobe}{$att} = {};
	foreach my $label (keys(%{$self->{stat_labels}})) {
	    my $total = 0; my @attvals = ();
	    foreach my $attval (keys(%{$self->{stat_attributes}{$att}})) {
		next unless
		    exists($self->{stat_attributes}{$att}{$attval}{$label}) and
		    $self->{stat_attributes}{$att}{$attval}{$label} > 0;
		push @attvals, $attval;
		$m->{condprob}{$att}{$attval} = {} unless
		    exists( $m->{condprob}{$att}{$attval} );
		$m->{condprob}{$att}{$attval}{$label} = 
		    $self->{stat_attributes}{$att}{$attval}{$label};
		$m->{condprobe}{$att}{$attval} = {} unless
		    exists( $m->{condprob}{$att}{$attval} );
		$m->{condprobe}{$att}{$attval}{$label} = 
		    $self->{stat_attributes}{$att}{$attval}{$label};
		$total += $m->{condprob}{$att}{$attval}{$label};
	    }
	    if (exists($self->{smoothing}{$att}) and
		$self->{smoothing}{$att} =~ /^unseen count=/) {
		my $uc = $'; $uc = 0.5 if $uc <= 0;
		if(! exists($m->{condprob}{$att}{'*'}) ) {
		    $m->{condprob}{$att}{'*'} = {};
		    $m->{condprobe}{$att}{'*'} = {};
		}
		$m->{condprob}{$att}{'*'}{$label} = $uc;
		$total += $uc;
		if (grep {$_ eq '*'} @attvals) { die }
		push @attvals, '*';
	    }
	    foreach my $attval (@attvals) {
		$m->{condprobe}{$att}{$attval}{$label} =
		    "(= $m->{condprob}{$att}{$attval}{$label} / $total)";
		$m->{condprob}{$att}{$attval}{$label} /= $total;
	    }
	}
    }

    # For real-valued attributes, we use Gaussian distribution
    # let us collect statistics
    foreach my $att (keys(%{$self->{stat_attributes}})) {
        next unless $self->{attribute_type}{$att} eq 'real';
	print STDERR "Smoothing ignored for real attribute $att!\n" if
	    defined($self->{smoothing}{att}) and $self->{smoothing}{att};
        $m->{real_stat}->{$att} = {};
        foreach my $attval (keys %{$self->{stat_attributes}{$att}}){
            foreach my $label (keys %{$self->{stat_attributes}{$att}{$attval}}){
                $m->{real_stat}{$att}{$label}{sum}
                += $attval * $self->{stat_attributes}{$att}{$attval}{$label};

                $m->{real_stat}{$att}{$label}{count}
                += $self->{stat_attributes}{$att}{$attval}{$label};
            }
            foreach my $label (keys %{$self->{stat_attributes}{$att}{$attval}}){
		next if
                !defined($m->{real_stat}{$att}{$label}{count}) ||
		$m->{real_stat}{$att}{$label}{count} == 0;

                $m->{real_stat}{$att}{$label}{mean} =
                    $m->{real_stat}{$att}{$label}{sum} /
                        $m->{real_stat}{$att}{$label}{count};
            }
        }

        # calculate stddev
        foreach my $attval (keys %{$self->{stat_attributes}{$att}}) {
            foreach my $label (keys %{$self->{stat_attributes}{$att}{$attval}}){
                $m->{real_stat}{$att}{$label}{stddev} +=
		    ($attval - $m->{real_stat}{$att}{$label}{mean})**2 *
		    $self->{stat_attributes}{$att}{$attval}{$label};
            }
        }
	foreach my $label (keys %{$m->{real_stat}{$att}}) {
	    $m->{real_stat}{$att}{$label}{stddev} =
		sqrt($m->{real_stat}{$att}{$label}{stddev} /
		     ($m->{real_stat}{$att}{$label}{count}-1)
		     );
	}
    }				# foreach real attribute
}				# end of sub train

sub predict {
  my ($self, %params) = @_;
  my $newattrs = $params{attributes} or die "Missing 'attributes' parameter for predict()";
  my $m = $self->{model};  # For convenience
  
  my %scores;
  my @labels = @{ $self->{labels} };
  $scores{$_} = $m->{labelprob}{$_} foreach (@labels);
  foreach my $att (keys(%{ $newattrs })) {
      if (!defined($self->{attribute_type}{$att})) { die "Unknown attribute: `$att'" }
      next if $self->{attribute_type}{$att} eq 'real';
      die unless exists($self->{stat_attributes}{$att});
      my $attval = $newattrs->{$att};
      die "Unknown value `$attval' for attribute `$att'."
      unless exists($self->{stat_attributes}{$att}{$attval}) or
	  exists($self->{smoothing}{$att});
      foreach my $label (@labels) {
	  if (exists($m->{condprob}{$att}{$attval}) and
	      exists($m->{condprob}{$att}{$attval}{$label}) and
	      $m->{condprob}{$att}{$attval}{$label} > 0 ) {
	      $scores{$label} *=
		  $m->{condprob}{$att}{$attval}{$label};
	  } elsif (exists($self->{smoothing}{$att})) {
	      $scores{$label} *=
                  $m->{condprob}{$att}{'*'}{$label};
	  } else { $scores{$label} = 0 }

      }
  }

  foreach my $att (keys %{$newattrs}){
      next unless $self->{attribute_type}{$att} eq 'real';
      my $sum=0; my %nscores;
      foreach my $label (@labels) {
	  die unless exists $m->{real_stat}{$att}{$label}{mean};
	  $nscores{$label} =
              0.398942280401433 / $m->{real_stat}{$att}{$label}{stddev}*
              exp( -0.5 *
                  ( ( $newattrs->{$att} -
                      $m->{real_stat}{$att}{$label}{mean})
                    / $m->{real_stat}{$att}{$label}{stddev}
                  ) ** 2
		 );
	  $sum += $nscores{$label};
      }
      if ($sum==0) { print STDERR "Ignoring all Gaussian probabilities: all=0!\n" }
      else {
	  foreach my $label (@labels) { $scores{$label} *= $nscores{$label} }
      }
  }

  my $sumPx = 0.0;
  $sumPx += $scores{$_} foreach (keys(%scores));
  $scores{$_} /= $sumPx foreach (keys(%scores));
  return \%scores;
}

sub print_model {
    my $self = shift;
    my $withcounts = '';
    if ($#_>-1 && $_[0] eq 'with counts')
    { shift @_; $withcounts = 1; }
    my $m = $self->{model};
    my @labels = $self->labels;
    my $r;

    # prepare table category P(category)
    my @lines;
    push @lines, 'category ', '-';
    push @lines, "$_ " foreach @labels;
    @lines = _append_lines(@lines);
    @lines = map { $_.='| ' } @lines;
    $lines[1] = substr($lines[1],0,length($lines[1])-2).'+-';
    $lines[0] .= "P(category) ";
    foreach my $i (2..$#lines) {
	my $label = $labels[$i-2];
	$lines[$i] .= $m->{labelprob}{$label} .' ';
	if ($withcounts) {
	    $lines[$i] .= "(= $self->{stat_labels}{$label} / ".
		"$self->{numof_instances} ) ";
	}
    }
    @lines = _append_lines(@lines);

    $r .= join("\n", @lines) . "\n". $lines[1]. "\n\n";

    # prepare conditional tables
    my @attributes = sort $self->attributes;
    foreach my $att (@attributes) {
	@lines = ( "category ", '-' );
	my @lines1 = ( "$att ", '-' );
	my @lines2 = ( "P( $att | category ) ", '-' );
	my @attvals = sort keys(%{ $m->{condprob}{$att} });
	foreach my $label (@labels) {
	    if ( $self->{attribute_type}{$att} ne 'real' ) {
		foreach my $attval (@attvals) {
		    next unless exists($m->{condprob}{$att}{$attval}{$label});
		    push @lines, "$label ";
		    push @lines1, "$attval ";

		    my $line = $m->{condprob}{$att}{$attval}{$label};
		    if ($withcounts)
		    { $line.= ' '.$m->{condprobe}{$att}{$attval}{$label} }
		    $line .= ' ';
		    push @lines2, $line;
		}
	    } else {
		push @lines, "$label ";
		push @lines1, "real ";
		push @lines2, "Gaussian(mean=".
		    $m->{real_stat}{$att}{$label}{mean}.",stddev=".
		    $m->{real_stat}{$att}{$label}{stddev}.") ";
	    }
	    push @lines, '-'; push @lines1, '-'; push @lines2, '-';
	}
	@lines = _append_lines(@lines);
	foreach my $i (0 .. $#lines)
	{ $lines[$i] .= ($lines[$i]=~/-$/?'+-':'| ') . $lines1[$i] }
	@lines = _append_lines(@lines);
	foreach my $i (0 .. $#lines)
	{ $lines[$i] .= ($lines[$i]=~/-$/?'+-':'| ') . $lines2[$i] }
	@lines = _append_lines(@lines);

	$r .= join("\n", @lines). "\n\n";
    }

    return $r;
}

sub _append_lines {
    my @l = @_;
    my $m = 0;
    foreach (@l) { $m = length($_) if length($_) > $m }
    @l = map 
    { while (length($_) < $m) { $_.=substr($_,length($_)-1) }; $_ }
    @l;
    return @l;
}

sub labels {
  my $self = shift;
  return @{ $self->{labels} };
}

sub attributes {
  my $self = shift;
  return keys %{ $self->{stat_attributes} };
}

sub export_to_YAML {
    my $self = shift;
    require YAML;
    return YAML::Dump($self);
}

sub export_to_YAML_file {
    my $self = shift;
    my $file = shift;
    require YAML;
    YAML::DumpFile($file, $self);
}

1;
__END__

NaiveBayes1.pm  view on Meta::CPAN


AI::NaiveBayes1 - Naive Bayes Classification

=head1 SYNOPSIS

  use AI::NaiveBayes1;
  my $nb = AI::NaiveBayes1->new;
  $nb->add_table(
  "Html  Caps  Free  Spam  count
  -------------------------------
     Y     Y     Y     Y    42   
     Y     Y     Y     N    32   
     Y     Y     N     Y    17   
     Y     Y     N     N     7   
     Y     N     Y     Y    32   
     Y     N     Y     N    12   
     Y     N     N     Y    20   
     Y     N     N     N    16   
     N     Y     Y     Y    38   
     N     Y     Y     N    18   
     N     Y     N     Y    16   
     N     Y     N     N    16   
     N     N     Y     Y     2   
     N     N     Y     N     9   
     N     N     N     Y    11   
     N     N     N     N    91   
  -------------------------------
  ");
  $nb->train;
  print "Model:\n" . $nb->print_model;
  print "Model (with counts):\n" . $nb->print_model('with counts');

  $nb = AI::NaiveBayes1->new;
  $nb->add_instances(attributes=>{model=>'H',place=>'B'},
		     label=>'repairs=Y',cases=>30);
  $nb->add_instances(attributes=>{model=>'H',place=>'B'},
		     label=>'repairs=N',cases=>10);
  $nb->add_instances(attributes=>{model=>'H',place=>'N'},
		     label=>'repairs=Y',cases=>18);
  $nb->add_instances(attributes=>{model=>'H',place=>'N'},
		     label=>'repairs=N',cases=>16);
  $nb->add_instances(attributes=>{model=>'T',place=>'B'},
		     label=>'repairs=Y',cases=>22);
  $nb->add_instances(attributes=>{model=>'T',place=>'B'},
		     label=>'repairs=N',cases=>14);
  $nb->add_instances(attributes=>{model=>'T',place=>'N'},
		     label=>'repairs=Y',cases=> 6);
  $nb->add_instances(attributes=>{model=>'T',place=>'N'},
		     label=>'repairs=N',cases=>84);

  $nb->train;

  print "Model:\n" . $nb->print_model;
  
  # Find results for unseen instances
  my $result = $nb->predict
     (attributes => {model=>'T', place=>'N'});

  foreach my $k (keys(%{ $result })) {
      print "for label $k P = " . $result->{$k} . "\n";
  }

  # export the model into a string
  my $string = $nb->export_to_YAML();

  # create the same model from the string
  my $nb1 = AI::NaiveBayes1->import_from_YAML($string);

  # write the model to a file (shorter than model->string->file)
  $nb->export_to_YAML_file('t/tmp1');

  # read the model from a file (shorter than file->string->model)
  my $nb2 = AI::NaiveBayes1->import_from_YAML_file('t/tmp1');

See Examples for more examples.

=head1 DESCRIPTION

NaiveBayes1.pm  view on Meta::CPAN


=item C<{smoothing}{$attribute}>

Attribute smoothing.  No smoothing if does not exist.  Implemented smoothing:

      - /^unseen count=/ followed by number, e.g., 0.5

=back

=head2 Attribute Smoothing

For an attribute A one can specify:

    $nb->{smoothing}{A} = 'unseen count=0.5';

to provide a count for unseen data.  The count is taken into
consideration in training and prediction, when any unseen attribute
values are observed.  Zero probabilities can be prevented in this way.
A count other than 0.5 can be provided, but if it is <=0 it will be

NaiveBayes1.pm  view on Meta::CPAN

=head1 THEORY

Bayes' Theorem is a way of inverting a conditional probability. It
states:

                P(y|x) P(x)
      P(x|y) = -------------
                   P(y)

and so on...

This is a pretty standard algorithm explained in many machine learning
textbooks (e.g., "Data Mining" by Witten and Eibe).

NaiveBayes1.pm  view on Meta::CPAN

Gaussian (normal) distribution for each possible value of C=c,  Hence,
for each C=c we collect the mean value (m) and standard deviation (s)
for A during training.  During classification, P(A=a|C=c) is estimated
using Gaussian distribution, i.e., in the following way:

                    1               (a-m)^2
 P(A=a|C=c) = ------------ * exp( - ------- )
              sqrt(2*Pi)*s           2*s^2

this boils down to the following lines of code:

    $scores{$label} *=
    0.398942280401433 / $m->{real_stat}{$att}{$label}{stddev}*
      exp( -0.5 *
           ( ( $newattrs->{$att} -
               $m->{real_stat}{$att}{$label}{mean})
             / $m->{real_stat}{$att}{$label}{stddev}
           ) ** 2
	   );

i.e.,

  P(A=a|C=c) = 0.398942280401433 / s *
    exp( -0.5 * ( ( a-m ) / s ) ** 2 );


=head1 EXAMPLES

Example with a real-valued attribute modeled by a Gaussian
distribution (from Witten I. and Frank E. book "Data Mining" (the WEKA
book), page 86):

 # @relation weather
 # 
 # @attribute outlook {sunny, overcast, rainy}
 # @attribute temperature real
 # @attribute humidity real
 # @attribute windy {TRUE, FALSE}
 # @attribute play {yes, no}
 # 
 # @data
 # sunny,85,85,FALSE,no
 # sunny,80,90,TRUE,no
 # overcast,83,86,FALSE,yes
 # rainy,70,96,FALSE,yes
 # rainy,68,80,FALSE,yes
 # rainy,65,70,TRUE,no
 # overcast,64,65,TRUE,yes
 # sunny,72,95,FALSE,no
 # sunny,69,70,FALSE,yes
 # rainy,75,80,FALSE,yes
 # sunny,75,70,TRUE,yes
 # overcast,72,90,TRUE,yes
 # overcast,81,75,FALSE,yes
 # rainy,71,91,TRUE,no
 
 $nb->set_real('temperature', 'humidity');
 
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>85,humidity=>85,windy=>'FALSE'},label=>'play=no');
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>80,humidity=>90,windy=>'TRUE'},label=>'play=no');
 $nb->add_instance(attributes=>{outlook=>'overcast',temperature=>83,humidity=>86,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>70,humidity=>96,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>68,humidity=>80,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>65,humidity=>70,windy=>'TRUE'},label=>'play=no');
 $nb->add_instance(attributes=>{outlook=>'overcast',temperature=>64,humidity=>65,windy=>'TRUE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>72,humidity=>95,windy=>'FALSE'},label=>'play=no');
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>69,humidity=>70,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>75,humidity=>80,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>75,humidity=>70,windy=>'TRUE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'overcast',temperature=>72,humidity=>90,windy=>'TRUE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'overcast',temperature=>81,humidity=>75,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>71,humidity=>91,windy=>'TRUE'},label=>'play=no');
 
 $nb->train;
 
 my $printedmodel =  "Model:\n" . $nb->print_model;
 my $p = $nb->predict(attributes=>{outlook=>'sunny',temperature=>66,humidity=>90,windy=>'TRUE'});

 YAML::DumpFile('file', $p);
 die unless (abs($p->{'play=no'}  - 0.792) < 0.001);
 die unless(abs($p->{'play=yes'} - 0.208) < 0.001);

=head1 HISTORY

L<Algorithm::NaiveBayes> by Ken Williams was not what I needed so I
wrote this one.  L<Algorithm::NaiveBayes> is oriented towards text

 view all matches for this distribution


AI-Nerl

 view release on metacpan or  search on metacpan

examples/digits/deep_digits.pl  view on Meta::CPAN

my $labels = rfits('t10k-labels-idx1-ubyte.fits');
my $y = identity(10)->range($labels->transpose)->sever;
say 't10k data loaded';

my $nerl = AI::Nerl->new(
   # type => image,dims=>[28,28],...
   scale_input => 1/256,
);

$nerl->init_network(l1 => 784, l3=>10, l2=>7);#method=batch,hidden=>12345,etc

my $prev_nerl = $nerl;
my $prev_cost = 10000;
my $passes=0;

for(1..3000){
   my @test = ($images(9000:9999)->sever,$y(9000:9999)->sever);
   my $n = int rand(8000);
   my $m = $n+499;
   my @train = ($images->slice("$n:$m")->copy, $y->slice("$n:$m")->copy);
   $nerl->train(@train,passes=>10);
   my ($cost, $nc) = $nerl->cost( @test );
   print "cost:$cost\n,num correct: $nc / 1000\n";
#   $nerl->network->show_neuron(1);
   $passes++;
   if ($cost < $prev_cost or $passes<10){
      $prev_cost = $cost;
      $prev_nerl = $nerl;
   } else { # use $nerl as basis for $nerl
      $passes=0;

      print "New layer!";
      $prev_cost = 1000;
      $nerl = AI::Nerl->new(
         basis => $prev_nerl,
         l2 => int(rand(12))+5,
      );
      $nerl->init_network();
      $prev_nerl = $nerl;
      #die $nerl->network->theta1->slice("1:2") . $nerl->network->theta2->slice("1:2");
   }


   #print "example output, images 0 to 4\n";
   #print "Labels: " . $y(0:4) . "\n";
   #print $nerl->run($images(0:4));
#   $nerl->network->show_neuron($_) for (0..4);

}

 view all matches for this distribution


AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

#!/usr/bin/perl	

# $Id: BackProp.pm,v 0.89 2000/08/12 01:05:27 josiah Exp $
#
# Copyright (c) 2000  Josiah Bryan  USA
#
# See AUTHOR section in pod text below for usage and distribution rights.   
# See UPDATES section in pod text below for info on what has changed in this release.
#

BEGIN {
	$AI::NeuralNet::BackProp::VERSION = "0.89";
}

#
# name:   AI::NeuralNet::BackProp
#
# author: Josiah Bryan 
# date:   Tuesday August 15 2000
# desc:   A simple back-propagation, feed-foward neural network with
#		  learning implemented via a generalization of Dobbs rule and
#		  several principals of Hoppfield networks. 
# online: http://www.josiah.countystart.com/modules/AI/cgi-bin/rec.pl
#

package AI::NeuralNet::BackProp::neuron;
	
	use strict;
	
	# Dummy constructor
    sub new {
    	bless {}, shift
	}	
	
	# Rounds floats to ints
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	
	# Receives input from other neurons. They must
	# be registered as a synapse of this neuron to effectively
	# input.
	sub input {
		my $self 	 =	shift;
		my $sid		 =	shift;
		my $value	 =	shift;
		
		# We simply weight the value sent by the neuron. The neuron identifies itself to us
		# using the code we gave it when it registered itself with us. The code is in $sid, 
		# (synapse ID) and we use that to track the weight of the connection.
		# This line simply multiplies the value by its weight and gets the integer from it.
		$self->{SYNAPSES}->{LIST}->[$sid]->{VALUE}	=	intr($value	*	$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
		$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED}	=	1;                                 
		$self->{SYNAPSES}->{LIST}->[$sid]->{INPUT}	=	$value;
		
		# Debugger
		AI::NeuralNet::BackProp::out1("\nRecieved input of $value, weighted to $self->{SYNAPSES}->{LIST}->[$sid]->{VALUE}, synapse weight is $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT} (sid is $sid for $self).\n");
		AI::NeuralNet::BackProp::out1((($self->input_complete())?"All synapses have fired":"Not all synapses have fired"));
		AI::NeuralNet::BackProp::out1(" for $self.\n");
		
		# Check and see if all synapses have fired that are connected to this one.
		# If they have, then generate the output value for this synapse.
		$self->output() if($self->input_complete());
	}
	
	# Loops thru and outputs to every neuron that this
	# neuron is registered as synapse of.
	sub output {
		my $self	=	shift;
		my $size	=	$self->{OUTPUTS}->{SIZE} || 0;
		my $value	=	$self->get_output();
		for (0..$size-1) {
			AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
			$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
		}
	}
	
	# Used internally by output().
	sub get_output {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value		=	0;
		my $state		= 	0;
		my (@map,@weight);
	
	    # We loop through all the syanpses connected to this one and add the weighted
	    # valyes together, saving in a debugging list.
		for (0..$size-1) {
			$value	+=	$self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
			$self->{SYNAPSES}->{LIST}->[$_]->{FIRED} = 0;
			
			$map[$_]=$self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
			$weight[$_]=$self->{SYNAPSES}->{LIST}->[$_]->{WEIGHT};
		}
		                                              
		# Debugger
		AI::NeuralNet::BackProp::join_cols(\@map,5) if(($AI::NeuralNet::BackProp::DEBUG eq 3) || ($AI::NeuralNet::BackProp::DEBUG eq 2));
		AI::NeuralNet::BackProp::out2("Weights: ".join(" ",@weight)."\n");
		
		# Simply average the values and get the integer of the average.
		$state	=	intr($value/$size);
		
		# Debugger
		AI::NeuralNet::BackProp::out1("From get_output, value is $value, so state is $state.\n");
		
		# Possible future exapnsion for self excitation. Not currently used.
		$self->{LAST_VALUE}	=	$value;
		
		# Just return the $state
		return $state;
	}
	
	# Used by input() to check if all registered synapses have fired.
	sub input_complete {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $retvalue	=	1;
		
		# Very simple loop. Doesn't need explaning.
		for (0..$size-1) {
			$retvalue = 0 if(!$self->{SYNAPSES}->{LIST}->[$_]->{FIRED});
		}
		return $retvalue;
	}
	
	# Used to recursively adjust the weights of synapse input channeles
	# to give a desired value. Designed to be called via 
	# AI::NeuralNet::BackProp::NeuralNetwork::learn().
	sub weight	{                
		my $self		=	shift;
		my $ammount		=	shift;
		my $what		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value;
		AI::NeuralNet::BackProp::out1("Weight: ammount is $ammount, what is $what with size at $size.\n");
		      
		# Now this sub is the main cog in the learning wheel. It is called recursively on 
		# each neuron that has been bad (given incorrect output.)
		for my $i (0..$size-1) {
			$value		=	$self->{SYNAPSES}->{LIST}->[$i]->{VALUE};

if(0) {
       
       		# Formula by Steve Purkis
       		# Converges very fast for low-value inputs. Has trouble converging on high-value
       		# inputs. Feel free to play and try to get to work for high values.
			my $delta	=	$ammount * ($what - $value) * $self->{SYNAPSES}->{LIST}->[$i]->{INPUT};
			$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT}  +=  $delta;
			$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
}
			
			# This formula in use by default is original by me (Josiah Bryan) as far as I know.
			
			# If it is equal, then don't adjust
			#
			### Disabled because this soemtimes causes 
			### infinte loops when learning with range limits enabled
			#
			#next if($value eq $what);
			
			# Adjust increment by the weight of the synapse of 
			# this neuron & apply direction delta
			my $delta = 
					$ammount * 
						($value<$what?1:-1) * 
							$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT};
			
			#print "($value,$what) delta:$delta\n";
			
			# Recursivly apply
			$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT}  +=  $delta;
			$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
			    
		}
	}
	
	# Registers some neuron as a synapse of this neuron.           
	# This is called exclusively by connect(), except for
	# in initalize_group() to connect the _map() package.
	sub register_synapse {
		my $self	=	shift;
		my $synapse	=	shift;
		my $sid		=	$self->{SYNAPSES}->{SIZE} || 0;
		$self->{SYNAPSES}->{LIST}->[$sid]->{PKG}		=	$synapse;
		$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}		=	1.00		if(!$self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
		$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED}		=	0;       
		AI::NeuralNet::BackProp::out1("$self: Registering sid $sid with weight $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT}, package $self->{SYNAPSES}->{LIST}->[$sid]->{PKG}.\n");
		$self->{SYNAPSES}->{SIZE} = ++$sid;
		return ($sid-1);
	}
	
	# Called via AI::NeuralNet::BackProp::NeuralNetwork::initialize_group() to 
	# form the neuron grids.
	# This just registers another synapes as a synapse to output to from this one, and
	# then we ask that synapse to let us register as an input connection and we
	# save the sid that the ouput synapse returns.
	sub connect {
		my $self	=	shift;
		my $to		=	shift;
		my $oid		=	$self->{OUTPUTS}->{SIZE} || 0;
		AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
		$self->{OUTPUTS}->{LIST}->[$oid]->{PKG}	=	$to;
 		$self->{OUTPUTS}->{LIST}->[$oid]->{ID}	=	$to->register_synapse($self);
		$self->{OUTPUTS}->{SIZE} = ++$oid;
		return $self->{OUTPUTS}->{LIST}->[$oid]->{ID};
	}
1;
			 
package AI::NeuralNet::BackProp;
	
	use Benchmark;          
	use strict;
	
	# Returns the number of elements in an array ref, undef on error
	sub _FETCHSIZE {
		my $a=$_[0];
		my ($b,$x);
		return undef if(substr($a,0,5) ne "ARRAY");
		foreach $b (@{$a}) { $x++ };
		return $x;
	}

	# Debugging subs
	$AI::NeuralNet::BackProp::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; } 
	sub out1  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
	sub out2  { print  shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
	sub out3  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG) }
	sub out4  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;
			$x++;
			if($x>$break-1) {
				print "\n";
				$x=0;
			}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1}; #AI::NeuralNet::BackProp::_FETCHSIZE($a1);
		my $a2s	=	$#{$a2}; #AI::NeuralNet::BackProp::_FETCHSIZE($a2);
		my ($a,$b,$diff,$t);
		$diff=0;
		#return undef if($a1s ne $a2s);	# must be same length
		for my $x (0..$a1s) {
			$a = $a1->[$x];
			$b = $a2->[$x];
			if($a!=$b) {
				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a);
				$diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift);
		sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in order. Usage:
	#
	#	 learn_set(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a percentage string.
	#
	sub learn_set {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my $p		=	(defined $args{flag})	?$args{flag}	   :1;
	    my $row		=	(defined $args{pattern})?$args{pattern}*2+1:1;
	    my ($fa,$fb);
		for my $x (0..$len) {
			print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
			my $str = $self->learn( $data->[$x*2],			# The list of data to input to the net
					  		  		$data->[$x*2+1], 		# The output desired
					    			inc=>$inc,				# The starting learning gradient
					    			max=>$max,				# The maximum num of loops allowed
					    			error=>$error);			# The maximum (%) error allowed
			print $str if($AI::NeuralNet::BackProp::DEBUG); 
		}
			
		
		my $res;
		$data->[$row] = $self->crunch($data->[$row]) if($data->[$row] == 0);
		
		if ($p) {
			$res=pdiff($data->[$row],$self->run($data->[$row-1]));
		} else {
			$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
		return $res;
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in RANDOM order. Usage:
	#
	#	 learn_set_rand(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a true value.
	#
	sub learn_set_rand {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my @learned;
		while(1) {
			_GET_X:
			my $x=$self->intr(rand()*$len);
			goto _GET_X if($learned[$x]);
			$learned[$x]=1;
			print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG); 
			my $str =  $self->learn($data->[$x*2],			# The list of data to input to the net
					  		  		$data->[$x*2+1], 		# The output desired
					    			inc=>$inc,				# The starting learning gradient
			 		    			max=>$max,				# The maximum num of loops allowed
					    			error=>$error);			# The maximum (%) error allowed
			print $str if($AI::NeuralNet::BackProp::DEBUG); 
		}
			
		
		return 1; 
	}

	# Returns the index of the element in array REF passed with the highest comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;
		
		my ($el,$len,$tmp);
		foreach $el (@{$ref1}) {
			$len++;
		}
		$tmp=0;
		for my $x (0..$len-1) {
			$tmp = $x if((@{$ref1})[$x] > (@{$ref1})[$tmp]);
		}
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1	=	shift;
		
		my ($el,$len,$tmp);
		foreach $el (@{$ref1}) {
			$len++;
		}
		$tmp=0;
		for my $x (0..$len-1) {
			$tmp = $x if((@{$ref1})[$x] < (@{$ref1})[$tmp]);
		}
		return $tmp;
	}  
	
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		return AI::NeuralNet::BackProp::PCX->new($self,shift);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my (@map,$ic);
		my @ws 		=	split(/[\s\t]/,shift);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);
			if(!defined $ic) {
				$self->{_CRUNCHED}->{LIST}->[$self->{_CRUNCHED}->{_LENGTH}++]=$ws[$a];
				@map[$a]=$self->{_CRUNCHED}->{_LENGTH};
			} else {
				@map[$a]=$ic;
            }
		}
		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			return $a+1 if($self->{_CRUNCHED}->{LIST}->[$a] eq $_[0]);
		}
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_CRUNCHED}->{LIST}->[$el-1].' ';
		}
		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	}
	
	# Sets/Removes value ranging
	sub range {
		my $self	=	shift;
		my $ref		=	shift;
		my $b		=	shift;
		if(substr($ref,0,5) ne "ARRAY") {
			if(($ref == 0) && (!defined $b)) {
				$ref	= $self->crunch($ref);
				#print "\$ref is a string, crunching to ",join(',',@{$ref}),"\n";
			} else {
    			my $a	= $ref;
    			$a		= $self->crunch($a)->[0] if($a == 0);
				$b		= $self->crunch($b)->[0] if($b == 0);
				$_[++$#_] = $a;
    			$_[++$#_] = $b;
    			$ref	= \@_;
				#print "Found ranged definition, joined to ",join(',',@{$ref}),"\n";
			}
		}
		my $rA		=	0;
		my $rB		=	$#{$ref};
		my $rS		=	0; #shift;
		if(!$rA && !$rB) {
			$self->{rA}=$self->{rB}=-1;
			return undef;
		}
		if($rB<$rA){my $t=$rA;$rA=$rB;$rB=$t};
		$self->{rA}=$rA;
		$self->{rB}=$rB;
		$self->{rS}=$rS if($rS);
		$self->{rRef} = $ref;
		return $ref;
	}
	
	# Used internally to scale outputs to fit range
	sub _range {
		my $self	=	shift;  
		my $in		=	shift;
		my $rA		=	$self->{rA};
		my $rB		=	$self->{rB};
		my $rS		=	$self->{rS};
		my $r		=	$rB;#-$rA+1;
		return $in if(!$rA && !$rB);
		my $l		=	$self->{OUT}-1;
		my $out 	=	[];
		# Adjust for a maximum outside what we have seen so far
		for my $i (0..$l) {
			$rS=$in->[$i] if($in->[$i]>$rS);
		}
		#print "\$l:$l,\$rA:$rA,\$rB:$rB,\$rS:$rS,\$r:$r\n";
		# Loop through, convert values to percentage of maximum, then multiply
		# percentage by range and add to base of range to get finaly value
		for my $i (0..$l) {
			#print "\$i:$i,\$in:$in->[$i]\n";
			$rS=1 if(!$rS);
			my $t=intr((($rS-$in->[$i])/$rS)*$r+$rA);
			#print "t:$t,$self->{rRef}->[$t],i:$i\n";
			$out->[$i] = $self->{rRef}->[$t];
		}
		$self->{rS}=$rS;
		return $out;
	}
			
		
	# Initialzes the base for a new neural network.
	# It is recomended that you call learn() before run()ing a pattern.
	# See documentation above for usage.
	sub new {
    	no strict;
    	my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $size	=	shift;
		my $out		=	shift || $size;
		my $flag	=	shift || 0;
		
		bless $self, $type;
		
		# If $layers is a string, then it will be nummerically equal to 0, so try to load it
		# as a network file.
		if($layers == 0) {  
		    # We use a "1" flag as the second argument to indicate that we want load()
		    # to call the new constructor to make a network the same size as in the file
		    # and return a refrence to the network, instead of just creating the network from
		    # pre-exisiting refrence
			return $self->load($layers,1);
		}
				
		
		#print "Creating $size neurons in each layer for $layers layer(s)...\n";
		
		AI::NeuralNet::BackProp::out2 "Creating $size neurons in each layer for $layers layer(s)...\n";
		
		# Error checking
		return undef if($out>$size);
		
		# When this is called, they tell us howmany layers and neurons in each layer.
		# But really what we store is a long line of neurons that are only divided in theory
		# when connecting the outputs and inputs.
		my $div = $size;
		my $size = $layers * $size;
		
		AI::NeuralNet::BackProp::out2 "Creating RUN and MAP systems for network...\n";
		#print "Creating RUN and MAP systems for network...\n";
		
		# Create a new runner and mapper for the network.
		$self->{RUN} = new AI::NeuralNet::BackProp::_run($self);
		$self->{MAP} = new AI::NeuralNet::BackProp::_map($self);
		
		$self->{SIZE}	=	$size;
		$self->{DIV}	=	$div;
		$self->{OUT}	=	$out;
		$self->{FLAG}	=	$flag;
		$self->{col_width}= 5;
		$self->{random} = 0.001;
		
		$self->initialize_group();
		
		return $self;
	}	

	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};

	    open(FILE,">$file");
	    
	    print FILE "size=$size\n";
	    print FILE "div=$div\n";
	    print FILE "out=$out\n";
	    print FILE "flag=$flag\n";
	    print FILE "rand=$self->{random}\n";
	    print FILE "cw=$self->{col_width}\n";
		print FILE "crunch=$self->{_CRUNCHED}->{_LENGTH}\n";
		print FILE "rA=$self->{rA}\n";
		print FILE "rB=$self->{rB}\n";
		print FILE "rS=$self->{rS}\n";
		print FILE "rRef=",(($self->{rRef})?join(',',@{$self->{rRef}}):''),"\n";
			
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			print FILE "c$a=$self->{_CRUNCHED}->{LIST}->[$a]\n";
		}
		
		my $w;
		for my $a (0..$self->{SIZE}-1) {
			$w="";
			for my $b (0..$self->{DIV}-1) {
				$w .= "$self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},";
			}
			chop($w);
			print FILE "n$a=$w\n";
		}
	
	    close(FILE);
	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self	=	shift;
		my $file	=	shift;
		my $load_flag = shift || 0;
	    
	    return undef if(!(-f $file));
	    
	    open(FILE,"$file");
	    my @lines=<FILE>;
	    close(FILE);
	    
	    my %db;
	    for my $line (@lines) {
	    	chomp($line);
	    	my ($a,$b) = split /=/, $line;
	    	$db{$a}=$b;
	    }
	    
	    return undef if(!$db{"size"});
	    
	    if($load_flag) {
		    undef $self;
	
			# Create new network
		    $self = AI::NeuralNet::BackProp->new(intr($db{"size"}/$db{"div"}),
		    								     $db{"div"},
		    								     $db{"out"},
		    								     $db{"flag"});
		} else {
			$self->{DIV}	=	$db{"div"};
			$self->{SIZE}	=	$db{"size"};
			$self->{OUT}	=	$db{"out"};
			$self->{FLAG}	=	$db{"flag"};
		}
		
	    # Load variables
	    $self->{col_width}	= $db{"cw"};
	    $self->{random}		= $db{"rand"};
        $self->{rA}			= $db{"rA"};
		$self->{rB}			= $db{"rB"};
		$self->{rS}			= $db{"rS"};
		my @tmp				= split /\,/, $db{"rRef"};
		$self->{rRef}		= \@tmp;
		
	   	$self->{_CRUNCHED}->{_LENGTH}	=	$db{"crunch"};
		
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			$self->{_CRUNCHED}->{LIST}->[$a] = $db{"c$a"}; 
		}
		
		$self->initialize_group();
	    
		my ($w,@l);
		for my $a (0..$self->{SIZE}-1) {
			$w=$db{"n$a"};
			@l=split /\,/, $w;
			for my $b (0..$self->{DIV}-1) {
				$self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT}=$l[$b];
			}
		}
	
	    return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		for my $a (0..$self->{SIZE}-1) {
			print "Neuron $a: ";
			for my $b (0..$self->{DIV}-1) {
				print $self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},"\t";
			}
			print "\n";
		}
	}
	
	# Used internally by new() and learn().
	# This is the sub block that actually creats
	# the connections between the synapse chains and
	# also connects the run packages and the map packages
	# to the appropiate ends of the neuron grids.
	sub initialize_group() {
		my $self	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};
		my $x		=	0; 
		my $y		=	0;
		
		# Reset map and run synapse counters.
		$self->{RUN}->{REGISTRATION} = $self->{MAP}->{REGISTRATION} = 0;
		
		AI::NeuralNet::BackProp::out2 "There will be $size neurons in this network group, with a divison value of $div.\n";
		#print "There will be $size neurons in this network group, with a divison value of $div.\n";
		
		# Create initial neuron packages in one long array for the entire group
		for($y=0; $y<$size; $y++) {
			#print "Initalizing neuron $y...     \r";
			$self->{NET}->[$y]=new AI::NeuralNet::BackProp::neuron();
		}
		
		AI::NeuralNet::BackProp::out2 "Creating synapse grid...\n";
		
		my $z  = 0;    
		my $aa = 0;
		my ($n0,$n1,$n2);
		
		# Outer loop loops over every neuron in group, incrementing by the number
		# of neurons supposed to be in each layer
		
		for($y=0; $y<$size; $y+=$div) {
			if($y+$div>=$size) {
				last;
			}
			
			# Inner loop connects every neuron in this 'layer' to one input of every neuron in
			# the next 'layer'. Remeber, layers only exist in terms of where the connections
			# are divided. For example, if a person requested 2 layers and 3 neurons per layer,
			# then there would be 6 neurons in the {NET}->[] list, and $div would be set to
			# 3. So we would loop over and every 3 neurons we would connect each of those 3 
			# neurons to one input of every neuron in the next set of 3 neurons. Of course, this
			# is an example. 3 and 2 are set by the new() constructor.
			
			# Flag values:
			# 0 - (default) - 
			# 	My feed-foward style: Each neuron in layer X is connected to one input of every
			#	neuron in layer Y. The best and most proven flag style.
			#
			#   ^   ^   ^               
			#	O\  O\ /O       Layer Y
			#   ^\\/^/\/^
			#	| //|\/\|
			#   |/ \|/ \|		
			#	O   O   O       Layer X
			#   ^   ^   ^
			#
			# 1	-
			#	In addition to flag 0, each neuron in layer X is connected to every input of 
			#	the neurons ahead of itself in layer X.
			# 2 - ("L-U Style") - 
			#	No, its not "Learning-Unit" style. It gets its name from this: In a 2 layer, 3
			#	neuron network, the connections form a L-U pair, or a W, however you want to look
			#	at it.   
			#
			#   ^   ^   ^
			#	|	|	|
			#	O-->O-->O
			#   ^   ^   ^
			#	|	|   |
			#   |   |   |
			#	O-->O-->O
			#   ^   ^   ^
			#	|	|	|
			#
			#	As you can see, each neuron is connected to the next one in its layer, as well
			#	as the neuron directly above itself.
			
			for ($z=0; $z<$div; $z++) {
				if((!$flag) || ($flag == 1)) {
					for ($aa=0; $aa<$div; $aa++) {      
						$self->{NET}->[$y+$z]->connect($self->{NET}->[$y+$div+$aa]);
					}
				}
				if($flag == 1) {
					for ($aa=$z+1; $aa<$div; $aa++) {      
						$self->{NET}->[$y+$z]->connect($self->{NET}->[$y+$aa]);
					}
				}
				if($flag == 2) {
					$self->{NET}->[$y+$z]->connect($self->{NET}->[$y+$div+$z]);
					$self->{NET}->[$y+$z]->connect($self->{NET}->[$y+$z+1]) if($z<$div-1);
				}
				AI::NeuralNet::BackProp::out1 "\n";
			}
			AI::NeuralNet::BackProp::out1 "\n";             
		}
		
		# These next two loops connect the _run and _map packages (the IO interface) to 
		# the start and end 'layers', respectively. These are how we insert data into
		# the network and how we get data from the network. The _run and _map packages 
		# are connected to the neurons so that the neurons think that the IO packages are
		# just another neuron, sending data on. But the IO packs. are special packages designed
		# with the same methods as neurons, just meant for specific IO purposes. You will
		# never need to call any of the IO packs. directly. Instead, they are called whenever
		# you use the run(), map(), or learn() methods of your network.
        
    	AI::NeuralNet::BackProp::out2 "\nMapping I (_run package) connections to network...\n";
		
	    for($y=0; $y<$div; $y++) {
			$self->{_tmp_synapse} = $y;
			$self->{NET}->[$y]->register_synapse($self->{RUN});
			#$self->{NET}->[$y]->connect($self->{RUN});
		}
		
		AI::NeuralNet::BackProp::out2 "Mapping O (_map package) connections to network...\n\n";
		
		for($y=$size-$div; $y<$size; $y++) {
			$self->{_tmp_synapse} = $y;
			$self->{NET}->[$y]->connect($self->{MAP});
		}
		
		# And the group is done! 
	}
	

	# When called with an array refrence to a pattern, returns a refrence
	# to an array associated with that pattern. See usage in documentation.
	sub run {
		my $self	 =	  shift;
		my $map		 =	  shift;
		my $t0 		 =	new Benchmark;
        $self->{RUN}->run($map);
		$self->{LAST_TIME}=timestr(timediff(new Benchmark, $t0));
        return $self->map();
	}
    
    # This automatically uncrunches a response after running it
	sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# Returns benchmark and loop's ran or learned
	# for last run(), or learn()
	# operation preformed.
	#
	sub benchmarked {
		my $self	=	shift;
		return $self->{LAST_TIME};
	}
	    
	# Used to retrieve map from last internal run operation.
	sub map {
		my $self	 =	  shift;
		$self->{MAP}->map();
	}
	
	# Forces network to learn pattern passed and give desired
	# results. See usage in POD.
	sub learn {
		my $self	=	shift;
		my $omap	=	shift;
		my $res		=	shift;
		my %args    =   @_;
		my $inc 	=	$args{inc} || 0.20;
		my $max     =   $args{max} || 1024;
		my $_mx		=	intr($max/10);
		my $_mi		=	0;
		my $error   = 	($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
  		my $div		=	$self->{DIV};
		my $size	=	$self->{SIZE};
		my $out		=	$self->{OUT};
		my $divide  =	AI::NeuralNet::BackProp->intr($div/$out);
		my ($a,$b,$y,$flag,$map,$loop,$diff,$pattern,$value);
		my ($t0,$it0);
		no strict 'refs';
		
		# Take care of crunching strings passed
		$omap = $self->crunch($omap) if($omap == 0);
		$res  = $self->crunch($res)  if($res  == 0);
		
		# Fill in empty spaces at end of results matrix with a 0
		if($#{$res}<$out) {
			for my $x ($#{$res}+1..$out) {
				#$res->[$x] = 0;
			}
		}
		
		# Debug
		AI::NeuralNet::BackProp::out1 "Num output neurons: $out, Input neurons: $size, Division: $divide\n";
		
		# Start benchmark timer and initalize a few variables
		$t0 	=	new Benchmark;
        $flag 	=	0; 
		$loop	=	0;   
		my $ldiff	=	0;
		my $dinc	=	0.0001;
		my $cdiff	=	0;
		$diff		=	100;
		$error 		= 	($error>-1)?$error:-1;
		
		# $flag only goes high when all neurons in output map compare exactly with
		# desired result map or $max loops is reached
		#	
		while(!$flag && ($max ? $loop<$max : 1)) {
			$it0 	=	new Benchmark;
			
			# Run the map
			$self->{RUN}->run($omap);
			
			# Retrieve last mapping  and initialize a few variables.
			$map	=	$self->map();
			$y		=	$size-$div;
			$flag	=	1;
			
			# Compare the result map we just ran with the desired result map.
			$diff 	=	pdiff($map,$res);
			
			# This adjusts the increment multiplier to decrease as the loops increase
			if($_mi > $_mx) {
				$dinc *= 0.1;
				$_mi   = 0;
			} 
			
			$_mi++;
				
			
			# We de-increment the loop ammount to prevent infinite learning loops.
			# In old versions of this module, if you used too high of an initial input
			# $inc, then the network would keep jumping back and forth over your desired 
			# results because the increment was too high...it would try to push close to
			# the desired result, only to fly over the other edge too far, therby trying
			# to come back, over shooting again. 
			# This simply adjusts the learning gradient proportionally to the ammount of
			# convergance left as the difference decreases.
           	$inc   -= ($dinc*$diff);
			$inc   = 0.0000000001 if($inc < 0.0000000001);
			
			# This prevents it from seeming to get stuck in one location
			# by attempting to boost the values out of the hole they seem to be in.
			if($diff eq $ldiff) {
				$cdiff++;
				$inc += ($dinc*$diff)+($dinc*$cdiff*10);
			} else {
				$cdiff=0;
			}
			
			# Save last $diff
			$ldiff = $diff;
			
			# This catches a max error argument and handles it
			if(!($error>-1 ? $diff>$error : 1)) {
				$flag=1;
				last;
			}
			
			# Debugging
			AI::NeuralNet::BackProp::out4 "Difference: $diff\%\t Increment: $inc\tMax Error: $error\%\n";
			AI::NeuralNet::BackProp::out1 "\n\nMapping results from $map:\n";
			
			# This loop compares each element of the output map with the desired result map.
			# If they don't match exactly, we call weight() on the offending output neuron 
			# and tell it what it should be aiming for, and then the offending neuron will
			# try to adjust the weights of its synapses to get closer to the desired output.
			# See comments in the weight() method of AI::NeuralNet::BackProp for how this works.
			my $l=$self->{NET};
			for my $i (0..$out-1) {
				$a = $map->[$i];
				$b = $res->[$i];
				
				AI::NeuralNet::BackProp::out1 "\nmap[$i] is $a\n";
				AI::NeuralNet::BackProp::out1 "res[$i] is $b\n";
					
				for my $j (0..$divide-1) {
					if($a!=$b) {
						AI::NeuralNet::BackProp::out1 "Punishing $self->{NET}->[($i*$divide)+$j] at ",(($i*$divide)+$j)," ($i with $a) by $inc.\n";
						$l->[$y+($i*$divide)+$j]->weight($inc,$b) if($l->[$y+($i*$divide)+$j]);
						$flag	=	0;
					}
				}
			}
			
			# This counter is just used in the benchmarking operations.
			$loop++;
			
			AI::NeuralNet::BackProp::out1 "\n\n";
			
			# Benchmark this loop.
			AI::NeuralNet::BackProp::out4 "Learning itetration $loop complete, timed at".timestr(timediff(new Benchmark, $it0),'noc','5.3f')."\n";
		
			# Map the results from this loop.
			AI::NeuralNet::BackProp::out4 "Map: \n";
			AI::NeuralNet::BackProp::join_cols($map,$self->{col_width}) if ($AI::NeuralNet::BackProp::DEBUG);
			AI::NeuralNet::BackProp::out4 "Res: \n";
			AI::NeuralNet::BackProp::join_cols($res,$self->{col_width}) if ($AI::NeuralNet::BackProp::DEBUG);
		}
		
		# Compile benchmarking info for entire learn() process and return it, save it, and
		# display it.
		$self->{LAST_TIME}="$loop loops and ".timestr(timediff(new Benchmark, $t0));
        my $str = "Learning took $loop loops and ".timestr(timediff(new Benchmark, $t0),'noc','5.3f');
        AI::NeuralNet::BackProp::out2 $str;
		return $str;
	}		
		
1;

# Internal input class. Not to be used directly.
package AI::NeuralNet::BackProp::_run;
	
	use strict;
	
	# Dummy constructor.
	sub new {
		bless { PARENT => $_[1] }, $_[0]
	}
	
	# This is so we comply with the neuron interface.
	sub weight {}
	sub input  {}
	
	# Again, compliance with neuron interface.
	sub register_synapse {
		my $self	=	shift;		
		my $sid		=	$self->{REGISTRATION} || 0;
		$self->{REGISTRATION}	=	++$sid;
		$self->{RMAP}->{$sid-1}	= 	$self->{PARENT}->{_tmp_synapse};
		return $sid-1;
	}
	
	# Here is the real meat of this package.
	# run() does one thing: It fires values
	# into the first layer of the network.
	sub run {
		my $self	=	shift;
		my $map		=	shift;
		my $x		=	0;
		$map = $self->{PARENT}->crunch($map) if($map == 0);
		return undef if(substr($map,0,5) ne "ARRAY");
		foreach my $el (@{$map}) {
			# Catch ourself if we try to run more inputs than neurons
			return $x if($x>$self->{PARENT}->{DIV}-1);
			
			# Here we add a small ammount of randomness to the network.
			# This is to keep the network from getting stuck on a 0 value internally.
			$self->{PARENT}->{NET}->[$x]->input(0,$el+(rand()*$self->{ramdom}));
			$x++;
		};
		# Incase we tried to run less inputs than neurons, run const 1 in extra neurons
		if($x<$self->{PARENT}->{DIV}) {
			for my $y ($x..$self->{PARENT}->{DIV}-1) {
				$self->{PARENT}->{NET}->[$y]->input(0,1);
			}
		}
		return $x;
	}
	
	
1;

# Internal output class. Not to be used directly.
package AI::NeuralNet::BackProp::_map;
	
	use strict;
	
	# Dummy constructor.
	sub new {
		bless { PARENT => $_[1] }, $_[0]
	}
	
	# Compliance with neuron interface
	sub weight {}
	
	# Compliance with neuron interface
	sub register_synapse {
		my $self	=	shift;		
		my $sid		=	$self->{REGISTRATION} || 0;
		$self->{REGISTRATION}	=	++$sid;
		$self->{RMAP}->{$sid-1} = 	$self->{PARENT}->{_tmp_synapse};
		return $sid-1;
	}
	
	# This acts just like a regular neuron by receiving
	# values from input synapes. Yet, unlike a regularr
	# neuron, it doesnt weight the values, just stores
	# them to be retrieved by a call to map().
	sub input  {
		no strict 'refs';             
		my $self	=	shift;
		my $sid		=	shift;
		my $value	=	shift;
		my $size	=	$self->{PARENT}->{DIV};
		my $flag	=	1;
		$self->{OUTPUT}->[$sid]->{VALUE}	=	$self->{PARENT}->intr($value);
		$self->{OUTPUT}->[$sid]->{FIRED}	=	1;
		
		AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
	}
	
	# Here we simply collect the value of every neuron connected to this
	# one from the layer below us and return an array ref to the final map..
	sub map {
		my $self	=	shift;
		my $size	=	$self->{PARENT}->{DIV};
		my $out		=	$self->{PARENT}->{OUT};
		my $divide  =	AI::NeuralNet::BackProp->intr($size/$out);
		my @map = ();
		my $value;
		AI::NeuralNet::BackProp::out1 "Num output neurons: $out, Input neurons: $size, Division: $divide\n";
		for(0..$out-1) {
			$value=0;
			for my $a (0..$divide-1) {
				$value += $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE};
				AI::NeuralNet::BackProp::out1 "\$a is $a, index is ".(($_*$divide)+$a).", value is $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE}\n";
			}
			$map[$_]	=	AI::NeuralNet::BackProp->intr($value/$divide);
			AI::NeuralNet::BackProp::out1 "Map position $_ is $map[$_] in @{[\@map]} with self set to $self.\n";
			$self->{OUTPUT}->[$_]->{FIRED}	=	0;
		}
		my $ret=\@map;
		return $self->{PARENT}->_range($ret);
	}
1;
			      
# load_pcx() wrapper package
package AI::NeuralNet::BackProp::PCX;

	# Called by load_pcx in AI::NeuralNet::BackProp;
	sub new {
		my $type	=	shift;
		my $self	=	{ 
			parent  => $_[0],
			file    => $_[1]
		};
		my (@a,@b)=load_pcx($_[1]);
		$self->{image}=\@a;
		$self->{palette}=\@b;
		bless \%{$self}, $type;
	}

	# Returns a rectangular block defined by an array ref in the form of
	# 		[$x1,$y1,$x2,$y2]
	# Return value is an array ref
	sub get_block {
		my $self	=	shift;
		my $ref		=	shift;
		my ($x1,$y1,$x2,$y2)	=	@{$ref};
		my @block	=	();
		my $count	=	0;
		for my $x ($x1..$x2-1) {
			for my $y ($y1..$y2-1) {
				$block[$count++]	=	$self->get($x,$y);
			}
		}
		return \@block;
	}
			
	# Returns pixel at $x,$y
	sub get {
		my $self	=	shift;
		my ($x,$y)  =	(shift,shift);
		return $self->{image}->[$y*320+$x];
	}
	
	# Returns array of (r,g,b) value from palette index passed
	sub rgb {
		my $self	=	shift;
		my $color	=	shift;
		return ($self->{palette}->[$color]->{red},$self->{palette}->[$color]->{green},$self->{palette}->[$color]->{blue});
	}
		
	# Returns mean of (rgb) value of palette index passed
	sub avg {
		my $self	=	shift;
		my $color	=	shift;
		return $self->{parent}->intr(($self->{palette}->[$color]->{red}+$self->{palette}->[$color]->{green}+$self->{palette}->[$color]->{blue})/3);
	}
	
	# Loads and decompresses a PCX-format 320x200, 8-bit image file and returns 
	# two arrays, first is a 64000-byte long array, each element contains a palette
	# index, and the second array is a 255-byte long array, each element is a hash
	# ref with the keys 'red', 'green', and 'blue', each key contains the respective color
	# component for that color index in the palette.
	sub load_pcx {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		
		# open the file
		open(FILE, "$_[0]");
		binmode(FILE);
		
		my $tmp;
		my @image;
		my @palette;
		my $data;
		
		# Read header
		read(FILE,$tmp,128);
		
		# load the data and decompress into buffer
		my $count=0;
		
		while($count<320*200) {
		     # get the first piece of data
		     read(FILE,$data,1);
	         $data=ord($data);
	         
		     # is this a rle?
		     if ($data>=192 && $data<=255) {
		        # how many bytes in run?
		        my $num_bytes = $data-192;
		
		        # get the actual $data for the run
		        read(FILE, $data, 1);
				$data=ord($data);
		        # replicate $data in buffer num_bytes times
		        while($num_bytes-->0) {
	            	$image[$count++] = $data;
		        } # end while
		     } else {
		        # actual $data, just copy it into buffer at next location
		        $image[$count++] = $data;
		     } # end else not rle
		}
		
		# move to end of file then back up 768 bytes i.e. to begining of palette
		seek(FILE,-768,2);
		
		# load the pallete into the palette
		for my $index (0..255) {
		    # get the red component
		    read(FILE,$tmp,1);
		    $palette[$index]->{red}   = ($tmp>>2);
		
		    # get the green component
		    read(FILE,$tmp,1);
			$palette[$index]->{green} = ($tmp>>2);
		
		    # get the blue component
		    read(FILE,$tmp,1);
			$palette[$index]->{blue}  = ($tmp>>2);
		
		}
		
		close(FILE);
		
		return @image,@palette;
	}

1;


__END__





=head1 NAME

AI::NeuralNet::BackProp - A simple back-prop neural net that uses Delta's and Hebbs' rule.

=head1 SYNOPSIS

use AI::NeuralNet::BackProp;
	
	# Create a new network with 1 layer, 5 inputs, and 5 outputs.
	my $net = new AI::NeuralNet::BackProp(1,5,5);
	
	# Add a small amount of randomness to the network
	$net->random(0.001);

	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";



=head1 UPDATES

This is version 0.89. In this version I have included a new feature, output range limits, as
well as automatic crunching of run() and learn*() inputs. Included in the examples directory
are seven new practical-use example scripts. Also implemented in this version is a much cleaner 
learning function for individual neurons which is more accurate than previous verions and is 
based on the LMS rule. See range() for information on output range limits. I have also updated 
the load() and save() methods so that they do not depend on Storable anymore. In this version 
you also have the choice between three network topologies, two not as stable, and the third is 
the default which has been in use for the previous four versions.


=head1 DESCRIPTION

AI::NeuralNet::BackProp implements a nerual network similar to a feed-foward,
back-propagtion network; learning via a mix of a generalization
of the Delta rule and a disection of Hebbs rule. The actual 
neruons of the network are implemented via the AI::NeuralNet::BackProp::neuron package.
	
You constuct a new network via the new constructor:
	
	my $net = new AI::NeuralNet::BackProp(2,3,1);
		

The new() constructor accepts two arguments and one optional argument, $layers, $size, 
and $outputs is optional (in this example, $layers is 2, $size is 3, and $outputs is 1).

$layers specifies the number of layers, including the input
and the output layer, to use in each neural grouping. A new
neural grouping is created for each pattern learned. Layers
is typically set to 2. Each layer has $size neurons in it.
Each neuron's output is connected to one input of every neuron
in the layer below it. 
	
This diagram illustrates a simple network, created with a call
to "new AI::NeuralNet::BackProp(2,2,2)" (2 layers, 2 neurons/layer, 2 outputs).
	                             	
     input
     /  \
    O    O
    |\  /|
    | \/ |
    | /\ |
    |/  \|
    O    O
     \  /
    mapper

In this diagram, each neuron is connected to one input of every
neuron in the layer below it, but there are not connections
between neurons in the same layer. Weights of the connection
are controlled by the neuron it is connected to, not the connecting
neuron. (E.g. the connecting neuron has no idea how much weight
its output has when it sends it, it just sends its output and the
weighting is taken care of by the receiving neuron.) This is the 
method used to connect cells in every network built by this package.

Input is fed into the network via a call like this:

	use AI;
	my $net = new AI::NeuralNet::BackProp(2,2);
	
	my @map = (0,1);
	
	my $result = $net->run(\@map);
	

Now, this call would probably not give what you want, because
the network hasn't "learned" any patterns yet. But this
illustrates the call. Run now allows strings to be used as
input. See run() for more information.


Run returns a refrence with $size elements (Remember $size? $size
is what you passed as the second argument to the network
constructor.) This array contains the results of the mapping. If
you ran the example exactly as shown above, $result would probably 
contain (1,1) as its elements. 

To make the network learn a new pattern, you simply call the learn
method with a sample input and the desired result, both array
refrences of $size length. Example:

	use AI;
	my $net = new AI::NeuralNet::BackProp(2,2);
	
	my @map = (0,1);
	my @res = (1,0);
	
	$net->learn(\@map,\@res);
	
	my $result = $net->run(\@map);

Now $result will conain (1,0), effectivly flipping the input pattern
around. Obviously, the larger $size is, the longer it will take
to learn a pattern. Learn() returns a string in the form of

	Learning took X loops and X wallclock seconds (X.XXX usr + X.XXX sys = X.XXX CPU).

With the X's replaced by time or loop values for that loop call. So,
to view the learning stats for every learn call, you can just:
	
	print $net->learn(\@map,\@res);
	

If you call "$net->debug(4)" with $net being the 
refrence returned by the new() constructor, you will get benchmarking 
information for the learn function, as well as plenty of other information output. 
See notes on debug() in the METHODS section, below. 

If you do call $net->debug(1), it is a good 
idea to point STDIO of your script to a file, as a lot of information is output. I often
use this command line:

	$ perl some_script.pl > .out

Then I can simply go and use emacs or any other text editor and read the output at my leisure,
rather than have to wait or use some 'more' as it comes by on the screen.

=head2 METHODS

=over 4

=item new AI::NeuralNet::BackProp($layers, $size [, $outputs, $topology_flag])

Returns a newly created neural network from an C<AI::NeuralNet::BackProp>
object. The network will have C<$layers> number layers in it
and each layer will have C<$size> number of neurons in that layer.

There is an optional parameter of $outputs, which specifies the number
of output neurons to provide. If $outputs is not specified, $outputs
defaults to equal $size. $outputs may not exceed $size. If $outputs
exceeds $size, the new() constructor will return undef.

The optional parameter, $topology_flag, defaults to 0 when not used. There are
three valid topology flag values:

B<0> I<default>
My feed-foward style: Each neuron in layer X is connected to one input of every
neuron in layer Y. The best and most proven flag style.

	^   ^   ^               
	O\  O\ /O       Layer Y
	^\\/^/\/^
	| //|\/\|
	|/ \|/ \|		
	O   O   O       Layer X
	^   ^   ^


(Sorry about the bad art...I am no ASCII artist! :-)


B<1>
In addition to flag 0, each neuron in layer X is connected to every input of 
the neurons ahead of itself in layer X.

B<2> I<("L-U Style")>
No, its not "Learning-Unit" style. It gets its name from this: In a 2 layer, 3
neuron network, the connections form a L-U pair, or a W, however you want to look
at it.   


	^   ^   ^
	|   |   |
	O-->O-->O
	^   ^   ^
	|   |   |
	|   |   |
	O-->O-->O
	^   ^   ^
	|   |   |



As you can see, each neuron is connected to the next one in its layer, as well
as the neuron directly above itself.
	

Before you can really do anything useful with your new neural network
object, you need to teach it some patterns. See the learn() method, below.

=item $net->learn($input_map_ref, $desired_result_ref [, options ]);

This will 'teach' a network to associate an new input map with a desired resuly.
It will return a string containg benchmarking information. You can retrieve the
pattern index that the network stored the new input map in after learn() is complete
with the pattern() method, below.

UPDATED: You can now specify strings as inputs and ouputs to learn, and they will be crunched
automatically. Example:

	$net->learn('corn', 'cob');
	# Before update, you have had to do this:
	# $net->learn($net->crunch('corn'), $net->crunch('cob'));

Note, the old method of calling crunch on the values still works just as well.	

UPDATED: You can now learn inputs with a 0 value. Beware though, it may not learn() a 0 value 
in the input map if you have randomness disabled. See NOTES on using a 0 value with randomness
disabled.

The first two arguments may be array refs (or now, strings), and they may be of different lengths.

Options should be written on hash form. There are three options:
	 
	 inc	=>	$learning_gradient
	 max	=>	$maximum_iterations
	 error	=>	$maximum_allowable_percentage_of_error
	 

$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.20.
 
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.

$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.


=item $net->learn_set(\@set, [ options ]);

UPDATE: Inputs and outputs in the dataset can now be strings. See information on auto-crunching
in learn()

This takes the same options as learn() and allows you to specify a set to learn, rather
than individual patterns. A dataset is an array refrence with at least two elements in the
array, each element being another array refrence (or now, a scalar string). For each pattern to
learn, you must specify an input array ref, and an ouput array ref as the next element. Example:
	
	my @set = (
		# inputs        outputs
		[ 1,2,3,4 ],  [ 1,3,5,6 ],
		[ 0,2,5,6 ],  [ 0,2,1,2 ]
	);


See the paragraph on measuring forgetfulness, below. There are 
two learn_set()-specific option tags available:

	flag     =>  $flag
	pattern  =>  $row

If "flag" is set to some TRUE value, as in "flag => 1" in the hash of options, or if the option "flag"
is not set, then it will return a percentage represting the amount of forgetfullness. Otherwise,
learn_set() will return an integer specifying the amount of forgetfulness when all the patterns 
are learned. 

If "pattern" is set, then learn_set() will use that pattern in the data set to measure forgetfulness by.
If "pattern" is omitted, it defaults to the first pattern in the set. Example:

	my @set = (
		[ 0,1,0,1 ],  [ 0 ],
		[ 0,0,1,0 ],  [ 1 ],
		[ 1,1,0,1 ],  [ 2 ],  #  <---
		[ 0,1,1,0 ],  [ 3 ]
	);
	
If you wish to measure forgetfulness as indicated by the line with the arrow, then you would
pass 2 as the "pattern" option, as in "pattern => 2".

Now why the heck would anyone want to measure forgetfulness, you ask? Maybe you wonder how I 
even measure that. Well, it is not a vital value that you have to know. I just put in a 
"forgetfulness measure" one day because I thought it would be neat to know. 

How the module measures forgetfulness is this: First, it learns all the patterns in the set provided,
then it will run the very first pattern (or whatever pattern is specified by the "row" option)
in the set after it has finished learning. It will compare the run() output with the desired output
as specified in the dataset. In a perfect world, the two should match exactly. What we measure is
how much that they don't match, thus the amount of forgetfulness the network has.

NOTE: In version 0.77 percentages were disabled because of a bug. Percentages are now enabled.

Example (from examples/ex_dow.pl):

	# Data from 1989 (as far as I know..this is taken from example data on BrainMaker)
	my @data = ( 
		#	Mo  CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3   Dow Ave (output)
		[	1, 	229, 220,  146, 	20.0, 21.9, 19.5, 	2645, 2652, 2597], 	[	2647  ],
		[	2, 	235, 226,  155, 	19.8, 20.0, 18.3, 	2633, 2645, 2585], 	[	2637  ],
		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],
		[	4, 	261, 244,  181, 	19.6, 19.6, 18.1, 	2611, 2627, 2563], 	[	2620  ],
		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
	
	# Learn the set
	my $f = learn_set(\@data, 
					  inc	=>	0.1,	
					  max	=>	500,
					  p		=>	1
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/ex_dow.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	
	$net->learn_set(\@data);
	
Same effect as above, but not the same data (obviously).

=item $net->learn_set_rand(\@set, [ options ]);

UPDATE: Inputs and outputs in the dataset can now be strings. See information on auto-crunching
in learn()

This takes the same options as learn() and allows you to specify a set to learn, rather
than individual patterns. 

learn_set_rand() differs from learn_set() in that it learns the patterns in a random order,
each pattern once, rather than in the order that they are in the array. This returns a true
value (1) instead of a forgetfullnes factor.

Example:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	
	$net->learn_set_rand(\@data);
	


=item $net->run($input_map_ref);

UPDATE: run() will now I<automatically> crunch() a string given as the input.

This method will apply the given array ref at the input layer of the neural network, and
it will return an array ref to the output of the network.

Example:
	
	my $inputs  = [ 1,1,0,1 ];
	my $outputs = $net->run($inputs);

With the new update you can do this:

	my $outputs = $net->run('cloudy, wind is 5 MPH NW');
	# Old method:
	# my $outputs = $net->run($net->crunch('cloudy, wind is 5 MPH NW'));

See also run_uc() below.


=item $net->run_uc($input_map_ref);

This method does the same thing as this code:
	
	$net->uncrunch($net->run($input_map_ref));

All that run_uc() does is that it automatically calls uncrunch() on the output, regardless
of whether the input was crunch() -ed or not.
	


=item $net->range();

This allows you to limit the possible outputs to a specific set of values. There are several 
ways you can specify the set of values to limit the output to. Each method is shown below. 
When called without any arguements, it will disable output range limits. You will need to re-learn
any data previously learned after disabling ranging, as disabling range invalidates the current
weight matrix in the network.

range() automatically scales the networks outputs to fit inside the size of range you allow, and, therefore,
it keeps track of the maximum output it can expect to scale. Therefore, you will need to learn() 
the whole data set again after calling range() on a network.

Subsequent calls to range() invalidate any previous calls to range()

NOTE: It is recomended, you call range() before you call learn() or else you will get unexpected
results from any run() call after range() .


=item $net->range($bottom..$top);

This is a common form often used in a C<for my $x (0..20)> type of for() constructor. It works
the exact same way. It will allow all numbers from $bottom to $top, inclusive, to be given 
as outputs of the network. No other values will be possible, other than those between $bottom
and $top, inclusive.


=item $net->range(\@values);

This allows you to specify a range of values as an array refrence. As the ranges are stored internally
as a refrence, this is probably the most natural way. Any value specified by an element in @values
will be allows as an output, no other values will be allowed.


=item $net->range("string of values");

With this construct you can specify a string of values to be allowed as the outputs. This string
is simply taken an crunch() -ed internally and saved as an array ref. This has the same effect
as calling:

	$net->range($net->crunch("string of values"));


=item $net->range("first string","second string");

This is the same as calling:

	$net->range($net->crunch("first string"),$net->crunch("second string"));

Or:	

	@range = ($net->crunch("first string"),
			  $net->crunch("second string"));
	$net->range(\@range);


=item $net->range($value1,$value2);

This is the same as calling:

	$net->range([$value1,$value2]);

Or:
	
	@range = ($value1,$value2);
	$net->range(\@range);

The second example is the same as the first example.



=item $net->benchmarked();

UPDATE: bencmarked() now returns just the string from timestr() for the last run() or
loop() call. Exception: If the last call was a loop the string will be prefixed with "%d loops and ".

This returns a benchmark info string for the last learn() or the last run() call, 
whichever occured later. It is easily printed as a string,
as following:

	print $net->benchmarked() . "\n";





=item $net->debug($level)

Toggles debugging off if called with $level = 0 or no arguments. There are four levels
of debugging.

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : This causes ALL debugging information for the network to be dumped
as the network runs. In this mode, it is a good idea to pipe your STDIO to a file, especially
for large programs.

Level 2 ($level = 2) : A slightly-less verbose form of debugging, not as many internal 
data dumps.

Level 3 ($level = 3) : JUST prints weight mapping as weights change.

Level 4 ($level = 4) : JUST prints the benchmark info for EACH learn loop iteteration, not just
learning as a whole. Also prints the percentage difference for each loop between current network
results and desired results, as well as learning gradient ('incremenet').   

Level 4 is useful for seeing if you need to give a smaller learning incrememnt to learn() .
I used level 4 debugging quite often in creating the letters.pl example script and the small_1.pl
example script.

Toggles debuging off when called with no arguments. 



=item $net->save($filename);

This will save the complete state of the network to disk, including all weights and any
words crunched with crunch() . Also saves any output ranges set with range() .

This has now been modified to use a simple flat-file text storage format, and it does not
depend on any external modules now.



=item $net->load($filename);

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.




=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.10f (sprintf-format) 
percent sting.


=item $net->p($a,$b);

Returns a floating point number which represents $a as a percentage of $b.



=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

Returns the index of the element in array REF passed with the highest comparative value.



=item $net->low($array_ref);

Returns the index of the element in array REF passed with the lowest comparative value.



=item $net->show();

This will dump a simple listing of all the weights of all the connections of every neuron
in the network to STDIO.




=item $net->crunch($string);

UPDATE: Now you can use a variabled instead of using qw(). Strings will be split internally.
Do not use qw() to pass strings to crunch.

This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and 
run() directly. It returns an array ref.

The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Will probably return an array ref containing 1,2,3. A subsequent call of:

    $net->crunch("How is Jane?");

Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored
only once internally and the returned array ref reflects that.


=item $net->uncrunch($array_ref);

Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the crunch() method, above, possibly to 
uncrunch() the output of a run() call. Consider the below code (also in ./examples/ex_crunch.pl):
                           
	use AI::NeuralNet::BackProp;
	my $net = AI::NeuralNet::BackProp->new(2,3);
	
	for (0..3) {		# Note: The four learn() statements below could 
						# be replaced with learn_set() to do the same thing,
						# but use this form here for clarity.

		$net->learn($net->crunch("I love chips."),  $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love apples."), $net->crunch("Good, Healthy Food."));
		$net->learn($net->crunch("I love pop."),    $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."));
	}
	
	my $response = $net->run($net->crunch("I love corn."));
	
	print $net->uncrunch($response),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list. 


=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.


=item $net->random($rand);

This will set the randomness factor from the network. Default is 0.001. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. See NOTES on learning 
a 0 value in the input map with randomness disabled.



=item $net->load_pcx($filename);

Oh heres a treat... this routine will load a PCX-format file (yah, I know ... ancient format ... but
it is the only one I could find specs for to write it in Perl. If anyone can get specs for
any other formats, or could write a loader for them, I would be very grateful!) Anyways, a PCX-format
file that is exactly 320x200 with 8 bits per pixel, with pure Perl. It returns a blessed refrence to 
a AI::NeuralNet::BackProp::PCX object, which supports the following routinges/members. See example 
files ex_pcxl.pl and ex_pcx.pl in the ./examples/ directory.



=item $pcx->{image}

This is an array refrence to the entire image. The array containes exactly 64000 elements, each
element contains a number corresponding into an index of the palette array, details below.



=item $pcx->{palette}

This is an array ref to an AoH (array of hashes). Each element has the following three keys:
	
	$pcx->{palette}->[0]->{red};
	$pcx->{palette}->[0]->{green};
	$pcx->{palette}->[0]->{blue};

Each is in the range of 0..63, corresponding to their named color component.



=item $pcx->get_block($array_ref);

Returns a rectangular block defined by an array ref in the form of:
	
	[$left,$top,$right,$bottom]

These must be in the range of 0..319 for $left and $right, and the range of 0..199 for
$top and $bottom. The block is returned as an array ref with horizontal lines in sequental order.
I.e. to get a pixel from [2,5] in the block, and $left-$right was 20, then the element in 
the array ref containing the contents of coordinates [2,5] would be found by [5*20+2] ($y*$width+$x).
    
	print (@{$pcx->get_block(0,0,20,50)})[5*20+2];

This would print the contents of the element at block coords [2,5].



=item $pcx->get($x,$y);

Returns the value of pixel at image coordinates $x,$y.
$x must be in the range of 0..319 and $y must be in the range of 0..199.



=item $pcx->rgb($index);

Returns a 3-element array (not array ref) with each element corresponding to the red, green, or
blue color components, respecitvely.



=item $pcx->avg($index);	

Returns the mean value of the red, green, and blue values at the palette index in $index.
	


=head1 NOTES

=item Learning 0s With Randomness Disabled

You can now use 0 values in any input maps. This is a good improvement over versions 0.40
and 0.42, where no 0s were allowed because the learning would never finish learning completly
with a 0 in the input. 

Yet with the allowance of 0s, it requires one of two factors to learn correctly. Either you
must enable randomness with $net->random(0.0001) (Any values work [other than 0], see random() ), 
or you must set an error-minimum with the 'error => 5' option (you can use some other error value 
as well). 

When randomness is enabled (that is, when you call random() with a value other than 0), it interjects
a bit of randomness into the output of every neuron in the network, except for the input and output
neurons. The randomness is interjected with rand()*$rand, where $rand is the value that was
passed to random() call. This assures the network that it will never have a pure 0 internally. It is
bad to have a pure 0 internally because the weights cannot change a 0 when multiplied by a 0, the
product stays a 0. Yet when a weight is multiplied by 0.00001, eventually with enough weight, it will
be able to learn. With a 0 value instead of 0.00001 or whatever, then it would never be able
to add enough weight to get anything other than a 0. 

The second option to allow for 0s is to enable a maximum error with the 'error' option in
learn() , learn_set() , and learn_set_rand() . This allows the network to not worry about
learning an output perfectly. 

For accuracy reasons, it is recomended that you work with 0s using the random() method.

If anyone has any thoughts/arguments/suggestions for using 0s in the network, let me know
at jdb@wcoil.com. 




=head1 OTHER INCLUDED PACKAGES

=item AI::NeuralNet::BackProp::neuron

AI::NeuralNet::BackProp::neuron is the worker package for AI::NeuralNet::BackProp.
It implements the actual neurons of the nerual network.
AI::NeuralNet::BackProp::neuron is not designed to be created directly, as
it is used internally by AI::NeuralNet::BackProp.

=item AI::NeuralNet::BackProp::_run

=item AI::NeuralNet::BackProp::_map

These two packages, _run and _map are used to insert data into
the network and used to get data from the network. The _run and _map packages 
are connected to the neurons so that the neurons think that the IO packages are
just another neuron, sending data on. But the IO packs. are special packages designed
with the same methods as neurons, just meant for specific IO purposes. You will
never need to call any of the IO packs. directly. Instead, they are called whenever
you use the run() or learn() methods of your network.
        



=head1 BUGS

This is an alpha release of C<AI::NeuralNet::BackProp>, and that holding true, I am sure 
there are probably bugs in here which I just have not found yet. If you find bugs in this module, I would 
appreciate it greatly if you could report them to me at F<E<lt>jdb@wcoil.comE<gt>>,
or, even better, try to patch them yourself and figure out why the bug is being buggy, and
send me the patched code, again at F<E<lt>jdb@wcoil.comE<gt>>. 



=head1 AUTHOR

Josiah Bryan F<E<lt>jdb@wcoil.comE<gt>>

Copyright (c) 2000 Josiah Bryan. All rights reserved. This program is free software; 
you can redistribute it and/or modify it under the same terms as Perl itself.

The C<AI::NeuralNet::BackProp> and related modules are free software. THEY COME WITHOUT WARRANTY OF ANY KIND.

                                                             




=head1 THANKS

Below is a list of people that have helped, made suggestions, patches, etc. No particular order.

		Tobias Bronx, tobiasb@odin.funcom.com
		Pat Trainor, ptrainor@title14.com
		Steve Purkis, spurkis@epn.nu
		Rodin Porrata, rodin@ursa.llnl.gov
		Daniel Macks dmacks@sas.upenn.edu

Tobias was a great help with the initial releases, and helped with learning options and a great
many helpful suggestions. Rodin has gave me some great ideas for the new internals, as well
as disabling Storable. Steve is the author of AI::Perceptron, and gave some good suggestions for 
weighting the neurons. Daniel was a great help with early beta testing of the module and related 
ideas. Pat has been a great help for running the module through the works. Pat is the author of 
the new Inter game, a in-depth strategy game. He is using a group of neural networks internally 
which provides a good test bed for coming up with new ideas for the network. Thankyou for all of
your help, everybody.


=head1 DOWNLOAD

You can always download the latest copy of AI::NeuralNet::BackProp
from http://www.josiah.countystart.com/modules/AI/cgi-bin/rec.pl


=head1 MAILING LIST

A mailing list has been setup for AI::NeuralNet::BackProp for discussion of AI and 
neural net related topics as they pertain to AI::NeuralNet::BackProp. I will also 
announce in the group each time a new release of AI::NeuralNet::BackProp is available.

The list address is at: ai-neuralnet-backprop@egroups.com 

To subscribe, send a blank email to: ai-neuralnet-backprop-subscribe@egroups.com  


=cut

 view all matches for this distribution


AI-NeuralNet-FastSOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN


my $epsilon = 0.001;
my $epochs  = 400;

{ # random initialisation
    my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
					   input_dim  => $dim);

    $nn->initialize;                                      # random
    my @mes = $nn->train ($epochs, @vs);
    warn "random:   length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}

{ # constant initialisation
    my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
					   input_dim  => $dim);

    $nn->initialize ($vs[-1]);
    my @mes = $nn->train ($epochs, @vs);
    warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}

{ # eigenvector initialisation
    my $nn = new AI::NeuralNet::FastSOM::Rect (output_dim => "5x6",
					   input_dim  => $dim);
    my @training_vectors;                                                # find these training vectors 
    {                                                                    # and prime them with this eigenvector stuff;
	use PDL;
	my $A = pdl \@vs;

	while ($A->getdim(0) < $A->getdim(1)) {                          # make the beast quadratic
	    $A = append ($A, zeroes (1));                                # by padding zeroes
	}
	my ($E, $e) = eigens_sym $A;
#	print $E;
#	print $e;

	my @es = list $e;                                                # eigenvalues
#	warn "es : ".Dumper \@es;
	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;
    for my $i (0..$#$l) {
	return $i if $v == $l->[$i];
    }
    return undef;
}

	for (@es_idx) {                                                  # from the highest values downwards, take the index
	    push @training_vectors, [ list $E->dice($_) ] ;              # get the corresponding vector
	}
    }

    $nn->initialize (@training_vectors[0..0]);                           # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
    my @mes = $nn->train ($epochs, @vs);
    warn "eigen:    length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}

__END__

 view all matches for this distribution


AI-NeuralNet-Hopfield

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

has 'matrix_rows'   => ( is => 'rw', isa => 'Int');

has 'matrix_cols'   => ( is => 'rw', isa => 'Int');

sub BUILD {
	my $self = shift;
	my $args = shift;
	my $matrix = Math::SparseMatrix->new($args->{row}, $args->{col});
	$self->matrix($matrix);	
	$self->matrix_rows($args->{row});
	$self->matrix_cols($args->{col});
}

sub train() {	
	my $self = shift;
	my @pattern = @_;	

	if ( ($#pattern + 1) != $self->matrix_rows) {
		die "Can't train a pattern of size " . ($#pattern + 1) . " on a hopfield network of size " , $self->matrix_rows;
	}
	
	my $m2 = &convert_array($self->matrix_rows, $self->matrix_cols, @pattern);

	my $m1 = &transpose($m2);

	my $m3 = &multiply($m1, $m2);

	my $identity = &identity($m3->{_rows});

	my $m4 = &subtract($m3, $identity);

	my $m5 = &add($self->matrix, $m4);
	
	$self->matrix($m5);	
}

sub evaluate() {
	my $self = shift;
	my @pattern = @_;

	my @output = ();

	my $input_matrix = &convert_array($self->matrix_rows, $self->matrix_cols, @pattern);

	for (my $col = 1; $col <= ($#pattern + 1); $col++) {
		
		my $column_matrix = &get_col($self, $col);
		
		my $transposed_column_matrix = &transpose($column_matrix);
		
		my $dot_product = &dot_product($input_matrix, $transposed_column_matrix);
		
		#say $dot_product;

		if ($dot_product > 0) {
			$output[$col - 1] = "true";
		} else {
			$output[$col - 1] = "false";
		}
	}
	return @output;
}

sub convert_array() {
	my $rows    = shift;
	my $cols 	= shift;
	my @pattern = @_;
	my $result  = Math::SparseMatrix->new(1, $cols);

	for (my $i = 0; $i < ($#pattern + 1); $i++) {
		if ($pattern[$i] =~ m/true/ig) {
			$result->set(1, ($i +1 ), 1);
		} else {
			$result->set(1, ($i + 1), -1);
		}
	}
	return $result;
}

sub transpose() {
	my $matrix  = shift;
	my $rows = $matrix->{_rows};
	my $cols = $matrix->{_cols};

	my $inverse = Math::SparseMatrix->new($cols, $rows);
		
	for (my $r = 1; $r <= $rows; $r++) {
		for (my $c = 1; $c <= $cols; $c++) {
			my $value = $matrix->get($r, $c);
			$inverse->set($c, $r, $value);
		}
	}
	return $inverse;
}

sub multiply() {
	my $matrix_a  = shift;
	my $matrix_b  = shift;

	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};

	my $b_rows = $matrix_b->{_rows};
	my $b_cols = $matrix_b->{_cols};

	my $result = Math::SparseMatrix->new($a_rows, $b_cols);

	if ($matrix_a->{_cols} != $matrix_b->{_rows}) {
		die "To use ordinary matrix multiplication the number of columns on the first matrix must mat the number of rows on the second";
	}

	for (my $result_row = 1; $result_row <= $a_rows; $result_row++) {
		for(my $result_col = 1; $result_col <= $b_cols; $result_col++) {
			my $value = 0;
			for (my $i = 1; $i <= $a_cols; $i++) {
				$value += ($matrix_a->get($result_row, $i)) * ($matrix_b->get($i, $result_col));
			}
			$result->set($result_row, $result_col, $value);
		}
	}
	return $result;
}

sub identity() {
	my $size = shift;

	if ($size < 1) {
		die "Identity matrix must be at least of size 1.";
	}
	
	my $result = Math::SparseMatrix->new ($size, $size);

	for (my $i = 1; $i <= $size; $i++) {
		$result->set($i, $i, 1);
	}
	return $result;
}

sub subtract() {
	my $matrix_a = shift;
	my $matrix_b = shift;

    my $a_rows = $matrix_a->{_rows};
    my $a_cols = $matrix_a->{_cols};

	my $b_rows = $matrix_b->{_rows};
	my $b_cols = $matrix_b->{_cols};

	if ($a_rows != $b_rows) {
		die "To subtract the matrixes they must have the same number of rows and columns.";
	}

	if ($a_cols != $b_cols) {
		die "To subtract the matrixes they must have the same number of rows and columns.  Matrix a has ";
	}

	my $result = Math::SparseMatrix->new($a_rows, $a_cols);

	for (my $result_row = 1; $result_row <= $a_rows; $result_row++) {
		for (my $result_col = 1; $result_col <= $a_cols; $result_col++) {
			my $value = ( $matrix_a->get($result_row, $result_col) ) - ( $matrix_b->get($result_row, $result_col));
			
			if ($value == 0) {
				$value += 2;
			}			
			$result->set($result_row, $result_col, $value);
		}
	}
	return $result;
}

sub add() {
	#weight matrix.
    my $matrix_a = shift;
	#identity matrix.
    my $matrix_b = shift;

	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};

	my $b_rows = $matrix_b->{_rows};
	my $b_cols = $matrix_b->{_cols};
	
	if ($a_rows != $b_rows) {
		die "To add the matrixes they must have the same number of rows and columns.";
	}

	if ($a_cols != $b_cols) {
		 die "To add the matrixes they must have the same number of rows and columns.";
	}

	my $result = Math::SparseMatrix->new($a_rows, $a_cols);

	for (my $result_row = 1; $result_row <= $a_rows; $result_row++) {
		for (my $result_col = 1; $result_col <= $a_cols; $result_col++) {
			my $value = $matrix_b->get($result_row, $result_col);			
			$result->set($result_row, $result_col, $matrix_a->get($result_row, $result_col) + $value  )
		}
	}
	return $result;
}

sub dot_product() {
	my $matrix_a = shift;
	my $matrix_b = shift;
	
	my $a_rows = $matrix_a->{_rows};
	my $a_cols = $matrix_a->{_cols};
	
	my $b_rows = $matrix_b->{_rows};
	my $b_cols = $matrix_b->{_cols};

	my @array_a = &packed_array($matrix_a);
	my @array_b = &packed_array($matrix_b);

	for (my $n = 0; $n <= $#array_b; $n++) {
		if ($array_b[$n] == 2) {
			$array_b[$n] = 0;
		}
	}
	
	if ($#array_a != $#array_b) {
		die "To take the dot product, both matrixes must be of the same length.";
	}

	my $result = 0;
	my $length = $#array_a + 1;

	for (my $i = 0; $i < $length; $i++) {
		$result += $array_a[$i] * $array_b[$i];
	}
	return $result;
}

sub packed_array() {
	my $matrix = shift;
	my @result = ();

	for (my $r = 1; $r <= $matrix->{_rows}; $r++) {
		for (my $c = 1; $c <= $matrix->{_cols}; $c++) {
			push(@result, $matrix->get($r, $c)); 
		}
	}
	return @result;
}

sub get_col() {
	my $self = shift;
	my $col  = shift;

	my $matrix = $self->matrix();
	
	my $matrix_rows = $self->matrix_rows();

	if ($col > $matrix_rows) {
		die "Can't get column";
	}

	my $new_matrix = Math::SparseMatrix->new($matrix_rows, 1);

	for (my $row = 1; $row <= $matrix_rows; $row++) {
		my $value = $matrix->get($row, $col);
		$new_matrix->set($row, 1, $value);
	}
	return $new_matrix;
}

sub print_matrix() {
    my $matrix  = shift;
    my $rs = $matrix->{_rows};
    my $cs = $matrix->{_cols};

	for (my $i = 1; $i <= $rs; $i++) {
		for (my $j = 1; $j <= $cs; $j++) {
			say "[$i,$j]" . $matrix->get($i, $j);
		}
	}
}

=head1 SYNOPSIS

This is a version of a Hopfield Network implemented in Perl. Hopfield networks are sometimes called associative networks since 

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN


=head2 New

In order to build new calssifiers, you have to pass to the constructor the number of rows and columns (neurons) for the matrix construction.

	my $hop = AI::NeuralNet::Hopfield->new(row => 4, col => 4);

=cut

=head2 Train

The training method configurates the network memory.

	my @input_1 = qw(true true false false);
	$hop->train(@input_1);

=cut

=head2 Evaluation

The evaluation method compares the new input with the information stored in the matrix memory.
The output is a new array with the boolean evaluation of each neuron.

	my @input_2 = qw(true true true false);
	my @result = $hop->evaluate(@input_2);

=cut


=head1 AUTHOR

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN


=head1 SUPPORT

You can find documentation for this module with the perldoc command.

    perldoc AI::NeuralNet::Hopfield


You can also look for information at:

=over 4

 view all matches for this distribution


AI-NeuralNet-Kohonen-Demo-RGB

 view release on metacpan or  search on metacpan

RGB.pm  view on Meta::CPAN

package AI::NeuralNet::Kohonen::Demo::RGB;

use vars qw/$VERSION/;
$VERSION = 0.123;	# 13 March 2003; using smoothing

=head1 NAME

AI::NeuralNet::Kohonen::Demo::RGB - Colour-based demo

=head1 SYNOPSIS

	use AI::NeuralNet::Kohonen::Demo::RGB;
	$_ = AI::NeuralNet::Kohonen::Demo::RGB->new(
		display_scale => 20,
		display	=> 'hex',
		map_dim	=> 39,
		epochs  => 9,
		table   => "R G B"
	              ."1 0 0"
	              ."0 1 0"
	              ."0 0 1",
	);
	$_->train;
	exit;


=head1 DESCRIPTION

A sub-class of C<AI::NeuralNet::Kohonen>
that impliments extra methods to make use of TK
in a very slow demonstration of how a SOM can collapse
a three dimensional space (RGB colour values) into a
two dimensional space (the display). See L<SYNOPSIS>.

The only things added are two new fields to supply to the
constructor - set C<display> to C<hex> for display as
a unified distance matrix, rather than plain grid; set
C<display_scale> for the size of the display.

=cut

use strict;
use warnings;
use Carp qw/cluck carp confess croak/;

use base "AI::NeuralNet::Kohonen";

use Tk;
use Tk::Canvas;
use Tk::Label;
use Tk qw/DoOneEvent DONT_WAIT/;

#
# Used only by &tk_train
#
sub tk_show { my $self=shift;
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour = sprintf("#%02x%02x%02x",
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[2])),
			);
			if ($self->{display} and $self->{display} eq 'hex'){
				my $xo = ($y % 2) * ($self->{display_scale}/2);
				my $yo = 0;

				$self->{c}->create(
					polygon	=> [
						$xo + ((1+$x)*$self->{display_scale} ),
						$yo + ((1+$y)*$self->{display_scale} ),

						# polygon only:
						$xo + ((1+$x)*($self->{display_scale})+($self->{display_scale}/2) ),
						$yo + ((1+$y)*($self->{display_scale})-($self->{display_scale}/2) ),
						#

						$xo + ((1+$x)*($self->{display_scale})+$self->{display_scale} ),
						$yo + ((1+$y)*$self->{display_scale} ),

						$xo + ((1+$x)*($self->{display_scale})+$self->{display_scale} ),
						$yo + ((1+$y)*($self->{display_scale})+($self->{display_scale}/2) ),

						# Polygon only:
						$xo + ((1+$x)*($self->{display_scale})+($self->{display_scale}/2) ),
						$yo + ((1+$y)*($self->{display_scale})+($self->{display_scale}) ),
						#

						$xo + ((1+$x)*$self->{display_scale} ),
						$yo + ((1+$y)*($self->{display_scale})+($self->{display_scale}/2) ),

					],
					-outline	=> "black",
					-fill 		=> $colour,
				);
			}
			else {
				$self->{c}->create(
					rectangle	=> [
						(1+$x)*$self->{display_scale} ,
						(1+$y)*$self->{display_scale} ,
						(1+$x)*($self->{display_scale})+$self->{display_scale} ,
						(1+$y)*($self->{display_scale})+$self->{display_scale}
					],
					-outline	=> "black",
					-fill 		=> $colour,
				);
			}
		}
	}
	return 1;
}


=head1 METHOD train

Over-rides the base class to provide TK displays of the map

=cut

sub train { my ($self,$epochs) = (shift,shift);
	my $label_txt;

	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	$self->{mw} = MainWindow->new(
		-width	=> 200+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 200+($self->{map_dim_y} * $self->{display_scale}),
	);
    my $quit_flag = 0;
    my $quit_code = sub {$quit_flag = 1};
    $self->{mw}->protocol('WM_DELETE_WINDOW' => $quit_code);

	$self->{c} = $self->{mw}->Canvas(
		-width	=> 50+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 50+($self->{map_dim_y} * $self->{display_scale}),
		-relief	=> 'ridge',
		-border => 5,
	);
	$self->{c}->pack(-side=>'top');

	my $l = $self->{mw}->Label(-text => ' ',-textvariable=>\$label_txt);
	$l->pack(-side=>'left');

	# Replaces Tk's MainLoop
    for (0..$self->{epochs}) {
		if ($quit_flag) {
			$self->{mw}->destroy;
			return;
		}
		$self->{t}++;				# Measure epoch
		my $target = $self->_select_target;
		my $bmu = $self->find_bmu($target);

		$self->_adjust_neighbours_of($bmu,$target);
		$self->_decay_learning_rate;

		$self->tk_show;
		$label_txt = sprintf("Epoch: %04d",$self->{t})."  "
		. "Learning: $self->{l}  "
		. sprintf("BMU: %02d,%02d",$bmu->[1],$bmu->[2])."  "
		. "Target: [".join(",",@$target)."]  "
		;
		$self->{c}->update;
		$l->update;
        DoOneEvent(DONT_WAIT);		# be kind and process XEvents if they arise
	}
	$label_txt = "Did $self->{t} epochs: now smoothed by "
		.($self->{smoothing}? $self->{smoothing} : "default amount");
	$_->smooth;
#	MainLoop;

	return 1;
}



1;

__END__

=head1 SEE ALSO

See
L<AI::NeuralNet::Kohonen>;
L<AI::NeuralNet::Kohonen::Node>;

=head1 AUTHOR AND COYRIGHT

This implimentation Copyright (C) Lee Goddard, 2003.
All Rights Reserved.

Available under the same terms as Perl itself.
















 view all matches for this distribution


AI-NeuralNet-Kohonen-Visual

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

package AI::NeuralNet::Kohonen::Visual;

use vars qw/$VERSION/;
$VERSION = 0.3; # 05 May 2006 pod and packaging

=head1 NAME

AI::NeuralNet::Kohonen::Visual - Tk-based Visualisation

=head1 SYNOPSIS

Test the test file in this distribution, or:

	package YourClass;
	use base "AI::NeuralNet::Kohonen::Visual";

	sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
		# From here you return a TK colour name.
		# Get it as you please; for example, values of a 3D map:
		return sprintf("#%02x%02x%02x",
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[2])),
		);
	}

	exit;
	1;

And then:

	use YourClass;
	my $net = AI::NeuralNet::Kohonen::Visual->new(
		display          => 'hex',
		map_dim          => 39,
		epochs           => 19,
		neighbour_factor => 2,
		targeting        => 1,
		table            => "3
			1 0 0 red
			0 1 0 yellow
			0 0 1 blue
			0 1 1 cyan
			1 1 0 yellow
			1 .5 0 orange
			1 .5 1 pink",
	);
	$net->train;
	$net->plot_map;
	$net->main_loop;

	exit;


=head1 DESCRIPTION

Provides TK-based visualisation routines for C<AI::NueralNet::Kohonen>.
Replaces the earlier C<AI::NeuralNet::Kohonen::Demo::RGB>.

This is a sub-class of C<AI::NeuralNet::Kohonen>
that impliments extra methods to make use of TK.

This moudle is itself intended to be sub-classed by you,
where you provide a version of the method C<get_colour_for>:
see L<METHOD get_colour_for> and L<SYNOPSIS> for details.


=head1 CONSTRUCTOR (new)

The following paramter fields are added to the base module's fields:

=over 4

=item display

Set to C<hex> for display as a unified distance matrix, rather than
as the default plain grid;

=item display_scale

Set with a factor to effect the size of the display.

=item show_bmu

Show the current BMU during training.

=item show_training

Display updates during training.

=item label_bmu

=item label_all

Displays labels...

=item MainLoop

Calls TK's C<MainLoop> at the end of training.

=item missing_colour

When selecting a colour using L<METHOD get_colour_for>,
every node weight holding the value of C<missing_mask>
will be given the value of this paramter. If this paramter
is not defined, the default is 0.

=back

=cut

use strict;
use warnings;
use Carp qw/cluck carp confess croak/;

use base "AI::NeuralNet::Kohonen";

use Tk;
use Tk::Canvas;
use Tk::Label;
use Tk qw/DoOneEvent DONT_WAIT/;



=head1 METHOD train

Over-rides the base class to provide TK displays of the map.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	&{$self->{train_start}} if exists $self->{train_start};

	$self->prepare_display if not defined $self->{_mw} or ref $self->{_mw} ne 'MainWindow';

	# Replaces Tk's MainLoop
    for (0..$self->{epochs}) {
		if ($self->{_quit_flag}) {
			$self->{_mw}->destroy;
			$self->{_mw} = undef;
			return;
		}
		$self->{t}++;				# Measure epoch
		&{$self->{epoch_start}} if exists $self->{epoch_start};

		for (0..$#{$self->{input}}){
			my $target = $self->_select_target;
			my $bmu = $self->find_bmu($target);

			$self->_adjust_neighbours_of($bmu,$target);

			if (exists $self->{show_training}){
				if ($self->{show_bmu}){
					$self->plot_map(bmu_x=>$bmu->[1],bmu_y=>$bmu->[2]);
				} else {
					$self->plot_map;
				}
				$self->{_label_txt} = sprintf("Epoch: %04d",$self->{t})."  "
				. "Learning: $self->{l}  "
				. sprintf("BMU: %02d,%02d",$bmu->[1],$bmu->[2])."  "
				.( exists $target->{class}? "Target: [$target->{class}]  " : "")
				;
				$self->{_canvas}->update;
				$self->{_label}->update;
				DoOneEvent(DONT_WAIT);		# be kind and process XEvents if they arise
			}
		}

		$self->_decay_learning_rate;
 		&{$self->{epoch_end}} if exists $self->{epoch_end};
	}

	$self->{_label_txt} = "Did $self->{t} epochs: ";
	$self->{_label_txt} .= "now smoothed." if $self->{smoothing};
	$_->smooth if $self->{smooth};
	$self->plot_map if $self->{MainLoop};
	&{$self->{train_end}} if exists $self->{train_end};
	MainLoop if $self->{MainLoop};

	return 1;
}

=head1 METHOD get_colour_for

This method is intended to be sub-classed.

Currently it only operates on the first three elements
of a weight vector, turning them into RGB values.

It returns the a TK colour for a node at position C<x>,C<y> in the
C<map> paramter.

Accepts: C<x> and C<y> co-ordinates in the map.

=cut

sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
	my $_0 = $self->{map}->[$x]->[$y]->{weight}->[0];
	$_0 = $self->{missing_colour} || 0 if $_0 eq $self->{missing_mask};
	my $_1 = $self->{map}->[$x]->[$y]->{weight}->[1];
	$_1 = $self->{missing_colour} || 0 if $_1 eq $self->{missing_mask};
	my $_2 = $self->{map}->[$x]->[$y]->{weight}->[2];
	$_2 = $self->{missing_colour} || 0 if $_2 eq $self->{missing_mask};
	return sprintf("#%02x%02x%02x",
		(int (255 * $_0)),
		(int (255 * $_1)),
		(int (255 * $_2)),
	);
}


=head1 METHOD prepare_display

Depracated: see L<METHOD create_empty_map>.

=cut

sub prepare_display {
	return $_[0]->create_empty_map;
}

=head1 METHOD create_empty_map

Sets up a TK C<MainWindow> and C<Canvas> to
act as an empty map.

=cut

sub create_empty_map { my $self = shift;
	my ($w,$h);
	if ($self->{display} and $self->{display} eq 'hex'){
		$w = ($self->{map_dim_x}+1) * ($self->{display_scale}+2);
		$h = ($self->{map_dim_y}+1) * ($self->{display_scale}+2);
	} else {
		$w = ($self->{map_dim_x}+1) * ($self->{display_scale});
		$h = ($self->{map_dim_y}+1) * ($self->{display_scale});
	}
	$self->{_mw} = MainWindow->new(
		-width	=> $w + 20,
		-height	=> $h + 20,
	);
	$self->{_mw}->fontCreate(qw/TAG -family verdana -size 8 -weight bold/);
	$self->{_mw}->resizable( 0, 0);
    $self->{_quit_flag} = 0;
    $self->{_mw}->protocol('WM_DELETE_WINDOW' => sub {$self->{_quit_flag}=1});
	$self->{_canvas} = $self->{_mw}->Canvas(
		-width	=> $w,
		-height	=> $h,
		-relief	=> 'raised',
		-border => 2,
	);
	$self->{_canvas}->pack(-side=>'top');
	$self->{_label} = $self->{_mw}->Button(
		-command      => sub { $self->{_mw}->destroy;$self->{_mw} = undef; },
		-relief       => 'groove',
		-text         => ' ',
		-wraplength   => $w,
		-textvariable => \$self->{_label_txt}
	);
	$self->{_label}->pack(-side=>'top');
	return 1;
}


=head1 METHOD plot_map

Plots the map on the existing canvas. Arguments are supplied
in a hash with the following keys as options:

The values of C<bmu_x> and C<bmu_y> represent The I<x> and I<y>
co-ordinates of unit to highlight using the value in the
C<hicol> to highlight it with colour. If no C<hicolo> is provided,
it default to red.

When called, this method also sets the object field flag C<plotted>:
currently, this prevents C<main_loop> from calling this routine.

See also L<METHOD get_colour_for>.

=cut

sub plot_map { my ($self,$args) = (shift,{@_});
	$self->{plotted} = 1;
	# MW may have been destroyed
	$self->prepare_display if not defined $self->{_mw};
	my $yo = 5+($self->{display_scale}/2);
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour;
			if ($args->{bmu_x} and $args->{bmu_x}==$x and $args->{bmu_y}==$y){
				$colour = $args->{hicol} || 'red';
			} else {
				$colour = $self->get_colour_for ($x,$y);
			}
			if ($self->{display} and $self->{display} eq 'hex'){
				my $xo = 5+($y % 2) * ($self->{display_scale}/2);

				$self->{_canvas}->create(
					polygon	=> [
						$xo + (($x)*$self->{display_scale} ),
						$yo + (($y)*$self->{display_scale} ),

						# polygon only:
						$xo + (($x)*($self->{display_scale})+($self->{display_scale}/2) ),
						$yo + (($y)*($self->{display_scale})-($self->{display_scale}/2) ),
						#

						$xo + (($x)*($self->{display_scale})+$self->{display_scale} ),
						$yo + (($y)*$self->{display_scale} ),

						$xo + (($x)*($self->{display_scale})+$self->{display_scale} ),
						$yo + (($y)*($self->{display_scale})+($self->{display_scale}/2) ),

						# Polygon only:
						$xo + (($x)*($self->{display_scale})+($self->{display_scale}/2) ),
						$yo + (($y)*($self->{display_scale})+($self->{display_scale}) ),
						#

						$xo + (($x)*$self->{display_scale} ),
						$yo + (($y)*($self->{display_scale})+($self->{display_scale}/2) ),

					],
					-outline	=> "black",
					-fill 		=> $colour,
				);
			}
			else {
				$self->{_canvas}->create(
					rectangle	=> [
						$x*$self->{display_scale} +1,
						$y*$self->{display_scale} +1,
						$x*($self->{display_scale})+$self->{display_scale} +1,
						$y*($self->{display_scale})+$self->{display_scale} +1
					],
					-outline	=> "black",
					-fill 		=> $colour,
				);
			}

			# Label
			if ($self->{label_all}){
				my $txt;
				unless ( $txt = $self->{map}->[$x]->[$y]->{class}){
					$txt = "";
				}
				$self->label_map($x,$y,"+$txt");
			}

		}
	}
	if ($self->{label_bmu}){
		my $txt;
		unless ( $txt = $self->{map}->[$args->{bmu_x}]->[$args->{bmu_y}]->{class}){
			$txt = "";
		}
		$self->label_map(
			$args->{bmu_x}, $args->{bmu_y}, "+$txt"
		);
	}

	$self->{_canvas}->update;
	$self->{_label}->update;

	return 1;
}

=head1 METHOD label_map

Put a text label on the map for the node at the I<x,y> co-ordinates
supplied in the first two paramters, using the text supplied in the
third.

Very naive: no attempt to check the text will appear on the map.

=cut

sub label_map { my ($self,$x,$y,$t) = (shift,shift,shift,shift);
	$self->{_canvas}->createText(
		$x*$self->{display_scale}+($self->{display_scale}),
		$y*$self->{display_scale}+($self->{display_scale}),
		-text	=> $t,
		-anchor => 'w',
		-fill 	=> 'white',
		-font	=> 'TAG',
	);
}


=head1 METHOD main_loop

Calls TK's C<MainLoop> to keep a window open until the user closes it.

=cut

sub main_loop { my $self = shift;
	$self->plot_map unless $self->{plotted};
	MainLoop;
}



1;

__END__

=head1 SEE ALSO

See
L<AI::NeuralNet::Kohonen>;
L<AI::NeuralNet::Kohonen::Node>;

=head1 AUTHOR AND COYRIGHT

This implimentation Copyright (C) Lee Goddard, 2003.
All Rights Reserved.

Available under the same terms as Perl itself.
















 view all matches for this distribution


AI-NeuralNet-Kohonen

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

package AI::NeuralNet::Kohonen;

use vars qw/$VERSION/;
$VERSION = 0.142;	# 08 August 2006 test lost input file

=head1 NAME

AI::NeuralNet::Kohonen - Kohonen's Self-organising Maps

=cut

use strict;
use warnings;
use Carp qw/croak cluck confess/;

use AI::NeuralNet::Kohonen::Node;
use AI::NeuralNet::Kohonen::Input;

=head1 SYNOPSIS

	$_ = AI::NeuralNet::Kohonen->new(
		map_dim_x => 39,
		map_dim_y => 19,
		epochs    => 100,
		table     =>
	"3
	1 0 0 red
	0 1 0 yellow
	0 0 1 blue
	0 1 1 cyan
	1 1 0 yellow
	1 .5 0 orange
	1 .5 1 pink"
	);

	$_->train;
	$_->save_file('mydata.txt');
	exit;

=head1 DESCRIPTION

An illustrative implimentation of Kohonen's Self-organising Feature Maps (SOMs)
in Perl. It's not fast - it's illustrative. In fact, it's slow: but it is illustrative....

Have a look at L<AI::NeuralNet::Kohonen::Demo::RGB> for an example of
visualisation of the map.

I'll maybe add some more text here later.

=head1 DEPENDENCIES

	AI::NeuralNet::Kohonen::Node
	AI::NeuralNet::Kohonen::Input

=head1 EXPORTS

None

=head1 CONSTRUCTOR new

Instantiates object fields:

=over 4

=item input_file

A I<SOM_PAK> training file to load. This does not prevent
other input methods (C<input>, C<table>) being processed, but
it does over-ride any specifications (C<weight_dim>) which may
have been explicitly handed to the constructor.

See also L</FILE FORMAT> and L</METHOD load_input>.

=item input

A reference to an array of training vectors, within which each vector
is represented by an array:

	[ [v1a, v1b, v1c], [v2a,v2b,v2c], ..., [vNa,vNb,vNc] ]

See also C<table>.

=item table

The contents of a file of the format that could be supplied to
the C<input_file> field.

=item input_names

A name for each dimension of the input vectors.

=item map_dim_x

=item map_dim_y

The dimensions of the feature map to create - defaults to a toy 19.
(note: this is Perl indexing, starting at zero).

=item epochs

Number of epochs to run for (see L<METHOD train>).
Minimum number is C<1>.

=item learning_rate

The initial learning rate.

=item train_start

Reference to code to call at the begining of training.

=item epoch_start

Reference to code to call at the begining of every epoch
(such as a colour calibration routine).

=item epoch_end

Reference to code to call at the end of every epoch
(such as a display routine).

=item train_end

Reference to code to call at the end of training.

=item targeting

If undefined, random targets are chosen; otherwise
they're iterated over. Just for experimental purposes.

=item smoothing

The amount of smoothing to apply by default when C<smooth>
is applied (see L</METHOD smooth>).

=item neighbour_factor

When working out the size of the neighbourhood of influence,
the average of the dimensions of the map are divided by this variable,
before the exponential function is applied: the default value is 2.5,
but you may with to use 2 or 4.

=item missing_mask

Used to signify data is missing in an input vector. Defaults
to C<x>.

=back

Private fields:

=over 4

=item time_constant

The number of iterations (epochs) to be completed, over the log of the map radius.

=item t

The current epoch, or moment in time.

=item l

The current learning rate.

=item map_dim_a

Average of the map dimensions.

=back

=cut

sub new {
	my $class					= shift;
	my %args					= @_;
	my $self 					= bless \%args,$class;

	$self->{missing_mask}		= 'x' unless defined $self->{missing_mask};
	$self->_process_table if defined $self->{table};	# Creates {input}
	$self->load_input($self->{input_file}) if defined $self->{input_file};	# Creates {input}
	if (not defined $self->{input}){
		cluck "No {input} supplied!";
		return undef;
	}

	$self->{map_dim_x}			= 19 unless defined $self->{map_dim_x};
	$self->{map_dim_y}			= 19 unless defined $self->{map_dim_y};
	# Legacy from...yesterday
	if ($self->{map_dim}){
		$self->{map_dim_x} 		= $self->{map_dim_y} = $self->{map_dim}
	}
	if (not defined $self->{map_dim_x} or $self->{map_dim_x}==0
	 or not defined $self->{map_dim_y} or $self->{map_dim_y}==0){
		 confess "No map dimensions in the input!";
	 }
	if ($self->{map_dim_x}>$self->{map_dim_y}){
		$self->{map_dim_a} 		= $self->{map_dim_y} + (($self->{map_dim_x}-$self->{map_dim_y})/2)
	} else {
		$self->{map_dim_a} 		= $self->{map_dim_x} + (($self->{map_dim_y}-$self->{map_dim_x})/2)
	}
	$self->{neighbour_factor}	= 2.5 unless $self->{neighbour_factor};
	$self->{epochs}				= 99 unless defined $self->{epochs};
	$self->{epochs}				= 1 if $self->{epochs}<1;
	$self->{time_constant}		= $self->{epochs} / log($self->{map_dim_a}) unless $self->{time_constant};	# to base 10?
	$self->{learning_rate}		= 0.5 unless $self->{learning_rate};
	$self->{l}					= $self->{learning_rate};
	if (not $self->{weight_dim}){
		cluck "{weight_dim} not set";
		return undef;
	}
	$self->randomise_map;
	return $self;
}




=head1 METHOD randomise_map

Populates the C<map> with nodes that contain random real nubmers.

See L<AI::NerualNet::Kohonen::Node/CONSTRUCTOR new>.

=cut

sub randomise_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];
		for my $y (0..$self->{map_dim_y}){
			$self->{map}->[$x]->[$y] = new AI::NeuralNet::Kohonen::Node(
				dim => $self->{weight_dim},
				missing_mask => $self->{missing_mask},
			);
		}
	}
}


=head1 METHOD clear_map

As L<METHOD randomise_map> but sets all C<map> nodes to
either the value supplied as the only paramter, or C<undef>.

=cut

sub clear_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	my $val = shift || $self->{missing_mask};
	my $w = [];
	foreach (0..$self->{weight_dim}){
		push @$w, $val;
	}
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];
		for my $y (0..$self->{map_dim_y}){
			$self->{map}->[$x]->[$y] = new AI::NeuralNet::Kohonen::Node(
				weight		 => $w,
				dim 		 => $self->{weight_dim},
				missing_mask => $self->{missing_mask},
			);
		}
	}
}




=head1 METHOD train

Optionally accepts a parameter that is the number of epochs
for which to train: the default is the value in the C<epochs> field.

An epoch is composed of A number of generations, the number being
the total number of input vectors.

For every generation, iterates:

=over 4

=item 1

selects a target from the input array (see L</PRIVATE METHOD _select_target>);

=item 2

finds the best-matching unit (see L</METHOD find_bmu>);

=item 3

adjusts the neighbours of the BMU (see L</PRIVATE METHOD _adjust_neighbours_of>);

=back

At the end of every generation, the learning rate is decayed
(see L</PRIVATE METHOD _decay_learning_rate>).

See C<CONSTRUCTOR new> for details of applicable callbacks.

Returns a true value.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;
	&{$self->{train_start}} if exists $self->{train_start};
	for my $epoch (1..$epochs){
		$self->{t} = $epoch;
		&{$self->{epoch_start}} if exists $self->{epoch_start};
		for (0..$#{$self->{input}}){
			my $target = $self->_select_target;
			my $bmu = $self->find_bmu($target);
			$self->_adjust_neighbours_of($bmu,$target);
		}
		$self->_decay_learning_rate;
		&{$self->{epoch_end}} if exists $self->{epoch_end};
	}
	&{$self->{train_end}} if $self->{train_end};
	return 1;
}


=head1 METHOD find_bmu

For a specific taraget, finds the Best Matching Unit in the map
and return the x/y index.

Accepts: a reference to an array that is the target.

Returns: a reference to an array that is the BMU (and should
perhaps be abstracted as an object in its own right), indexed as follows:

=over 4

=item 0

euclidean distance from the supplied target

=item 1, 2

I<x> and I<y> co-ordinate in the map

=back

See L</METHOD get_weight_at>,
and L<AI::NeuralNet::Kohonen::Node/distance_from>,

=cut


sub find_bmu { my ($self,$target) = (shift,shift);
	my $closest = [];	# [value, x,y] value and co-ords of closest match
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $distance = $self->{map}->[$x]->[$y]->distance_from( $target );
			$closest = [$distance,0,0] if $x==0 and $y==0;
			$closest = [$distance,$x,$y] if $distance < $closest->[0];
		}
	}
	return $closest;
}

=head1 METHOD get_weight_at

Returns a reference to the weight array at the supplied I<x>,I<y>
co-ordinates.

Accepts: I<x>,I<y> co-ordinates, each a scalar.

Returns: reference to an array that is the weight of the node, or
C<undef> on failure.

=cut

sub get_weight_at { my ($self,$x,$y) = (shift,shift,shift);
	return undef if $x<0 or $y<0 or $x>$self->{map_dim_x} or $y>$self->{map_dim_y};
	return $self->{map}->[$x]->[$y]->{weight};
}



=head1 METHOD get_results

Finds and returns the results for all input vectors in the supplied
reference to an array of arrays,
placing the values in the C<results> field (array reference),
and, returning it either as an array or as it is, depending on
the calling context

If no array reference of input vectors is supplied, will use
the values in the C<input> field.

Individual results are in the array format as described in
L<METHOD find_bmu>.

See L<METHOD find_bmu>, and L</METHOD get_weight_at>.

=cut

sub get_results { my ($self,$targets)=(shift,shift);
	$self->{results} = [];
	if (not defined $targets){
		$targets = $self->{input};
	} elsif (not $targets eq $self->{input}){
		foreach (@$targets){
			next if ref $_ eq 'AI::NeuralNet::Kohonen::Input';
			$_ = new AI::NeuralNet::Kohonen::Input(values=>$_);
		}
	}
	foreach my $target (@{ $targets}){
		$_ = $self->find_bmu($target);
		push @$_, $target->{class}||"?";
		push @{$self->{results}}, $_;
	}
	# Make it a scalar if it's a scalar
#	if ($#{$self->{results}} == 0){
#		$self->{results} = @{$self->{results}}[0];
#	}
	return wantarray? @{$self->{results}} : $self->{results};
}


=head1 METHOD map_results

Clears the C<map> and fills it with the results.

The sole paramter is passed to the L<METHOD clear_map>.
L<METHOD get_results> is then called, and the results
returned fed into the object field C<map>.

This may change, as it seems misleading to re-use that field.

=cut

sub map_results { my $self=shift;

}


=head1 METHOD dump

Print the current weight values to the screen.

=cut

sub dump { my $self=shift;
	print "    ";
	for my $x (0..$self->{map_dim_x}){
		printf ("  %02d ",$x);
	}
	print"\n","-"x107,"\n";
	for my $x (0..$self->{map_dim_x}){
		for my $w (0..$self->{weight_dim}){
			printf ("%02d | ",$x);
			for my $y (0..$self->{map_dim_y}){
				printf("%.2f ", $self->{map}->[$x]->[$y]->{weight}->[$w]);
			}
			print "\n";
		}
		print "\n";
	}
}

=head1 METHOD smooth

Perform gaussian smoothing upon the map.

Accepts: the length of the side of the square gaussian mask to apply.
If not supplied, uses the value in the field C<smoothing>; if that is
empty, uses the square root of the average of the map dimensions
(C<map_dim_a>).

Returns: a true value.

=cut

sub smooth { my ($self,$smooth) = (shift,shift);
	$smooth = $self->{smoothing} if not $smooth and defined $self->{smoothing};
	return unless $smooth;
	$smooth = int( sqrt $self->{map_dim_a} );
	my $mask = _make_gaussian_mask($smooth);

	# For every weight at every point
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			for my $w (0..$self->{weight_dim}){
				# Apply the mask
				for my $mx (0..$smooth){
					for my $my (0..$smooth){
						$self->{map}->[$x]->[$y]->{weight}->[$w] *= $mask->[$mx]->[$my];
					}
				}
			}
		}
	}
	return 1;
}



=head1 METHOD load_input

Loads a SOM_PAK-format file of input vectors.

This method is automatically accessed if the constructor is supplied
with an C<input_file> field.

Requires: a path to a file.

Returns C<undef> on failure.

See L</FILE FORMAT>.

=cut

sub load_input { my ($self,$path) = (shift,shift);
	local *IN;
	if (not open IN,$path){
		warn "Could not open file <$path>: $!";
		return undef;
	}
	@_ = <IN>;
	close IN;
	$self->_process_input_text(\@_);
	return 1;
}


=head1 METHOD save_file

Saves the map file in I<SOM_PAK> format (see L<METHOD load_input>)
at the path specified in the first argument.

Return C<undef> on failure, a true value on success.

=cut

sub save_file { my ($self,$path) = (shift,shift);
	local *OUT;
	if (not open OUT,">$path"){
		warn "Could not open file for writing <$path>: $!";
		return undef;
	}
	#- Dimensionality of the vectors (integer, compulsory).
	print OUT ($self->{weight_dim}+1)," ";	# Perl indexing
	#- Topology type, either hexa or rect (string, optional, case-sensitive).
	if (not defined $self->{display}){
		print OUT "rect ";
	} else { # $self->{display} eq 'hex'
		print OUT "hexa ";
	}
	#- Map dimension in x-direction (integer, optional).
	print OUT $self->{map_dim_x}." ";
	#- Map dimension in y-direction (integer, optional).
	print OUT $self->{map_dim_y}." ";
	#- Neighborhood type, either bubble or gaussian (string, optional, case-sen- sitive).
	print OUT "gaussian ";
	# End of header
	print OUT "\n";

	# Format input data
	foreach (@{$self->{input}}){
		print OUT join("\t",@{$_->{values}});
		if ($_->{class}){
			print OUT " $_->{class} " ;
		}
		print OUT "\n";
	}
	# EOF
	print OUT chr 26;
	close OUT;
	return 1;
}


#
# Process ASCII from table field or input file
# Accepts: ASCII as array or array ref
#
sub _process_input_text { my ($self) = (shift);
	if (not defined $_[1]){
		if (ref $_[0] eq 'ARRAY'){
			@_ = @{$_[0]};
		} else {
			@_ = split/[\n\r\f]+/,$_[0];
		}
	}
	chomp @_;
	my @specs = split/\s+/,(shift @_);
	#- Dimensionality of the vectors (integer, compulsory).
	$self->{weight_dim} = shift @specs;
	$self->{weight_dim}--; # Perl indexing
	#- Topology type, either hexa or rect (string, optional, case-sensitive).
	my $display		    = shift @specs;
	if (not defined $display and exists $self->{display}){
		# Intentionally blank
	} elsif (not defined $display){
		$self->{display} = undef;
	} elsif ($display eq 'hexa'){
		$self->{display} = 'hex'
	} elsif ($display eq 'rect'){
		$self->{display} = undef;
	}
	#- Map dimension in x-direction (integer, optional).
	$_				      = shift @specs;
	$self->{map_dim_x}    = $_ if defined $_;
	#- Map dimension in y-direction (integer, optional).
	$_				      = shift @specs;
	$self->{map_dim_y}    = $_ if defined $_;
	#- Neighborhood type, either bubble or gaussian (string, optional, case-sen- sitive).
	# not implimented

	# Format input data
	foreach (@_){
		$self->_add_input_from_str($_);
	}
	return 1;
}


=head1 PRIVATE METHOD _select_target

Return a random target from the training set in the C<input> field,
unless the C<targeting> field is defined, when the targets are
iterated over.

=cut

sub _select_target { my $self=shift;
	if (not $self->{targeting}){
		return $self->{input}->[
			(int rand(scalar @{$self->{input}}))
		];
	}
	else {
		$self->{tar}++;
		if ($self->{tar}>$#{ $self->{input} }){
			$self->{tar} = 0;
		}
		return $self->{input}->[$self->{tar}];
	}
}


=head1 PRIVATE METHOD _adjust_neighbours_of

Accepts: a reference to an array containing
the distance of the BMU from the target, as well
as the x and y co-ordinates of the BMU in the map;
a reference to the target, which is an
C<AI::NeuralNet::Kohonen::Input> object.

Returns: true.

=head2 FINDING THE NEIGHBOURS OF THE BMU

	                        (      t   )
	sigma(t) = sigma(0) exp ( - ------ )
	                        (   lambda )

Where C<sigma> is the width of the map at any stage
in time (C<t>), and C<lambda> is a time constant.

Lambda is our field C<time_constant>.

The map radius is naturally just half the map width.

=head2 ADJUSTING THE NEIGHBOURS OF THE BMU

	W(t+1) = W(t) + THETA(t) L(t)( V(t)-W(t) )

Where C<L> is the learning rate, C<V> the target vector,
and C<W> the weight. THETA(t) represents the influence
of distance from the BMU upon a node's learning, and
is calculated by the C<Node> class - see
L<AI::NeuralNet::Kohonen::Node/distance_effect>.

=cut

sub _adjust_neighbours_of { my ($self,$bmu,$target) = (shift,shift,shift);
	my $neighbour_radius = int (
		($self->{map_dim_a}/$self->{neighbour_factor}) * exp(- $self->{t} / $self->{time_constant})
	);

	# Distance from co-ord vector (0,0) as integer
	# Basically map_width * y  +  x
	my $centre = ($self->{map_dim_a}*$bmu->[2])+$bmu->[1];
	# Set the class of the BMU
	$self->{map}->[ $bmu->[1] ]->[ $bmu->[2] ]->{class} = $target->{class};

	for my $x ($bmu->[1]-$neighbour_radius .. $bmu->[1]+$neighbour_radius){
		next if $x<0 or $x>$self->{map_dim_x};		# Ignore those not mappable
		for my $y ($bmu->[2]-$neighbour_radius .. $bmu->[2]+$neighbour_radius){
			next if $y<0 or $y>$self->{map_dim_y};	# Ignore those not mappable
			# Skip node if it is out of the circle of influence
			next if (
				(($bmu->[1] - $x) * ($bmu->[1] - $x)) + (($bmu->[2] - $y) * ($bmu->[2] - $y))
			) > ($neighbour_radius * $neighbour_radius);

			# Adjust the weight
			for my $w (0..$self->{weight_dim}){
				next if $target->{values}->[$w] eq $self->{map}->[$x]->[$y]->{missing_mask};
				my $weight = \$self->{map}->[$x]->[$y]->{weight}->[$w];
				$$weight = $$weight + (
					$self->{map}->[$x]->[$y]->distance_effect($bmu->[0], $neighbour_radius)
					* ( $self->{l} * ($target->{values}->[$w] - $$weight) )
				);
			}
		}
	}
}


=head1 PRIVATE METHOD _decay_learning_rate

Performs a gaussian decay upon the learning rate (our C<l> field).

	              (       t   )
	L(t) = L  exp ( -  ------ )
	        0     (    lambda )

=cut

sub _decay_learning_rate { my $self=shift;
	$self->{l} =  (
		$self->{learning_rate} * exp(- $self->{t} / $self->{time_constant})
	);
}


=head1 PRIVATE FUNCTION _make_gaussian_mask

Accepts: size of mask.

Returns: reference to a 2d array that is the mask.

=cut

sub _make_gaussian_mask { my ($smooth) = (shift);
	my $f = 4; # Cut-off threshold
	my $g_mask_2d = [];
	for my $x (0..$smooth){
		$g_mask_2d->[$x] = [];
		for my $y (0..$smooth){
			$g_mask_2d->[$x]->[$y] =
				_gauss_weight( $x-($smooth/2), $smooth/$f)
			  * _gauss_weight( $y-($smooth/2), $smooth/$f );
		}
	}
	return $g_mask_2d;
}

=head1 PRIVATE FUNCTION _gauss_weight

Accepts: two paramters: the first, C<r>, gives the distance from the mask centre,
the second, C<sigma>, specifies the width of the mask.

Returns the gaussian weight.

See also L<_decay_learning_rate>.

=cut

sub _gauss_weight { my ($r, $sigma) = (shift,shift);
	return exp( -($r**2) / (2 * $sigma**2) );
}


=head1 PUBLIC METHOD quantise_error

Returns the quantise error for either the supplied points,
or those in the C<input> field.

=cut


sub quantise_error { my ($self,$targets) = (shift,shift);
	my $qerror=0;
	if (not defined $targets){
		$targets = $self->{input};
	} else {
		foreach (@$targets){
			if (not ref $_ or ref $_ ne 'ARRAY'){
				croak "Supplied target parameter should be an array of arrays!"
			}
			$_ = new AI::NeuralNet::Kohonen::Input(values=>$_);
		}
	}

	# Recieves an array of ONE element,
	# should be an array of an array of elements
	my @bmu = $self->get_results($targets);

	# Check input and output dims are the same
	if ($#{$self->{map}->[0]->[1]->{weight}} != $targets->[0]->{dim}){
		confess "target input and map dimensions differ";
	}

	for my $i (0..$#bmu){
		foreach my $w (0..$self->{weight_dim}){
			$qerror += $targets->[$i]->{values}->[$w]
			- $self->{map}->[$bmu[$i]->[1]]->[$bmu[$i]->[2]]->{weight}->[$w];
		}
	}
	$qerror /= scalar @$targets;
	return $qerror;
}


=head1 PRIVATE METHOD _add_input_from_str

Adds to the C<input> field an input vector in SOM_PAK-format
whitespace-delimited ASCII.

Returns C<undef> on failure to add an item (perhaps because
the data passed was a comment, or the C<weight_dim> flag was
not set); a true value on success.

=cut

sub _add_input_from_str { my ($self) = (shift);
	$_ = shift;
	s/#.*$//g;
	return undef if /^$/ or not defined $self->{weight_dim};
	my @i = split /\s+/,$_;
	return undef if $#i < $self->{weight_dim}; # catch bad lines
	# 'x' in files signifies unknown: we prefer undef?
#	@i[0..$self->{weight_dim}] = map{
#		$_ eq 'x'? undef:$_
#	} @i[0..$self->{weight_dim}];
	my %args = (
		dim 	=> $self->{weight_dim},
		values	=> [ @i[0..$self->{weight_dim}] ],
	);
	$args{class} = $i[$self->{weight_dim}+1] if $i[$self->{weight_dim}+1];
	$args{enhance} = $i[$self->{weight_dim}+1] if $i[$self->{weight_dim}+2];
	$args{fixed} = $i[$self->{weight_dim}+1] if $i[$self->{weight_dim}+3];
	push @{$self->{input}}, new AI::NeuralNet::Kohonen::Input(%args);

	return 1;
}


#
# Processes the 'table' paramter to the constructor
#
sub _process_table { my $self = shift;
	$_ = $self->_process_input_text( $self->{table} );
	undef $self->{table};
	return $_;
}


__END__
1;

=head1 FILE FORMAT

This module has begun to attempt the I<SOM_PAK> format:
I<SOM_PAK> file format version 3.1 (April 7, 1995),
Helsinki University of Technology, Espoo:

=over 4

The input data is stored in ASCII-form as a list of entries, one line
...for each vectorial sample.

The first line of the file is reserved for status knowledge of the
entries; in the present version it is used to define the following
items (these items MUST occur in the indicated order):

   - Dimensionality of the vectors (integer, compulsory).
   - Topology type, either hexa or rect (string, optional, case-sensitive).
   - Map dimension in x-direction (integer, optional).
   - Map dimension in y-direction (integer, optional).
   - Neighborhood type, either bubble or gaussian (string, optional, case-sen-
      sitive).

...

Subsequent lines consist of n floating-point numbers followed by an
optional class label (that can be any string) and two optional
qualifiers (see below) that determine the usage of the corresponding
data entry in training programs.  The data files can also contain an
arbitrary number of comment lines that begin with '#', and are
ignored. (One '#' for each comment line is needed.)

If some components of some data vectors are missing (due to data
collection failures or any other reason) those components should be
marked with 'x'...[in processing, these] are ignored.

...

Each data line may have two optional qualifiers that determine the
usage of the data entry during training. The qualifiers are of the
form codeword=value, where spaces are not allowed between the parts of
the qualifier. The optional qualifiers are the following:

=over 4

=item -

Enhancement factor: e.g. weight=3.  The training rate for the
corresponding input pattern vector is multiplied by this
parameter so that the reference vectors are updated as if this
input vector were repeated 3 times during training (i.e., as if
the same vector had been stored 2 extra times in the data file).

=item -

Fixed-point qualifier: e.g. fixed=2,5.  The map unit defined by
the fixed-point coordinates (x = 2; y = 5) is selected instead of
the best-matching unit for training. (See below for the definition
of coordinates over the map.) If several inputs are forced to
known locations, a wanted orientation results in the map.

=back

=back

Not (yet) implimented in file format:

=over 4

=item *

hexa/rect is only visual, and only in the ::Demo::RGB package atm

=item *

I<neighbourhood type> is always gaussian.

=item *

i<x> for missing data.

=item *

the two optional qualifiers

=back

=cut

=head1 SEE ALSO

See L<AI::NeuralNet::Kohonen::Node/distance_from>;
L<AI::NeuralNet::Kohonen::Demo::RGB>.

L<The documentation for C<SOM_PAK>|ftp://cochlea.hut.fi/pub/som_pak>,
which has lots of advice on map building that may or may not be applicable yet.

A very nice explanation of Kohonen's algorithm:
L<AI-Junkie SOM tutorial part 1|http://www.fup.btinternet.co.uk/aijunkie/som1.html>

=head1 AUTHOR AND COYRIGHT

This implimentation Copyright (C) Lee Goddard, 2003-2006.
All Rights Reserved.

Available under the same terms as Perl itself.

 view all matches for this distribution


AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

#!/usr/bin/perl	

# Copyright (c) 2000  Josiah Bryan  USA
#
# See AUTHOR section in pod text below for usage and distribution rights.   
#

BEGIN {
	 $AI::NeuralNet::Mesh::VERSION = "0.44";
	 $AI::NeuralNet::Mesh::ID = 
'$Id: AI::NeuralNet::Mesh.pm, v'.$AI::NeuralNet::Mesh::VERSION.' 2000/15/09 03:29:08 josiah Exp $';
}

package AI::NeuralNet::Mesh;
                                 
    require Exporter;
	@ISA         = qw(Exporter);
	@EXPORT      = qw(range intr pdiff);
	%EXPORT_TAGS = ( 
		'default'    => [ qw ( range intr pdiff )],
		'all'        => [ qw ( p low high ramp and_gate or_gate range intr pdiff ) ],
		'p'          => [ qw ( p low high intr pdiff ) ],
		'acts'       => [ qw ( ramp and_gate or_gate range ) ],
	);
    @EXPORT_OK   = ( @{ $EXPORT_TAGS{'all'} }, qw( p low high ramp and_gate or_gate ) );
    
    use strict;
    use Benchmark; 

   	# See POD for usage of this variable.
	$AI::NeuralNet::Mesh::Connector = '_c';
	
	# Debugging subs
	$AI::NeuralNet::Mesh::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::Mesh::DEBUG = shift || 0; } 
	sub d { shift if(substr($_[0],0,4) eq 'AI::'); my ($a,$b,$c)=(shift,shift,$AI::NeuralNet::Mesh::DEBUG); print $a if($c == $b); return $c }
	sub verbose {debug @_};
	sub verbosity {debug @_};
	sub v {debug @_};
	
	
	# Return version of ::ID string passed or current version of this
	# module if no string is passed. Used in load() to detect file versions.
	sub version {
		shift if(substr($_[0],0,4) eq 'AI::');
		substr((split(/\s/,(shift || $AI::NeuralNet::Mesh::ID)))[2],1);
	}                                  
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Package constructor
	sub new {
		no strict 'refs';
		my $type	=	shift;
		my $self	=	{};
		my $layers	=	shift;
		my $nodes	=	shift;
		my $outputs	=	shift || $nodes;
		my $inputs	=	shift || $nodes;
        
		bless $self, $type;
		                       
		# If $layers is a string, then it will be numerically equal to 0, so 
		# try to load it as a network file.
		if($layers == 0) {  
		    # We use a "1" flag as the second argument to indicate that we 
		    # want load() to call the new constructor to make a network the
		    # same size as in the file and return a refrence to the network,
		    # instead of just creating the network from pre-exisiting refrence
			return $self->load($layers,1);
		}
		
		# Looks like we got ourselves a layer specs array
		if(ref($layers) eq "ARRAY") { 
			if(ref($layers->[0]) eq "HASH") {
				$self->{total_nodes}	=	0;
				$self->{inputs}			=	$layers->[0]->{nodes};
				$self->{nodes}			=	$layers->[0]->{nodes};
				$self->{outputs}		=	$layers->[$#{$layers}]->{nodes};
				$self->{total_layers}	=	$#{$layers};
				for (0..$#{$layers}){$self->{layers}->[$_] = $layers->[$_]->{nodes}}
				for (0..$self->{total_layers}){$self->{total_nodes}+=$self->{layers}->[$_]}	
			} else {
				$self->{inputs}			= $layers->[0];
			    $self->{nodes}			= $layers->[0];
				$self->{outputs}		= $layers->[$#{$layers}];
				$self->{layers} 		= $layers;
				$self->{total_layers}	= $#{$self->{layers}};
				$self->{total_nodes}	= 0;
				for (0..$self->{total_layers}) {
					$self->{total_nodes}+=$self->{layers}->[$_];
				}
			}
		} else {
			$self->{total_nodes}	= $layers * $nodes + $outputs;
			$self->{total_layers}	= $layers;
			$self->{nodes}			= $nodes;
			$self->{inputs}			= $inputs;
			$self->{outputs}		= $outputs;
		}	
		
		# Initalize misc. variables
		$self->{col_width}		=	5;
		$self->{random}			=	0;
		$self->{const}			=	0.0001;
		$self->{connector}		=	$AI::NeuralNet::Mesh::Connector;
		
		# Build mesh
		$self->_init();	
		
		# Initalize activation, thresholds, etc, if provided
		if(ref($layers->[0]) eq "HASH") {
			for (0..$self->{total_layers}) {
				$self->activation($_,$layers->[$_]->{activation});
				$self->threshold($_,$layers->[$_]->{threshold});
				$self->mean($_,$layers->[$_]->{mean});
			}
		}
				
		# Done!
		return $self;
	}	
    

    # Internal usage
    # Connects one range of nodes to another range
    sub _c {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;
    	my $m1		=	shift || $self->{mesh};
    	my $m2		=	shift || $m1;
		for my $y ($r1a..$r1b-1) {
			for my $z ($r2a..$r2b-1) {
				$m1->[$y]->add_output_node($m2->[$z]);
			}
		}
	}
    
    # Internal usage
    # Creates the mesh of neurons
    sub _init {
    	my $self		=	shift;
    	my $nodes		=	$self->{nodes};
    	my $outputs		=	$self->{outputs} || $nodes;
    	my $inputs		=	$self->{inputs}  || $nodes;
    	my $layers		=	$self->{total_layers};
        my $tmp 		=	$self->{total_nodes} || ($layers * $nodes + $outputs);
    	my $layer_specs	=	$self->{layers};
    	my $connector	=	$self->{connector};
        my ($x,$y,$z);
        no strict 'refs';
        
        # Just to be safe.
        $self->{total_nodes} = $tmp;
        
        # If they didn't give layer specifications, then we derive our own specs.
        if(!(defined $self->{layers})) {
        	$layer_specs = [split(',',"$nodes," x $layers)];
        	$layer_specs->[$#{$layer_specs}+1]=$outputs;
        	$self->{layers}	= $layer_specs;
        }
        
        # First create the individual nodes
		for my $x (0..$tmp-1) {         
			$self->{mesh}->[$x] = AI::NeuralNet::Mesh::node->new($self);
        }              
        
        # Get an instance of an output (data collector) node
		$self->{output} = AI::NeuralNet::Mesh::output->new($self);
		
		# Connect the output layer to the data collector
        for $x (0..$outputs-1) {                    
			$self->{mesh}->[$tmp-$outputs+$x]->add_output_node($self->{output});
		}
		
		# Now we use the _c() method to connect the layers together.
        $y=0;
        my $c = $connector.'($self,$y,$y+$z,$y+$z,$y+$z+$layer_specs->[$x+1])';
        for $x (0..$layers-1) {
        	$z = $layer_specs->[$x];                         
        	d("layer $x size: $z (y:$y)\n,",1);
        	eval $c;
        	$y+=$z;
		}		
		
		# Get an instance of our cap node.
		$self->{input}->{cap} = AI::NeuralNet::Mesh::cap->new(); 

		# Add a cap to the bottom of the mesh to stop it from trying
		# to recursivly adjust_weight() where there are no more nodes.		
		for my $x (0..$inputs-1) {
			$self->{input}->{IDs}->[$x] = 
				$self->{mesh}->[$x]->add_input_node($self->{input}->{cap});
		}
	}
    
    # See POD for usage
    sub extend {
    	my $self	=	shift;
    	my $layers	=	shift;
    
    	# Looks like we got ourselves a layer specs array
		if(ref($layers) eq "ARRAY") { 
			if($self->{total_layers}!=$#{$layers}) {
				$self->{error} = "extend(): Cannot add new layers. Create a new network to add layers.\n";
				return undef;
			}
			if(ref($layers->[0]) eq "HASH") {
				$self->{total_nodes}	=	0;
				$self->{inputs}			=	$layers->[0]->{nodes};
				$self->{nodes}			=	$layers->[0]->{nodes};
				$self->{outputs}		=	$layers->[$#{$layers}]->{nodes};
				for (0..$#{$layers}){
					$self->extend_layer($_,$layers->[$_]);
					$self->{layers}->[$_] =$layers->[$_]->{nodes};
				}
				for (0..$self->{total_layers}){$self->{total_nodes}+=$self->{layers}->[$_]}	
			} else {
				$self->{inputs}			= $layers->[0];
			    $self->{nodes}			= $layers->[0];
				$self->{outputs}		= $layers->[$#{$layers}];
				$self->{total_nodes}	= 0;
				for (0..$self->{total_layers}){$self->extend_layer($_,$layers->[$_])}
				$self->{layers} 		= $layers;
				for (0..$self->{total_layers}){$self->{total_nodes}+= $self->{layers}->[$_]}
			}
		} else {
			$self->{error} = "extend(): Invalid argument type.\n";
			return undef;
		}
		return 1;
	}
    
    # See POD for usage
    sub extend_layer {
    	my $self	=	shift;
    	my $layer	=	shift || 0;
    	my $specs	=	shift;
    	if(!$specs) {
    		$self->{error} = "extend_layer(): You must provide specs to extend layer $layer with.\n";
    		return undef;
    	}
    	if(ref($specs) eq "HASH") {
    		$self->activation($layer,$specs->{activation}) if($specs->{activation});
    		$self->threshold($layer,$specs->{threshold})   if($specs->{threshold});
    		$self->mean($layer,$specs->{mean})             if($specs->{mean});
    		return $self->add_nodes($layer,$specs->{nodes});
    	} else { 
    		return $self->add_nodes($layer,$specs);
    	}
    	return 1;
    }
    
    # Pseudo-internal usage
    sub add_nodes {
    	no strict 'refs';
		my $self	=	shift;
    	my $layer	=	shift;
    	my $nodes	=	shift;
    	my $n		=	0;
		my $more	=	$nodes - $self->{layers}->[$layer] - 1;
        d("Checking on extending layer $layer to $nodes nodes (check:$self->{layers}->[$layer]).\n",9);
        return 1 if ($nodes == $self->{layers}->[$layer]);
        if ($self->{layers}->[$layer]>$nodes) {
        	$self->{error} = "add_nodes(): I cannot remove nodes from the network with this version of my module. You must create a new network to remove nodes.\n";
        	return undef;
        }
        d("Extending layer $layer by $more.\n",9);
        for (0..$more){$self->{mesh}->[$#{$self->{mesh}}+1]=AI::NeuralNet::Mesh::node->new($self)}
        for(0..$layer-2){$n+=$self->{layers}->[$_]}
		$self->_c($n,$n+$self->{layers}->[$layer-1],$#{$self->{mesh}}-$more+1,$#{$self->{mesh}});
		$self->_c($#{$self->{mesh}}-$more+1,$#{$self->{mesh}},$n+$self->{layers}->[$layer],$n+$self->{layers}->[$layer]+$self->{layers}->[$layer+1]);
    }
        
        
    # See POD for usage
    sub run {
    	my $self	=	shift;
    	my $inputs	=	shift;
    	my $const	=	$self->{const};
    	#my $start	=	new Benchmark;
    	$inputs		=	$self->crunch($inputs) if($inputs == 0);
    	no strict 'refs';
    	for my $x (0..$#{$inputs}) {
    		last if($x>$self->{inputs});
    		d("inputing $inputs->[$x] at index $x with ID $self->{input}->{IDs}->[$x].\n",1);
    		$self->{mesh}->[$x]->input($inputs->[$x]+$const,$self->{input}->{IDs}->[$x]);
    	}
    	if($#{$inputs}<$self->{inputs}-1) {
	    	for my $x ($#{$inputs}+1..$self->{inputs}-1) {
	 	    	d("inputing 1 at index $x with ID $self->{input}->{IDs}->[$x].\n",1);
	    		$self->{mesh}->[$x]->input(1,$self->{input}->{IDs}->[$x]);
	    	}
	    }
    	#$self->{benchmark} = timestr(timediff(new Benchmark, $start));
    	return $self->{output}->get_outputs();
    }    
    
    # See POD for usage
    sub run_uc {
    	$_[0]->uncrunch(run(@_));
    }

	# See POD for usage
	sub learn {
    	my $self	=	shift;					
    	my $inputs	=	shift;					# input set
    	my $outputs	=	shift;					# target outputs
    	my %args	=	@_;						# get args into hash
    	my $inc		=	$args{inc} || 0.002;	# learning gradient
    	my $max     =   $args{max} || 1024;     # max iteterations
    	my $degrade =   $args{degrade} || 0;    # enable gradient degrading
		my $error   = 	($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
  		my $dinc	=	0.0002;					# amount to adjust gradient by
		my $diff	=	100;					# error magin between results
		my $start	=	new Benchmark;			
		$inputs		=	$self->crunch($inputs)  if($inputs == 0); 
		$outputs	=	$self->crunch($outputs) if($outputs == 0);
		my ($flag,$ldiff,$cdiff,$_mi,$loop,$y); 
		while(!$flag && ($max ? $loop<$max : 1)) {
    		my $b	=	new Benchmark;
    		my $got	=	$self->run($inputs);
    		$diff 	=	pdiff($got,$outputs);
		    $flag	=	1;
    		    		
		    if(($error>-1 ? $diff<$error : 0) || !$diff) {
				$flag=1;
				last;
			}
			
			if($degrade) {
				$inc   -= ($dinc*$diff);
				
				if($diff eq $ldiff) {
					$cdiff++;
					$inc += ($dinc*$diff)+($dinc*$cdiff*10);
				} else {
					$cdiff=0;
				}
				$ldiff = $diff;
			}
				
    		for my $x (0..$self->{outputs}-1) { 
    			my $a	=	$got->[$x];
    			my $b	=	$outputs->[$x];
    			d("got: $a, wanted: $b\n",2);
    			if ($a != 	$b) {
    				$flag	=	0;
    				$y 		=	$self->{total_nodes}-$self->{outputs}+$x;
    				$self->{mesh}->[$y]->adjust_weight(($a<$b?1:-1)*$inc,$b);
   				}
   			}
   			
   			$loop++;
   			
   			d("===============================Loop: [$loop]===================================\n",4);
   			d("Current Error: $diff\tCurrent Increment: $inc\n",4);
   			d("Benchmark: ".timestr(timediff(new Benchmark,$b))."\n",4);
   			d("============================Results, [$loop]===================================\n",4);
   			d("Actual: ",4);	
   			join_cols($got,($self->{col_width})?$self->{col_width}:5) if(d()==4);
   			d("Target: ",4);	
   			join_cols($outputs,($self->{col_width})?$self->{col_width}:5) if(d()==4);
   			d("\n",4);
   			d('.',12);
   			d('['.join(',',@{$got})."-".join(',',@{$outputs}).']',13);
   		}  
   		my $str = "Learning took $loop loops and ".timestr(timediff(new Benchmark,$start))."\n";
   		d($str,3); $self->{benchmark} = "$loop loops and ".timestr(timediff(new Benchmark,$start))."\n";
   		return $str;
   	}


	# See POD for usage
	sub learn_set {
		my $self	=	shift;
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2;
		my $inc		=	$args{inc};
		my $max		=	$args{max};
	    my $error	=	$args{error};
	    my $degrade	=	$args{degrade};
	    my $p		=	(defined $args{flag}) ?$args{flag} :1;
	    my $row		=	(defined $args{row})  ?$args{row}+1:1;
	    my $leave	=	(defined $args{leave})?$args{leave}:0;
		for my $x (0..$len-$leave) {
			d("Learning set $x...\n",4);
			my $str = $self->learn( $data->[$x*2],
					  		  		$data->[$x*2+1],
					    			inc=>$inc,
					    			max=>$max,
					    			error=>$error,
					    			degrade=>$degrade);
		}
			
		if ($p) {
			return pdiff($data->[$row],$self->run($data->[$row-1]));
		} else {
			return $data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
	}
	
	# See POD for usage
	sub run_set {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my (@results,$res);
		for my $x (0..$len) {
			$res = $self->run($data->[$x*2]);
			for(0..$#{$res}){$results[$x]->[$_]=$res->[$_]}
			d("Running set $x [$res->[0]]...\r",4);
		}
		return \@results;
	}
	
	#
	# Loads a CSV-like dataset from disk
	#
	# Usage:
	#	my $set = $set->load_set($file, $column, $seperator);
	#
	# Returns a data set of the same format as required by the
	# learn_set() method. $file is the disk file to load set from.
	# $column an optional variable specifying the column in the 
	# data set to use as the class attribute. $class defaults to 0.
	# $seperator is an optional variable specifying the seperator
	# character between values. $seperator defaults to ',' (a single comma). 
	# NOTE: This does not handle quoted fields, or any other record
	# seperator other than "\n".
	#
	sub load_set {
		my $self	=	shift;
		my $file	=	shift;
		my $attr	=	shift || 0;
		my $sep		=	shift || ',';
		my $data	=	[];
		open(FILE,	$file);
		my @lines	=	<FILE>;
		close(FILE);
		for my $x (0..$#lines) {
			chomp($lines[$x]);
			my @tmp	= split /$sep/, $lines[$x];
			my $c=0;
			for(0..$#tmp){ 
				$tmp[$_]=$self->crunch($tmp[$_])->[0] if($tmp[$_]=~/[AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz]/);
				if($_!=$attr){$data->[$x*2]->[$c]=$tmp[$c];$c++}
			};             
			d("Loaded line $x, [@tmp]                            \r",4);
			$data->[$x*2+1]=[$tmp[$attr]];
		}
		return $data;
	}
	
	# See POD for usage
	sub get_outs {
		my $self	=	shift;
		my $data	=	shift;
		my $len		=	$#{$data}/2;
		my $outs	=	[];
		for my $x (0..$len) {
			$outs->[$x] = $data->[$x*2+1];
		}
		return $outs;
	}
	
	# Save entire network state to disk.
	sub save {
		my $self	=	shift;
		my $file	=	shift;
		no strict 'refs';
		
		open(FILE,">$file");
	    
	    print FILE "header=$AI::NeuralNet::Mesh::ID\n";
	   	
		print FILE "total_layers=$self->{total_layers}\n";
		print FILE "total_nodes=$self->{total_nodes}\n";
	    print FILE "nodes=$self->{nodes}\n";
	    print FILE "inputs=$self->{inputs}\n";
	    print FILE "outputs=$self->{outputs}\n";
	    print FILE "layers=",(($self->{layers})?join(',',@{$self->{layers}}):''),"\n";
	    
	    print FILE "rand=$self->{random}\n";
	    print FILE "const=$self->{const}\n";
	    print FILE "cw=$self->{col_width}\n";
		print FILE "crunch=$self->{_crunched}->{_length}\n";
		print FILE "rA=$self->{rA}\n";
		print FILE "rB=$self->{rB}\n";
		print FILE "rS=$self->{rS}\n";
		print FILE "rRef=",(($self->{rRef})?join(',',@{$self->{rRef}}):''),"\n";
			
		for my $a (0..$self->{_crunched}->{_length}-1) {
			print FILE "c$a=$self->{_crunched}->{list}->[$a]\n";
		}
	
		my $n = 0;
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
			    my $w='';
				for my $z (0..$self->{layers}->[$x-1]-1) {
					$w.="$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				print FILE "n$n=$w$self->{mesh}->[$n]->{activation},$self->{mesh}->[$n]->{threshold},$self->{mesh}->[$n]->{mean}\n";
				$n++;
			}
		}
		
	    close(FILE);
	    
	    if(!(-f $file)) {
	    	$self->{error} = "Error writing to \"$file\".";
	    	return undef;
	    }
	    
	    return $self;
	}
        
	# Load entire network state from disk.
	sub load {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    my @lines;
	    
	    if(-f $file) {
		    open(FILE,"$file");
		    @lines=<FILE>;
	    	close(FILE);
	    } else {
	    	@lines=split /\n/, $file;
	    }
	    
	    my %db;
	    for my $line (@lines) {
	    	chomp($line);
	    	my ($a,$b) = split /=/, $line;
	    	$db{$a}=$b;
	    }
	    
	    if(!$db{"header"}) {
	    	$self->{error} = "Invalid format.";
	    	return undef;
	    }
	    
	    return $self->load_old($file) if($self->version($db{"header"})<0.21);
	    
	    if($load_flag) {
		    undef $self;
	        $self = AI::NeuralNet::Mesh->new([split(',',$db{layers})]);
		} else {
			$self->{inputs}			= $db{inputs};
		    $self->{nodes}			= $db{nodes};
			$self->{outputs}		= $db{outputs};
			$self->{layers} 		= [split(',',$db{layers})];
			$self->{total_layers}	= $db{total_layers};
			$self->{total_nodes}	= $db{total_nodes};
		}
		
	    # Load variables
	    $self->{random}		= $db{"rand"};
	    $self->{const}		= $db{"const"};
        $self->{col_width}	= $db{"cw"};
	    $self->{rA}			= $db{"rA"};
		$self->{rB}			= $db{"rB"};
		$self->{rS}			= $db{"rS"};
		$self->{rRef}		= [split /\,/, $db{"rRef"}];
		
	   	$self->{_crunched}->{_length}	=	$db{"crunch"};
		
		for my $a (0..$self->{_crunched}->{_length}-1) {
			$self->{_crunched}->{list}->[$a] = $db{"c$a"}; 
		}
		
		$self->_init();
	    
		my $n = 0;
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
			    my @l = split /\,/, $db{"n$n"};
				for my $z (0..$self->{layers}->[$x-1]-1) {
					$self->{mesh}->[$n]->{_inputs}->[$z]->{weight} = $l[$z];
				}
				my $z = $self->{layers}->[$x-1];
				$self->{mesh}->[$n]->{activation} = $l[$z];
				$self->{mesh}->[$n]->{threshold}  = $l[$z+1];
				$self->{mesh}->[$n]->{mean}       = $l[$z+2];
				$n++;
			}
		}
		
		return $self;
	}
	
	# Load entire network state from disk.
	sub load_old {
		my $self		=	shift;
		my $file		=	shift;  
		my $load_flag   =	shift;
		
	    if(!(-f $file)) {
	    	$self->{error} = "File \"$file\" does not exist.";
	    	return undef;
	    }
	    
	    open(FILE,"$file");
	    my @lines=<FILE>;
	    close(FILE);
	    
	    my %db;
	    for my $line (@lines) {
	    	chomp($line);
	    	my ($a,$b) = split /=/, $line;
	    	$db{$a}=$b;
	    }
	    
	    if(!$db{"header"}) {
	    	$self->{error} = "Invalid format.";
	    	return undef;
	    }
	    
	    if($load_flag) {
		    undef $self;
	
			# Create new network
			$self = AI::NeuralNet::Mesh->new($db{"layers"},
		    			 				 	 $db{"nodes"},
		    						      	 $db{"outputs"});
		} else {
			$self->{total_layers}	=	$db{"layers"};
			$self->{nodes}			=	$db{"nodes"};
			$self->{outputs}		=	$db{"outputs"};
			$self->{inputs}			=	$db{"nodes"};
			#$self->{total_nodes}	=	$db{"total"};
		}
		
	    # Load variables
	    $self->{random}		= $db{"rand"};
	    $self->{const}		= $db{"const"};
        $self->{col_width}	= $db{"cw"};
	    $self->{rA}			= $db{"rA"};
		$self->{rB}			= $db{"rB"};
		$self->{rS}			= $db{"rS"};
		$self->{rRef}		= [split /\,/, $db{"rRef"}];
		
	   	$self->{_crunched}->{_length}	=	$db{"crunch"};
		
		for my $a (0..$self->{_crunched}->{_length}-1) {
			$self->{_crunched}->{list}->[$a] = $db{"c$a"}; 
		}
		
	
		$self->_init();
	    
	    my $nodes	=	$self->{nodes};
	   	my $outputs	=	$self->{outputs};
	   	my $tmp		=	$self->{total_nodes};
	   	my $div 	=	intr($nodes/$outputs);

		# Load input and hidden
		for my $a (0..$tmp-1) {
			my @l = split /\,/, $db{"n$a"};
			for my $b (0..$nodes-1) {
				$self->{mesh}->[$a]->{_inputs}->[$b]->{weight} = $l[$b];
			}                  
		}
	     
		# Load output layer
		for my $x (0..$outputs-1) {
			my @l = split /\,/, $db{"n".($tmp+$x)};
			for my $y (0..$div-1) {
				$self->{mesh}->[$tmp+$x]->{_inputs}->[$y]->{weight} = $l[$y];
		 	}
		} 
		
		return $self;
	}

	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		my $n 		=	0;    
		no strict 'refs';
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
				for my $z (0..$self->{layers}->[$x-1]-1) {
					print "$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				$n++;
			}
			print "\n";
		}
	}
	  
	# Set the activation type of a specific layer.
	# usage: $net->activation($layer,$type);
	# $type can be: "linear", "sigmoid", "sigmoid_2".
	# You can use "sigmoid_1" as a synonym to "sigmoid". 
	# Type can also be a CODE ref, ( ref($type) eq "CODE" ).
	# If $type is a CODE ref, then the function is called in this form:
	# 	$output	= &$type($sum_of_inputs,$self);
	# The code ref then has access to all the data in that node (thru the
	# blessed refrence $self) and is expected to return the value to be used
	# as the output for that node. The sum of all the inputs to that node
	# is already summed and passed as the first argument.
	sub activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		for($n..$n+$self->{layers}->[$layer]-1) {
			$self->{mesh}->[$_]->{activation} = $value; 
		}
	}
	
	# Applies an activation type to a specific node
	sub node_activation {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 'linear';
		my $n 		=	0;    
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		$self->{mesh}->[$n+$node]->{activation} = $value; 
	}
	
	# Set the activation threshold for a specific layer.
	# Only applicable if that layer uses "sigmoid" or "sigmoid_2"
	# usage: $net->threshold($layer,$threshold);
	sub threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		for($n..$n+$self->{layers}->[$layer]-1) {
			$self->{mesh}->[$_]->{threshold} = $value;
		}
	}
	
	# Applies a threshold to a specific node     
	sub node_threshold {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $node	=	shift || 0;
		my $value	=	shift || 0.5; 
		my $n		=	0;
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		$self->{mesh}->[$n+$node]->{threshold} = $value;
	}
	
	# Set mean (avg.) flag for a layer.
	# usage: $net->mean($layer,$flag);
	# If $flag is true, it enables finding the mean for that layer,
	# If $flag is false, disables mean.
	sub mean {
		my $self	=	shift;
		my $layer	=	shift || 0;
		my $value	=	shift || 0;
		my $n		=	0;
		no strict 'refs';
		for(0..$layer-1){$n+=$self->{layers}->[$_]}
		for($n..$n+$self->{layers}->[$layer]-1) {
			$self->{mesh}->[$_]->{mean} = $value;
		}
	}
	
	  
	# Returns a pcx object
	sub load_pcx {
		my $self	=	shift;
		my $file	=	shift;
		eval('use PCX::Loader');
		if(@_) {
			$self->{error}="Cannot load PCX::Loader module: @_";
			return undef;
		}
		return PCX::Loader->new($self,$file);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my @ws 		=	split(/[\s\t]/,shift);
		my (@map,$ic);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);
			if(!defined $ic) {
				$self->{_crunched}->{list}->[$self->{_crunched}->{_length}++]=$ws[$a];
				$map[$a]=$self->{_crunched}->{_length};
			} else {
				$map[$a]=$ic;
            }
		}
		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {
		my $self	=	shift;
		for my $a (0..$self->{_crunched}->{_length}-1) {
			return $a+1 if($self->{_crunched}->{list}->[$a] eq $_[0]);
		}
		$self->{error} = "Word \"$_[0]\" not found.";
		return undef;
	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) 
	# and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_crunched}->{list}->[$el-1].' ';
		}
		return $c;
	}
	
	# Sets/gets randomness facter in the network. Setting a value of 0 
	# disables random factors.
	sub random {
		my $self	=	shift;
		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;
	} 

	# Sets/gets run const. facter in the network. Setting a value of 0 
	# disables run const. factor. 
	sub const {
		my $self	=	shift;
		my $const	=	shift;
		return $self->{const}	if(!(defined $const));
		$self->{const}	=	$const;
	}
	
	# Return benchmark time from last learn() operation.
	sub benchmark {
		shift->{benchmarked};
	}
	
	# Same as benchmark()
	sub benchmarked {
		benchmark(shift);
	}
	
	# Return the last error in the mesh, or undef if no error.
	sub error {
		my $self = shift;
		return undef if !$self->{error};
		chomp($self->{error});
		return $self->{error}."\n";
	}
	
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;	$x++;
			if($x>$break-1) { print "\n"; $x=0;	}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.
	sub pdiff {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $a1	=	shift;
		my $a2	=	shift;
		my $a1s	=	$#{$a1};
		my $a2s	=	$#{$a2};
		my ($a,$b,$diff,$t);
		$diff=0;
		for my $x (0..$a1s) {
			$a = $a1->[$x]; $b = $a2->[$x];
			if($a!=$b) {
				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a); $diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift); 
		sprintf("%.3f",$fa/$fb*100); #((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100
	}
	
	# Returns the index of the element in array REF passed with the highest 
	# comparative value
	sub high {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] > $ref1->[$tmp]) }
		return $tmp;
	}
	
	# Returns the index of the element in array REF passed with the lowest 
	# comparative value
	sub low {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $ref1 = shift; my ($el,$len,$tmp); $tmp=0;
		foreach $el (@{$ref1}) { $len++ }
		for my $x (0..$len-1) { $tmp = $x if($ref1->[$x] < $ref1->[$tmp]) }
		return $tmp;
	}  
	
	# Following is a collection of a few nifty custom activation functions.
	# range() is exported by default, the rest you can get with:
	#	use AI::NeuralNet::Mesh ':acts'
	# The ':all' tag also gets these into your namespace.
	 
	#
	# range() returns a closure limiting the output 
	# of that node to a specified set of values.
	# Good for output layers.
	#
	# usage example:
	#	$net->activation(4,range(0..5));
	# or:
	#	..
	#	{ 
	#		nodes		=>	1,
	#		activation	=>	range 5..2
	#	}
	#	..
	# You can also pass an array containing the range
	# values (not array ref), or you can pass a comma-
	# seperated list of values as parameters:
	#
	#	$net->activation(4,range(@numbers));
	#	$net->activation(4,range(6,15,26,106,28,3));
	#
	# Note: when using a range() activatior, train the
	# net TWICE on the data set, because the first time
	# the range() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#	
	sub range {
		my @r=@_;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$r[intr($_[0]/$_[1]->{t}*$#r)]}
	}
	
	#
	# ramp() preforms smooth ramp activation between 0 and 1 if $r is 1, 
	# or between -1 and 1 if $r is 2. $r defaults to 1, as you can see.	
	#
	# Note: when using a ramp() activatior, train the
	# net at least TWICE on the data set, because the first 
	# time the ramp() function searches for the top value in
	# the inputs, and therefore, results could flucuate.
	# The second learning cycle guarantees more accuracy.
	#
	sub ramp {
		my $r=shift||1;my $t=($r<2)?0:-1;
		sub{$_[1]->{t}=$_[0]if($_[0]>$_[1]->{t});$_[0]/$_[1]->{t}*$r-$b}
	}

	# Self explanitory, pretty much. $threshold is used to decide if an input 
	# is true or false (1 or 0). If an input is below $threshold, it is false.
	sub and_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $self->{_parent}->{const} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $sum/$self->{_inputs_size};
		}
	}
	
	# Self explanitory, $threshold is used same as above.
	sub or_gate {
		my $threshold = shift || 0.5;
		sub {
			my $sum  = shift;
			my $self = shift;
			for my $x (0..$self->{_inputs_size}-1) { return $sum/$self->{_inputs_size} if!$self->{_inputs}->[$x]->{value}<$threshold }
			return $self->{_parent}->{const};
		}
	}
	
1;

package AI::NeuralNet::Mesh::node;
	
	use strict;

	# Node constructor
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
			_outputs	=>	[]
		};
		bless $self, $type;
	}

	# Receive inputs from other nodes, and also send
	# outputs on.	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		
		$self->{_inputs}->[$from_id]->{value} = $input * $self->{_inputs}->[$from_id]->{weight};
		$self->{_inputs}->[$from_id]->{input} = $input;
		$self->{_inputs}->[$from_id]->{fired} = 1;
		
		$self->{_parent}->d("got input $input from id $from_id, weighted to $self->{_inputs}->[$from_id]->{value}.\n",1);
		
		my $flag	=	1;
		for my $x (0..$self->{_inputs_size}-1) { $flag = 0 if(!$self->{_inputs}->[$x]->{fired}) }
		if ($flag) {
			$self->{_parent}->d("all inputs fired for $self.\n",1);
			my $output	=	0;   
			
			# Sum
			for my $i (@{$self->{_inputs}}) {                        
				$output += $i->{value};
			}
		
			# Handle activations, thresholds, and means
			$output	   /=  $self->{_inputs_size} if($self->{flag_mean});
			#$output    += (rand()*$self->{_parent}->{random});
			$output		= ($output>=$self->{threshold})?1:0 if(($self->{activation} eq "sigmoid") || ($self->{activation} eq "sigmoid_1"));
			if($self->{activation} eq "sigmoid_2") {
				$output =  1 if($output >$self->{threshold});
				$output = -1 if($output <$self->{threshold});
				$output =  0 if($output==$self->{threshold});
			}
			
			# Handle CODE refs
			$output = &{$self->{activation}}($output,$self) if(ref($self->{activation}) eq "CODE");
			
			# Send output
			for my $o (@{$self->{_outputs}}) { $o->{node}->input($output,$o->{from_id}) }
		} else {
			$self->{_parent}->d("all inputs have NOT fired for $self.\n",1);
		}
	}

	sub add_input_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_inputs_size} || 0;
		$self->{_inputs}->[$i]->{node}	 = $node;
		$self->{_inputs}->[$i]->{value}	 = 0;
		$self->{_inputs}->[$i]->{weight} = 1; #rand()*1;
		$self->{_inputs}->[$i]->{fired}	 = 0;
		$self->{_inputs_size} = ++$i;
		return $i-1;
	}
	
	sub add_output_node {
		my $self	=	shift;
		my $node	=	shift;
		my $i		=	$self->{_outputs_size} || 0;
		$self->{_outputs}->[$i]->{node}		= $node;
		$self->{_outputs}->[$i]->{from_id}	= $node->add_input_node($self);
		$self->{_outputs_size} = ++$i;
		return $i-1;
	}     
	
	sub adjust_weight {
		my $self	=	shift;
		my $inc		=	shift;
		for my $i (@{$self->{_inputs}}) {
			$i->{weight} += $inc * $i->{weight};
			$i->{node}->adjust_weight($inc) if($i->{node});
		}
	}

1;	
	
# Internal usage, prevents recursion on empty nodes.
package AI::NeuralNet::Mesh::cap;
	sub new     { bless {}, shift }
	sub input           {}
	sub adjust_weight   {}
	sub add_output_node {}
	sub add_input_node  {}
1;

# Internal usage, collects data from output layer.
package AI::NeuralNet::Mesh::output;
	
	use strict;
	
	sub new {
		my $type		=	shift;
		my $self		={ 
			_parent		=>	shift,
			_inputs		=>	[],
		};
		bless $self, $type;
	}
	
	sub add_input_node {
		my $self	=	shift;
		return (++$self->{_inputs_size})-1;
	}
	
	sub input {
		my $self	=	shift;
		my $input	=	shift;
		my $from_id	=	shift;
		$self->{_parent}->d("GOT INPUT [$input] FROM [$from_id]\n",1);
		$self->{_inputs}->[$from_id] = $self->{_parent}->intr($input);
	}
	
	sub get_outputs {
		my $self	=	shift;
		return $self->{_inputs};
	}

1;
                                       
__END__

=head1 NAME

AI::NeuralNet::Mesh - An optimized, accurate neural network Mesh.

=head1 SYNOPSIS
    
	use AI::NeuralNet::Mesh;

    # Create a mesh with 2 layers, 2 nodes/layer, and one output node.
	my $net = new AI::NeuralNet::Mesh(2,2,1);
	
	# Teach the network the AND function
	$net->learn([0,0],[0]);
	$net->learn([0,1],[0]);
	$net->learn([1,0],[0]);
	$net->learn([1,1],[1]);
	
	# Present it with two test cases
	my $result_bit_1 = $net->run([0,1])->[0];
	my $result_bit_2 = $net->run([1,1])->[0];
	
	# Display the results
	print "AND test with inputs (0,1): $result_bit_1\n";
	print "AND test with inputs (1,1): $result_bit_2\n";
	

=head1 VERSION & UPDATES

This is version B<0.44>, an update release for version 0.43.

This fixed the usage conflict with perl 5.3.3.

With this version I have gone through and tuned up many area
of this module, including the descent algorithim in learn(),
as well as four custom activation functions, and several export 
tag sets. With this release, I have also included a few
new and more practical example scripts. (See ex_wine.pl) This release 

Mesh.pm  view on Meta::CPAN

for details. This version also fixes a big bug that I never knew about 
until writing some demos for this version - that is, when trying to use 
more than one output node, the mesh would freeze in learning. But, that 
is fixed now, and you can have as many outputs as you want (how does 3 
inputs and 50 outputs sound? :-)


=head1 DESCRIPTION

AI::NeuralNet::Mesh is an optimized, accurate neural network Mesh.
It was designed with accruacy and speed in mind. 

This network model is very flexable. It will allow for clasic binary
operation or any range of integer or floating-point inputs you care
to provide. With this you can change activation types on a per node or
per layer basis (you can even include your own anonymous subs as 
activation types). You can add sigmoid transfer functions and control
the threshold. You can learn data sets in batch, and load CSV data
set files. You can do almost anything you need to with this module.
This code is deigned to be flexable. Any new ideas for this module?
See AUTHOR, below, for contact info.

This module is designed to also be a customizable, extensable 
neural network simulation toolkit. Through a combination of setting
the $Connection variable and using custom activation functions, as
well as basic package inheritance, you can simulate many different
types of neural network structures with very little new code written
by you.

In this module I have included a more accurate form of "learning" for the
mesh. This form preforms descent toward a local error minimum (0) on a 
directional delta, rather than the desired value for that node. This allows
for better, and more accurate results with larger datasets. This module also
uses a simpler recursion technique which, suprisingly, is more accurate than
the original technique that I've used in other ANNs.

=head1 EXPORTS

This module exports three functions by default:

	range
	intr
	pdiff
	
See range() intr() and pdiff() for description of their respective functions.

Also provided are several export tag sets for usage in the form of:

	use AI::NeuralNet::Mesh ':tag';
	
Tag sets are:

	:default 
	    - These functions are always exported.
		- Exports:
		range()
		intr()
		pdiff()
	
	:all
		- Exports:
		p()
		high()
		low()
		range()
		ramp()
		and_gate()
		or_gate()
	
	:p
		- Exports:
		p()
		high()
		low()
	
	:acts
		- Exports:
		ramp()
		and_gate()
		or_gate()

See the respective methods/functions for information about
each method/functions usage.


=head1 METHODS

=item AI::NeuralNet::Mesh->new();

There are four ways to construct a new network with new(). Each is detailed below.

P.S. Don't worry, the old C<new($layers, $nodes [, $outputs])> still works like always!

=item AI::NeuralNet::Mesh->new($layers, $nodes [, $outputs]);

Returns a newly created neural network from an C<AI::NeuralNet::Mesh>
object. The network will have C<$layers> number of layers in it
and it will have C<$nodes> number of nodes per layer.

There is an optional parameter of $outputs, which specifies the number
of output neurons to provide. If $outputs is not specified, $outputs
defaults to equal $size. 


=item AI::NeuralNet::Mesh->new($file);

This will automatically create a new network from the file C<$file>. It will
return undef if the file was of an incorrect format or non-existant. Otherwise,
it will return a blessed refrence to a network completly restored from C<$file>.

=item AI::NeuralNet::Mesh->new(\@layer_sizes);

This constructor will make a network with the number of layers corresponding to the length
in elements of the array ref passed. Each element in the array ref passed is expected
to contain an integer specifying the number of nodes (neurons) in that layer. The first
layer ($layer_sizes[0]) is to be the input layer, and the last layer in @layer_sizes is to be
the output layer.

Example:

	my $net = AI::NeuralNet::Mesh->new([2,3,1]);
	

Creates a network with 2 input nodes, 3 hidden nodes, and 1 output node.


=item AI::NeuralNet::Mesh->new(\@array_of_hashes);

Another dandy constructor...this is my favorite. It allows you to tailor the number of layers,
the size of the layers, the activation type (you can even add anonymous inline subs with this one),
and even the threshold, all with one array ref-ed constructor.

Example:

	my $net = AI::NeuralNet::Mesh->new([
	    {
		    nodes        => 2,
		    activation   => linear
		},
		{
		    nodes        => 3,
		    activation   => sub {
		        my $sum  =  shift;
		        return $sum + rand()*1;
		    }
		},
		{
		    nodes        => 1,
		    activation   => sigmoid,
		    threshold    => 0.75
		}
	]);
	
	
Interesting, eh? What you are basically passing is this:

	my @info = ( 
		{ },
		{ },
		{ },
		...
	);

You are passing an array ref who's each element is a hash refrence. Each
hash refrence, or more precisely, each element in the array refrence you are passing
to the constructor, represents a layer in the network. Like the constructor above,
the first element is the input layer, and the last is the output layer. The rest are
hidden layers.

Each hash refrence is expected to have AT LEAST the "nodes" key set to the number
of nodes (neurons) in that layer. The other two keys are optional. If "activation" is left
out, it defaults to "linear". If "threshold" is left out, it defaults to 0.50.

The "activation" key can be one of four values:

	linear                    ( simply use sum of inputs as output )
	sigmoid    [ sigmoid_1 ]  ( only positive sigmoid )
	sigmoid_2                 ( positive / 0 /negative sigmoid )
	\&code_ref;

"sigmoid_1" is an alias for "sigmoid". 

The code ref option allows you to have a custom activation function for that layer.
The code ref is called with this syntax:

	$output = &$code_ref($sum_of_inputs, $self);
	
The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.

Three of the activation syntaxes are shown in the first constructor above, the "linear",
"sigmoid" and code ref types.

You can also set the activation and threshold values after network creation with the
activation() and threshold() methods. 

	



=item $net->learn($input_map_ref, $desired_result_ref [, options ]);

NOTE: learn_set() now has increment-degrading turned OFF by default. See note
on the degrade flag, below.

This will 'teach' a network to associate an new input map with a desired 
result. It will return a string containg benchmarking information. 

You can also specify strings as inputs and ouputs to learn, and they will be 
crunched automatically. Example:

	$net->learn('corn', 'cob');
	
	
Note, the old method of calling crunch on the values still works just as well.	

The first two arguments may be array refs (or now, strings), and they may be 
of different lengths.

Options should be written on hash form. There are three options:
	 
	 inc      =>    $learning_gradient
	 max      =>    $maximum_iterations
	 error    =>    $maximum_allowable_percentage_of_error
	 degrade  =>    $degrade_increment_flag
	 

$learning_gradient is an optional value used to adjust the weights of the internal
connections. If $learning_gradient is ommitted, it defaults to 0.002.
 
$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.

$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.

$degrade_increment_flag is a simple flag used to allow/dissalow increment degrading
during learning based on a product of the error difference with several other factors.
$degrade_increment_flag is off by default. Setting $degrade_increment_flag to a true
value turns increment degrading on. 

In previous module releases $degrade_increment_flag was not used, as increment degrading
was always on. In this release I have looked at several other network types as well
as several texts and decided that it would be better to not use increment degrading. The
option is still there for those that feel the inclination to use it. I have found some areas
that do need the degrade flag to work at a faster speed. See test.pl for an example. If
the degrade flag wasn't in test.pl, it would take a very long time to learn.



=item $net->learn_set(\@set, [ options ]);

This takes the same options as learn() (learn_set() uses learn() internally) 
and allows you to specify a set to learn, rather than individual patterns. 
A dataset is an array refrence with at least two elements in the array, 
each element being another array refrence (or now, a scalar string). For 
each pattern to learn, you must specify an input array ref, and an ouput 
array ref as the next element. Example:
	
	my @set = (
		# inputs        outputs
		[ 1,2,3,4 ],  [ 1,3,5,6 ],
		[ 0,2,5,6 ],  [ 0,2,1,2 ]
	);


Inputs and outputs in the dataset can also be strings.

See the paragraph on measuring forgetfulness, below. There are 
two learn_set()-specific option tags available:

	flag     =>  $flag
	pattern  =>  $row

If "flag" is set to some TRUE value, as in "flag => 1" in the hash of options, or if the option "flag"
is not set, then it will return a percentage represting the amount of forgetfullness. Otherwise,
learn_set() will return an integer specifying the amount of forgetfulness when all the patterns 
are learned. 

If "pattern" is set, then learn_set() will use that pattern in the data set to measure forgetfulness by.
If "pattern" is omitted, it defaults to the first pattern in the set. Example:

	my @set = (
		[ 0,1,0,1 ],  [ 0 ],
		[ 0,0,1,0 ],  [ 1 ],
		[ 1,1,0,1 ],  [ 2 ],  #  <---
		[ 0,1,1,0 ],  [ 3 ]
	);
	
If you wish to measure forgetfulness as indicated by the line with the arrow, then you would
pass 2 as the "pattern" option, as in "pattern => 2".

Now why the heck would anyone want to measure forgetfulness, you ask? Maybe you wonder how I 
even measure that. Well, it is not a vital value that you have to know. I just put in a 
"forgetfulness measure" one day because I thought it would be neat to know. 

How the module measures forgetfulness is this: First, it learns all the patterns 
in the set provided, then it will run the very first pattern (or whatever pattern
is specified by the "row" option) in the set after it has finished learning. It 
will compare the run() output with the desired output as specified in the dataset. 
In a perfect world, the two should match exactly. What we measure is how much that 
they don't match, thus the amount of forgetfulness the network has.

Example (from examples/ex_dow.pl):

	# Data from 1989 (as far as I know..this is taken from example data on BrainMaker)
	my @data = ( 
		#	Mo  CPI  CPI-1 CPI-3 	Oil  Oil-1 Oil-3    Dow   Dow-1 Dow-3   Dow Ave (output)
		[	1, 	229, 220,  146, 	20.0, 21.9, 19.5, 	2645, 2652, 2597], 	[	2647  ],
		[	2, 	235, 226,  155, 	19.8, 20.0, 18.3, 	2633, 2645, 2585], 	[	2637  ],
		[	3, 	244, 235,  164, 	19.6, 19.8, 18.1, 	2627, 2633, 2579], 	[	2630  ],
		[	4, 	261, 244,  181, 	19.6, 19.6, 18.1, 	2611, 2627, 2563], 	[	2620  ],
		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
	
	# Learn the set
	my $f = $net->learn_set(\@data, 
					  inc	=>	0.1,	
					  max	=>	500,
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/finance.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	
	$net->learn_set(\@data);
	
Same effect as above, but not the same data (obviously).


=item $net->run($input_map_ref);

This method will apply the given array ref at the input layer of the neural network, and
it will return an array ref to the output of the network. run() will now automatically crunch() 
a string given as an input (See the crunch() method for info on crunching).

Example Usage:
	
	my $inputs  = [ 1,1,0,1 ];
	my $outputs = $net->run($inputs);

You can also do this with a string:
                                                                                  
	my $outputs = $net->run('cloudy - wind is 5 MPH NW');
	

See also run_uc() and run_set() below.


=item $net->run_uc($input_map_ref);

This method does the same thing as this code:
	
	$net->uncrunch($net->run($input_map_ref));

All that run_uc() does is that it automatically calls uncrunch() on the output, regardless
of whether the input was crunch() -ed or not.
	

=item $net->run_set($set);
                                                                                    
This takes an array ref of the same structure as the learn_set() method, above. It returns
an array ref. Each element in the returned array ref represents the output for the corresponding
element in the dataset passed. Uses run() internally.


=item $net->get_outs($set);

Simple utility function which takes an array ref of the same structure as the learn_set() method,
above. It returns an array ref of the same type as run_set() wherein each element contains an
output value. The output values are the target values specified in the $set passed. Each element
in the returned array ref represents the output value for the corrseponding row in the dataset
passed. (A row is two elements of the dataset together, see learn_set() for dataset structure.)

=item $net->load_set($file,$column,$seperator);

Loads a CSV-like dataset from disk

Returns a data set of the same structure as required by the
learn_set() method. $file is the disk file to load set from.
$column an optional variable specifying the column in the 
data set to use as the class attribute. $class defaults to 0.
$seperator is an optional variable specifying the seperator
character between values. $seperator defaults to ',' (a single comma). 
NOTE: This does not handle quoted fields, or any other record
seperator other than "\n".

The returned array ref is suitable for passing directly to
learn_set() or get_outs().
	

=item $net->range();

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions.


=item $net->benchmark();

=item $net->benchmarked();

This returns a benchmark info string for the last learn() call.
It is easily printed as a string, as following:

	print "Last learn() took ",$net->benchmark(),"\n";



=item $net->verbose($level);

=item $net->verbosity($level);

=item $net->v($level);

=item $net->debug($level)

Note: verbose(), verbosity(), and v() are all functional aliases for debug().

Toggles debugging off if called with $level = 0 or no arguments. There are several levels
of debugging. 

NOTE: Debugging verbosity has been toned down somewhat from AI::NeuralNet::BackProp,
but level 4 still prints the same amount of information as you were used to. The other
levels, however, are mostly for  advanced use. Not much explanation in the other
levels, but they are included for those of you that feel daring (or just plain bored.)

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : Displays the activity between nodes, prints what values were
received and what they were weighted to.

Level 2 ($level = 2) : Just prints info from the learn() loop, in the form of "got: X, wanted Y"
type of information. This is about the third most useful debugging level, after level 12 and
level 4.

Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.

Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.

Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.

Toggles debuging off when called with no arguments. 



=item $net->save($filename);

This will save the complete state of the network to disk, including all weights and any
words crunched with crunch() . Also saves the layer size and activations of the network.

NOTE: The only activation type NOT saved is the CODE ref type, which must be set again
after loading.

This uses a simple flat-file text storage format, and therefore the network files should
be fairly portable.

This method will return undef if there was a problem with writing the file. If there is an
error, it will set the internal error message, which you can retrive with the error() method,
below.

If there were no errors, it will return a refrence to $net.


=item $net->load($filename);

This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.

If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.

If there were no errors, it will return a refrence to $net.

UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly
for a demo I'm writing but not qutie done with yet. So, Cheers!



=item $net->activation($layer,$type);

This sets the activation type for layer C<$layer>.

C<$type> can be one of four values:

	linear                    ( simply use sum of inputs as output )
	sigmoid    [ sigmoid_1 ]  ( only positive sigmoid )
	sigmoid_2                 ( positive / 0 /negative sigmoid )
	\&code_ref;

"sigmoid_1" is an alias for "sigmoid". 

The code ref option allows you to have a custom activation function for that layer.
The code ref is called with this syntax:

	$output = &$code_ref($sum_of_inputs, $self);
	
The code ref is expected to return a value to be used as the output of the node.
The code ref also has access to all the data of that node through the second argument,
a blessed hash refrence to that node.

See CUSTOM ACTIVATION FUNCTIONS for information on several included activation functions
other than the ones listed above.

The activation type for each layer is preserved across load/save calls. 

EXCEPTION: Due to the constraints of Perl, I cannot load/save the actual subs that the code
ref option points to. Therefore, you must re-apply any code ref activation types after a 
load() call.

=item $net->node_activation($layer,$node,$type);

This sets the activation function for a specific node in a layer. The same notes apply
here as to the activation() method above.


=item $net->threshold($layer,$value);

This sets the activation threshold for a specific layer. The threshold only is used
when activation is set to "sigmoid", "sigmoid_1", or "sigmoid_2". 


=item $net->node_threshold($layer,$node,$value);

This sets the activation threshold for a specific node in a layer. The threshold only is used
when activation is set to "sigmoid", "sigmoid_1", or "sigmoid_2".  

=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->extend(\@array_of_hashes);

This allows you to re-apply any activations and thresholds with the same array ref which
you created a network with. This is useful for re-applying code ref activations after a load()
call without having to type the code ref twice.

You can also specify the extension in a simple array ref like this:

	$net->extend([2,3,1]);
	
Which will simply add more nodes if needed to set the number of nodes in each layer to their 
respective elements. This works just like the respective new() constructor, above.

NOTE: Your net will probably require re-training after adding nodes.


=item $net->extend_layer($layer,\%hash);

With this you can modify only one layer with its specifications in a hash refrence. This hash
refrence uses the same keys as for the last new() constructor form, above. 

You can also specify just the number of nodes for the layer in this form:

	$net->extend_layer(0,5);

Which will set the number of nodes in layer 0 to 5 nodes. This is the same as calling:
	
	$net->add_nodes(0,5);

Which does the exact same thing. See add_nodes() below.

NOTE: Your net will probably require re-training after adding nodes.


=item $net->add_nodes($layer,$total_nodes);

This method was created mainly to service the extend*() group of functions, but it 
can also be called independently. This will add nodes as needed to layer C<$layer> to 
make the nodes in layer equal to $total_nodes. 

NOTE: Your net will probably require re-training after adding nodes.



=item $net->p($a,$b);

Returns a floating point number which represents $a as a percentage of $b.



=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

Returns the index of the element in array REF passed with the highest comparative value.



=item $net->low($array_ref);

Returns the index of the element in array REF passed with the lowest comparative value.



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.20f (sprintf-format) 
percent sting.




=item $net->show();

This will dump a simple listing of all the weights of all the connections of every neuron
in the network to STDIO.




=item $net->crunch($string);

This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and 
run() directly. It returns an array ref.

The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Will probably return an array ref containing 1,2,3. A subsequent call of:

    $net->crunch("How is Jane?");

Will probably return an array ref containing 1,4,5. Notice, the first element stayed
the same. That is because it already stored the word "How". So, each word is stored
only once internally and the returned array ref reflects that.


=item $net->uncrunch($array_ref);

Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the crunch() method, above, possibly to 
uncrunch() the output of a run() call. Consider the below code (also in ./examples/ex1.pl):
                           
	use AI::NeuralNet::Mesh;
	my $net = AI::NeuralNet::Mesh->new(2,3);
	
	for (0..3) {
		$net->learn_set([
			$net->crunch("I love chips."),  $net->crunch("That's Junk Food!")),
			$net->crunch("I love apples."), $net->crunch("Good, Healthy Food.")),
			$net->crunch("I love pop."),    $net->crunch("That's Junk Food!")),
			$net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."))
		]);
	}
	
	print $net->run_uc("I love corn.")),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make
for some interesting demos!



=item $net->crunched($word);

This will return undef if the word is not in the internal crunch list, or it will return the
index of the word if it exists in the crunch list. 

If the word is not in the list, it will set the internal error value with a text message
that you can retrive with the error() method, below.

=item $net->word($word);

A function alias for crunched().


=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.

The column width is preserved across load() and save() calls.


=item $net->random($rand);

This will set the randomness factor from the network. Default is 0. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. The randomness factor
is preserved across load() and save() calls. 


=item $net->const($const);

This sets the run const. for the network. The run const. is a value that is added
to every input line when a set of inputs are run() or learn() -ed, to prevent the
network from hanging on a 0 value. When called with no arguments, it returns the current
const. value. It defaults to 0.0001 on a newly-created network. The run const. value
is preserved across load() and save() calls.


=item $net->error();

Returns the last error message which occured in the mesh, or undef if no errors have
occured.


=item $net->load_pcx($filename);

NOTE: To use this function, you must have PCX::Loader installed. If you do not have
PCX::Loader installed, it will return undef and store an error for you to retrive with 
the error() method, below.

This is a treat... this routine will load a PCX-format file (yah, I know ... ancient 
format ... but it is the only one I could find specs for to write it in Perl. If 
anyone can get specs for any other formats, or could write a loader for them, I 
would be very grateful!) Anyways, a PCX-format file that is exactly 320x200 with 8 bits 
per pixel, with pure Perl. It returns a blessed refrence to a PCX::Loader object, which 
supports the following routinges/members. See example files ex_pcx.pl and ex_pcxl.pl in 
the ./examples/ directory.

See C<perldoc PCX::Loader> for information on the methods of the object returned.

You can download PCX::Loader from 
	http://www.josiah.countystart.com/modules/get.pl?pcx-loader:mpod


=head1 CUSTOM ACTIVATION FUNCTIONS 

Included in this package are four custom activation functions meant to be used
as a guide to create your own, as well as to be useful to you in normal use of the
module. There is only one function exported by default into your namespace, which
is the range() functions. These are not meant to be used as methods, but as functions.
These functions return code refs to a Perl closure which does the actual work when
the time comes.


=item range(0..X);

=item range(@range);

=item range(A,B,C);

range() returns a closure limiting the output 
of that node to a specified set of values.
Good for use in output layers.

Usage example:
	$net->activation(4,range(0..5));
or (in the new() hash constructor form):
	..
	{ 
		nodes		=>	1,
		activation	=>	range 5..2
	}
	..
You can also pass an array containing the range
values (not array ref), or you can pass a comma-
seperated list of values as parameters:

	$net->activation(4,range(@numbers));
	$net->activation(4,range(6,15,26,106,28,3));

Note: when using a range() activatior, train the
net TWICE on the data set, because the first time
the range() function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.

The actual code that implements the range closure is
a bit convulted, so I will expand on it here as a simple
tutorial for custom activation functions.

	= line 1 = 	sub {
	= line 2 =		my @values = ( 6..10 );
	= line 3 =		my $sum   = shift;
	= line 4 =		my $self  = shift;
	= line 5 =		$self->{top_value}=$sum if($sum>$self->{top_value});
	= line 6 =		my $index = intr($sum/$self->{top_value}*$#values);
	= line 7 =		return $values[$index];
	= line 8 =	}

Now, the actual function fits in one line of code, but I expanded it a bit
here. Line 1 creates our array of allowed output values. Lines two and
three grab our parameters off the stack which allow us access to the
internals of this node. Line 5 checks to see if the sum output of this
node is higher than any previously encountered, and, if so, it sets

Mesh.pm  view on Meta::CPAN

in the ramp() activator. Line 6 computes the index into the allowed
values array by first scaling the $sum to be between 0 and 1 and then
expanding it to fit smoothly inside the number of elements in the array. Then
we simply round to an integer and pluck that index from the array and
use it as the output value for that node. 

See? It's not that hard! Using custom activation functions, you could do
just about anything with the node that you want to, since you have
access to the node just as if you were a blessed member of that node's object.


=item ramp($r);

ramp() preforms smooth ramp activation between 0 and 1 if $r is 1, 
or between -1 and 1 if $r is 2. $r defaults to 1.	

You can get this into your namespace with the ':acts' export 
tag as so:
	
	use AI::NeuralNet::Mesh ':acts';

Note: when using a ramp() activatior, train the
net at least TWICE on the data set, because the first 
time the ramp() function searches for the top value in
the inputs, and therefore, results could flucuate.
The second learning cycle guarantees more accuracy.

No code to show here, as it is almost exactly the same as range().


=item and_gate($threshold);

Self explanitory, pretty much. This turns the node into a basic AND gate.
$threshold is used to decide if an input is true or false (1 or 0). If 
an input is below $threshold, it is false. $threshold defaults to 0.5.

You can get this into your namespace with the ':acts' export 
tag as so:
	
	use AI::NeuralNet::Mesh ':acts';

Let's look at the code real quick, as it shows how to get at the indivudal
input connections:

	= line 1 =	sub {
	= line 2 =		my $sum  = shift;
	= line 3 =		my $self = shift;
	= line 4 =		my $threshold = 0.50;
	= line 5 =		for my $x (0..$self->{_inputs_size}-1) { 
	= line 6 =			return 0.000001 if(!$self->{_inputs}->[$x]->{value}<$threshold)
	= line 7 =		}
	= line 8 =		return $sum/$self->{_inputs_size};
	= line 9 =	}

Line 2 and 3 pulls in our sum and self refrence. Line 5 opens a loop to go over
all the input lines into this node. Line 6 looks at each input line's value 
and comparse it to the threshold. If the value of that line is below threshold, then
we return 0.000001 to signify a 0 value. (We don't return a 0 value so that the network
doen't get hung trying to multiply a 0 by a huge weight during training [it just will
keep getting a 0 as the product, and it will never learn]). Line 8 returns the mean 
value of all the inputs if all inputs were above threshold. 

Very simple, eh? :)
	
=item or_gate($threshold)

Self explanitory. Turns the node into a basic OR gate, $threshold is used same as above.

You can get this into your namespace with the ':acts' export 
tag as so:
	
	use AI::NeuralNet::Mesh ':acts';


=head1 VARIABLES

=item $AI::NeuralNet::Mesh::Connector

This is an option is step up from average use of this module. This variable 
should hold the fully qualified name of the function used to make the actual connections
between the nodes in the network. This contains '_c' by default, but if you use
this variable, be sure to add the fully qualified name of the method. For example,
in the ALN example, I use a connector in the main package called tree() instead of
the default connector. Before I call the new() constructor, I use this line of code:

	$AI::NeuralNet::Mesh::Connector = 'main::tree'
	
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. See notes on CUSTOM NETWORK CONNECTORS,
below, for more information on creating your own custom connector.


=item $AI::NeuralNet::Mesh::DEBUG

This variable controls the verbosity level. It will not hurt anything to set this 
directly, yet most people find it easier to set it using the debug() method, or 
any of its aliases.


=head1 CUSTOM NETWORK CONNECTORS

Creating custom network connectors is step up from average use of this module. 
However, it can be very useful in creating other styles of neural networks, other
than the default fully-connected feed-foward network. 

You create a custom connector by setting the variable $AI::NeuralNet::Mesh::Connector
to the fully qualified name of the function used to make the actual connections
between the nodes in the network. This variable contains '_c' by default, but if you use
this variable, be sure to add the fully qualified name of the method. For example,
in the ALN example, I use a connector in the main package called tree() instead of
the default connector. Before I call the new() constructor, I use this line of code:

	$AI::NeuralNet::Mesh::Connector = 'main::tree'
	
The tree() function is called as a blessed method when it is used internally, providing
access to the bless refrence in the first argument. 

Example connector:

	sub connect_three {
    	my $self	=	shift;
    	my $r1a		=	shift;
    	my $r1b		=	shift;
    	my $r2a		=	shift;
    	my $r2b		=	shift;
    	my $mesh	=	$self->{mesh};
    	     
	    for my $y (0..($r1b-$r1a)-1) {
			$mesh->[$y+$r1a]->add_output_node($mesh->[$y+$r2a-1]) if($y>0);
			$mesh->[$y+$r1a]->add_output_node($mesh->[$y+$r2a]) if($y<($r2b-$r2a));
			$mesh->[$y+$r1a]->add_output_node($mesh->[$y+$r2a+1]) if($y<($r2b-$r2a));
		}
	}
	
This is a very simple example. It feeds the outputs	of every node in the first layer
to the node directly above it, as well as the nodes on either side of the node directly
above it, checking for range sides, of course.

The network is stored internally as one long array of node objects. The goal here
is to connect one range of nodes in that array to another range of nodes. The calling
function has already calculated the indices into the array, and it passed it to you
as the four arguments after the $self refrence. The first two arguments we will call
$r1a and $r1b. These define the start and end indices of the first range, or "layer." Likewise,
the next two arguemnts, $r2a and $r2b, define the start and end indices of the second
layer. We also grab a refrence to the mesh array so we dont have to type the $self
refrence over and over.

The loop that folows the arguments in the above example is very simple. It opens
a for() loop over the range of numbers, calculating the size instead of just going
$r1a..$r1b because we use the loop index with the next layer up as well.

$y + $r1a give the index into the mesh array of the current node to connect the output FROM.
We need to connect this nodes output lines to the next layers input nodes. We do this
with a simple method of the outputing node (the node at $y+$r1a), called add_output_node().

add_output_node() takes one simple arguemnt: A blessed refrence to a node that it is supposed
to output its final value TO. We get this blessed refrence with more simple addition.

$y + $r2a gives us the node directly above the first node (supposedly...I'll get to the "supposedly"
part in a minute.) By adding or subtracting from this number we get the neighbor nodes.
In the above example you can see we check the $y index to see that we havn't come close to
any of the edges of the range.

Using $y+$r2a we get the index of the node to pass to add_output_node() on the first node at
$y+B<$r1a>. 

And that's all there is to it!

For the fun of it, we'll take a quick look at the default connector.
Below is the actual default connector code, albeit a bit cleaned up, as well as
line numbers added.

	= line 1  =	sub _c {
	= line 2  =    	my $self	=	shift;
	= line 3  =    	my $r1a		=	shift;
	= line 4  =    	my $r1b		=	shift;
	= line 5  =    	my $r2a		=	shift;
	= line 6  =    	my $r2b		=	shift;
	= line 7  =    	my $mesh	=	$self->{mesh};
	= line 8  =		for my $y ($r1a..$r1b-1) {
	= line 9  =			for my $z ($r2a..$r2b-1) {
	= line 10 =				$mesh->[$y]->add_output_node($mesh->[$z]);
	= line 11 =			}
	= line 12 =		}
	= line 12 =	}
    
Its that easy! The simplest connector (well almost anyways). It just connects each
node in the first layer defined by ($r1a..$r1b) to every node in the second layer as
defined by ($r2a..$r2b).

Those of you that are still reading, if you do come up with any new connection functions,
PLEASE SEND THEM TO ME. I would love to see what others are doing, as well as get new
network ideas. I will probably include any connectors you send over in future releases (with
propoer credit and permission, of course).

Anyways, happy coding!


=head1 WHAT CAN IT DO?

Rodin Porrata asked on the ai-neuralnet-backprop malining list,
"What can they [Neural Networks] do?". In regards to that questioin,
consider the following:

Neural Nets are formed by simulated neurons connected together much the same
way the brain's neurons are, neural networks are able to associate and
generalize without rules.  They have solved problems in pattern recognition,
robotics, speech processing, financial predicting and signal processing, to
name a few.

One of the first impressive neural networks was NetTalk, which read in ASCII
text and correctly pronounced the words (producing phonemes which drove a
speech chip), even those it had never seen before.  Designed by John Hopkins
biophysicist Terry Sejnowski and Charles Rosenberg of Princeton in 1986,
this application made the Backprogagation training algorithm famous.  Using
the same paradigm, a neural network has been trained to classify sonar
returns from an undersea mine and rock.  This classifier, designed by
Sejnowski and R.  Paul Gorman, performed better than a nearest-neighbor
classifier.

The kinds of problems best solved by neural networks are those that people
are good at such as association, evaluation and pattern recognition.
Problems that are difficult to compute and do not require perfect answers,
just very good answers, are also best done with neural networks.  A quick,
very good response is often more desirable than a more accurate answer which

Mesh.pm  view on Meta::CPAN

loan analysis and financial forecasting make good applications.  New network
designers are working on weather forecasts by neural networks (Myself
included).  Currently, doctors are developing medical neural networks as an
aid in diagnosis.  Attorneys and insurance companies are also working on
neural networks to help estimate the value of claims.

Neural networks are poor at precise calculations and serial processing. They
are also unable to predict or recognize anything that does not inherently
contain some sort of pattern.  For example, they cannot predict the lottery,
since this is a random process.  It is unlikely that a neural network could
be built which has the capacity to think as well as a person does for two
reasons.  Neural networks are terrible at deduction, or logical thinking and
the human brain is just too complex to completely simulate.  Also, some
problems are too difficult for present technology.  Real vision, for
example, is a long way off.

In short, Neural Networks are poor at precise calculations, but good at
association, evaluation, and pattern recognition.


=head1 EXAMPLES

Included are several example files in the "examples" directory from the
distribution ZIP file. Each of the examples includes a short explanation 
at the top of the file. Each of these are ment to demonstrate simple, yet 
practical (for the most part :-) uses of this module.
	


=head1 OTHER INCLUDED PACKAGES

These packages are not designed to be called directly, they are for internal use. They are
listed here simply for your refrence.

=item AI::NeuralNet::Mesh::node

This is the worker package of the mesh. It implements all the individual nodes of the mesh.
It might be good to look at the source for this package (in the Mesh.pm file) if you
plan to do a lot of or extensive custom node activation types.

=item AI::NeuralNet::Mesh::cap

This is applied to the input layer of the mesh to prevent the mesh from trying to recursivly
adjust weights out throug the inputs.

=item AI::NeuralNet::Mesh::output

This is simply a data collector package clamped onto the output layer to record the data 
as it comes out of the mesh. 


=head1 BUGS

This is a beta release of C<AI::NeuralNet::Mesh>, and that holding true, I am sure 
there are probably bugs in here which I just have not found yet. If you find bugs in this module, I would 
appreciate it greatly if you could report them to me at F<E<lt>jdb@wcoil.comE<gt>>,
or, even better, try to patch them yourself and figure out why the bug is being buggy, and
send me the patched code, again at F<E<lt>jdb@wcoil.comE<gt>>. 



=head1 AUTHOR

Josiah Bryan F<E<lt>jdb@wcoil.comE<gt>>

Copyright (c) 2000 Josiah Bryan. All rights reserved. This program is free software; 
you can redistribute it and/or modify it under the same terms as Perl itself.

The C<AI::NeuralNet::Mesh> and related modules are free software. THEY COME WITHOUT WARRANTY OF ANY KIND.

$Id: AI::NeuralNet::Mesh.pm, v0.44 2000/15/09 03:29:08 josiah Exp $


=head1 THANKS

Below are a list of the people that have contributed in some way to this module (no particular order):

	Rodin Porrata, rodin@ursa.llnl.gov
	Randal L. Schwartz, merlyn@stonehedge.com
	Michiel de Roo, michiel@geo.uu.nl
	
Thanks to Randal and Michiel for spoting some documentation and makefile bugs in the last release.
Thanks to Rodin for continual suggetions and questions about the module and more.

=head1 DOWNLOAD

You can always download the latest copy of AI::NeuralNet::Mesh
from http://www.josiah.countystart.com/modules/get.pl?mesh:pod


=head1 MAILING LIST

A mailing list has been setup for AI::NeuralNet::Mesh and AI::NeuralNet::BackProp. 
The list is for discussion of AI and neural net related topics as they pertain to 
AI::NeuralNet::BackProp and AI::NeuralNet::mesh. I will also announce in the group
each time a new release of AI::NeuralNet::Mesh is available.

The list address is at:
	 ai-neuralnet-backprop@egroups.com 
	 
To subscribe, send a blank email:
	ai-neuralnet-backprop-subscribe@egroups.com  


=cut













 view all matches for this distribution


AI-NeuralNet-SOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN


my $epsilon = 0.001;
my $epochs  = 400;

{ # random initialisation
    my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
					   input_dim  => $dim);

    $nn->initialize;                                      # random
    my @mes = $nn->train ($epochs, @vs);
    warn "random:   length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}

{ # constant initialisation
    my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
					   input_dim  => $dim);

    $nn->initialize ($vs[-1]);
    my @mes = $nn->train ($epochs, @vs);
    warn "constant: length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}

{ # eigenvector initialisation
    my $nn = new AI::NeuralNet::SOM::Rect (output_dim => "5x6",
					   input_dim  => $dim);
    my @training_vectors;                                                # find these training vectors 
    {                                                                    # and prime them with this eigenvector stuff;
	use PDL;
	my $A = pdl \@vs;

	while ($A->getdim(0) < $A->getdim(1)) {                          # make the beast quadratic
	    $A = append ($A, zeroes (1));                                # by padding zeroes
	}
	my ($E, $e) = eigens_sym $A;
#	print $E;
#	print $e;

	my @es = list $e;                                                # eigenvalues
#	warn "es : ".Dumper \@es;
	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;
    for my $i (0..$#$l) {
	return $i if $v == $l->[$i];
    }
    return undef;
}

	for (@es_idx) {                                                  # from the highest values downwards, take the index
	    push @training_vectors, [ list $E->dice($_) ] ;              # get the corresponding vector
	}
    }

    $nn->initialize (@training_vectors[0..0]);                           # take only the biggest ones (the eigenvalues are big, actually)
#warn $nn->as_string;
    my @mes = $nn->train ($epochs, @vs);
    warn "eigen:    length until error is < $epsilon ". scalar (grep { $_ >= $epsilon } @mes);
}

__END__

 view all matches for this distribution


( run in 1.158 second using v1.01-cache-2.11-cpan-a5abf4f5562 )