Result:
found more than 680 distributions - search limited to the first 2001 files matching your query ( run in 0.586 )


AI-ExpertSystem-Advanced

 view release on metacpan or  search on metacpan

inc/Module/Install.pm  view on Meta::CPAN

		# Normalise multipart versions
		$s =~ s/(\.)(\d{1,3})/sprintf("$1%03d",$2)/eg;
	}
	$s =~ s/^(\d+)\.?//;
	my $l = $1 || 0;
	my @v = map {
		$_ . '0' x (3 - length $_)
	} $s =~ /(\d{1,3})\D?/g;
	$l = $l . '.' . join '', @v if @v;
	return $l + 0;
}

 view all matches for this distribution


AI-ExpertSystem-Simple

 view release on metacpan or  search on metacpan

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

sub _explain_this {
	my ($self, $rule, $depth, @processed_rules) = @_;

	$self->_add_to_log( "${depth}Explaining rule '$rule'" );

	my %dont_do_these = map{ $_ => 1 } @processed_rules;

	my @check_these_rules = ();

	my %conditions = $self->{_rules}->{$rule}->conditions();
	foreach my $name (sort keys %conditions) {

 view all matches for this distribution


AI-FANN-Evolving

 view release on metacpan or  search on metacpan

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

	my %list_properties = __PACKAGE__->_list_properties;
	for my $prop ( keys %list_properties ) {
		my $handler = $list_properties{$prop};		
		my @values = $self->$prop;
		if ( ref $handler ) {
			$self->$prop( map { $handler->($_,$mu) } @values );
		}
		else {
			$self->$prop( map { _mutate_enum($handler,$_,$mu) } @values );
		}		
	}	
	
	# mutate the layer properties
	$log->debug("mutating layer properties");

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

	}
}

=item activation_function

Getter/setter for the function that maps inputs to outputs. default is 
FANN_SIGMOID_SYMMETRIC

=back

=cut

 view all matches for this distribution


AI-FANN

 view release on metacpan or  search on metacpan

lib/AI/FANN.pm  view on Meta::CPAN


    @_ == 1 or croak "Usage: AI::FANN::get_neurons(self)";

    my $self = shift;
    if (wantarray) {
        map { $self->layer_num_neurons($_) } (0 .. $self->num_layers - 1);
    }
    else {
        $self->total_neurons;
    }
}

lib/AI/FANN.pm  view on Meta::CPAN

  Pure Data and Mathematica bindings are available. A reference manual
  accompanies the library with examples and recommendations on how to
  use the library. A graphical user interface is also available for
  the library.

AI::FANN object oriented interface provides an almost direct map to
the C library API. Some differences have been introduced to make it
more perlish:

=over 4

 view all matches for this distribution


AI-FuzzyEngine

 view release on metacpan or  search on metacpan

lib/AI/FuzzyEngine.pm  view on Meta::CPAN


    # TODO: Rapid return if @_ == 1 (isa piddle)
    # TODO: join "-", ndims -> Schnellcheck auf gleiche Dim.

    # All elements must get piddles
    my @pdls  = map { PDL::Core::topdl($_) } @vals;

    # Get size of wrapping piddle (using a trick)
    # applying valid expansion rules for element wise operations
    my $zeros = PDL->pdl(0);
    #        v-- does not work due to threading mechanisms :-((

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

        eval { $zeros = $zeros + $p->zeros(); 1
            } or croak q{Can't expand piddles to same size};
    }

    # Now, cat 'em by expanding them on the fly
    my $vals = PDL::cat( map {$_ + $zeros} @pdls );
    return $vals;
};

1;

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

    my $zeros = PDL->pdl(0);
    # Note: $zeros += $_->zeros() for @pdls does not work here
    $zeros = $zeros + $_->zeros() for @pdls;

    # Expand all piddles
    @pdls = map {$_ + $zeros} @pdls;

Defuzzification uses some heavy non-threading code,
so there might be a performance penalty for big piddles. 

=head2 Todos

 view all matches for this distribution


AI-FuzzyInference

 view release on metacpan or  search on metacpan

FuzzyInference/Set.pm  view on Meta::CPAN

sub complement {
    my ($self, $name) = @_;

    my @coords = $self->coords($name);
    my $i = 0;
    return map {++$i % 2 ? $_ : 1 - $_} @coords;
}

sub coords {
    my ($self,
	$name,

FuzzyInference/Set.pm  view on Meta::CPAN

	$name,
	$scale,
	) = @_;

    my $i = 0;
    my @c = map { $_ * ++$i % 2 ? 1 : $scale } $self->coords($name);

    return @c;
}

sub clip {   # min implication

FuzzyInference/Set.pm  view on Meta::CPAN

	$name,
	$val,
	) = @_;

    my $i = 0;
    my @c = map {
	++$i % 2 ? $_ : $_ > $val ? $val : $_
	}$self->coords($name);

    return @c;
}

 view all matches for this distribution


AI-Gene-Sequence

 view release on metacpan or  search on metacpan

AI/Gene/Sequence.pm  view on Meta::CPAN

  %EXPORT_TAGS = ();
  @EXPORT_OK   = qw();
}
our @EXPORT_OK;

my ($probs,$mut_keys) = _normalise( { map {$_ => 1} 
				      qw(insert remove overwrite 
					 duplicate minor major 
					 switch shuffle reverse) } );

##

AI/Gene/Sequence.pm  view on Meta::CPAN

  if ($sum <= 0) {
    die "Cannot randomly mutate with bad probability distribution";
  }
  else {
    my $cum;
    @{$h2}{ @{$muts} } = map {$cum +=$_; $cum / $sum} @{$hr}{ @{$muts} };
    return ($h2, $muts);
  }
}

##

 view all matches for this distribution


AI-Genetic-Pro

 view release on metacpan or  search on metacpan

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#=======================================================================
sub new {
	my ( $class, %args ) = ( shift, @_ );
	
	#-------------------------------------------------------------------
	my %opts = map { if(ref $_){$_}else{ /^-?(.*)$/o; $1 }} @_;
	my $self = bless \%opts, $class;
	
	#-------------------------------------------------------------------
	$AI::Genetic::Pro::Array::Type::Native = 1 if $self->native;
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		_selector	=> undef,
		_strategist	=> undef,
		_mutator	=> undef,
	};
	
	$clone->{ chromosomes } = [ map { ${ tied( @$_ ) } } @{ $self->chromosomes } ] 
		if $self->_package;
	
	foreach my $key(keys %$self){
		next if exists $clone->{$key};
		$clone->{$key} = $self->{$key};

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#=======================================================================
sub slurp {
	my ( $self, $dump ) = @_;

	if( my $typ = $self->_package ){ 
		@{ $dump->{ chromosomes } } = map {
			my $arr = $typ->make_with_packed( $_ );
			bless $arr, q[AI::Genetic::Pro::Chromosome];
		} @{ $dump->{ chromosomes } };
	}
    

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		return \@chr;
		
	}elsif($self->type eq q/rangevector/){
		my $fix_range = $self->_fix_range;
		my $c = -1;
		#my @array = map { $c++; warn "WARN: $c | ",scalar @$chromosome,"\n" if not defined $fix_range->[$c]; $_ ? $_ - $fix_range->[$c] : undef } @$chromosome;
		my @array = map { $c++; $_ ? $_ - $fix_range->[$c] : undef } @$chromosome;
		
		return @array if wantarray;
		return \@array;
	}else{
		my $cnt = 0;
		my @array = map { $self->_translations->[$cnt++]->[$_] } @$chromosome;
		return @array if wantarray;
		return \@array;
	}
}
#=======================================================================

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	return join(q/___/, @$array);
}
#=======================================================================
sub as_string {	
	return join(q//, @{$_[1]}) if $_[0]->type eq q/bitvector/;
	return 	join(q/___/, map { defined $_ ? $_ : q/ / } $_[0]->as_array($_[1]));
}
#=======================================================================
sub as_value { 
	my ($self, $chromosome) = @_;
	croak(q/You MUST call 'as_value' as method of 'AI::Genetic::Pro' object./)

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	my ( $self ) = @_;
	
	my @res;
	
	if( $self->_package ){
		@res = map { 
			[
				${ tied( @{ $self->chromosomes->[ $_ ] } ) },
				$self->_fitness->{ $_ },
			]
		} 0 .. $self->population - 1
	}else{
		@res = map { 
			[
				$self->chromosomes->[ $_ ],
				$self->_fitness->{ $_ },
			]
		} 0 .. $self->population - 1

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

			$self->generation($self->generation + 1);
			# update history -----------------------------------------------
			$self->_save_history;
			#---------------------------------------------------------------
			# preservation of N unique chromosomes
			@preserved = map { clone($_) } @{ $self->getFittest_as_arrayref($self->preserve - 1, 1) };
			# selection ----------------------------------------------------
			$self->_select_parents();
			# crossover ----------------------------------------------------
			$self->_crossover();
			# mutation -----------------------------------------------------

 view all matches for this distribution


AI-Genetic

 view release on metacpan or  search on metacpan

Genetic.pm  view on Meta::CPAN

		 randomSinglePoint => \&AI::Genetic::Defaults::randomSinglePoint,
		 randomTwoPoint    => \&AI::Genetic::Defaults::randomTwoPoint,
		 randomUniform     => \&AI::Genetic::Defaults::randomUniform,
		);

# this hash maps the genome types to the
# classes they're defined in.

my %_genome2class = (
		     bitvector   => 'AI::Genetic::IndBitVector',
		     rangevector => 'AI::Genetic::IndRangeVector',

 view all matches for this distribution


AI-LibNeural

 view release on metacpan or  search on metacpan

MANIFEST  view on Meta::CPAN

MANIFEST.SKIP
Makefile.PL
README
t/00.AILibNeural.t
t/01.AILibNeuralChild.t
typemap

 view all matches for this distribution


AI-Logic-AnswerSet

 view release on metacpan or  search on metacpan

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

		return 0;
	}
	return 0;
}

sub mapAS {	#Mapping of the Answer Sets in an array of hashes

	my $countAS = 0;

	my @answerSets = @{$_[0]};

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

  my @stdoutput = AI::Logic::AnswerSet::singleExec("3-colorability.txt");

  # parse the output
  my @res = AI::Logic::AnswerSet::getAS(@stdoutput);

  # map the results
  my @mappedAS = AI::Logic::AnswerSet::mapAS(\@res);

  # get a predicate from the results
  my @col = AI::Logic::AnswerSet::getPred(\@mappedAS,1,"col");

  # get a term of a predicate
  my @term = AI::Logic::AnswerSet::getProjection(\@mappedAS,1,"col",2);


=head1 DESCRIPTION

This extension allows to interact with DLV, an Artificial Intelligence system

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

Parses the output of a DLV execution and gather the answer sets.

	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");
	my @result = AI::Logic::AnswerSet::getAS(@out);

=head3 mapAS

Parses the new output in order to save and organize the results into a hashmap.

	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");
	my @result = AI::Logic::AnswerSet::getAS(@out);
	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result);

The user can set some constraints on the data to be saved in the hashmap, such as predicates, or answer sets, or both.

	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result,@predicates,@answerSets);

For instance, think about the 3-colorability problem: imagine to 
have the edges in the hashmap, and to print the edges contained in the third answer set
returned by DLV; this is an example of the print instruction, useful to understand how
the hashmap works:

	print "Edges: @{$mappedAS[2]{edge}}\n";

In this case, we are printing the array containing the predicate "edge".

=head3 getPred

Easily manage the hashmap and get the desired predicate(see the print example
described in the method above):

	my @edges = AI::Logic::AnswerSet::getPred(\@mappedAS,3,"edge");

=head3 getProjection

Returns the projection of the n-th term of a specified predicate.
Suppose that we have the predicate "person" C<person(Name,Surename);> and
that we just want the surenames of all the instances of "person":

	my @surenames = AI::Logic::AnswerSet::getProjection(\@mappedAS,3,"person",2);

The parameters are, respectively: hashmap, number of the answer set, name of the predicate,
position of the term.

=head3 statistics

This method returns an array of hashes with some stats of every predicate of every answer set,

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	my @res = AI::Logic::AnswerSet::getAS(@output);
	my @predicates = ("node","edge");
	my @stats = AI::Logic::AnswerSet::statistics(\@res,\@predicates);

In this case the data structure returned is the same as the one returned by C<mapAS()>.
Hence, for each answer set (each element of the array of hashes), the hashmap will appear 
like this:

	{
		node => 6
		edge => 9

 view all matches for this distribution


AI-ML

 view release on metacpan or  search on metacpan

t/00-report-prereqs.t  view on Meta::CPAN

        }

        if ( @reports ) {
            push @full_reports, "=== $title ===\n\n";

            my $ml = _max( map { length $_->[0] } @reports );
            my $wl = _max( map { length $_->[1] } @reports );
            my $hl = _max( map { length $_->[2] } @reports );

            if ($type eq 'modules') {
                splice @reports, 1, 0, ["-" x $ml, "", "-" x $hl];
                push @full_reports, map { sprintf("    %*s %*s\n", -$ml, $_->[0], $hl, $_->[2]) } @reports;
            }
            else {
                splice @reports, 1, 0, ["-" x $ml, "-" x $wl, "-" x $hl];
                push @full_reports, map { sprintf("    %*s %*s %*s\n", -$ml, $_->[0], $wl, $_->[1], $hl, $_->[2]) } @reports;
            }

            push @full_reports, "\n";
        }
    }

 view all matches for this distribution


AI-MXNet-Gluon-Contrib

 view release on metacpan or  search on metacpan

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN

has 'axis' => (is => 'rw', isa => 'Int', default => -1);
method python_constructor_arguments() { ['axis'] }

method forward(GluonInput $x)
{
    return AI::MXNet::NDArray->concat((map { $_->($x) } $self->_children->values), dim=>$self->axis);
}

__PACKAGE__->register('AI::MXNet::Gluon::NN');

package AI::MXNet::Gluon::NN::HybridConcurrent;

lib/AI/MXNet/Gluon/Contrib/NN/BasicLayers.pm  view on Meta::CPAN

has 'axis' => (is => 'rw', isa => 'Int', default => -1);
method python_constructor_arguments() { ['axis'] }

method hybrid_forward(GluonClass $F, GluonInput $x)
{
    return $F->concat((map { $_->($x) } $self->_children->values), dim=>$self->axis);
}

__PACKAGE__->register('AI::MXNet::Gluon::NN');

package AI::MXNet::Gluon::NN::Identity;

 view all matches for this distribution


AI-MXNet-Gluon-ModelZoo

 view release on metacpan or  search on metacpan

examples/image_classification.pl  view on Meta::CPAN

## get a pretrained model (download parameters file if necessary)
my $net = get_model($model, pretrained => 1);

## ImageNet classes
my $fname = download('http://data.mxnet.io/models/imagenet/synset.txt');
my @text_labels = map { chomp; s/^\S+\s+//; $_ } IO::File->new($fname)->getlines;

## get the image from the disk or net
if($image =~ /^https/)
{
    eval { require IO::Socket::SSL; };

 view all matches for this distribution


AI-MXNet

 view release on metacpan or  search on metacpan

examples/char_lstm.pl  view on Meta::CPAN

my $file = "data/input.txt";
open(F, $file) or die "can't open $file: $!";
my $fdata;
{ local($/) = undef; $fdata = <F>; close(F) };
my %vocabulary; my $i = 0;
$fdata = pdl(map{ exists $vocabulary{$_} ? $vocabulary{$_} : ($vocabulary{$_} = $i++) } split(//, $fdata));
my $data_iter = AI::MXNet::RNN::IO::ASCIIIterator->new(
    batch_size => $batch_size,
    data       => $fdata,
    seq_size   => $seq_size
);

examples/char_lstm.pl  view on Meta::CPAN

my $net   = mx->sym->SoftmaxOutput(data => $pred, label => $label, name => 'softmax');

my $contexts;
if(defined $gpus)
{
    $contexts = [map { mx->gpu($_) } split(/,/, $gpus)];
}
else
{
    $contexts = mx->cpu(0);
}

 view all matches for this distribution


AI-MXNetCAPI

 view release on metacpan or  search on metacpan

MANIFEST  view on Meta::CPAN

META.json
META.yml
t/AI-MXNetCAPI.t
lib/AI/MXNetCAPI.pm
mxnet.i
mxnet_typemaps.i

 view all matches for this distribution


AI-MaxEntropy

 view release on metacpan or  search on metacpan

inc/Module/AutoInstall.pm  view on Meta::CPAN

use vars qw{$VERSION};
BEGIN {
	$VERSION = '1.03';
}

# special map on pre-defined feature sets
my %FeatureMap = (
    ''      => 'Core Features',    # XXX: deprecated
    '-core' => 'Core Features',
);

inc/Module/AutoInstall.pm  view on Meta::CPAN


    my $maxlen = length(
        (
            sort   { length($b) <=> length($a) }
              grep { /^[^\-]/ }
              map  {
                ref($_)
                  ? ( ( ref($_) eq 'HASH' ) ? keys(%$_) : @{$_} )
                  : ''
              }
              map { +{@args}->{$_} }
              grep { /^[^\-]/ or /^-core$/i } keys %{ +{@args} }
        )[0]
    );

    while ( my ( $feature, $modules ) = splice( @args, 0, 2 ) ) {

inc/Module/AutoInstall.pm  view on Meta::CPAN

            if (
                defined( my $cur = _version_check( _load($mod), $arg ||= 0 ) ) )
            {
                print "loaded. ($cur" . ( $arg ? " >= $arg" : '' ) . ")\n";
                push @Existing, $mod => $arg;
                $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
            }
            else {
                print "missing." . ( $arg ? " (would need $arg)" : '' ) . "\n";
                push @required, $mod => $arg;
            }

inc/Module/AutoInstall.pm  view on Meta::CPAN

                ) =~ /^[Yy]/
            )
          )
        {
            push( @Missing, @required );
            $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
        }

        elsif ( !$SkipInstall
            and $default
            and $mandatory
            and
            _prompt( qq{==> The module(s) are mandatory! Really skip?}, 'n', )
            =~ /^[Nn]/ )
        {
            push( @Missing, @required );
            $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
        }

        else {
            $DisabledTests{$_} = 1 for map { glob($_) } @tests;
        }
    }

    $UnderCPAN = _check_lock();    # check for $UnderCPAN

inc/Module/AutoInstall.pm  view on Meta::CPAN


sub install {
    my $class = shift;

    my $i;    # used below to strip leading '-' from config keys
    my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } );

    my ( @modules, @installed );
    while ( my ( $pkg, $ver ) = splice( @_, 0, 2 ) ) {

        # grep out those already installed

inc/Module/AutoInstall.pm  view on Meta::CPAN

    }

    $args{test}{TESTS} ||= 't/*.t';
    $args{test}{TESTS} = join( ' ',
        grep { !exists( $DisabledTests{$_} ) }
          map { glob($_) } split( /\s+/, $args{test}{TESTS} ) );

    my $missing = join( ',', @Missing );
    my $config =
      join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )
      if $Config;

 view all matches for this distribution


AI-MicroStructure

 view release on metacpan or  search on metacpan

bin/remote.pl  view on Meta::CPAN

    # compute all categories
    my @categories = ( [ $data->{names}, '' ] );
    while ( my ( $h, $k ) = @{ shift @categories or []} ) {
        if ( ref $h eq 'HASH' ) {
            push @categories,
                map { [ $h->{$_}, ( $k ? "$k$sep$_" : $_ ) ] } keys %$h;
        }
        else {    # leaf
            my @items = split /\s+/, $h;
            while ($k) {
                push @{ ${"$class\::KnowHow"}{$k} }, @items;

bin/remote.pl  view on Meta::CPAN

    # compute the base Knowledge for this category
    no strict 'refs';
    my %seen;
    $self->{base} = [
        grep { !$seen{$_}++ }
            map { @{ ${"$class\::KnowHow"}{$_} } }
            $self->{category} eq ':all'
        ? ( keys %{"$class\::KnowHow"} )
        : ( $self->{category} )
    ];
    return;

bin/remote.pl  view on Meta::CPAN

    if ( ref $src eq 'ARRAY' ) {
        return @$src;
    }
    elsif ( ref $src eq 'HASH' ) {
        return
            map { ref $_ ? @$_ : $_ } $_[1] ? $src->{ $_[1] } : values %$src;
    }
    return $src;
}

sub has_remoteKnowledge { return defined $_[0]->source(); }

bin/remote.pl  view on Meta::CPAN

        female => 'http://en.wikipedia.org/wiki/Knowledge_of_female_porn_stars',
        male   => 'http://en.wikipedia.org/wiki/Knowledge_of_male_porn_stars'
    },
    extract => sub {
        return
            map { AI::MicroStructure::RemoteKnowledge::tr_accent($_) }
            map { AI::MicroStructure::RemoteKnowledge::tr_utf8_basic($_) }
            grep { ! /^Knowledge_|_Groups$/ }
            map { s/[-\s']/_/g; s/[."]//g; $_ }
            $_[0]
            =~ m{^<li>(?:<[^>]*>)?(.*?)(?:(?: ?[-,(<]| aka | see ).*)?</li>}mig
    },
    ,
);

 view all matches for this distribution


AI-NNFlex

 view release on metacpan or  search on metacpan

lib/AI/NNFlex.pm  view on Meta::CPAN


	my $filename = $config{'filename'};

	open (IFILE,$filename) or return "Error: unable to open $filename because $!";

	# we have to build a map of nodeids to objects
	my %nodeMap;
	foreach my $layer (@{$network->{'layers'}})
	{
		foreach my $node (@{$layer->{'nodes'}})
		{
			$nodeMap{$node->{'nodeid'}} = $node;
		}
	}

	# Add the bias node into the map
	if ($network->{'bias'})
	{
		$nodeMap{'bias'} = $network->{'biasnode'};
	}

 view all matches for this distribution



AI-NaiveBayes

 view release on metacpan or  search on metacpan

lib/AI/NaiveBayes/Learner.pm  view on Meta::CPAN

            my $limit = min($self->features_kept, 0+@features);
            if ($limit < 1) {
                $limit = int($limit * keys(%old));
            }
            my @top = @features[0..$limit-1];
            my %kept = map { $_ => $old{$_} } @top;
            $model->{probs}{$label} = \%kept;
        }
    }
    my $classifier_class = $self->classifier_class;
    return $classifier_class->new( model => $model );

 view all matches for this distribution


AI-NaiveBayes1

 view release on metacpan or  search on metacpan

NaiveBayes1.pm  view on Meta::CPAN

    # prepare table category P(category)
    my @lines;
    push @lines, 'category ', '-';
    push @lines, "$_ " foreach @labels;
    @lines = _append_lines(@lines);
    @lines = map { $_.='| ' } @lines;
    $lines[1] = substr($lines[1],0,length($lines[1])-2).'+-';
    $lines[0] .= "P(category) ";
    foreach my $i (2..$#lines) {
	my $label = $labels[$i-2];
	$lines[$i] .= $m->{labelprob}{$label} .' ';

NaiveBayes1.pm  view on Meta::CPAN


sub _append_lines {
    my @l = @_;
    my $m = 0;
    foreach (@l) { $m = length($_) if length($_) > $m }
    @l = map 
    { while (length($_) < $m) { $_.=substr($_,length($_)-1) }; $_ }
    @l;
    return @l;
}

 view all matches for this distribution


AI-Nerl

 view release on metacpan or  search on metacpan

examples/digits/idx_to_fits.pl  view on Meta::CPAN


die 'filename' unless $ARGV[0];
my $img_filename = $ARGV[0];
my $img_data = read_file( $img_filename, binmode => ':raw' ) ;

my @header = map {ord}  split ('', substr ($img_data, 0, 4, ''));
my $numdims = $header[3];
my @dims = map {ord} split ('',substr($img_data, 0, 4*$numdims, ''));

#'IDX' format described here: http://yann.lecun.com/exdb/mnist/
for (0..$numdims-1){
   $dims[$_] = 256*$dims[4*$_+2] + $dims[4*$_+3];
}
@dims=@dims[0..$numdims-1];
#die join ' ',@dims;
#my @img_data = map{ord}split('',$img_data);
my $img_pdl = pdl(unpack('C*',$img_data));

use PDL::Graphics2D;

if(!defined($dims[1])){

 view all matches for this distribution


AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

	sub get_output {
		my $self		=	shift;
		my $size		=	$self->{SYNAPSES}->{SIZE} || 0;
		my $value		=	0;
		my $state		= 	0;
		my (@map,@weight);
	
	    # We loop through all the syanpses connected to this one and add the weighted
	    # valyes together, saving in a debugging list.
		for (0..$size-1) {
			$value	+=	$self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
			$self->{SYNAPSES}->{LIST}->[$_]->{FIRED} = 0;
			
			$map[$_]=$self->{SYNAPSES}->{LIST}->[$_]->{VALUE};
			$weight[$_]=$self->{SYNAPSES}->{LIST}->[$_]->{WEIGHT};
		}
		                                              
		# Debugger
		AI::NeuralNet::BackProp::join_cols(\@map,5) if(($AI::NeuralNet::BackProp::DEBUG eq 3) || ($AI::NeuralNet::BackProp::DEBUG eq 2));
		AI::NeuralNet::BackProp::out2("Weights: ".join(" ",@weight)."\n");
		
		# Simply average the values and get the integer of the average.
		$state	=	intr($value/$size);
		

BackProp.pm  view on Meta::CPAN

		}
	}
	
	# Registers some neuron as a synapse of this neuron.           
	# This is called exclusively by connect(), except for
	# in initalize_group() to connect the _map() package.
	sub register_synapse {
		my $self	=	shift;
		my $synapse	=	shift;
		my $sid		=	$self->{SYNAPSES}->{SIZE} || 0;
		$self->{SYNAPSES}->{LIST}->[$sid]->{PKG}		=	$synapse;

BackProp.pm  view on Meta::CPAN

	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;
			$x++;
			if($x>$break-1) {

BackProp.pm  view on Meta::CPAN

	sub load_pcx {
		my $self	=	shift;
		return AI::NeuralNet::BackProp::PCX->new($self,shift);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my (@map,$ic);
		my @ws 		=	split(/[\s\t]/,shift);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);
			if(!defined $ic) {
				$self->{_CRUNCHED}->{LIST}->[$self->{_CRUNCHED}->{_LENGTH}++]=$ws[$a];
				@map[$a]=$self->{_CRUNCHED}->{_LENGTH};
			} else {
				@map[$a]=$ic;
            }
		}
		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {

BackProp.pm  view on Meta::CPAN

	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_CRUNCHED}->{LIST}->[$el-1].' ';
		}
		return $c;
	}
	

BackProp.pm  view on Meta::CPAN

		my $size = $layers * $size;
		
		AI::NeuralNet::BackProp::out2 "Creating RUN and MAP systems for network...\n";
		#print "Creating RUN and MAP systems for network...\n";
		
		# Create a new runner and mapper for the network.
		$self->{RUN} = new AI::NeuralNet::BackProp::_run($self);
		$self->{MAP} = new AI::NeuralNet::BackProp::_map($self);
		
		$self->{SIZE}	=	$size;
		$self->{DIV}	=	$div;
		$self->{OUT}	=	$out;
		$self->{FLAG}	=	$flag;

BackProp.pm  view on Meta::CPAN

	}
	
	# Used internally by new() and learn().
	# This is the sub block that actually creats
	# the connections between the synapse chains and
	# also connects the run packages and the map packages
	# to the appropiate ends of the neuron grids.
	sub initialize_group() {
		my $self	=	shift;
		my $size	=	$self->{SIZE};
		my $div		=	$self->{DIV};
		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};
		my $x		=	0; 
		my $y		=	0;
		
		# Reset map and run synapse counters.
		$self->{RUN}->{REGISTRATION} = $self->{MAP}->{REGISTRATION} = 0;
		
		AI::NeuralNet::BackProp::out2 "There will be $size neurons in this network group, with a divison value of $div.\n";
		#print "There will be $size neurons in this network group, with a divison value of $div.\n";
		

BackProp.pm  view on Meta::CPAN

				AI::NeuralNet::BackProp::out1 "\n";
			}
			AI::NeuralNet::BackProp::out1 "\n";             
		}
		
		# These next two loops connect the _run and _map packages (the IO interface) to 
		# the start and end 'layers', respectively. These are how we insert data into
		# the network and how we get data from the network. The _run and _map packages 
		# are connected to the neurons so that the neurons think that the IO packages are
		# just another neuron, sending data on. But the IO packs. are special packages designed
		# with the same methods as neurons, just meant for specific IO purposes. You will
		# never need to call any of the IO packs. directly. Instead, they are called whenever
		# you use the run(), map(), or learn() methods of your network.
        
    	AI::NeuralNet::BackProp::out2 "\nMapping I (_run package) connections to network...\n";
		
	    for($y=0; $y<$div; $y++) {
			$self->{_tmp_synapse} = $y;
			$self->{NET}->[$y]->register_synapse($self->{RUN});
			#$self->{NET}->[$y]->connect($self->{RUN});
		}
		
		AI::NeuralNet::BackProp::out2 "Mapping O (_map package) connections to network...\n\n";
		
		for($y=$size-$div; $y<$size; $y++) {
			$self->{_tmp_synapse} = $y;
			$self->{NET}->[$y]->connect($self->{MAP});
		}

BackProp.pm  view on Meta::CPAN


	# When called with an array refrence to a pattern, returns a refrence
	# to an array associated with that pattern. See usage in documentation.
	sub run {
		my $self	 =	  shift;
		my $map		 =	  shift;
		my $t0 		 =	new Benchmark;
        $self->{RUN}->run($map);
		$self->{LAST_TIME}=timestr(timediff(new Benchmark, $t0));
        return $self->map();
	}
    
    # This automatically uncrunches a response after running it
	sub run_uc {
    	$_[0]->uncrunch(run(@_));

BackProp.pm  view on Meta::CPAN

	sub benchmarked {
		my $self	=	shift;
		return $self->{LAST_TIME};
	}
	    
	# Used to retrieve map from last internal run operation.
	sub map {
		my $self	 =	  shift;
		$self->{MAP}->map();
	}
	
	# Forces network to learn pattern passed and give desired
	# results. See usage in POD.
	sub learn {
		my $self	=	shift;
		my $omap	=	shift;
		my $res		=	shift;
		my %args    =   @_;
		my $inc 	=	$args{inc} || 0.20;
		my $max     =   $args{max} || 1024;
		my $_mx		=	intr($max/10);

BackProp.pm  view on Meta::CPAN

		my $error   = 	($args{error}>-1 && defined $args{error}) ? $args{error} : -1;
  		my $div		=	$self->{DIV};
		my $size	=	$self->{SIZE};
		my $out		=	$self->{OUT};
		my $divide  =	AI::NeuralNet::BackProp->intr($div/$out);
		my ($a,$b,$y,$flag,$map,$loop,$diff,$pattern,$value);
		my ($t0,$it0);
		no strict 'refs';
		
		# Take care of crunching strings passed
		$omap = $self->crunch($omap) if($omap == 0);
		$res  = $self->crunch($res)  if($res  == 0);
		
		# Fill in empty spaces at end of results matrix with a 0
		if($#{$res}<$out) {
			for my $x ($#{$res}+1..$out) {

BackProp.pm  view on Meta::CPAN

		my $dinc	=	0.0001;
		my $cdiff	=	0;
		$diff		=	100;
		$error 		= 	($error>-1)?$error:-1;
		
		# $flag only goes high when all neurons in output map compare exactly with
		# desired result map or $max loops is reached
		#	
		while(!$flag && ($max ? $loop<$max : 1)) {
			$it0 	=	new Benchmark;
			
			# Run the map
			$self->{RUN}->run($omap);
			
			# Retrieve last mapping  and initialize a few variables.
			$map	=	$self->map();
			$y		=	$size-$div;
			$flag	=	1;
			
			# Compare the result map we just ran with the desired result map.
			$diff 	=	pdiff($map,$res);
			
			# This adjusts the increment multiplier to decrease as the loops increase
			if($_mi > $_mx) {
				$dinc *= 0.1;
				$_mi   = 0;

BackProp.pm  view on Meta::CPAN

				last;
			}
			
			# Debugging
			AI::NeuralNet::BackProp::out4 "Difference: $diff\%\t Increment: $inc\tMax Error: $error\%\n";
			AI::NeuralNet::BackProp::out1 "\n\nMapping results from $map:\n";
			
			# This loop compares each element of the output map with the desired result map.
			# If they don't match exactly, we call weight() on the offending output neuron 
			# and tell it what it should be aiming for, and then the offending neuron will
			# try to adjust the weights of its synapses to get closer to the desired output.
			# See comments in the weight() method of AI::NeuralNet::BackProp for how this works.
			my $l=$self->{NET};
			for my $i (0..$out-1) {
				$a = $map->[$i];
				$b = $res->[$i];
				
				AI::NeuralNet::BackProp::out1 "\nmap[$i] is $a\n";
				AI::NeuralNet::BackProp::out1 "res[$i] is $b\n";
					
				for my $j (0..$divide-1) {
					if($a!=$b) {
						AI::NeuralNet::BackProp::out1 "Punishing $self->{NET}->[($i*$divide)+$j] at ",(($i*$divide)+$j)," ($i with $a) by $inc.\n";

BackProp.pm  view on Meta::CPAN

			# Benchmark this loop.
			AI::NeuralNet::BackProp::out4 "Learning itetration $loop complete, timed at".timestr(timediff(new Benchmark, $it0),'noc','5.3f')."\n";
		
			# Map the results from this loop.
			AI::NeuralNet::BackProp::out4 "Map: \n";
			AI::NeuralNet::BackProp::join_cols($map,$self->{col_width}) if ($AI::NeuralNet::BackProp::DEBUG);
			AI::NeuralNet::BackProp::out4 "Res: \n";
			AI::NeuralNet::BackProp::join_cols($res,$self->{col_width}) if ($AI::NeuralNet::BackProp::DEBUG);
		}
		
		# Compile benchmarking info for entire learn() process and return it, save it, and

BackProp.pm  view on Meta::CPAN

	# Here is the real meat of this package.
	# run() does one thing: It fires values
	# into the first layer of the network.
	sub run {
		my $self	=	shift;
		my $map		=	shift;
		my $x		=	0;
		$map = $self->{PARENT}->crunch($map) if($map == 0);
		return undef if(substr($map,0,5) ne "ARRAY");
		foreach my $el (@{$map}) {
			# Catch ourself if we try to run more inputs than neurons
			return $x if($x>$self->{PARENT}->{DIV}-1);
			
			# Here we add a small ammount of randomness to the network.
			# This is to keep the network from getting stuck on a 0 value internally.

BackProp.pm  view on Meta::CPAN

	
	
1;

# Internal output class. Not to be used directly.
package AI::NeuralNet::BackProp::_map;
	
	use strict;
	
	# Dummy constructor.
	sub new {

BackProp.pm  view on Meta::CPAN

	}
	
	# This acts just like a regular neuron by receiving
	# values from input synapes. Yet, unlike a regularr
	# neuron, it doesnt weight the values, just stores
	# them to be retrieved by a call to map().
	sub input  {
		no strict 'refs';             
		my $self	=	shift;
		my $sid		=	shift;
		my $value	=	shift;

BackProp.pm  view on Meta::CPAN

		
		AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
	}
	
	# Here we simply collect the value of every neuron connected to this
	# one from the layer below us and return an array ref to the final map..
	sub map {
		my $self	=	shift;
		my $size	=	$self->{PARENT}->{DIV};
		my $out		=	$self->{PARENT}->{OUT};
		my $divide  =	AI::NeuralNet::BackProp->intr($size/$out);
		my @map = ();
		my $value;
		AI::NeuralNet::BackProp::out1 "Num output neurons: $out, Input neurons: $size, Division: $divide\n";
		for(0..$out-1) {
			$value=0;
			for my $a (0..$divide-1) {
				$value += $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE};
				AI::NeuralNet::BackProp::out1 "\$a is $a, index is ".(($_*$divide)+$a).", value is $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE}\n";
			}
			$map[$_]	=	AI::NeuralNet::BackProp->intr($value/$divide);
			AI::NeuralNet::BackProp::out1 "Map position $_ is $map[$_] in @{[\@map]} with self set to $self.\n";
			$self->{OUTPUT}->[$_]->{FIRED}	=	0;
		}
		my $ret=\@map;
		return $self->{PARENT}->_range($ret);
	}
1;
			      
# load_pcx() wrapper package

BackProp.pm  view on Meta::CPAN

    | \/ |
    | /\ |
    |/  \|
    O    O
     \  /
    mapper

In this diagram, each neuron is connected to one input of every
neuron in the layer below it, but there are not connections
between neurons in the same layer. Weights of the connection
are controlled by the neuron it is connected to, not the connecting

BackProp.pm  view on Meta::CPAN

Input is fed into the network via a call like this:

	use AI;
	my $net = new AI::NeuralNet::BackProp(2,2);
	
	my @map = (0,1);
	
	my $result = $net->run(\@map);
	

Now, this call would probably not give what you want, because
the network hasn't "learned" any patterns yet. But this
illustrates the call. Run now allows strings to be used as
input. See run() for more information.


Run returns a refrence with $size elements (Remember $size? $size
is what you passed as the second argument to the network
constructor.) This array contains the results of the mapping. If
you ran the example exactly as shown above, $result would probably 
contain (1,1) as its elements. 

To make the network learn a new pattern, you simply call the learn
method with a sample input and the desired result, both array
refrences of $size length. Example:

	use AI;
	my $net = new AI::NeuralNet::BackProp(2,2);
	
	my @map = (0,1);
	my @res = (1,0);
	
	$net->learn(\@map,\@res);
	
	my $result = $net->run(\@map);

Now $result will conain (1,0), effectivly flipping the input pattern
around. Obviously, the larger $size is, the longer it will take
to learn a pattern. Learn() returns a string in the form of

	Learning took X loops and X wallclock seconds (X.XXX usr + X.XXX sys = X.XXX CPU).

With the X's replaced by time or loop values for that loop call. So,
to view the learning stats for every learn call, you can just:
	
	print $net->learn(\@map,\@res);
	

If you call "$net->debug(4)" with $net being the 
refrence returned by the new() constructor, you will get benchmarking 
information for the learn function, as well as plenty of other information output. 

BackProp.pm  view on Meta::CPAN

	

Before you can really do anything useful with your new neural network
object, you need to teach it some patterns. See the learn() method, below.

=item $net->learn($input_map_ref, $desired_result_ref [, options ]);

This will 'teach' a network to associate an new input map with a desired resuly.
It will return a string containg benchmarking information. You can retrieve the
pattern index that the network stored the new input map in after learn() is complete
with the pattern() method, below.

UPDATED: You can now specify strings as inputs and ouputs to learn, and they will be crunched
automatically. Example:

BackProp.pm  view on Meta::CPAN

	# $net->learn($net->crunch('corn'), $net->crunch('cob'));

Note, the old method of calling crunch on the values still works just as well.	

UPDATED: You can now learn inputs with a 0 value. Beware though, it may not learn() a 0 value 
in the input map if you have randomness disabled. See NOTES on using a 0 value with randomness
disabled.

The first two arguments may be array refs (or now, strings), and they may be of different lengths.

Options should be written on hash form. There are three options:

BackProp.pm  view on Meta::CPAN

	
	$net->learn_set_rand(\@data);
	


=item $net->run($input_map_ref);

UPDATE: run() will now I<automatically> crunch() a string given as the input.

This method will apply the given array ref at the input layer of the neural network, and
it will return an array ref to the output of the network.

BackProp.pm  view on Meta::CPAN

	# my $outputs = $net->run($net->crunch('cloudy, wind is 5 MPH NW'));

See also run_uc() below.


=item $net->run_uc($input_map_ref);

This method does the same thing as this code:
	
	$net->uncrunch($net->run($input_map_ref));

All that run_uc() does is that it automatically calls uncrunch() on the output, regardless
of whether the input was crunch() -ed or not.
	

BackProp.pm  view on Meta::CPAN

for large programs.

Level 2 ($level = 2) : A slightly-less verbose form of debugging, not as many internal 
data dumps.

Level 3 ($level = 3) : JUST prints weight mapping as weights change.

Level 4 ($level = 4) : JUST prints the benchmark info for EACH learn loop iteteration, not just
learning as a whole. Also prints the percentage difference for each loop between current network
results and desired results, as well as learning gradient ('incremenet').   

BackProp.pm  view on Meta::CPAN

UPDATE: Now you can use a variabled instead of using qw(). Strings will be split internally.
Do not use qw() to pass strings to crunch.

This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and 
run() directly. It returns an array ref.

The words are not duplicated internally. For example:

	$net->crunch("How are you?");

BackProp.pm  view on Meta::CPAN

only once internally and the returned array ref reflects that.


=item $net->uncrunch($array_ref);

Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the crunch() method, above, possibly to 
uncrunch() the output of a run() call. Consider the below code (also in ./examples/ex_crunch.pl):
                           
	use AI::NeuralNet::BackProp;
	my $net = AI::NeuralNet::BackProp->new(2,3);

BackProp.pm  view on Meta::CPAN



=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.


=item $net->random($rand);

This will set the randomness factor from the network. Default is 0.001. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. See NOTES on learning 
a 0 value in the input map with randomness disabled.



=item $net->load_pcx($filename);

BackProp.pm  view on Meta::CPAN


=head1 NOTES

=item Learning 0s With Randomness Disabled

You can now use 0 values in any input maps. This is a good improvement over versions 0.40
and 0.42, where no 0s were allowed because the learning would never finish learning completly
with a 0 in the input. 

Yet with the allowance of 0s, it requires one of two factors to learn correctly. Either you
must enable randomness with $net->random(0.0001) (Any values work [other than 0], see random() ), 

BackProp.pm  view on Meta::CPAN

AI::NeuralNet::BackProp::neuron is not designed to be created directly, as
it is used internally by AI::NeuralNet::BackProp.

=item AI::NeuralNet::BackProp::_run

=item AI::NeuralNet::BackProp::_map

These two packages, _run and _map are used to insert data into
the network and used to get data from the network. The _run and _map packages 
are connected to the neurons so that the neurons think that the IO packages are
just another neuron, sending data on. But the IO packs. are special packages designed
with the same methods as neurons, just meant for specific IO purposes. You will
never need to call any of the IO packs. directly. Instead, they are called whenever
you use the run() or learn() methods of your network.

 view all matches for this distribution


AI-NeuralNet-FastSOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN


	my @es = list $e;                                                # eigenvalues
#	warn "es : ".Dumper \@es;
	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;

 view all matches for this distribution


AI-NeuralNet-Kohonen-Demo-RGB

 view release on metacpan or  search on metacpan

RGB.pm  view on Meta::CPAN


	use AI::NeuralNet::Kohonen::Demo::RGB;
	$_ = AI::NeuralNet::Kohonen::Demo::RGB->new(
		display_scale => 20,
		display	=> 'hex',
		map_dim	=> 39,
		epochs  => 9,
		table   => "R G B"
	              ."1 0 0"
	              ."0 1 0"
	              ."0 0 1",

RGB.pm  view on Meta::CPAN


#
# Used only by &tk_train
#
sub tk_show { my $self=shift;
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour = sprintf("#%02x%02x%02x",
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[2])),
			);
			if ($self->{display} and $self->{display} eq 'hex'){
				my $xo = ($y % 2) * ($self->{display_scale}/2);
				my $yo = 0;

RGB.pm  view on Meta::CPAN

}


=head1 METHOD train

Over-rides the base class to provide TK displays of the map

=cut

sub train { my ($self,$epochs) = (shift,shift);
	my $label_txt;

	$epochs = $self->{epochs} unless defined $epochs;
	$self->{display_scale} = 10 if not defined 	$self->{display_scale};

	$self->{mw} = MainWindow->new(
		-width	=> 200+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 200+($self->{map_dim_y} * $self->{display_scale}),
	);
    my $quit_flag = 0;
    my $quit_code = sub {$quit_flag = 1};
    $self->{mw}->protocol('WM_DELETE_WINDOW' => $quit_code);

	$self->{c} = $self->{mw}->Canvas(
		-width	=> 50+($self->{map_dim_x} * $self->{display_scale}),
		-height	=> 50+($self->{map_dim_y} * $self->{display_scale}),
		-relief	=> 'ridge',
		-border => 5,
	);
	$self->{c}->pack(-side=>'top');

 view all matches for this distribution


AI-NeuralNet-Kohonen-Visual

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

	package YourClass;
	use base "AI::NeuralNet::Kohonen::Visual";

	sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
		# From here you return a TK colour name.
		# Get it as you please; for example, values of a 3D map:
		return sprintf("#%02x%02x%02x",
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[2])),
		);
	}

	exit;
	1;

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

And then:

	use YourClass;
	my $net = AI::NeuralNet::Kohonen::Visual->new(
		display          => 'hex',
		map_dim          => 39,
		epochs           => 19,
		neighbour_factor => 2,
		targeting        => 1,
		table            => "3
			1 0 0 red

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

			1 1 0 yellow
			1 .5 0 orange
			1 .5 1 pink",
	);
	$net->train;
	$net->plot_map;
	$net->main_loop;

	exit;


lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN




=head1 METHOD train

Over-rides the base class to provide TK displays of the map.

=cut

sub train { my ($self,$epochs) = (shift,shift);
	$epochs = $self->{epochs} unless defined $epochs;

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


			$self->_adjust_neighbours_of($bmu,$target);

			if (exists $self->{show_training}){
				if ($self->{show_bmu}){
					$self->plot_map(bmu_x=>$bmu->[1],bmu_y=>$bmu->[2]);
				} else {
					$self->plot_map;
				}
				$self->{_label_txt} = sprintf("Epoch: %04d",$self->{t})."  "
				. "Learning: $self->{l}  "
				. sprintf("BMU: %02d,%02d",$bmu->[1],$bmu->[2])."  "
				.( exists $target->{class}? "Target: [$target->{class}]  " : "")

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

	}

	$self->{_label_txt} = "Did $self->{t} epochs: ";
	$self->{_label_txt} .= "now smoothed." if $self->{smoothing};
	$_->smooth if $self->{smooth};
	$self->plot_map if $self->{MainLoop};
	&{$self->{train_end}} if exists $self->{train_end};
	MainLoop if $self->{MainLoop};

	return 1;
}

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


Currently it only operates on the first three elements
of a weight vector, turning them into RGB values.

It returns the a TK colour for a node at position C<x>,C<y> in the
C<map> paramter.

Accepts: C<x> and C<y> co-ordinates in the map.

=cut

sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
	my $_0 = $self->{map}->[$x]->[$y]->{weight}->[0];
	$_0 = $self->{missing_colour} || 0 if $_0 eq $self->{missing_mask};
	my $_1 = $self->{map}->[$x]->[$y]->{weight}->[1];
	$_1 = $self->{missing_colour} || 0 if $_1 eq $self->{missing_mask};
	my $_2 = $self->{map}->[$x]->[$y]->{weight}->[2];
	$_2 = $self->{missing_colour} || 0 if $_2 eq $self->{missing_mask};
	return sprintf("#%02x%02x%02x",
		(int (255 * $_0)),
		(int (255 * $_1)),
		(int (255 * $_2)),

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

}


=head1 METHOD prepare_display

Depracated: see L<METHOD create_empty_map>.

=cut

sub prepare_display {
	return $_[0]->create_empty_map;
}

=head1 METHOD create_empty_map

Sets up a TK C<MainWindow> and C<Canvas> to
act as an empty map.

=cut

sub create_empty_map { my $self = shift;
	my ($w,$h);
	if ($self->{display} and $self->{display} eq 'hex'){
		$w = ($self->{map_dim_x}+1) * ($self->{display_scale}+2);
		$h = ($self->{map_dim_y}+1) * ($self->{display_scale}+2);
	} else {
		$w = ($self->{map_dim_x}+1) * ($self->{display_scale});
		$h = ($self->{map_dim_y}+1) * ($self->{display_scale});
	}
	$self->{_mw} = MainWindow->new(
		-width	=> $w + 20,
		-height	=> $h + 20,
	);

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

	$self->{_label}->pack(-side=>'top');
	return 1;
}


=head1 METHOD plot_map

Plots the map on the existing canvas. Arguments are supplied
in a hash with the following keys as options:

The values of C<bmu_x> and C<bmu_y> represent The I<x> and I<y>
co-ordinates of unit to highlight using the value in the
C<hicol> to highlight it with colour. If no C<hicolo> is provided,

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN


See also L<METHOD get_colour_for>.

=cut

sub plot_map { my ($self,$args) = (shift,{@_});
	$self->{plotted} = 1;
	# MW may have been destroyed
	$self->prepare_display if not defined $self->{_mw};
	my $yo = 5+($self->{display_scale}/2);
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour;
			if ($args->{bmu_x} and $args->{bmu_x}==$x and $args->{bmu_y}==$y){
				$colour = $args->{hicol} || 'red';
			} else {
				$colour = $self->get_colour_for ($x,$y);

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

			}

			# Label
			if ($self->{label_all}){
				my $txt;
				unless ( $txt = $self->{map}->[$x]->[$y]->{class}){
					$txt = "";
				}
				$self->label_map($x,$y,"+$txt");
			}

		}
	}
	if ($self->{label_bmu}){
		my $txt;
		unless ( $txt = $self->{map}->[$args->{bmu_x}]->[$args->{bmu_y}]->{class}){
			$txt = "";
		}
		$self->label_map(
			$args->{bmu_x}, $args->{bmu_y}, "+$txt"
		);
	}

	$self->{_canvas}->update;
	$self->{_label}->update;

	return 1;
}

=head1 METHOD label_map

Put a text label on the map for the node at the I<x,y> co-ordinates
supplied in the first two paramters, using the text supplied in the
third.

Very naive: no attempt to check the text will appear on the map.

=cut

sub label_map { my ($self,$x,$y,$t) = (shift,shift,shift,shift);
	$self->{_canvas}->createText(
		$x*$self->{display_scale}+($self->{display_scale}),
		$y*$self->{display_scale}+($self->{display_scale}),
		-text	=> $t,
		-anchor => 'w',

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

Calls TK's C<MainLoop> to keep a window open until the user closes it.

=cut

sub main_loop { my $self = shift;
	$self->plot_map unless $self->{plotted};
	MainLoop;
}



 view all matches for this distribution


AI-NeuralNet-Kohonen

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

use AI::NeuralNet::Kohonen::Input;

=head1 SYNOPSIS

	$_ = AI::NeuralNet::Kohonen->new(
		map_dim_x => 39,
		map_dim_y => 19,
		epochs    => 100,
		table     =>
	"3
	1 0 0 red
	0 1 0 yellow

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


An illustrative implimentation of Kohonen's Self-organising Feature Maps (SOMs)
in Perl. It's not fast - it's illustrative. In fact, it's slow: but it is illustrative....

Have a look at L<AI::NeuralNet::Kohonen::Demo::RGB> for an example of
visualisation of the map.

I'll maybe add some more text here later.

=head1 DEPENDENCIES

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=item input_names

A name for each dimension of the input vectors.

=item map_dim_x

=item map_dim_y

The dimensions of the feature map to create - defaults to a toy 19.
(note: this is Perl indexing, starting at zero).

=item epochs

Number of epochs to run for (see L<METHOD train>).

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

is applied (see L</METHOD smooth>).

=item neighbour_factor

When working out the size of the neighbourhood of influence,
the average of the dimensions of the map are divided by this variable,
before the exponential function is applied: the default value is 2.5,
but you may with to use 2 or 4.

=item missing_mask

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=over 4

=item time_constant

The number of iterations (epochs) to be completed, over the log of the map radius.

=item t

The current epoch, or moment in time.

=item l

The current learning rate.

=item map_dim_a

Average of the map dimensions.

=back

=cut

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	if (not defined $self->{input}){
		cluck "No {input} supplied!";
		return undef;
	}

	$self->{map_dim_x}			= 19 unless defined $self->{map_dim_x};
	$self->{map_dim_y}			= 19 unless defined $self->{map_dim_y};
	# Legacy from...yesterday
	if ($self->{map_dim}){
		$self->{map_dim_x} 		= $self->{map_dim_y} = $self->{map_dim}
	}
	if (not defined $self->{map_dim_x} or $self->{map_dim_x}==0
	 or not defined $self->{map_dim_y} or $self->{map_dim_y}==0){
		 confess "No map dimensions in the input!";
	 }
	if ($self->{map_dim_x}>$self->{map_dim_y}){
		$self->{map_dim_a} 		= $self->{map_dim_y} + (($self->{map_dim_x}-$self->{map_dim_y})/2)
	} else {
		$self->{map_dim_a} 		= $self->{map_dim_x} + (($self->{map_dim_y}-$self->{map_dim_x})/2)
	}
	$self->{neighbour_factor}	= 2.5 unless $self->{neighbour_factor};
	$self->{epochs}				= 99 unless defined $self->{epochs};
	$self->{epochs}				= 1 if $self->{epochs}<1;
	$self->{time_constant}		= $self->{epochs} / log($self->{map_dim_a}) unless $self->{time_constant};	# to base 10?
	$self->{learning_rate}		= 0.5 unless $self->{learning_rate};
	$self->{l}					= $self->{learning_rate};
	if (not $self->{weight_dim}){
		cluck "{weight_dim} not set";
		return undef;
	}
	$self->randomise_map;
	return $self;
}




=head1 METHOD randomise_map

Populates the C<map> with nodes that contain random real nubmers.

See L<AI::NerualNet::Kohonen::Node/CONSTRUCTOR new>.

=cut

sub randomise_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];
		for my $y (0..$self->{map_dim_y}){
			$self->{map}->[$x]->[$y] = new AI::NeuralNet::Kohonen::Node(
				dim => $self->{weight_dim},
				missing_mask => $self->{missing_mask},
			);
		}
	}
}


=head1 METHOD clear_map

As L<METHOD randomise_map> but sets all C<map> nodes to
either the value supplied as the only paramter, or C<undef>.

=cut

sub clear_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	my $val = shift || $self->{missing_mask};
	my $w = [];
	foreach (0..$self->{weight_dim}){
		push @$w, $val;
	}
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];
		for my $y (0..$self->{map_dim_y}){
			$self->{map}->[$x]->[$y] = new AI::NeuralNet::Kohonen::Node(
				weight		 => $w,
				dim 		 => $self->{weight_dim},
				missing_mask => $self->{missing_mask},
			);
		}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

}


=head1 METHOD find_bmu

For a specific taraget, finds the Best Matching Unit in the map
and return the x/y index.

Accepts: a reference to an array that is the target.

Returns: a reference to an array that is the BMU (and should

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


euclidean distance from the supplied target

=item 1, 2

I<x> and I<y> co-ordinate in the map

=back

See L</METHOD get_weight_at>,
and L<AI::NeuralNet::Kohonen::Node/distance_from>,

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

=cut


sub find_bmu { my ($self,$target) = (shift,shift);
	my $closest = [];	# [value, x,y] value and co-ords of closest match
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $distance = $self->{map}->[$x]->[$y]->distance_from( $target );
			$closest = [$distance,0,0] if $x==0 and $y==0;
			$closest = [$distance,$x,$y] if $distance < $closest->[0];
		}
	}
	return $closest;

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

C<undef> on failure.

=cut

sub get_weight_at { my ($self,$x,$y) = (shift,shift,shift);
	return undef if $x<0 or $y<0 or $x>$self->{map_dim_x} or $y>$self->{map_dim_y};
	return $self->{map}->[$x]->[$y]->{weight};
}



=head1 METHOD get_results

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

#	}
	return wantarray? @{$self->{results}} : $self->{results};
}


=head1 METHOD map_results

Clears the C<map> and fills it with the results.

The sole paramter is passed to the L<METHOD clear_map>.
L<METHOD get_results> is then called, and the results
returned fed into the object field C<map>.

This may change, as it seems misleading to re-use that field.

=cut

sub map_results { my $self=shift;

}


=head1 METHOD dump

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=cut

sub dump { my $self=shift;
	print "    ";
	for my $x (0..$self->{map_dim_x}){
		printf ("  %02d ",$x);
	}
	print"\n","-"x107,"\n";
	for my $x (0..$self->{map_dim_x}){
		for my $w (0..$self->{weight_dim}){
			printf ("%02d | ",$x);
			for my $y (0..$self->{map_dim_y}){
				printf("%.2f ", $self->{map}->[$x]->[$y]->{weight}->[$w]);
			}
			print "\n";
		}
		print "\n";
	}
}

=head1 METHOD smooth

Perform gaussian smoothing upon the map.

Accepts: the length of the side of the square gaussian mask to apply.
If not supplied, uses the value in the field C<smoothing>; if that is
empty, uses the square root of the average of the map dimensions
(C<map_dim_a>).

Returns: a true value.

=cut

sub smooth { my ($self,$smooth) = (shift,shift);
	$smooth = $self->{smoothing} if not $smooth and defined $self->{smoothing};
	return unless $smooth;
	$smooth = int( sqrt $self->{map_dim_a} );
	my $mask = _make_gaussian_mask($smooth);

	# For every weight at every point
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			for my $w (0..$self->{weight_dim}){
				# Apply the mask
				for my $mx (0..$smooth){
					for my $my (0..$smooth){
						$self->{map}->[$x]->[$y]->{weight}->[$w] *= $mask->[$mx]->[$my];
					}
				}
			}
		}
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

}


=head1 METHOD save_file

Saves the map file in I<SOM_PAK> format (see L<METHOD load_input>)
at the path specified in the first argument.

Return C<undef> on failure, a true value on success.

=cut

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

		print OUT "rect ";
	} else { # $self->{display} eq 'hex'
		print OUT "hexa ";
	}
	#- Map dimension in x-direction (integer, optional).
	print OUT $self->{map_dim_x}." ";
	#- Map dimension in y-direction (integer, optional).
	print OUT $self->{map_dim_y}." ";
	#- Neighborhood type, either bubble or gaussian (string, optional, case-sen- sitive).
	print OUT "gaussian ";
	# End of header
	print OUT "\n";

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	} elsif ($display eq 'rect'){
		$self->{display} = undef;
	}
	#- Map dimension in x-direction (integer, optional).
	$_				      = shift @specs;
	$self->{map_dim_x}    = $_ if defined $_;
	#- Map dimension in y-direction (integer, optional).
	$_				      = shift @specs;
	$self->{map_dim_y}    = $_ if defined $_;
	#- Neighborhood type, either bubble or gaussian (string, optional, case-sen- sitive).
	# not implimented

	# Format input data
	foreach (@_){

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=head1 PRIVATE METHOD _adjust_neighbours_of

Accepts: a reference to an array containing
the distance of the BMU from the target, as well
as the x and y co-ordinates of the BMU in the map;
a reference to the target, which is an
C<AI::NeuralNet::Kohonen::Input> object.

Returns: true.

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


	                        (      t   )
	sigma(t) = sigma(0) exp ( - ------ )
	                        (   lambda )

Where C<sigma> is the width of the map at any stage
in time (C<t>), and C<lambda> is a time constant.

Lambda is our field C<time_constant>.

The map radius is naturally just half the map width.

=head2 ADJUSTING THE NEIGHBOURS OF THE BMU

	W(t+1) = W(t) + THETA(t) L(t)( V(t)-W(t) )

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


=cut

sub _adjust_neighbours_of { my ($self,$bmu,$target) = (shift,shift,shift);
	my $neighbour_radius = int (
		($self->{map_dim_a}/$self->{neighbour_factor}) * exp(- $self->{t} / $self->{time_constant})
	);

	# Distance from co-ord vector (0,0) as integer
	# Basically map_width * y  +  x
	my $centre = ($self->{map_dim_a}*$bmu->[2])+$bmu->[1];
	# Set the class of the BMU
	$self->{map}->[ $bmu->[1] ]->[ $bmu->[2] ]->{class} = $target->{class};

	for my $x ($bmu->[1]-$neighbour_radius .. $bmu->[1]+$neighbour_radius){
		next if $x<0 or $x>$self->{map_dim_x};		# Ignore those not mappable
		for my $y ($bmu->[2]-$neighbour_radius .. $bmu->[2]+$neighbour_radius){
			next if $y<0 or $y>$self->{map_dim_y};	# Ignore those not mappable
			# Skip node if it is out of the circle of influence
			next if (
				(($bmu->[1] - $x) * ($bmu->[1] - $x)) + (($bmu->[2] - $y) * ($bmu->[2] - $y))
			) > ($neighbour_radius * $neighbour_radius);

			# Adjust the weight
			for my $w (0..$self->{weight_dim}){
				next if $target->{values}->[$w] eq $self->{map}->[$x]->[$y]->{missing_mask};
				my $weight = \$self->{map}->[$x]->[$y]->{weight}->[$w];
				$$weight = $$weight + (
					$self->{map}->[$x]->[$y]->distance_effect($bmu->[0], $neighbour_radius)
					* ( $self->{l} * ($target->{values}->[$w] - $$weight) )
				);
			}
		}
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	# Recieves an array of ONE element,
	# should be an array of an array of elements
	my @bmu = $self->get_results($targets);

	# Check input and output dims are the same
	if ($#{$self->{map}->[0]->[1]->{weight}} != $targets->[0]->{dim}){
		confess "target input and map dimensions differ";
	}

	for my $i (0..$#bmu){
		foreach my $w (0..$self->{weight_dim}){
			$qerror += $targets->[$i]->{values}->[$w]
			- $self->{map}->[$bmu[$i]->[1]]->[$bmu[$i]->[2]]->{weight}->[$w];
		}
	}
	$qerror /= scalar @$targets;
	return $qerror;
}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	s/#.*$//g;
	return undef if /^$/ or not defined $self->{weight_dim};
	my @i = split /\s+/,$_;
	return undef if $#i < $self->{weight_dim}; # catch bad lines
	# 'x' in files signifies unknown: we prefer undef?
#	@i[0..$self->{weight_dim}] = map{
#		$_ eq 'x'? undef:$_
#	} @i[0..$self->{weight_dim}];
	my %args = (
		dim 	=> $self->{weight_dim},
		values	=> [ @i[0..$self->{weight_dim}] ],

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

input vector were repeated 3 times during training (i.e., as if
the same vector had been stored 2 extra times in the data file).

=item -

Fixed-point qualifier: e.g. fixed=2,5.  The map unit defined by
the fixed-point coordinates (x = 2; y = 5) is selected instead of
the best-matching unit for training. (See below for the definition
of coordinates over the map.) If several inputs are forced to
known locations, a wanted orientation results in the map.

=back

=back

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN


See L<AI::NeuralNet::Kohonen::Node/distance_from>;
L<AI::NeuralNet::Kohonen::Demo::RGB>.

L<The documentation for C<SOM_PAK>|ftp://cochlea.hut.fi/pub/som_pak>,
which has lots of advice on map building that may or may not be applicable yet.

A very nice explanation of Kohonen's algorithm:
L<AI-Junkie SOM tutorial part 1|http://www.fup.btinternet.co.uk/aijunkie/som1.html>

=head1 AUTHOR AND COYRIGHT

 view all matches for this distribution


AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

			return undef;
		}
		return PCX::Loader->new($self,$file);
	}	
	
	# Crunch a string of words into a map
	sub crunch {
		my $self	=	shift;
		my @ws 		=	split(/[\s\t]/,shift);
		my (@map,$ic);
		for my $a (0..$#ws) {
			$ic=$self->crunched($ws[$a]);
			if(!defined $ic) {
				$self->{_crunched}->{list}->[$self->{_crunched}->{_length}++]=$ws[$a];
				$map[$a]=$self->{_crunched}->{_length};
			} else {
				$map[$a]=$ic;
            }
		}
		return \@map;
	}
	
	# Finds if a word has been crunched.
	# Returns undef on failure, word index for success.
	sub crunched {

Mesh.pm  view on Meta::CPAN

	}
	
	# Alias for crunched(), above
	sub word { crunched(@_) }
	
	# Uncrunches a map (array ref) into an array of words (not an array ref) 
	# and returns array
	sub uncrunch {
		my $self	=	shift;
		my $map = shift;
		my ($c,$el,$x);
		foreach $el (@{$map}) {
			$c .= $self->{_crunched}->{list}->[$el-1].' ';
		}
		return $c;
	}
	

Mesh.pm  view on Meta::CPAN

	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;
		my $a		=	shift;
		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;	$x++;
			if($x>$break-1) { print "\n"; $x=0;	}
		}

Mesh.pm  view on Meta::CPAN


	



=item $net->learn($input_map_ref, $desired_result_ref [, options ]);

NOTE: learn_set() now has increment-degrading turned OFF by default. See note
on the degrade flag, below.

This will 'teach' a network to associate an new input map with a desired 
result. It will return a string containg benchmarking information. 

You can also specify strings as inputs and ouputs to learn, and they will be 
crunched automatically. Example:

Mesh.pm  view on Meta::CPAN

	$net->learn_set(\@data);
	
Same effect as above, but not the same data (obviously).


=item $net->run($input_map_ref);

This method will apply the given array ref at the input layer of the neural network, and
it will return an array ref to the output of the network. run() will now automatically crunch() 
a string given as an input (See the crunch() method for info on crunching).

Mesh.pm  view on Meta::CPAN

	

See also run_uc() and run_set() below.


=item $net->run_uc($input_map_ref);

This method does the same thing as this code:
	
	$net->uncrunch($net->run($input_map_ref));

All that run_uc() does is that it automatically calls uncrunch() on the output, regardless
of whether the input was crunch() -ed or not.
	

Mesh.pm  view on Meta::CPAN


=item $net->crunch($string);

This splits a string passed with /[\s\t]/ into an array ref containing unique indexes
to the words. The words are stored in an intenal array and preserved across load() and save()
calls. This is designed to be used to generate unique maps sutible for passing to learn() and 
run() directly. It returns an array ref.

The words are not duplicated internally. For example:

	$net->crunch("How are you?");

Mesh.pm  view on Meta::CPAN

only once internally and the returned array ref reflects that.


=item $net->uncrunch($array_ref);

Uncrunches a map (array ref) into an scalar string of words seperated by ' ' and returns the 
string. This is ment to be used as a counterpart to the crunch() method, above, possibly to 
uncrunch() the output of a run() call. Consider the below code (also in ./examples/ex1.pl):
                           
	use AI::NeuralNet::Mesh;
	my $net = AI::NeuralNet::Mesh->new(2,3);

Mesh.pm  view on Meta::CPAN



=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.

The column width is preserved across load() and save() calls.

 view all matches for this distribution


AI-NeuralNet-SOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN


	my @es = list $e;                                                # eigenvalues
#	warn "es : ".Dumper \@es;
	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;
	my @es_idx  = map { _find_num ($_, \@es) } @es_desc;             # eigenvalue indices sorted by eigenvalue (desc)
#	warn "idx: ".Dumper \@es_idx;

sub _find_num {
    my $v = shift;
    my $l = shift;

 view all matches for this distribution


( run in 0.586 second using v1.01-cache-2.11-cpan-49f99fa48dc )