Result:
found more than 333 distributions - search limited to the first 2001 files matching your query ( run in 0.852 )


AI-ExpertSystem-Advanced

 view release on metacpan or  search on metacpan

inc/Module/Install.pm  view on Meta::CPAN

# The load order for Module::Install is a bit magic.
# It goes something like this...
#
# IF ( host has Module::Install installed, creating author mode ) {
#     1. Makefile.PL calls "use inc::Module::Install"
#     2. $INC{inc/Module/Install.pm} set to installed version of inc::Module::Install
#     3. The installed version of inc::Module::Install loads
#     4. inc::Module::Install calls "require Module::Install"
#     5. The ./inc/ version of Module::Install loads
# } ELSE {
#     1. Makefile.PL calls "use inc::Module::Install"
#     2. $INC{inc/Module/Install.pm} set to ./inc/ version of Module::Install
#     3. The ./inc/ version of Module::Install loads
# }

use 5.005;
use strict 'vars';

inc/Module/Install.pm  view on Meta::CPAN





# Whether or not inc::Module::Install is actually loaded, the
# $INC{inc/Module/Install.pm} is what will still get set as long as
# the caller loaded module this in the documented manner.
# If not set, the caller may NOT have loaded the bundled version, and thus
# they may not have a MI version that works with the Makefile.PL. This would
# result in false errors or unexpected behaviour. And we don't want that.
my $file = join( '/', 'inc', split /::/, __PACKAGE__ ) . '.pm';
unless ( $INC{$file} ) { die <<"END_DIE" }

 view all matches for this distribution


AI-ExpertSystem-Simple

 view release on metacpan or  search on metacpan

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

	$self->{_number_of_questions} = 0;

	return bless $self, $class;
}

sub reset {
	my ($self) = @_;

	die "Simple->reset() takes no arguments" if scalar(@_) != 1;

	foreach my $name (keys %{$self->{_rules}}) {
		$self->{_rules}->{$name}->reset();
	}

	foreach my $name (keys %{$self->{_knowledge}}) {
		$self->{_knowledge}->{$name}->reset();
	}

	$self->{_ask_about} = undef;
	$self->{_told_about} = undef;
	$self->{_log} = ();

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN


	if(!defined($self->{_knowledge}->{$attribute})) {
		$self->{_number_of_attributes}++;
		$self->{_knowledge}->{$attribute} = AI::ExpertSystem::Simple::Knowledge->new($attribute);
	}
	$self->{_knowledge}->{$attribute}->set_question($text, @responses);

	eval { $t->purge(); }
}

sub process {

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN


	die "Simple->process() takes no arguments" if scalar(@_) != 1;

	my $n = $self->{_goal}->name();

	if($self->{_knowledge}->{$n}->is_value_set()) {
		return 'finished';
	}

	if($self->{_ask_about}) {
		my %answers = ();

		$answers{$self->{_ask_about}}->{value} = $self->{_told_about};
		$answers{$self->{_ask_about}}->{setter} = '';

		$self->{_ask_about} = undef;
		$self->{_told_about} = undef;

		while(%answers) {

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

			%answers = ();

			foreach my $answer (keys(%old_answers)) {
				my $n = $answer;
				my $v = $old_answers{$answer}->{value};
				my $s = $old_answers{$answer}->{setter};

				$self->_add_to_log( "Setting '$n' to '$v'" );

				$self->{_knowledge}->{$n}->set_value($v,$s);

				foreach my $key (keys(%{$self->{_rules}})) {
					if($self->{_rules}->{$key}->state() eq 'active') {
						my $state = $self->{_rules}->{$key}->given($n, $v);
						if($state eq 'completed') {
							$self->_add_to_log( "Rule '$key' has completed" );
							my %y = $self->{_rules}->{$key}->actions();
							foreach my $k (keys(%y)) {
								$self->_add_to_log( "Rule '$key' is setting '$k' to '$y{$k}'" );
								$answers{$k}->{value} = $y{$k};
								$answers{$k}->{setter} = $key;
							}
						} elsif($state eq 'invalid') {
							$self->_add_to_log( "Rule '$key' is now inactive" );
						}
					}

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

	my ($self) = @_;

	die "Simple->explain() takes no arguments" if scalar(@_) != 1;

	my $name  = $self->{_goal}->name();
	my $rule  = $self->{_knowledge}->{$name}->get_setter();
	my $value = $self->{_knowledge}->{$name}->get_value();

	my $x = "The goal '$name' was set to '$value' by " . ($rule ? "rule '$rule'" : 'asking a question' );
	$self->_add_to_log( $x );

	my @processed_rules;
	push( @processed_rules, $rule ) if $rule;

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

	my @check_these_rules = ();

	my %conditions = $self->{_rules}->{$rule}->conditions();
	foreach my $name (sort keys %conditions) {
		my $value = $conditions{$name};
		my $setter = $self->{_knowledge}->{$name}->get_setter();

		my $x = "$depth Condition '$name' was set to '$value' by " . ($setter ? "rule '$setter'" : 'asking a question' );
		$self->_add_to_log( $x );

		if($setter) {
			unless($dont_do_these{$setter}) {
				$dont_do_these{$setter} = 1;
				push( @check_these_rules, $setter );
			}
		}
	}

	my %actions = $self->{_rules}->{$rule}->actions();
	foreach my $name (sort keys %actions) {
		my $value = $actions{$name};

		my $x = "$depth Action set '$name' to '$value'";
		$self->_add_to_log( $x );
	}

	@processed_rules = keys %dont_do_these;

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN


=head2 Public methods

=over 4

=item reset( )

Resets the system back to its initial state so that a new consoltation can be run

=item load( FILENAME )

This method takes the FILENAME of an XML knowledgebase and attempts to parse it to set up the data structures 
required for a consoltation.

=item process( )

Once the knowledgebase is loaded the consultation is run by repeatedly calling this method.

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

If the process( ) method has returned "question" then this method will return the question to ask the user 
and a list of valid responses.

=item answer( VALUE )

The user has been presented with the question from the get_question( ) method along with a set of 
valid responses and the users selection is returned by this method.

=item get_answer( )

If the process( ) method has returned "finished" then the answer to the users query will be 

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN


A private method to add a message to the log.

=item _explain_this

A private method to explain how a single attribute was set.

=back

=head1 ENVIRONMENT

lib/AI/ExpertSystem/Simple.pm  view on Meta::CPAN

=item Simple->new() takes no arguments

When the constructor is initialised it requires no arguments. This message is given if 
some arguments were supplied.

=item Simple->reset() takes no arguments

When the method is called it requires no arguments. This message is given if 
some arguments were supplied.

=item Simple->load() takes 1 argument

 view all matches for this distribution


AI-FANN-Evolving

 view release on metacpan or  search on metacpan

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

	)
}

=item defaults

Getter/setter to influence default ANN configuration

=cut

sub defaults {
	my $self = shift;
	my %args = @_;
	for my $key ( keys %args ) {
		$log->info("setting $key to $args{$key}");
		if ( $key eq 'activation_function' ) {
			$args{$key} = $constant{$args{$key}};
		}
		$default{$key} = $args{$key};
	}

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

sub train {
	my ( $self, $data ) = @_;
	if ( $self->train_type eq 'cascade' ) {
		$log->debug("cascade training");
	
		# set learning curve
		$self->cascade_activation_functions( $self->activation_function );
		
		# train
		$self->{'ann'}->cascadetrain_on_data(
			$data,

lib/AI/FANN/Evolving.pm  view on Meta::CPAN

		);
	}
	else {
		$log->debug("normal training");
	
		# set learning curves
		$self->hidden_activation_function( $self->activation_function );
		$self->output_activation_function( $self->activation_function );
		
		# train
		$self->{'ann'}->train_on_data(

lib/AI/FANN/Evolving.pm  view on Meta::CPAN


=cut

=item error

Getter/setter for the error rate. Default is 0.0001

=cut

sub error {
	my $self = shift;
	if ( @_ ) {
		my $value = shift;
		$log->debug("setting error threshold to $value");
		return $self->{'error'} = $value;
	}
	else {
		$log->debug("getting error threshold");
		return $self->{'error'};
	}
}

=item epochs

Getter/setter for the number of training epochs, default is 500000

=cut

sub epochs {
	my $self = shift;
	if ( @_ ) {
		my $value = shift;
		$log->debug("setting training epochs to $value");
		return $self->{'epochs'} = $value;
	}
	else {
		$log->debug("getting training epochs");
		return $self->{'epochs'};
	}
}

=item epoch_printfreq

Getter/setter for the number of epochs after which progress is printed. default is 1000

=cut

sub epoch_printfreq {
	my $self = shift;
	if ( @_ ) {
		my $value = shift;
		$log->debug("setting epoch printfreq to $value");
		return $self->{'epoch_printfreq'} = $value;
	}
	else {
		$log->debug("getting epoch printfreq");
		return $self->{'epoch_printfreq'}
	}
}

=item neurons

Getter/setter for the number of neurons. Default is 15

=cut

sub neurons {
	my $self = shift;
	if ( @_ ) {
		my $value = shift;
		$log->debug("setting neurons to $value");
		return $self->{'neurons'} = $value;
	}
	else {
		$log->debug("getting neurons");
		return $self->{'neurons'};
	}
}

=item neuron_printfreq

Getter/setter for the number of cascading neurons after which progress is printed. 
default is 10

=cut

sub neuron_printfreq {
	my $self = shift;
	if ( @_ ) {
		my $value = shift;
		$log->debug("setting neuron printfreq to $value");
		return $self->{'neuron_printfreq'} = $value;
	}
	else {	
		$log->debug("getting neuron printfreq");
		return $self->{'neuron_printfreq'};
	}
}

=item train_type

Getter/setter for the training type: 'cascade' or 'ordinary'. Default is ordinary

=cut

sub train_type {
	my $self = shift;
	if ( @_ ) {
		my $value = lc shift;
		$log->debug("setting train type to $value"); 
		return $self->{'train_type'} = $value;
	}
	else {
		$log->debug("getting train type");
		return $self->{'train_type'};
	}
}

=item activation_function

Getter/setter for the function that maps inputs to outputs. default is 
FANN_SIGMOID_SYMMETRIC

=back

=cut

sub activation_function {
	my $self = shift;
	if ( @_ ) {
		my $value = shift;
		$log->debug("setting activation function to $value");
		return $self->{'activation_function'} = $value;
	}
	else {
		$log->debug("getting activation function");
		return $self->{'activation_function'};

 view all matches for this distribution


AI-FANN

 view release on metacpan or  search on metacpan

lib/AI/FANN.pm  view on Meta::CPAN

  Fast Artificial Neural Network Library is a free open source neural
  network library, which implements multilayer artificial neural
  networks in C with support for both fully connected and sparsely
  connected networks. Cross-platform execution in both fixed and
  floating point are supported. It includes a framework for easy
  handling of training data sets. It is easy to use, versatile, well
  documented, and fast. PHP, C++, .NET, Python, Delphi, Octave, Ruby,
  Pure Data and Mathematica bindings are available. A reference manual
  accompanies the library with examples and recommendations on how to
  use the library. A graphical user interface is also available for
  the library.

lib/AI/FANN.pm  view on Meta::CPAN


  $train_data->shuffle;

=item *

Pairs of C get/set functions are wrapped in Perl with dual accessor
methods named as the attribute (and without any C<set_>/C<get_>
prefix). For instance:

  $ann->bit_fail_limit($limit); # sets the bit_fail_limit

  $bfl = $ann->bit_fail_limit;  # gets the bit_fail_limit


Pairs of get/set functions requiring additional indexing arguments are
also wrapped inside dual accessors:

  # sets:
  $ann->neuron_activation_function($layer_ix, $neuron_ix, $actfunc);

  # gets:
  $af = $ann->neuron_activation_function($layer_ix, $neuron_ix);

Important: note that on the Perl version, the optional value argument
is moved to the last position (on the C version of the C<set_> method
it is usually the second argument).

=item *

Some functions have been renamed to make the naming more consistent

lib/AI/FANN.pm  view on Meta::CPAN

  -----------------------------------------------------------
  fann_create_from_file               => new_from_file
  fann_create_standard                => new_standard
  fann_get_num_input                  => num_inputs
  fann_get_activation_function        => neuron_activation_function
  fann_set_activation_function        => ^^^
  fann_set_activation_function_layer  => layer_activation_function
  fann_set_activation_function_hidden => hidden_activation_function
  fann_set_activation_function_output => output_activation_function

=item *

Boolean methods return true on success and undef on failure.

lib/AI/FANN.pm  view on Meta::CPAN


C<$input> and C<$desired_output> are arrays.

It returns an array with the values of the output layer.

=item $ann->reset_MSE

-

=item $ann->train_on_file($filename, $max_epochs, $epochs_between_reports, $desired_error)

lib/AI/FANN.pm  view on Meta::CPAN


returns a list of the activation functions used for cascade training.

=item $ann->cascade_activation_functions(@activation_functions)

sets the list of activation function to use for cascade training.

=item $ann->cascade_activation_steepnesses()

returns a list of the activation steepnesses used for cascade training.

=item $ann->cascade_activation_steepnesses(@activation_steepnesses)

sets the list of activation steepnesses to use for cascade training.

=item $ann->training_algorithm

=item $ann->training_algorithm($training_algorithm)

lib/AI/FANN.pm  view on Meta::CPAN


=item AI::FANN::TrainData->new_empty($num_data, $num_inputs, $num_outputs)

returns a new AI::FANN::TrainData object of the sizes indicated on the
arguments. The initial values of the data contained inside the object
are random and should be set before using the train data object for
training an ANN.

=item $train->data($index)

returns two arrays with the values of the input and output layer

lib/AI/FANN.pm  view on Meta::CPAN


=item $train->data($index, $input, $output)

C<$input> and C<$output> are two arrays.

The input and output layers at the index C<$index> are set to the
values on these arrays.

=item $train->shuffle

-

lib/AI/FANN.pm  view on Meta::CPAN


=item $train->scale($new_min, $new_max)

-

=item $train->subset($pos, $length)

-

=item $train->num_inputs

 view all matches for this distribution


AI-Fuzzy

 view release on metacpan or  search on metacpan

Fuzzy.pm  view on Meta::CPAN

=head1 DESCRIPTION

AI::Fuzzy really consists of three modules - AI::Fuzzy::Axis, AI::Fuzzy::Label, and
AI::Fuzzy::Set.  

A fuzzy set is simply a mathematical set to which members can
I<partially> belong. For example, a particular shade of gray may
partially belong to the set of dark colors, whereas black would have
full membership, and lemon yellow would have almost no membership.

A fuzzy axis holds fuzzy labels and can be used to classify values
by examining the degree to which they belong to several labels, and 
selecting the most appropriate.  For example, it can decide whether 

Fuzzy.pm  view on Meta::CPAN

    @shortest_first = B<members> $fs_tall_people;

    $fs = B<new> AI::Fuzzy::Set( x1 => .3, x2 => .5, x3 => .8, x4 => 0, x5 => 1);

    B<complement>, B<union>, B<intersection>
    Thesie are the fuzzy set version of the typical functions.
   
    B<equal>
    Returns true if the sets have the same elements and those elements
    are all equal.

   B<as_string>
   Prints the set as tuples:
	$b = new AI::Fuzzy::Set( x5 => .3, x6 => .5, x7 => .8, x8 => 0, x9 => 1);
	print "b is: " . $b->as_string . "\n"; 
    prints:
	b is: x8/0, x5/0.3, x6/0.5, x7/0.8, x9/1

 view all matches for this distribution


AI-FuzzyEngine

 view release on metacpan or  search on metacpan

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

    push @{$self->{_variables}}, $var;
    Scalar::Util::weaken $self->{_variables}->[-1];
    return $var;
}

sub reset {
    my ($self) = @_;
    $_->reset() for $self->variables(); 
    return $self;
}

sub _class_of_variable { 'AI::FuzzyEngine::Variable' }

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

    my $t = $fe->true();                    # 1.0
    # Always false:
    my $f = $fe->false();                   # 0.0

    # These functions are constitutive for the operations
    # on the fuzzy sets of the fuzzy variables:

    # VARIABLES (AI::FuzzyEngine::Variable)

    # input variables need definition of membership functions of their sets
    my $flow = $fe->new_variable( 0 => 2000,
                        small => [0, 1,  500, 1, 1000, 0                  ],
                        med   => [       400, 0, 1000, 1, 1500, 0         ],
                        huge  => [               1000, 0, 1500, 1, 2000, 1],
                   );
    my $cap  = $fe->new_variable( 0 => 1800,
                        avg   => [0, 1, 1500, 1, 1700, 0         ],
                        high  => [      1500, 0, 1700, 1, 1800, 1],
                   );
    # internal variables need sets, but no membership functions
    my $saturation = $fe->new_variable( # from => to may be ommitted
                        low   => [],
                        crit  => [],
                        over  => [],
                   );
    # But output variables need membership functions for their sets:
    my $green = $fe->new_variable( -5 => 5,
                        decrease => [-5, 1, -2, 1, 0, 0            ],
                        ok       => [       -2, 0  0, 1, 2, 0      ],
                        increase => [              0, 0, 2, 1, 5, 1],
                   );

    # Reset FuzzyEngine (resets all variables)
    $fe->reset();

    # Reset a fuzzy variable directly
    $flow->reset;

    # Membership functions can be changed via the set's variable.
    # This might be useful during parameter identification algorithms
    # Changing a function resets the respective variable.
    $flow->change_set( med => [500, 0, 1000, 1, 1500, 0] );

    # Fuzzification of input variables
    $flow->fuzzify( 600 );
    $cap->fuzzify( 1000 );

    # Membership degrees of the respective sets are now available:
    my $flow_is_small = $flow->small(); # 0.8
    my $flow_is_med   = $flow->med();   # 0.2
    my $flow_is_huge  = $flow->huge();  # 0.0

    # RULES and their application

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

    # b) deduce output variable(s) (here: from internal variable $saturation)
    $green->decrease( $saturation->low()  );
    $green->ok(       $saturation->crit() );
    $green->increase( $saturation->over() );

    # All sets provide their respective membership degrees: 
    my $saturation_is_over = $saturation->over(); # This is no defuzzification!
    my $green_is_ok        = $green->ok();

    # Defuzzification ( is a matter of the fuzzy variable )
    my $delta_green = $green->defuzzify(); # -5 ... 5

lib/AI/FuzzyEngine.pm  view on Meta::CPAN


    # Apply some rules
    $problem->yes( $severity->high,  $threshold->low );
    $problem->no( $fe->not( $problem->yes )  );

    # Fuzzy results are represented by the membership degrees of sets 
    print 'Problem yes: ', $problem->yes,  "\n";
    # [
    #  [         0 0.33333333 0.33333333 0.33333333]
    #  [         0        0.5          1          1]
    # ]

lib/AI/FuzzyEngine.pm  view on Meta::CPAN


This module is yet another implementation of a fuzzy inference system.
The aim was to  be able to code rules (no string parsing),
but avoid operator overloading,
and make it possible to split calculation into multiple steps.
All intermediate results (memberships of sets of variables)
should be available.

Beginning with v0.2.0 it is PDL aware,
meaning that it can handle piddles (PDL objects)
when running the fuzzy operations.

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

and he provides good information and links to learn more about Fuzzy Logics.

=head2 Fuzzy stuff

The L<AI::FuzzyEngine> object defines and provides
the elementary operations for fuzzy sets.
All membership degrees of sets are values from 0 to 1.
Up to now there is no choice with regard to how to operate on sets:

=over 2

=item C<< $fe->or( ... ) >> (Disjunction)

lib/AI/FuzzyEngine.pm  view on Meta::CPAN


=item C<< $fe->and( ... ) >> (Conjunction)

is I<Minimum> of membership degrees

=item C<< $fe->not( $var->$set ) >> (Negation)

is I<1-degree> of membership degree

=item Aggregation of rules (Disjunction)

lib/AI/FuzzyEngine.pm  view on Meta::CPAN


=over 2

=item Implication

I<Clip> membership function of a set according to membership degree,
before the implicated memberships of all sets of a variable are taken for defuzzification:

=item Defuzzification

I<Centroid> of aggregated (and clipped) membership functions

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

they will be configured as arguments to C<new>. 

Once built, the engine can create fuzzy variables by C<new_variable>:

    my $var = $fe->new_variable( $from => $to,
                        $name_of_set1 => [$x11, $y11, $x12, $y12, ... ],
                        $name_of_set2 => [$x21, $y21, $x22, $y22, ... ],
                        ...
                   );

Result is an L<AI::FuzzyEngine::Variable>.
The name_of_set strings are taken to assign corresponding methods
for the respective fuzzy variables.
They must be valid function identifiers.
Same name_of_set can used for different variables without conflict.
Take care:
There is no check for conflicts with predefined class methods. 

Fuzzy variables provide a method to fuzzify input values:

    $var->fuzzify( $val );

according to the defined sets and their membership functions.

The memberships of the sets of C<$var> are accessible
by the respective functions:

    my $membership_degree = $var->$name_of_set();

Membership degrees can be assigned directly (within rules for example):

    $var->$name_of_set( $membership_degree );

If multiple membership_degrees are given, they are "anded":

    $var->$name_of_set( $degree1, $degree2, ... ); # "and"

By this, simple rules can be coded directly:

    my $var_3->zzz( $var_1->xxx, $var_2->yyy, ... ); # "and"

this implements the fuzzy implication

    if $var_1->xxx and $var_2->yyy and ... then $var_3->zzz

The membership degrees of a variable's sets can be reset to undef:

    $var->reset(); # resets a variable
    $fe->reset();  # resets all variables

The fuzzy engine C<$fe> has all variables registered
that have been created by its C<new_variable> method.

A variable can be defuzzified:

    my $out_value = $var->defuzzify();

Membership functions can be replaced via a set's variable:

    $var->change_set( $name_of_set => [$x11n, $y11n, $x12n, $y12n, ... ] );

The variable will be reset when replacing a membership function
of any of its sets.
Interdependencies with other variables are not checked
(it might happen that the results of any rules are no longer valid,
so it needs some recalculations).

Sometimes internal variables are used that need neither fuzzification
nor defuzzification.
They can be created by a simplified call to C<new_variable>:

    my $var_int = $fe->new_variable( $name_of_set1 => [],
                                     $name_of_set2 => [],
                                     ...
                       );

Hence, they can not use the methods C<fuzzify> or C<defuzzify>.

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

    my $disjunction = $fe->or(  $var1->xxx, $var2->yyy, ... );
    my $negated     = $fe->not( $var1->zzz );

There is no magic.

A sequence of rules for the same set can be implemented as follows: 

    $var_3->zzz( $var_1->xxx, $var_2->yyy, ... );
    $var_3->zzz( $var_4->aaa, $var_5->bbb, ... );

The subsequent application of C<< $var_3->zzz(...) >>
corresponds to "or" operations (aggregation of rules).

Only a reset can reset C<$var_3>. 

=head2 PDL awareness

Membership degrees of sets might be either scalars or piddles now.

    $var_a->memb_fun_a(        5  ); # degree of memb_fun_a is a scalar
    $var_a->memb_fun_b( pdl(7, 8) ); # degree of memb_fun_b is a piddle

Empty piddles are not allowed, behaviour with bad values is not tested.

lib/AI/FuzzyEngine.pm  view on Meta::CPAN

Fuzzification (hence calculating degrees) accepts piddles:

    $var_b->fuzzify( pdl([1, 2], [3, 4]) );

Defuzzification returns a piddle if any of the membership
degrees of the function's sets is a piddle:

    my $val = $var_a->defuzzify(); # $var_a returns a 1dim piddle with two elements

So do the fuzzy operations as provided by the fuzzy engine C<$fe> itself.

 view all matches for this distribution


AI-FuzzyInference

 view release on metacpan or  search on metacpan

FuzzyInference.pm  view on Meta::CPAN

}

# sub implication() - public method.
#
# one optional argument: has to match one of the keys of the %_implication hash.
# used to query/set the implication method.

sub implication {
    my ($self,
	$new,
	) = @_;

FuzzyInference.pm  view on Meta::CPAN

}

# sub aggregation() - public method.
#
# one optional argument: has to match one of the keys of the %_aggregation hash.
# used to query/set the aggregation method.

sub aggregation {
    my ($self,
	$new,
	) = @_;

FuzzyInference.pm  view on Meta::CPAN

}

# sub defuzzification() - public method.
#
# one optional argument: has to match one of the keys of the %_defuzzification hash.
# used to query/set the defuzzification method.

sub defuzzification {
    my ($self,
	$new,
	) = @_;

FuzzyInference.pm  view on Meta::CPAN

# sub operation() - public method.
#
# two arguments: first one mandatory and specifies the logic operation
# in question. Second one is optional and has to match one of the keys
# of the %{$_operations{$first_arg}} hash.
# Used to query/set the logic operations method.

sub operation {
    my ($self,
	$op,
	$new,

FuzzyInference.pm  view on Meta::CPAN

#
# 4 arguments or more : First is a name of a new input variable.
# Second and third are the min and max values of that variable.
# These define the universe of discourse for that variable.
# Additional argumets constitute a hash. The keys of the hash
# are term set names defined for the given variable. The values
# are the coordinates of the vertices of the term sets.
#
# ex. $obj->inVar('height',
#                 5, 8,   # xmin, xmax (in feet, say)
#                 'tall' => [0, 0,
#                            5, 1,

FuzzyInference.pm  view on Meta::CPAN

sub inVar {
    my ($self,
	$var,
	$xmin,
	$xmax,
	@sets,
	) = @_;

    $self->{INVARS}{$var} = [$xmin, $xmax];

    while (@sets) {
	my $s = shift @sets;
	my $c = shift @sets;

	$self->{SET}->add("$var:$s", $xmin, $xmax, @$c);
    }
}

FuzzyInference.pm  view on Meta::CPAN

#
# 4 arguments or more : First is a name of a new output variable.
# Second and third are the min and max values of that variable.
# These define the universe of discourse for that variable.
# Additional argumets constitute a hash. The keys of the hash
# are term set names defined for the given variable. The values
# are the coordinates of the vertices of the term sets.

sub outVar {
    my ($self,
	$var,
	$xmin,
	$xmax,
	@sets,
	) = @_;

    $self->{OUTVARS}{$var} = [$xmin, $xmax];

    while (@sets) {
	my $s = shift @sets;
	my $c = shift @sets;

	$self->{SET}->add("$var:$s", $xmin, $xmax, @$c);
    }
}

FuzzyInference.pm  view on Meta::CPAN


    return undef unless exists $self->{RESULTS}{$var};
    return $self->{RESULTS}{$var};
}

# sub reset() - public method
# 
# cleans the data structures used.

sub reset {
    my $self = shift;

    my @list   =  $self->{SET}->listMatching(q|:implicated$|);
    push @list => $self->{SET}->listMatching(q|:aggregated$|);

FuzzyInference.pm  view on Meta::CPAN

sub compute {
    my ($self,
	%vars,
	) = @_;

    $self->reset();

    # First thing we do is to fuzzify the inputs.
    $self->_fuzzify(%vars);

    # Now, apply the rules to see which ones fire.

FuzzyInference.pm  view on Meta::CPAN

}

# sub _defuzzify() - private method.
#
# no arguments. This method applies the defuzzification technique
# to get a crisp value out of the aggregated set of each output
# var.

sub _defuzzify {
    my $self = shift;

FuzzyInference.pm  view on Meta::CPAN

}

# sub _aggregate() - private method.
#
# no arguments. This method applies the aggregation technique to get
# one fuzzy set out of the implicated sets of each output var.

sub _aggregate {
    my $self = shift;

    my $_aggregation = $self->{AGGREGATION};

    # iterate through all output vars.
    for my $var (keys %{$self->{OUTVARS}}) {

	# get implicated sets.
	my @list = $self->{SET}->listMatching("\Q$var\E:.*:implicated\$");

	next unless @list;

	my $i = 0;

FuzzyInference.pm  view on Meta::CPAN

	    my @c = $self->{SET}->$_aggregation($current, $new);
	    $self->{SET}->add($name, @{$self->{OUTVARS}{$var}}, @c);
	    $current = $name;
	}

	# rename the final aggregated set.
	my @c = $self->{SET}->coords($current);
	$self->{SET}->add("$var:aggregated", @{$self->{OUTVARS}{$var}}, @c);

	# delete the temporary sets.
	for my $j (0 .. $i - 1) {
	    $self->{SET}->delete("temp$j");
	}
    }
}

FuzzyInference.pm  view on Meta::CPAN

# sub _fuzzify() - private method.
#
# one argument: a hash. The keys are input variables. The
# values are the crisp values of the input variables (same arguments
# as compute()). It finds the degree of membership of each input
# variable in each of its term sets.

sub _fuzzify {
    my ($self, %vars) = @_;

    my %terms;

FuzzyInference.pm  view on Meta::CPAN

	    food    => 7);

=head1 DESCRIPTION

This module implements a fuzzy inference system. Very briefly, an FIS
is a system defined by a set of input and output variables, and a set
of fuzzy rules relating the input variables to the output variables.
Given crisp values for the input variables, the FIS uses the fuzzy rules
to compute a crisp value for each of the output variables.

The operation of an FIS is split into 4 distinct parts: I<fuzzification>,

FuzzyInference.pm  view on Meta::CPAN


=head2 Fuzzification

In this step, the crisp values of the input variables are used to
compute a degree of membership of each of the input variables in each
of its term sets. This produces a set of fuzzy sets.

=head2 Inference

In this step, all the defined rules are examined. Each rule has two parts:
the I<precedent> and the I<consequent>. The degree of support for each
rule is computed by applying fuzzy operators (I<and>, I<or>) to combine
all parts of its precendent, and generate a single crisp value. This value
indicates the "strength of firing" of the rule, and is used to reshape
(I<implicate>) the consequent part of the rule, generating modified
fuzzy sets.

=head2 Aggregation

Here, all implicated fuzzy sets of the fired rules are combined using
fuzzy operators to generate a single fuzzy set for each of the
output variables.

=head2 Defuzzification

Finally, a defuzzification operator is applied to the aggregated fuzzy
set to generate a single crisp value for each of the output variables.

For a more detailed explanation of fuzzy inference, you can check out
the tutorial by Jerry Mendel at
S<http://sipi.usc.edu/~mendel/publications/FLS_Engr_Tutorial_Errata.pdf>.

FuzzyInference.pm  view on Meta::CPAN

This is the constructor. It takes no arguments, and returns an
initialized AI::FuzzyInference object.

=item operation()

This method is used to set/query the fuzzy operations. It takes at least
one argument, and at most 2. The first argument specifies the logic
operation in question, and can be either C<&> for logical I<AND>,
C<|> for logical I<OR>, or C<!> for logical I<NOT>. The second
argument is used to set what method to use for the given operator.
The following values are possible:

=item &

=over 8

FuzzyInference.pm  view on Meta::CPAN

The method returns the name of the method to be used for the given
operation.

=item implication()

This method is used to set/query the implication method used to alter
the shape of the implicated output fuzzy sets. It takes one optional
argument which specifies the name of the implication method used.
This can be one of the following:

=over 8

=item clip

This causes the output fuzzy set to be clipped at its support value.
This is the default.

=item scale

This scales the output fuzzy set by multiplying it by its support value.

=back

=item aggregation()

This method is used to set/query the aggregation method used to combine
the output fuzzy sets. It takes one optional
argument which specifies the name of the aggregation method used.
This can be one of the following:

=over 8

=item max

The fuzzy sets are combined by taking at each point the maximum value of
all the fuzzy sets at that point.
This is the default.

=back

=item defuzzification()

This method is used to set/query the defuzzification method used to
extract a single crisp value from the aggregated fuzzy set.
It takes one optional
argument which specifies the name of the defuzzification method used.
This can be one of the following:

=over 8

=item centroid

The centroid (aka I<center of mass> and I<center of gravity>) of the
aggregated fuzzy set is computed and returned.
This is the default.

=back

=item inVar()

This method defines an input variable, along with its universe of
discourse, and its term sets. Here's an example:

      $obj->inVar('height',
                  5, 8,   # xmin, xmax (in feet, say)
                  'tall' => [5,   0,
                             5.5, 1,

FuzzyInference.pm  view on Meta::CPAN

                             7, 1]
		  );

This example defines an input variable called I<height>. The minimum
possible value for height is 5, and the maximum is 8. It also defines
3 term sets associated with height: I<tall>, I<medium> and I<short>.
The shape of each of these triangular term sets is completely
specified by the supplied anonymous array of indices.

=item outVar()

This method defines an output variable, along with its universe of
discourse, and its term sets. The arguments are identical to those for
the C<inVar()> method.

=item addRule()

This method is used to add the fuzzy rules. Its arguments are hash-value
pairs; the keys are the precedents and the values are the consequents.
Each antecedent has to be a combination of 1 or more strings. The
strings have to be separated by C<&> or C<|> indicating the fuzzy
I<AND> and I<OR> operations respectively. Each consequent must be a
single string. Each string has the form: C<var = term_set>. Spaces
are completely optional. Example:

    $obj->addRule('height=short & weight=big' => 'diet = necessary',
		  'height=tall & weight=tiny' => 'diet = are_you_kidding_me');

The first rule basically says I<If the height is short, and the weight is
big, then diet is necessary>.

=item compute()

This method takes as input a set of hash-value pairs; the keys are names
of input variables, and the values are the values of the variables. It
runs those values through the FIS, generating corresponding values for
the output variables. It always returns a true value. To get the actual
values of the output variables, look at the C<value()> method below.
Example:

FuzzyInference.pm  view on Meta::CPAN

    $obj->compute(x => 5,
		  y => 24);

Note that any subsequent call to C<compute()> will implicitly clear out
the old computed values before recomputing the new ones. This is done
through a call to the C<reset()> method below.

=item value()

This method returns the value of the supplied output variable. It only
works for output variables (defined using the C<outVar()> method),
and only returns useful results after a call to C<compute()> has been
made.

=item reset()

This method resets all the data structures used to compute crisp values
of the output variables. It is implicitly called by the C<compute()>
method above.

=back

 view all matches for this distribution


AI-Gene-Sequence

 view release on metacpan or  search on metacpan

demo/Regexgene.pm  view on Meta::CPAN

If called as an object method (C<$obj->new>), this decreases the depth
count by one from the invoking object.  It also adds tokens to the regular
expression and uses the B<valid_gene> method to ensure we stay sane 
from the start.

As can be seen, we use array offsets above $self->[1] to store information
which is specific to our implementation.

=cut

sub new {

 view all matches for this distribution


AI-Genetic-Pro

 view release on metacpan or  search on metacpan

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

		if $self->type eq q/combination/ and ($self->strategy->[0] ne q/OX/ and $self->strategy->[0] ne q/PMX/);
	croak(q/Strategy cannot be "/,$self->strategy->[0],q/" if "variable length" feature is active!/ )
		if ($self->strategy->[0] eq 'PMX' or $self->strategy->[0] eq 'OX') and $self->variable_length;
	
	#-------------------------------------------------------------------
	$self->_set_strict if $self->strict;

	#-------------------------------------------------------------------
	return $self unless $self->mce;

	#-------------------------------------------------------------------

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

#=======================================================================
sub _Cache { $_Cache; }
#=======================================================================
# INIT #################################################################
#=======================================================================
sub _set_strict {
	my ($self) = @_;
	
	# fitness
	my $fitness = $self->fitness();
	my $replacement = sub {

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	$self->_history( [  [ ], [ ], [ ] ] );
	$self->_init_cache if $self->cache;
	#-------------------------------------------------------------------
	
	if($self->type eq q/listvector/){
		croak(q/You have to pass array reference if "type" is set to "listvector"/) unless ref $data eq 'ARRAY';
		$self->_translations( $self->_check_data_ref($data) );
	}elsif($self->type eq q/bitvector/){
		croak(q/You have to pass integer if "type" is set to "bitvector"/) if $data !~ /^\d+$/o;
		$self->_translations( [ [ 0, 1 ] ] );
		$self->_translations->[$_] = $self->_translations->[0] for 1..$data-1;
	}elsif($self->type eq q/combination/){
		croak(q/You have to pass array reference if "type" is set to "combination"/) unless ref $data eq 'ARRAY';
		$self->_translations( [ clone($data) ] );
		$self->_translations->[$_] = $self->_translations->[0] for 1..$#$data;
	}elsif($self->type eq q/rangevector/){
		croak(q/You have to pass array reference if "type" is set to "rangevector"/) unless ref $data eq 'ARRAY';
		$self->_translations( $self->_find_fix_range( $self->_check_data_ref($data) ));
	}else{
		croak(q/You have to specify first "type" of vector!/);
	}
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	my $graph = GD()->new(($params{-width} || 640), ($params{-height} || 480));

	my $data = $self->getHistory;

	if(defined $params{-font}){
    	$graph->set_title_font  ($params{-font}, 12);
    	$graph->set_x_label_font($params{-font}, 10);
    	$graph->set_y_label_font($params{-font}, 10);
    	$graph->set_legend_font ($params{-font},  8);
	}
	
    $graph->set_legend(
    	$params{legend1} || q/Max value/,
    	$params{legend2} || q/Mean value/,
    	$params{legend3} || q/Min value/,
    );

    $graph->set(
        x_label_skip        => int(($data->[0]->[-1]*4)/100),
        x_labels_vertical   => 1,
        x_label_position    => .5,
        y_label_position    => .5,
        y_long_ticks        => 1,   # poziome linie

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


I<Beta> distribution.  The density of the beta is:

    X^($aa - 1) * (1 - X)^($bb - 1) / B($aa , $bb) for 0 < X < 1.

C<$aa> and C<$bb> are set by default to number of parents.

B<Argument restrictions:> Both $aa and $bb must not be less than 1.0E-37.

=item C<-selection =E<gt> [ 'RouletteDistribution', 'binomial' ]>

Binomial distribution. No additional parameters are needed.

=item C<-selection =E<gt> [ 'RouletteDistribution', 'chi_square', $df ]>

Chi-square distribution with C<$df> degrees of freedom. C<$df> by default is set to size of population.

=item C<-selection =E<gt> [ 'RouletteDistribution', 'exponential', $av ]>

Exponential distribution, where C<$av> is average . C<$av> by default is set to size of population.

=item C<-selection =E<gt> [ 'RouletteDistribution', 'poisson', $mu ]>

Poisson distribution, where C<$mu> is mean. C<$mu> by default is set to size of population.

=back

=item B<Distribution>

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


I<Beta> distribution.  The density of the beta is:

    X^($aa - 1) * (1 - X)^($bb - 1) / B($aa , $bb) for 0 < X < 1.

C<$aa> and C<$bb> are set by default to number of parents.

B<Argument restrictions:> Both $aa and $bb must not be less than 1.0E-37.

=item C<-selection =E<gt> [ 'Distribution', 'binomial' ]>

Binomial distribution. No additional parameters are needed.

=item C<-selection =E<gt> [ 'Distribution', 'chi_square', $df ]>

Chi-square distribution with C<$df> degrees of freedom. C<$df> by default is set to size of population.

=item C<-selection =E<gt> [ 'Distribution', 'exponential', $av ]>

Exponential distribution, where C<$av> is average . C<$av> by default is set to size of population.

=item C<-selection =E<gt> [ 'Distribution', 'poisson', $mu ]>

Poisson distribution, where C<$mu> is mean. C<$mu> by default is set to size of population.

=back

=back

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


I<Beta> distribution.  The density of the beta is:

    X^($aa - 1) * (1 - X)^($bb - 1) / B($aa , $bb) for 0 < X < 1.

C<$aa> and C<$bb> are set by default to the number of parents.

B<Argument restrictions:> Both $aa and $bb must not be less than 1.0E-37.

=item C<-strategy =E<gt> [ 'Distribution', 'binomial' ]>

Binomial distribution. No additional parameters are needed.

=item C<-strategy =E<gt> [ 'Distribution', 'chi_square', $df ]>

Chi-squared distribution with C<$df> degrees of freedom. C<$df> by default is set to the number of parents.

=item C<-strategy =E<gt> [ 'Distribution', 'exponential', $av ]>

Exponential distribution, where C<$av> is average . C<$av> by default is set to the number of parents.

=item C<-strategy =E<gt> [ 'Distribution', 'poisson', $mu ]>

Poisson distribution, where C<$mu> is mean. C<$mu> by default is set to the number of parents.

=back

=item PMX

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


Alias for C<indType>.

=item I<$ga>-E<gt>B<crossProb>()

This method is used to query and set the crossover rate.

=item I<$ga>-E<gt>B<crossover>()

Alias for C<crossProb>.

=item I<$ga>-E<gt>B<mutProb>()

This method is used to query and set the mutation rate.

=item I<$ga>-E<gt>B<mutation>()

Alias for C<mutProb>.

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


=item I<$ga>-E<gt>B<as_array>($chromosome)

In list context return an array representing the specified chromosome. 
In scalar context return an reference to an array representing the specified 
chromosome. If I<variable_length> is turned on and is set to level 2, an array 
can have some C<undef> values. To get only C<not undef> values use 
C<as_array_def_only> instead of C<as_array>.

=item I<$ga>-E<gt>B<as_array_def_only>($chromosome)

In list context return an array representing the specified chromosome. 
In scalar context return an reference to an array representing the specified 
chromosome. If I<variable_length> is turned off, this function is just an
alias for C<as_array>. If I<variable_length> is turned on and is set to 
level 2, this function will return only C<not undef> values from chromosome. 
See example below:

    # -variable_length => 2, -type => 'bitvector'
	

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
	$string = $ga->as_string($chromosome);
	# $string looks something like that
	# element0___element1___element2___element3...

Attention! If I<variable_length> is turned on and is set to level 2, it is 
possible to get C<undef> values on the left side of the vector. In the returned
string C<undef> values will be replaced with B<spaces>. If you don't want
to see any I<spaces>, use C<as_string_def_only> instead of C<as_string>.

=item I<$ga>-E<gt>B<as_string_def_only>($chromosome)

Return a string representation of specified chromosome. If I<variable_length> 
is turned off, this function is just alias for C<as_string>. If I<variable_length> 
is turned on and is set to level 2, this function will return a string without
C<undef> values. See example below:

	# -variable_length => 2, -type => 'bitvector'
	
	my $string = $ga->as_string($chromosome);

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

=back

=head1 REPORTING BUGS

When reporting bugs/problems please include as much information as possible.
It may be difficult for me to reproduce the problem as almost every setup
is different.

A small script which yields the problem will probably be of help. 

=head1 THANKS

 view all matches for this distribution


AI-Genetic

 view release on metacpan or  search on metacpan

Genetic.pm  view on Meta::CPAN

##################

# sub new():
# This is the constructor. It creates a new AI::Genetic
# object. Options are:
# -population: set the population size
# -crossover:  set the crossover probability
# -mutation:   set the mutation probability
# -fitness:    set the fitness function
# -type:       set the genome type. See docs.
# -terminate:  set termination sub.

sub new {
  my ($class, %args) = @_;

  my $self = bless {

Genetic.pm  view on Meta::CPAN

  }

  $self->{PEOPLE};
}

# useful little methods to set/query parameters.
sub size       { $_[0]{POPSIZE}    = $_[1] if defined $_[1]; $_[0]{POPSIZE}   }
sub crossProb  { $_[0]{CROSSRATE}  = $_[1] if defined $_[1]; $_[0]{CROSSRATE} }
sub mutProb    { $_[0]{MUTPROB}    = $_[1] if defined $_[1]; $_[0]{MUTPROB}   }
sub indType    { $_[0]{INDIVIDUAL} }
sub generation { $_[0]{GENERATION} }

Genetic.pm  view on Meta::CPAN


I will not go into the details of GAs here, but here are the
bare basics. Plenty of information can be found on the web.

In a GA, a population of individuals compete for survival. Each
individual is designated by a set of genes that define its
behaviour. Individuals that perform better (as defined by the
fitness function) have a higher chance of mating with other
individuals. When two individuals mate, they swap some of
their genes, resulting in an individual that has properties
from both of its "parents". Every now and then, a mutation

Genetic.pm  view on Meta::CPAN

B<IMPORTANT>: the actual array reference used by the AI::Genetic object
is returned, so any changes to it will be reflected in I<$ga>.

=item I<$ga>-E<gt>B<size>(?I<newSize>?)

This method is used to query and set the population size.

=item I<$ga>-E<gt>B<crossProb>(?I<newProb>?)

This method is used to query and set the crossover rate.

=item I<$ga>-E<gt>B<mutProb>(?I<newProb>?)

This method is used to query and set the mutation rate.

=item I<$ga>-E<gt>B<indType>()

This method returns the type of individual: I<bitvector>, I<listvector>,
or I<rangevector>.

 view all matches for this distribution


AI-Image

 view release on metacpan or  search on metacpan

lib/AI/Image.pm  view on Meta::CPAN


The size for the generated image (default: '512x512').

=item debug

Used for testing.  If set to any true value, the image method
will return details of the error encountered instead of C<undef>

=back

=head2 image

 view all matches for this distribution


AI-LibNeural

 view release on metacpan or  search on metacpan

README  view on Meta::CPAN

INSTALLATION

> perl Makefile.PL
or 
> perl Makefile.PL PREFIX=/some/non/system/place
(then setting PERL5LIB as appropriate based on ^ is a good ideal)
ignore the warnings about CC and LD being unknown parameters, then
> make all test install

Provided that everything goes well this should build the module, test it out,
and install it. If you encounter problems try e-mailing the author with as

 view all matches for this distribution


AI-Logic-AnswerSet

 view release on metacpan or  search on metacpan

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	return @arr;
}

sub statistics {	# Return an array of hashes in which the statistics of every predicate of every answerSets are stored
			# If a condition of comparison is specified(number of predicates) it returns the answer sets that satisfy
			# that condition 

	my @as = @{$_[0]};
	my @pred = @{$_[1]};
	my @num = @{$_[2]};
	my @operators = @{$_[3]};

	my @sets;
	my @ans;
	
	my $countAS = 0;
	my @stat;

	my $countPred;

	foreach my $elem (@as) {

		if($elem =~ /(\w+).*\n/) {
			push @{$sets[$countAS]}, $elem;
			if(_existsPred($1,\@pred)) {
				$stat[$countAS]{$1} += 1;
				$countAS += 1;
			}
		}

		elsif($elem =~ /(\w+).*/) {
			push @{$sets[$countAS]}, $elem;
			if(_existsPred($1,\@pred)) {
				$stat[$countAS]{$1} += 1;
			}
		}
	}

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

					break;
				}
			}

			if($countPred == $size) {
				push @ans , $sets[$j];
			}
			$countPred = 0;
		}
		return @ans;

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


In this case the first output is selected.

=head3 getASFromFile

Parses the output of a DLV execution saved in a file and gather the answer sets.

	AI::Logic::AnswerSet::executeFromFileAndSave("outprog.txt","dlvprog.txt","");
	my @result = AI::Logic::AnswerSet::getASFromFile("outprog.txt");

=head3 getAS

Parses the output of a DLV execution and gather the answer sets.

	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");
	my @result = AI::Logic::AnswerSet::getAS(@out);

=head3 mapAS

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	my @out = AI::Logic::AnswerSet::singleExec("3col.txt","nodes.txt","edges.txt","-nofacts");
	my @result = AI::Logic::AnswerSet::getAS(@out);
	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result);

The user can set some constraints on the data to be saved in the hashmap, such as predicates, or answer sets, or both.

	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result,@predicates,@answerSets);

For instance, think about the 3-colorability problem: imagine to 
have the edges in the hashmap, and to print the edges contained in the third answer set
returned by DLV; this is an example of the print instruction, useful to understand how
the hashmap works:

	print "Edges: @{$mappedAS[2]{edge}}\n";

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

Suppose that we have the predicate "person" C<person(Name,Surename);> and
that we just want the surenames of all the instances of "person":

	my @surenames = AI::Logic::AnswerSet::getProjection(\@mappedAS,3,"person",2);

The parameters are, respectively: hashmap, number of the answer set, name of the predicate,
position of the term.

=head3 statistics

This method returns an array of hashes with some stats of every predicate of every answer set,
namely the number of occurrences of the specified predicates of each answer set.
If a condition is specified(number of predicates), only the answer sets that satisfy
the condition are returned.

	my @res = AI::Logic::AnswerSet::getAS(@output);
	my @predicates = ("node","edge");
	my @stats = AI::Logic::AnswerSet::statistics(\@res,\@predicates);

In this case the data structure returned is the same as the one returned by C<mapAS()>.
Hence, for each answer set (each element of the array of hashes), the hashmap will appear 
like this:

	{
		node => 6
		edge => 9
	}

This means that for a particular answer set we have 6 nodes and 9 edges.
In addition, this method can be used with some constraints:

	my @res = AI::Logic::AnswerSet::getAS(@output);
	my @predicates = ("node,"edge");
	my @numbers = (4,15);
	my @operators = (">","<");
	my @stats = AI::Logic::AnswerSet::statistics(\@res,\@predicates,\@numbers,\@operators);

Now the functions returns the answer sets that satisfy the condition, i.e., an answer set
is returned only if the number of occurrences of the predicate "node" is higher than 4, and the number of occurrences of the predicate "edge" less than 15.

=head3 getFacts

Get the logic program facts from a file or a string.

 view all matches for this distribution


AI-ML

 view release on metacpan or  search on metacpan

lib/AI/ML/LinearRegression.pm  view on Meta::CPAN


                $grads =  ($x->T x (($x x $thetas)-$y)) / $m;

                $reg_thetas = ($lambda / $m) * $thetas;
                # do not regularize theta 0
                $reg_thetas->set_element(0,0,0);
                $thetas = $thetas - $alpha * ( $grads + $reg_thetas );
            }
        }
        else{
            for my $i (1..$iters)

 view all matches for this distribution


AI-MXNet-Gluon-ModelZoo

 view release on metacpan or  search on metacpan

examples/image_classification.pl  view on Meta::CPAN

use Getopt::Long qw(HelpMessage);

GetOptions(
    ## my Pembroke Welsh Corgi Kyuubi, enjoing Solar eclipse of August 21, 2017
    'image=s' => \(my $image = 'http://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/'.
                               'gluon/dataset/kyuubi.jpg'),
    'model=s' => \(my $model = 'resnet152_v2'),
    'help'    => sub { HelpMessage(0) },
) or HelpMessage(1);

## get a pretrained model (download parameters file if necessary)
my $net = get_model($model, pretrained => 1);

## ImageNet classes
my $fname = download('http://data.mxnet.io/models/imagenet/synset.txt');
my @text_labels = map { chomp; s/^\S+\s+//; $_ } IO::File->new($fname)->getlines;

## get the image from the disk or net
if($image =~ /^https/)
{

 view all matches for this distribution


AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

    my $data = PDL->random(2, $n);
    ## creates the pdl with $n rows and one column with labels
    ## labels are floats that either sum or product, etc of
    ## two random values in each corresponding row of the data pdl
    my $label = $func->($data->slice('0,:'), $data->slice('1,:'));
    # partition into train/eval sets
    my $edge = int($n / 8);
    my $validation_data = $data->slice(":,0:@{[ $edge - 1 ]}");
    my $validation_label = $label->slice(":,0:@{[ $edge - 1 ]}");
    my $train_data = $data->slice(":,$edge:");
    my $train_label = $label->slice(":,$edge:");
    # build iterators around the sets
    return(mx->io->NDArrayIter(
        batch_size => $batch_size,
        data => $train_data,
        label => $train_label,
    ), mx->io->NDArrayIter(

 view all matches for this distribution


AI-MXNetCAPI

 view release on metacpan or  search on metacpan

mxnet.i  view on Meta::CPAN

 *  \return error info
 */
const char *MXGetLastError();

//-------------------------------------
// Part 0: Global State setups
//-------------------------------------
/*!
 * \brief Seed the global random number generators in mxnet.
 * \param seed the random number seed.
 * \return 0 when success, -1 when failure happens.

mxnet.i  view on Meta::CPAN

 * \return 0 when success, -1 when failure happens
 */
int MXNDArrayDetach(NDArrayHandle handle, NDArrayHandle *out);

/*!
 * \brief set the flag for gradient array state.
 * \param handle NDArray handle
 * \param state the new state.
 * \return 0 when success, -1 when failure happens
 */
int MXNDArraySetGradState(NDArrayHandle handle, int state);

/*!
 * \brief set the flag for gradient array state.
 * \param handle NDArray handle
 * \param state the new state.
 * \return 0 when success, -1 when failure happens
 */
int MXNDArrayGetGradState(NDArrayHandle handle, int *out);

mxnet.i  view on Meta::CPAN

                                 NDArrayHandle **out_array,
                                 int num_params,
                                 const char **keys,
                                 const char **vals);
/*!
 * \brief set whether to record operator for autograd
 * \param is_train 1 when training, 0 when testing
 * \param prev returns the previous status before this set.
 * \return 0 when success, -1 when failure happens
 */
int MXAutogradSetIsTraining(int is_training, int* out);
/*!
 * \brief mark NDArrays as variables to compute gradient for autograd

mxnet.i  view on Meta::CPAN

/*!
 * \brief Set string attribute from symbol.
 *  NOTE: Setting attribute to a symbol can affect the semantics(mutable/immutable) of symbolic graph.
 *
 *  Safe recommendaton: use  immutable graph
 *  - Only allow set attributes during creation of new symbol as optional parameter
 *
 *  Mutable graph (be careful about the semantics):
 *  - Allow set attr at any point.
 *  - Mutating an attribute of some common node of two graphs can cause confusion from user.
 *
 * \param symbol the source symbol
 * \param key The key of the symbol.
 * \param value The value to be saved.

mxnet.i  view on Meta::CPAN

                         ExecutorHandle shared_exec_handle,
                         ExecutorHandle* out
);

/*!
 * \brief set a call back to notify the completion of operation
 */
int MXExecutorSetMonitorCallback(ExecutorHandle handle,
                                           ExecutorMonitorCallback callback,
                                           void* callback_handle);
//--------------------------------------------

mxnet.i  view on Meta::CPAN

 * \return 0 when success, -1 when failure happens
 */
int MXDataIterNext(DataIterHandle handle,
                             int *out);
/*!
 * \brief Call iterator.Reset
 * \param handle the handle to iterator
 * \return 0 when success, -1 when failure happens
 */
int MXDataIterBeforeFirst(DataIterHandle handle);

 view all matches for this distribution


AI-MaxEntropy

 view release on metacpan or  search on metacpan

inc/Module/AutoInstall.pm  view on Meta::CPAN

use vars qw{$VERSION};
BEGIN {
	$VERSION = '1.03';
}

# special map on pre-defined feature sets
my %FeatureMap = (
    ''      => 'Core Features',    # XXX: deprecated
    '-core' => 'Core Features',
);

inc/Module/AutoInstall.pm  view on Meta::CPAN

            my $option = lc($1);

            # check for a newer version of myself
            _update_to( $modules, @_ ) and return if $option eq 'version';

            # sets CPAN configuration options
            $Config = $modules if $option eq 'config';

            # promote every features to core status
            $core_all = ( $modules =~ /^all$/i ) and next
              if $option eq 'core';

inc/Module/AutoInstall.pm  view on Meta::CPAN

    my $conf = $cp->configure_object;

    return unless $conf->can('conf') # 0.05x+ with "sudo" support
               or _can_write($conf->_get_build('base'));  # 0.04x

    # if we're root, set UNINST=1 to avoid trouble unless user asked for it.
    my $makeflags = $conf->get_conf('makeflags') || '';
    if ( UNIVERSAL::isa( $makeflags, 'HASH' ) ) {
        # 0.03+ uses a hashref here
        $makeflags->{UNINST} = 1 unless exists $makeflags->{UNINST};

inc/Module/AutoInstall.pm  view on Meta::CPAN

        # 0.02 and below uses a scalar
        $makeflags = join( ' ', split( ' ', $makeflags ), 'UNINST=1' )
          if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } );

    }
    $conf->set_conf( makeflags => $makeflags );
    $conf->set_conf( prereqs   => 1 );

    

    while ( my ( $key, $val ) = splice( @config, 0, 2 ) ) {
        $conf->set_conf( $key, $val );
    }

    my $modtree = $cp->module_tree;
    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        print "*** Installing $pkg...\n";

inc/Module/AutoInstall.pm  view on Meta::CPAN

        # no "sudo" support, probe for writableness
        return unless _can_write( MM->catfile( $CPAN::Config->{cpan_home}, 'sources' ) )
                  and _can_write( $Config::Config{sitelib} );
    }

    # if we're root, set UNINST=1 to avoid trouble unless user asked for it.
    my $makeflags = $CPAN::Config->{make_install_arg} || '';
    $CPAN::Config->{make_install_arg} =
      join( ' ', split( ' ', $makeflags ), 'UNINST=1' )
      if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } );

    # don't show start-up info
    $CPAN::Config->{inhibit_startup_message} = 1;

    # set additional options
    while ( my ( $opt, $arg ) = splice( @config, 0, 2 ) ) {
        ( $args{$opt} = $arg, next )
          if $opt =~ /^force$/;    # pseudo-option
        $CPAN::Config->{$opt} = $arg;
    }

 view all matches for this distribution


AI-MegaHAL

 view release on metacpan or  search on metacpan

lib/AI/MegaHAL.pm  view on Meta::CPAN


use strict;

use vars qw(@EXPORT @ISA $VERSION $AUTOLOAD);

@EXPORT = qw(megahal_setnoprompt
	     megahal_setnowrap
	     megahal_setnobanner
	     megahal_seterrorfile
	     megahal_setstatusfile
	     megahal_initialize
	     megahal_initial_greeting
	     megahal_command
	     megahal_do_reply
	     megahal_learn

lib/AI/MegaHAL.pm  view on Meta::CPAN

    } else {
	die("Error: unable to locate megahal.brn or megahal.trn\n");
    }

    # Set some of the options that may have been passed to us.
    megahal_setnobanner() if(! $args{'Banner'});
    megahal_setnowrap()   if(! $args{'Wrap'});
    megahal_setnoprompt() if(! $args{'Prompt'});

    # This flag indicates whether or not we should automatically save
    # our brain when the object goes out of scope.
    $self->{'AutoSave'} = $args{'AutoSave'};

 view all matches for this distribution


AI-MicroStructure

 view release on metacpan or  search on metacpan

bin/from-folder.pl  view on Meta::CPAN


GetOptions (\%opts, "cache_file=s");

our $cache = {};
our @target = split("\/",$opts{cache_file});
my $set = AI::MicroStructure::ObjectSet->new();

eval {
    local $^W = 0;  # because otherwhise doesn't pass errors
#`rm $opts{cache_file}`;
    $cache = lock_retrieve($opts{cache_file});

bin/from-folder.pl  view on Meta::CPAN


END{

  lock_store($cache,$opts{cache_file});

  print Dumper [$set->size,$set->members];


  }


bin/from-folder.pl  view on Meta::CPAN


sub translate {
  return unless -f;
  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;

  $set->insert(AI::MicroStructure::Object->new($rel_name));

}
#print Dumper join "-", soundex(("rock'n'roll", 'rock and roll', 'rocknroll'));

 view all matches for this distribution


AI-NNEasy

 view release on metacpan or  search on metacpan

lib/AI/NNEasy.pm  view on Meta::CPAN

    foreach my $Key ( keys %$layer_conf ) { $$layer_conf{$Key} = $$conf{$Key} if exists $$conf{$Key} ;}

    return $layer_conf ;
  }
  
  sub reset_nn { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    
    $this->{NN} = AI::NNEasy::NN->new( @{ $this->{NN_ARGS} } ) ;
  }

lib/AI/NNEasy.pm  view on Meta::CPAN

    
    $err *= -1 if $err < 0 ;
    return $err ;
  }
  
  *_learn_set_get_output_error = \&_learn_set_get_output_error_c ;
  
  sub _learn_set_get_output_error_pl { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my $set = shift(@_) ;
    my $error_ok = shift(@_) ;
    my $ins_ok = shift(@_) ;
    my $verbose = shift(@_) ;
    
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      $this->{NN}->learn($$set[$i+1]) ;
    }

    my ($err,$learn_ok,$print) ;
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      ++$learn_ok if $er < $error_ok ;
      $err += $er ;
      $print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
    }
    
    $err /= $ins_ok ;
    
    return ( $err , $learn_ok , $print ) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

  
  
  
  
    
  sub learn_set { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    my $limit = shift(@_) ;
    my $verbose = shift(@_) ;
    
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
    
    my $err_static_limit = 15 ;
    my $err_static_limit_positive ;

lib/AI/NNEasy.pm  view on Meta::CPAN

  
    my $error_ok = $this->{ERROR_OK} ;
    
    my $check_diff_count = 1000 ;
    
    my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
    
    $err_static = 0 ;
    
    while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
      ($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
      
      ++$counter ;
      
      if ( !($counter % 100) || $learn_ok == $ins_ok ) {
        my $err_diff = $err_last - $err ;

lib/AI/NNEasy.pm  view on Meta::CPAN

        
        print "err_static = $err_static\n" if $verbose && $err_static ;

        $err_last = $err ;
        
        my $reseted ;
        if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {
          $err_static = 0 ;
          $counter -= 2000 ;
          $reseted = 1 ;
          ++$reset_count1 ;
          
          if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
            $reset_count1 = $reset_count2 = 0 ;
            print "** Reseting NN...\n" if $verbose ;
            $this->reset_nn ;
          }
          else {
            print "** Reseting weights due NULL diff...\n" if $verbose ;
            $this->{NN}->init ;
          }
        }
        
        if ( !($counter % $check_diff_count) ) {
          $err_count /= ($check_diff_count/100) ;
          
          print "ERR COUNT> $err_count\n" if $verbose ;
          
          if ( !$reseted && $err_count < 0.001 ) {
            $err_static = 0 ;
            $counter -= 1000 ;
            ++$reset_count2 ;
            
            if ( ($reset_count1 + $reset_count2) > 2 ) {
              $reset_count1 = $reset_count2 = 0 ;
              print "** Reseting NN...\n" if $verbose ;
              $this->reset_nn ;
            }
            else {
              print "** Reseting weights due LOW diff...\n" if $verbose ;
              $this->{NN}->init ;
            }
          }

          $err_count = 0 ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      
    }
    
  }
  
  sub get_set_error { 
    my $this = ref($_[0]) ? shift : undef ;
    my $CLASS = ref($this) || __PACKAGE__ ;
    my @set = ref($_[0]) eq 'ARRAY' ? @{ shift(@_) } : ( ref($_[0]) eq 'HASH' ? %{ shift(@_) } : shift(@_) ) ;
    my $ins_ok = shift(@_) ;
    
    my $ins_sz = @set / 2 ;

    $ins_ok ||= $ins_sz ;
  
    my $err ;
    for (my $i = 0 ; $i < @set ; $i+=2) {
      $this->{NN}->run($set[$i]) ;
      my $er = $this->{NN}->RMSErr($set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      $err += $er ;
    }
    
    $err /= $ins_ok ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      sv_catsv(ret , elem) ;
    }
    return ret ;
}

void _learn_set_get_output_error_c( SV* self , SV* set , double error_ok , int ins_ok , bool verbose ) {
    dXSARGS;
    
    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    AV* set_av = OBJ_AV( set ) ;
    SV* nn = FETCH_ATTR(self_hv , "NN") ;
    SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
    SV* ret ;
    double err = 0 ;
    double er = 0 ;
    int learn_ok = 0 ;
        
    for (i = 0 ; i <= av_len(set_av) ; i+=2) {
      SV* set_in = *av_fetch(set_av, i ,0) ;
      SV* set_out = *av_fetch(set_av, i+1 ,0) ;

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_in );
      PUTBACK ;
      call_method("run", G_DISCARD) ;
      
      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_out );
      PUTBACK ;
      call_method("learn", G_SCALAR) ;
    }
    
    for (i = 0 ; i <= av_len(set_av) ; i+=2) {
      SV* set_in = *av_fetch(set_av, i ,0) ;
      SV* set_out = *av_fetch(set_av, i+1 ,0) ;

      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_in );
      PUTBACK ;
      call_method("run", G_DISCARD) ;
      
      PUSHMARK(SP) ;
        XPUSHs( nn );
        XPUSHs( set_out );
      PUTBACK ;
      call_method("RMSErr", G_SCALAR) ;
      
      SPAGAIN ;
      ret = POPs ;

lib/AI/NNEasy.pm  view on Meta::CPAN

      if (er < 0) er *= -1 ;
      if (er < error_ok) ++learn_ok ;
      err += er ;
      
      if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
                       SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
                       SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
                       er
                     ) ;

    }
    

lib/AI/NNEasy.pm  view on Meta::CPAN

I have writed this module because after test different NN module on Perl I can't find
one that is portable through Linux and Windows, easy to use and the most important,
one that really works in a reall problem.

With this module you don't need to learn much about NN to be able to construct one, you just
define the construction of the NN, learn your set of inputs, and use it.

=head1 USAGE

Here's an example of a NN to compute XOR:

lib/AI/NNEasy.pm  view on Meta::CPAN

  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  $ERR_OK ,   ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  ) ;
  
  
  ## Our set of inputs and outputs to learn:
  my @set = (
  [0,0] => [0],
  [0,1] => [1],
  [1,0] => [1],
  [1,1] => [0],
  );
  
  ## Calculate the actual error for the set:
  my $set_err = $nn->get_set_error(\@set) ;
  
  ## If set error is bigger than maximal error lest's learn this set:
  if ( $set_err > $ERR_OK ) {
    $nn->learn_set( \@set ) ;
    ## Save the NN:
    $nn->save ;
  }
  
  ## Use the NN:

lib/AI/NNEasy.pm  view on Meta::CPAN

  print "1 0 => @$out\n" ; ## 1 0 => 1
  
  my $out = $nn->run_get_winner([1,1]) ;
  print "1 1 => @$out\n" ; ## 1 1 => 0
  
  ## or just interate through the @set:
  for (my $i = 0 ; $i < @set ; $i+=2) {
    my $out = $nn->run_get_winner($set[$i]) ;
    print "@{$set[$i]}) => @$out\n" ;
  }

=head1 METHODS

=head2 new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )

lib/AI/NNEasy.pm  view on Meta::CPAN

  'xor.nne' , ## file to save the NN.
  [0,1] ,     ## Output types of the NN.
  0.1 ,       ## Maximal error for output.
  2 ,         ## Number of inputs.
  1 ,         ## Number of outputs.
  [3] ,       ## Hidden layers. (this is setting 1 hidden layer with 3 nodes).
  {random_connections=>0 , networktype=>'feedforward' , random_weights=>1 , learning_algorithm=>'backprop' , learning_rate=>0.1 , bias=>1} ,
  ) ;

And a simple example that will create a NN equal of the above:

lib/AI/NNEasy.pm  view on Meta::CPAN


  $nn->learn( [0,1] , [1] , 10 ) ;

=back

=head2 learn_set (@SET , OK_OUTPUTS , LIMIT , VERBOSE)

Learn a set of inputs until get the right error for the outputs.

=over 4

=item @SET

lib/AI/NNEasy.pm  view on Meta::CPAN


If TRUE turn verbose method ON when learning.

=back

=head2 get_set_error (@SET , OK_OUTPUTS)

Get the actual error of a set in the NN. If the returned error is bigger than
I<ERROR_OK> defined on I<new()> you should learn or relearn the set.

=head2 run (@INPUT)

Run a input and return the output calculated by the NN based in what the NN already have learned.

lib/AI/NNEasy.pm  view on Meta::CPAN


Some functions of this module have I<Inline> functions writed in C.

I have made a C version only for the functions that are wild called, like:

  AI::NNEasy::_learn_set_get_output_error

  AI::NNEasy::NN::tanh

  AI::NNEasy::NN::feedforward::run
  

lib/AI/NNEasy.pm  view on Meta::CPAN

let's say 0.078875.

What is hard in a NN is to find this I<weights>. By default L<AI::NNEasy> uses
I<backprop> as learning algorithm. With I<backprop> it pastes the inputs through
the Neural Network and adjust the I<weights> using random numbers until we find
a set of I<weights> that give to us the right output.

The secret of a NN is the number of hidden layers and nodes/neurons for each layer.
Basically the best way to define the hidden layers is 1 layer of (INPUT_NODES+OUTPUT_NODES).
So, a layer of 2 input nodes and 1 output node, should have 3 nodes in the hidden layer.
This definition exists because the number of inputs define the maximal variability of

lib/AI/NNEasy.pm  view on Meta::CPAN


Actually this is not the real explanation, but is the easiest way to understand that
you need to have a number of nodes/neuros in the hidden layer that can give the
right output for your problem.

Other inportant step of a NN is the learning fase. Where we get a set of inputs
and paste them through the NN until we have the right output. This process basically
will adjust the nodes I<weights> until we have an output near the real output that we want.

Other important concept is that the inputs and outputs in the NN should be from 0 to 1.
So, you can define sets like:

  0 0      => 0
  0 0.5    => 0.5
  0.5 0.5  => 1
  1 0.5    => 0

 view all matches for this distribution


AI-NNFlex

 view release on metacpan or  search on metacpan

examples/add.pl  view on Meta::CPAN

use AI::NNFlex::Backprop;
use AI::NNFlex::Dataset;


# train the network to do addition. Adapted from code posted to perlmonks
# by tlpriest on 13/05/05

examples/add.pl  view on Meta::CPAN



$network->init();

# Taken from Mesh ex_add.pl
my $dataset = AI::NNFlex::Dataset->new([
[ 1,   1   ], [ 2    ],
[ 1,   2   ], [ 3    ],
[ 2,   2   ], [ 4    ],
[ 20,  20  ], [ 40   ],
[ 10,  10  ], [ 20   ],

examples/add.pl  view on Meta::CPAN

]);

my $err = 10;
# Stop after 4096 epochs -- don't want to wait more than that
for ( my $i = 0; ($err > 0.0001) && ($i < 4096); $i++ ) {
    $err = $dataset->learn($network);
    print "Epoch = $i error = $err\n";
}

foreach (@{$dataset->run($network)})
{
    foreach (@$_){print $_}
    print "\n";    
}

 view all matches for this distribution


AI-NNVMCAPI

 view release on metacpan or  search on metacpan

nnvm.i  view on Meta::CPAN

typedef NNSymbol *SymbolHandle;
/*! \brief handle to Graph */
typedef NNGraph *GraphHandle;
/*!
 * \brief Set the last error message needed by C API
 * \param msg The error message to set.
 */
void NNAPISetLastError(const char* msg);
/*!
 * \brief return str message of the last error
 *  all function in this file will return 0 when success

nnvm.i  view on Meta::CPAN

/*!
 * \brief Set string attribute from symbol.
 *  NOTE: Setting attribute to a symbol can affect the semantics(mutable/immutable) of symbolic graph.
 *
 *  Safe recommendaton: use  immutable graph
 *  - Only allow set attributes during creation of new symbol as optional parameter
 *
 *  Mutable graph (be careful about the semantics):
 *  - Allow set attr at any point.
 *  - Mutating an attribute of some common node of two graphs can cause confusion from user.
 *
 * \param symbol the source symbol
 * \param num_param Number of parameters to set.
 * \param keys The keys of the attribute
 * \param values The value to be set
 * \return 0 when success, -1 when failure happens
 */
int NNSymbolSetAttrs(SymbolHandle in,
                              nn_uint num_param,
                              const char** keys,

 view all matches for this distribution


AI-NaiveBayes

 view release on metacpan or  search on metacpan

lib/AI/NaiveBayes.pm  view on Meta::CPAN


=head1 SYNOPSIS

    # AI::NaiveBayes objects are created by AI::NaiveBayes::Learner
    # but for quick start you can use the 'train' class method
    # that is a shortcut using default AI::NaiveBayes::Learner settings

    my $classifier = AI::NaiveBayes->train( 
        {
            attributes => {
                sheep => 1, very => 1,  valuable => 1, farming => 1

lib/AI/NaiveBayes.pm  view on Meta::CPAN

classifier from training data.

=item train( LIST of HASHREFS )

Shortcut for creating a trained classifier using L<AI::NaiveBayes::Learner> default
settings. 
Arguments are passed to the C<add_example> method of the L<AI::NaiveBayes::Learner>
object one by one.

=item classify( HASHREF )

 view all matches for this distribution


AI-NaiveBayes1

 view release on metacpan or  search on metacpan

NaiveBayes1.pm  view on Meta::CPAN

		smoothing => {},
		attribute_type => {},
	       }, $package;
}

sub set_real {
    my ($self, @attr) = @_;
    foreach my $a (@attr) { $self->{attribute_type}{$a} = 'real' }
}

sub import_from_YAML {

NaiveBayes1.pm  view on Meta::CPAN


to provide a count for unseen data.  The count is taken into
consideration in training and prediction, when any unseen attribute
values are observed.  Zero probabilities can be prevented in this way.
A count other than 0.5 can be provided, but if it is <=0 it will be
set to 0.5.  The method is similar to add-one smoothing.  A special
attribute value '*' is used for all unseen data. 

=head1 METHODS

=head2 Constructor Methods

NaiveBayes1.pm  view on Meta::CPAN


=item drop_attributes(@attributes)

Delete attributes after adding instances.

=item set_real(list_of_attributes)

Delares a list of attributes to be real-valued.  During training,
their conditional probabilities will be modeled with Gaussian (normal)
distributions. 

NaiveBayes1.pm  view on Meta::CPAN

 # sunny,75,70,TRUE,yes
 # overcast,72,90,TRUE,yes
 # overcast,81,75,FALSE,yes
 # rainy,71,91,TRUE,no
 
 $nb->set_real('temperature', 'humidity');
 
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>85,humidity=>85,windy=>'FALSE'},label=>'play=no');
 $nb->add_instance(attributes=>{outlook=>'sunny',temperature=>80,humidity=>90,windy=>'TRUE'},label=>'play=no');
 $nb->add_instance(attributes=>{outlook=>'overcast',temperature=>83,humidity=>86,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>70,humidity=>96,windy=>'FALSE'},label=>'play=yes');

 view all matches for this distribution


AI-Nerl

 view release on metacpan or  search on metacpan

lib/AI/Nerl.pm  view on Meta::CPAN


our $VERSION = .03;

#A Nerl is a mechanism to build neural networks?
#Give it training,test, and cv data?
#it settles on a learning rate and stuff?
#or maybe it's also a language for guided training?
#or maybe a visual gui thing?
#Not exactly sure. Maybe I'm tinkering with forces better left alone.
#That's a great excuse for failing horribly.

 view all matches for this distribution


AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift);
		sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in order. Usage:
	#
	#	 learn_set(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a percentage string.
	#
	sub learn_set {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};

BackProp.pm  view on Meta::CPAN

			$res=$data->[$row]->[0]-$self->run($data->[$row-1])->[0];
		}
		return $res;
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...
	#				   );
	#
	# This wil sub returns the percentage of 'forgetfullness' when the net learns all the
	# data in the set in RANDOM order. Usage:
	#
	#	 learn_set_rand(\@data,[ options ]);
	#
	# Options are options in hash form. They can be of any form that $net->learn takes.
	#
	# It returns a true value.
	#
	sub learn_set_rand {
		my $self	=	shift if(substr($_[0],0,4) eq 'AI::'); 
		my $data	=	shift;
		my %args	=	@_;
		my $len		=	$#{$data}/2-1;
		my $inc		=	$args{inc};

BackProp.pm  view on Meta::CPAN

		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};
		my $x		=	0; 
		my $y		=	0;
		
		# Reset map and run synapse counters.
		$self->{RUN}->{REGISTRATION} = $self->{MAP}->{REGISTRATION} = 0;
		
		AI::NeuralNet::BackProp::out2 "There will be $size neurons in this network group, with a divison value of $div.\n";
		#print "There will be $size neurons in this network group, with a divison value of $div.\n";
		

BackProp.pm  view on Meta::CPAN

			}
			
			# Inner loop connects every neuron in this 'layer' to one input of every neuron in
			# the next 'layer'. Remeber, layers only exist in terms of where the connections
			# are divided. For example, if a person requested 2 layers and 3 neurons per layer,
			# then there would be 6 neurons in the {NET}->[] list, and $div would be set to
			# 3. So we would loop over and every 3 neurons we would connect each of those 3 
			# neurons to one input of every neuron in the next set of 3 neurons. Of course, this
			# is an example. 3 and 2 are set by the new() constructor.
			
			# Flag values:
			# 0 - (default) - 
			# 	My feed-foward style: Each neuron in layer X is connected to one input of every
			#	neuron in layer Y. The best and most proven flag style.

BackProp.pm  view on Meta::CPAN

			for my $a (0..$divide-1) {
				$value += $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE};
				AI::NeuralNet::BackProp::out1 "\$a is $a, index is ".(($_*$divide)+$a).", value is $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE}\n";
			}
			$map[$_]	=	AI::NeuralNet::BackProp->intr($value/$divide);
			AI::NeuralNet::BackProp::out1 "Map position $_ is $map[$_] in @{[\@map]} with self set to $self.\n";
			$self->{OUTPUT}->[$_]->{FIRED}	=	0;
		}
		my $ret=\@map;
		return $self->{PARENT}->_range($ret);
	}

BackProp.pm  view on Meta::CPAN

	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");
	my $phrase4 = $net->crunch("Tired of word crunching yet?");

	# Make a data set from the array refs
	my @phrases = (
		$phrase1, $phrase2,
		$phrase3, $phrase4
	);

	# Learn the data set	
	$net->learn_set(\@phrases);
	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	

BackProp.pm  view on Meta::CPAN

and $outputs is optional (in this example, $layers is 2, $size is 3, and $outputs is 1).

$layers specifies the number of layers, including the input
and the output layer, to use in each neural grouping. A new
neural grouping is created for each pattern learned. Layers
is typically set to 2. Each layer has $size neurons in it.
Each neuron's output is connected to one input of every neuron
in the layer below it. 
	
This diagram illustrates a simple network, created with a call
to "new AI::NeuralNet::BackProp(2,2,2)" (2 layers, 2 neurons/layer, 2 outputs).

BackProp.pm  view on Meta::CPAN

$maximum_iterations is the maximum numbers of iteration the loop should do.
It defaults to 1024.  Set it to 0 if you never want the loop to quit before
the pattern is perfectly learned.

$maximum_allowable_percentage_of_error is the maximum allowable error to have. If 
this is set, then learn() will return when the perecentage difference between the
actual results and desired results falls below $maximum_allowable_percentage_of_error.
If you do not include 'error', or $maximum_allowable_percentage_of_error is set to -1,
then learn() will not return until it gets an exact match for the desired result OR it
reaches $maximum_iterations.


=item $net->learn_set(\@set, [ options ]);

UPDATE: Inputs and outputs in the dataset can now be strings. See information on auto-crunching
in learn()

This takes the same options as learn() and allows you to specify a set to learn, rather
than individual patterns. A dataset is an array refrence with at least two elements in the
array, each element being another array refrence (or now, a scalar string). For each pattern to
learn, you must specify an input array ref, and an ouput array ref as the next element. Example:
	
	my @set = (
		# inputs        outputs
		[ 1,2,3,4 ],  [ 1,3,5,6 ],
		[ 0,2,5,6 ],  [ 0,2,1,2 ]
	);


See the paragraph on measuring forgetfulness, below. There are 
two learn_set()-specific option tags available:

	flag     =>  $flag
	pattern  =>  $row

If "flag" is set to some TRUE value, as in "flag => 1" in the hash of options, or if the option "flag"
is not set, then it will return a percentage represting the amount of forgetfullness. Otherwise,
learn_set() will return an integer specifying the amount of forgetfulness when all the patterns 
are learned. 

If "pattern" is set, then learn_set() will use that pattern in the data set to measure forgetfulness by.
If "pattern" is omitted, it defaults to the first pattern in the set. Example:

	my @set = (
		[ 0,1,0,1 ],  [ 0 ],
		[ 0,0,1,0 ],  [ 1 ],
		[ 1,1,0,1 ],  [ 2 ],  #  <---
		[ 0,1,1,0 ],  [ 3 ]
	);

BackProp.pm  view on Meta::CPAN


Now why the heck would anyone want to measure forgetfulness, you ask? Maybe you wonder how I 
even measure that. Well, it is not a vital value that you have to know. I just put in a 
"forgetfulness measure" one day because I thought it would be neat to know. 

How the module measures forgetfulness is this: First, it learns all the patterns in the set provided,
then it will run the very first pattern (or whatever pattern is specified by the "row" option)
in the set after it has finished learning. It will compare the run() output with the desired output
as specified in the dataset. In a perfect world, the two should match exactly. What we measure is
how much that they don't match, thus the amount of forgetfulness the network has.

NOTE: In version 0.77 percentages were disabled because of a bug. Percentages are now enabled.

Example (from examples/ex_dow.pl):

BackProp.pm  view on Meta::CPAN

		[	5, 	276, 261,  196, 	19.5, 19.6, 18.0, 	2630, 2611, 2582], 	[	2638  ],
		[	6, 	287, 276,  207, 	19.5, 19.5, 18.0, 	2637, 2630, 2589], 	[	2635  ],
		[	7, 	296, 287,  212, 	19.3, 19.5, 17.8, 	2640, 2637, 2592], 	[	2641  ] 		
	);
	
	# Learn the set
	my $f = learn_set(\@data, 
					  inc	=>	0.1,	
					  max	=>	500,
					  p		=>	1
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/ex_dow.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	
	$net->learn_set(\@data);
	
Same effect as above, but not the same data (obviously).

=item $net->learn_set_rand(\@set, [ options ]);

UPDATE: Inputs and outputs in the dataset can now be strings. See information on auto-crunching
in learn()

This takes the same options as learn() and allows you to specify a set to learn, rather
than individual patterns. 

learn_set_rand() differs from learn_set() in that it learns the patterns in a random order,
each pattern once, rather than in the order that they are in the array. This returns a true
value (1) instead of a forgetfullnes factor.

Example:

	my @data = (
		[ 0,1 ], [ 1 ],
		[ 1,0 ], [ 0 ]
	);
	
	$net->learn_set_rand(\@data);
	


=item $net->run($input_map_ref);

BackProp.pm  view on Meta::CPAN

	


=item $net->range();

This allows you to limit the possible outputs to a specific set of values. There are several 
ways you can specify the set of values to limit the output to. Each method is shown below. 
When called without any arguements, it will disable output range limits. You will need to re-learn
any data previously learned after disabling ranging, as disabling range invalidates the current
weight matrix in the network.

range() automatically scales the networks outputs to fit inside the size of range you allow, and, therefore,
it keeps track of the maximum output it can expect to scale. Therefore, you will need to learn() 
the whole data set again after calling range() on a network.

Subsequent calls to range() invalidate any previous calls to range()

NOTE: It is recomended, you call range() before you call learn() or else you will get unexpected
results from any run() call after range() .

BackProp.pm  view on Meta::CPAN



=item $net->save($filename);

This will save the complete state of the network to disk, including all weights and any
words crunched with crunch() . Also saves any output ranges set with range() .

This has now been modified to use a simple flat-file text storage format, and it does not
depend on any external modules now.


BackProp.pm  view on Meta::CPAN

                           
	use AI::NeuralNet::BackProp;
	my $net = AI::NeuralNet::BackProp->new(2,3);
	
	for (0..3) {		# Note: The four learn() statements below could 
						# be replaced with learn_set() to do the same thing,
						# but use this form here for clarity.

		$net->learn($net->crunch("I love chips."),  $net->crunch("That's Junk Food!"));
		$net->learn($net->crunch("I love apples."), $net->crunch("Good, Healthy Food."));
		$net->learn($net->crunch("I love pop."),    $net->crunch("That's Junk Food!"));

BackProp.pm  view on Meta::CPAN



=item $net->col_width($width);

This is useful for formating the debugging output of Level 4 if you are learning simple 
bitmaps. This will set the debugger to automatically insert a line break after that many
elements in the map output when dumping the currently run map during a learn loop.

It will return the current width when called with a 0 or undef value.


=item $net->random($rand);

This will set the randomness factor from the network. Default is 0.001. When called 
with no arguments, or an undef value, it will return current randomness value. When
called with a 0 value, it will disable randomness in the network. See NOTES on learning 
a 0 value in the input map with randomness disabled.


BackProp.pm  view on Meta::CPAN

and 0.42, where no 0s were allowed because the learning would never finish learning completly
with a 0 in the input. 

Yet with the allowance of 0s, it requires one of two factors to learn correctly. Either you
must enable randomness with $net->random(0.0001) (Any values work [other than 0], see random() ), 
or you must set an error-minimum with the 'error => 5' option (you can use some other error value 
as well). 

When randomness is enabled (that is, when you call random() with a value other than 0), it interjects
a bit of randomness into the output of every neuron in the network, except for the input and output
neurons. The randomness is interjected with rand()*$rand, where $rand is the value that was

BackProp.pm  view on Meta::CPAN

product stays a 0. Yet when a weight is multiplied by 0.00001, eventually with enough weight, it will
be able to learn. With a 0 value instead of 0.00001 or whatever, then it would never be able
to add enough weight to get anything other than a 0. 

The second option to allow for 0s is to enable a maximum error with the 'error' option in
learn() , learn_set() , and learn_set_rand() . This allows the network to not worry about
learning an output perfectly. 

For accuracy reasons, it is recomended that you work with 0s using the random() method.

If anyone has any thoughts/arguments/suggestions for using 0s in the network, let me know

BackProp.pm  view on Meta::CPAN

from http://www.josiah.countystart.com/modules/AI/cgi-bin/rec.pl


=head1 MAILING LIST

A mailing list has been setup for AI::NeuralNet::BackProp for discussion of AI and 
neural net related topics as they pertain to AI::NeuralNet::BackProp. I will also 
announce in the group each time a new release of AI::NeuralNet::BackProp is available.

The list address is at: ai-neuralnet-backprop@egroups.com 

 view all matches for this distribution


AI-NeuralNet-FastSOM

 view release on metacpan or  search on metacpan

FastSOM.h  view on Meta::CPAN

 * 
 * Each of Rect, Map, Array, and Vector contains a member 'ref' which is
 * an SV* pointing to an RV. The RV can be returned directly to perl-land
 * after being blessed into its respective class.
 * 
 * The RV references an SV containing an IV. The IV is set to the base
 * address of its component structure. This is so the class code can know
 * which instance of the class is being referred to on callback.
 * 
 * The reference count of the SV has its initial reference count set to one,
 * representing its parents ownership. If a parent dies or a perl-land
 * reference is taken of any componenet, its reference count should
 * be adjusted accordingly.
 * 
 * When the count reaches zero perl will call the classes DESTROY method,

 view all matches for this distribution


AI-NeuralNet-Hopfield

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

	my @pattern = @_;
	my $result  = Math::SparseMatrix->new(1, $cols);

	for (my $i = 0; $i < ($#pattern + 1); $i++) {
		if ($pattern[$i] =~ m/true/ig) {
			$result->set(1, ($i +1 ), 1);
		} else {
			$result->set(1, ($i + 1), -1);
		}
	}
	return $result;
}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

	my $inverse = Math::SparseMatrix->new($cols, $rows);
		
	for (my $r = 1; $r <= $rows; $r++) {
		for (my $c = 1; $c <= $cols; $c++) {
			my $value = $matrix->get($r, $c);
			$inverse->set($c, $r, $value);
		}
	}
	return $inverse;
}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		for(my $result_col = 1; $result_col <= $b_cols; $result_col++) {
			my $value = 0;
			for (my $i = 1; $i <= $a_cols; $i++) {
				$value += ($matrix_a->get($result_row, $i)) * ($matrix_b->get($i, $result_col));
			}
			$result->set($result_row, $result_col, $value);
		}
	}
	return $result;
}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

	}
	
	my $result = Math::SparseMatrix->new ($size, $size);

	for (my $i = 1; $i <= $size; $i++) {
		$result->set($i, $i, 1);
	}
	return $result;
}

sub subtract() {

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

			my $value = ( $matrix_a->get($result_row, $result_col) ) - ( $matrix_b->get($result_row, $result_col));
			
			if ($value == 0) {
				$value += 2;
			}			
			$result->set($result_row, $result_col, $value);
		}
	}
	return $result;
}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

	my $result = Math::SparseMatrix->new($a_rows, $a_cols);

	for (my $result_row = 1; $result_row <= $a_rows; $result_row++) {
		for (my $result_col = 1; $result_col <= $a_cols; $result_col++) {
			my $value = $matrix_b->get($result_row, $result_col);			
			$result->set($result_row, $result_col, $matrix_a->get($result_row, $result_col) + $value  )
		}
	}
	return $result;
}

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN


	my $new_matrix = Math::SparseMatrix->new($matrix_rows, 1);

	for (my $row = 1; $row <= $matrix_rows; $row++) {
		my $value = $matrix->get($row, $col);
		$new_matrix->set($row, 1, $value);
	}
	return $new_matrix;
}

sub print_matrix() {

 view all matches for this distribution


AI-NeuralNet-Kohonen

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	$self->{epochs}				= 1 if $self->{epochs}<1;
	$self->{time_constant}		= $self->{epochs} / log($self->{map_dim_a}) unless $self->{time_constant};	# to base 10?
	$self->{learning_rate}		= 0.5 unless $self->{learning_rate};
	$self->{l}					= $self->{learning_rate};
	if (not $self->{weight_dim}){
		cluck "{weight_dim} not set";
		return undef;
	}
	$self->randomise_map;
	return $self;
}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

See L<AI::NerualNet::Kohonen::Node/CONSTRUCTOR new>.

=cut

sub randomise_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	for my $x (0..$self->{map_dim_x}){
		$self->{map}->[$x] = [];
		for my $y (0..$self->{map_dim_y}){
			$self->{map}->[$x]->[$y] = new AI::NeuralNet::Kohonen::Node(
				dim => $self->{weight_dim},

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

}


=head1 METHOD clear_map

As L<METHOD randomise_map> but sets all C<map> nodes to
either the value supplied as the only paramter, or C<undef>.

=cut

sub clear_map { my $self=shift;
	confess "{weight_dim} not set" unless $self->{weight_dim};
	confess "{map_dim_x} not set" unless $self->{map_dim_x};
	confess "{map_dim_y} not set" unless $self->{map_dim_y};
	my $val = shift || $self->{missing_mask};
	my $w = [];
	foreach (0..$self->{weight_dim}){
		push @$w, $val;
	}

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

}


=head1 PRIVATE METHOD _select_target

Return a random target from the training set in the C<input> field,
unless the C<targeting> field is defined, when the targets are
iterated over.

=cut

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

Adds to the C<input> field an input vector in SOM_PAK-format
whitespace-delimited ASCII.

Returns C<undef> on failure to add an item (perhaps because
the data passed was a comment, or the C<weight_dim> flag was
not set); a true value on success.

=cut

sub _add_input_from_str { my ($self) = (shift);
	$_ = shift;

 view all matches for this distribution


( run in 0.852 second using v1.01-cache-2.11-cpan-49f99fa48dc )