AI-PSO

 view release on metacpan or  search on metacpan

MANIFEST  view on Meta::CPAN

README
t/PSO.t
lib/AI/PSO.pm
extradoc/ReactivePower-PSO-wks.pdf
examples/NeuralNet/pso_ann.pl
examples/NeuralNet/pso.dat
examples/NeuralNet/Makefile
examples/NeuralNet/NeuralNet.h
examples/NeuralNet/NeuralNet.cpp
examples/NeuralNet/main.cpp
META.yml                                 Module meta-data (added by MakeMaker)

MPL-1.1.txt  view on Meta::CPAN

     1.2. "Contributor Version" means the combination of the Original
     Code, prior Modifications used by a Contributor, and the Modifications
     made by that particular Contributor.

     1.3. "Covered Code" means the Original Code or Modifications or the
     combination of the Original Code and Modifications, in each case
     including portions thereof.

     1.4. "Electronic Distribution Mechanism" means a mechanism generally
     accepted in the software development community for the electronic
     transfer of data.

     1.5. "Executable" means Covered Code in any form other than Source
     Code.

     1.6. "Initial Developer" means the individual or entity identified
     as the Initial Developer in the Source Code notice required by Exhibit
     A.

     1.7. "Larger Work" means a work which combines Covered Code or
     portions thereof with code not governed by the terms of this License.

examples/NeuralNet/main.cpp  view on Meta::CPAN


    if(argc > 1) {
        annConfigFile = string(argv[1]);
    }
    if(argc > 2) {
        annDataFile = string(argv[2]);
    }

    int numInputs, numHidden;
    ifstream ifs;
    ifs.open(annConfigFile.data());
    if(!ifs.is_open()) {
        cerr << "Error opening neural network configuration file" << endl;
    }
    ifs  >> numInputs >> numHidden;

    string xferFunc;
    ifs >> xferFunc;

    double *dataForNet = new double[numInputs];

    ifstream ids;
    ids.open(annDataFile.data());
    if(!ids.is_open()) {
        cerr << "Error opening neural network data file" << endl;
    }
    for(int i = 0; i < numInputs; i++) {
        ids >> dataForNet[i];
    }
    ids.close();


    NeuralNet *m_ann = new NeuralNet(numInputs, numHidden, xferFunc.c_str());

    double weight;
    for(int c = 0; c < numHidden; c++) {
        for(int j = 0; j < numInputs; j++) {
            ifs >> weight;
            m_ann->setHiddenWeight(c, j, weight);
        }
    }
    for(int k = 0; k < numHidden; k++) {
        ifs >> weight;
        m_ann->setOutputWeight(k, weight);
    }
    
    for(int d = 0; d < numInputs; d++) {
        m_ann->setInput(d, dataForNet[d]);
    }

    delete [] dataForNet;

    ifs.close();
    if(ifs.is_open()) {
        cerr << "Error closing neural network configuration file" << endl;
    }

    cout << m_ann->value() << endl;

    delete m_ann;
}

examples/NeuralNet/pso_ann.pl  view on Meta::CPAN

	print ANN "$inputs $hidden\n";
	print ANN "$func\n";
	foreach my $weight (@weights) {
		print ANN "$weight ";
	}
	print ANN "\n";
	close(ANN);
}

sub runANN($$) {
	my ($configFile, $dataFile) = @_;
	my $networkValue = `ann_compute $configFile $dataFile`;
	chomp($networkValue);
	return $networkValue;
}

lib/AI/PSO.pm  view on Meta::CPAN


#----------  END  EXPORTED SUBROUTINES ----------



#--------- BEGIN INTERNAL SUBROUTINES -----------

#
# init
#   - initializes global variables
#   - initializes particle data structures
#
sub init() {
	if($psoRandomRange =~ m/null/) {
		$useModifiedAlgorithm = 1;
	} else {
		$useModifiedAlgorithm = 0;
	}
	&initialize_particles();
}

#
# initialize_particles
#    - sets up internal data structures
#    - initializes particle positions and velocities with an element of randomness
#
sub initialize_particles() {
    for(my $p = 0; $p < $numParticles; $p++) {
        $particles[$p]           = {};  # each particle is a hash of arrays with the array sizes being the dimensionality of the problem space
        $particles[$p]{nextPos}  = [];  # nextPos is the array of positions to move to on the next positional update
        $particles[$p]{bestPos}  = [];  # bestPos is the position of that has yielded the best fitness for this particle (it gets updated when a better fitness is found)
        $particles[$p]{currPos}  = [];  # currPos is the current position of this particle in the problem space
        $particles[$p]{velocity} = [];  # velocity ... come on ...

lib/AI/PSO.pm  view on Meta::CPAN

    }
}



#
# initialize_neighbors
# NOTE: I made this a separate subroutine so that different topologies of neighbors can be created and used instead of this.
# NOTE: This subroutine is currently not used because we access neighbors by index to the particle array rather than storing their references
# 
#  - adds a neighbor array to the particle hash data structure
#  - sets the neighbor based on the default neighbor hash function
#
sub initialize_neighbors() {
    for(my $p = 0; $p < $numParticles; $p++) {
        for(my $n = 0; $n < $numNeighbors; $n++) {
            $particles[$p]{neighbor}[$n] = $particles[&get_index_of_neighbor($p, $n)];
        }
    }
}

lib/AI/PSO.pm  view on Meta::CPAN

  Sets the particle swarm configuration parameters to use for the search.

=item pso_register_fitness_function()

  Sets the user defined fitness function to call.  The fitness function 
  should return a value between 0 and 1.  Users may want to look into 
  the sigmoid function [1 / (1+e^(-x))] and it's variants to implement 
  this.  Also, you may want to take a look at either t/PSO.t for the 
  simple test or examples/NeuralNetwork/pso_ann.pl for an example on 
  how to train a simple 3-layer feed forward neural network.  (Note 
  that a real training application would have a real dataset with many 
  input-output pairs...pso_ann.pl is a _very_ simple example.  Also note 
  that the neural network exmaple requires g++.  Type 'make run' in the 
  examples/NeuralNetwork directory to run the example.  Lastly, the 
  neural network c++ code is in a very different coding style.  I did 
  indeed write this, but it was many years ago when I was striving to 
  make my code nicely formatted and good looking :)).

=item pso_optimize()

  Runs the particle swarm optimization algorithm.  This consists of 



( run in 0.329 second using v1.01-cache-2.11-cpan-8d75d55dd25 )