view release on metacpan or search on metacpan
examples/game_ai.pl view on Meta::CPAN
my $format = "%8s %5s %3s %7s %6s\n";
my @actions = qw/attack run wander hide/;
printf $format, qw/Health Knife Gun Enemies Action/;
display_result($net,2,1,1,1);
display_result($net,2,0,0,2);
display_result($net,2,0,1,2);
display_result($net,2,0,1,3);
display_result($net,1,1,0,0);
display_result($net,1,0,1,2);
display_result($net,0,1,0,3);
while (1) {
print "Type 'quit' to exit\n";
my $health = prompt("Am I in poor, average, or good health? ", qr/^(?i:[pag])/);
my $knife = prompt("Do I have a knife? ", qr/^(?i:[yn])/);
examples/game_ai.pl view on Meta::CPAN
}
sub prompt
{
my ($message,$domain) = @_;
my $valid_response = 0;
my $response;
do {
print $message;
chomp($response = <STDIN>);
exit if substr(lc $response, 0, 1) eq 'q';
$valid_response = $response =~ /$domain/;
} until $valid_response;
return $response;
}
sub display_result
{
my ($net,@data) = @_;
my $result = $net->winner(\@data);
my @health = qw/Poor Average Good/;
my @knife = qw/No Yes/;
my @gun = qw/No Yes/;
printf $format,
$health[$_[1]],
$knife[$_[2]],
$gun[$_[3]],
$_[4], # number of enemies
$actions[$result];
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Ollama/Client.pm view on Meta::CPAN
package AI::Ollama::Client 0.05;
use 5.020;
use Moo 2;
use experimental 'signatures';
use MIME::Base64 'encode_base64';
extends 'AI::Ollama::Client::Impl';
=head1 NAME
lib/AI/Ollama/Client.pm view on Meta::CPAN
use AI::Ollama::Client;
my $client = AI::Ollama::Client->new(
server => 'https://example.com/',
);
my $res = $client->someMethod()->get;
say $res;
=head1 METHODS
=head2 C<< checkBlob >>
my $res = $client->checkBlob()->get;
Check to see if a blob exists on the Ollama server which is useful when creating models.
=cut
around 'checkBlob' => sub ( $super, $self, %options ) {
$super->( $self, %options )->then( sub( $res ) {
if( $res->code =~ /^2\d\d$/ ) {
return Future->done( 1 )
} else {
return Future->done( 0 )
}
});
};
=head2 C<< createBlob >>
my $res = $client->createBlob()->get;
Create a blob from a file. Returns the server file path.
=cut
=head2 C<< generateChatCompletion >>
my $res = $client->generateChatCompletion()->get;
Generate the next message in a chat with a provided model.
Returns a L<< AI::Ollama::GenerateChatCompletionResponse >>.
=cut
=head2 C<< copyModel >>
my $res = $client->copyModel()->get;
Creates a model with another name from an existing model.
=cut
=head2 C<< createModel >>
my $res = $client->createModel()->get;
Create a model from a Modelfile.
Returns a L<< AI::Ollama::CreateModelResponse >>.
=cut
=head2 C<< deleteModel >>
my $res = $client->deleteModel()->get;
Delete a model and its data.
=cut
=head2 C<< generateEmbedding >>
my $res = $client->generateEmbedding()->get;
Generate embeddings from a model.
Returns a L<< AI::Ollama::GenerateEmbeddingResponse >>.
=cut
=head2 C<< generateCompletion >>
use Future::Utils 'repeat';
my $responses = $client->generateCompletion();
repeat {
my ($res) = $responses->shift;
if( $res ) {
my $str = $res->get;
say $str;
}
Future::Mojo->done( defined $res );
} until => sub($done) { $done->get };
Generate a response for a given prompt with a provided model.
Returns a L<< AI::Ollama::GenerateCompletionResponse >>.
=cut
lib/AI/Ollama/Client.pm view on Meta::CPAN
return $super->($self, %options);
};
=head2 C<< pullModel >>
my $res = $client->pullModel(
name => 'llama',
)->get;
Download a model from the ollama library.
lib/AI/Ollama/Client.pm view on Meta::CPAN
=cut
=head2 C<< pushModel >>
my $res = $client->pushModel()->get;
Upload a model to a model library.
Returns a L<< AI::Ollama::PushModelResponse >>.
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PBDD.pm view on Meta::CPAN
not
makeSet
exists
forall
relProd
restrict
constrain
// variables replacement
createPair
deletePair
replace
lib/AI/PBDD.pm view on Meta::CPAN
kill();
=head1 DESCRIPTION
Binary Decision Diagrams (BDDs) are used for efficient computation of many common problems. This is done by giving a compact representation and a set of efficient operations on boolean functions f: {0,1}^n --> {0,1}.
It turns out that this representation is good enough to solve a huge amount of problems in Artificial Intelligence and other areas of computing such as hardware verification.
This is a Perl interface to the popular BDD package BuDDy. The interface is largely inspired by JBDD, a Java common interface for the two BDD packages BuDDy and CUDD written by Arash Vahidi, which can be found at L<http://javaddlib.sourceforge.net/jb...
PBDD allows you to combine the power of Perl with an efficient BDD package written in highly optimized C.
lib/AI/PBDD.pm view on Meta::CPAN
=over 4
=item B<$bdd = and($bdd1,$bdd2)>
BDD AND operation. The returned result is already referenced.
=item B<$bdd = or($bdd1,$bdd2)>
BDD OR operation. The returned result is already referenced.
=item B<$bdd1new = andTo($bdd1,$bdd2)>
BDD cumulative AND operation. The returned result is already referenced, while $bdd1 is de-referenced.
=item B<$bdd1new = orTo($bdd1,$bdd2)>
BDD cumulative OR operation. The returned result is already referenced, while $bdd1 is de-referenced.
=item B<$bdd = nand($bdd1,$bdd2)>
BDD NAND operation. The returned result is already referenced.
=item B<$bdd = nor($bdd1,$bdd2)>
BDD NOR operation. The returned result is already referenced.
=item B<$bdd = xor($bdd1,$bdd2)>
BDD XOR operation. The returned result is already referenced.
=item B<$bdd = ite($bdd_if,$bdd_then,$bdd_else)>
BDD ITE (If-Then-Else) operation, i.e. C<($bdd_if AND $bdd_then) OR (NOT $bdd_if AND $bdd_else)>. The returned result is already referenced.
=item B<$bdd = imp($bdd1,$bdd2)>
BDD IMPlication operation. The returned result is already referenced.
=item B<$bdd = biimp($bdd1,$bdd2)>
BDD BIIMPlication operation. The returned result is already referenced.
=item B<$bdd = not($bdd1)>
BDD NOT operation. The returned result is already referenced.
=item B<$cube = makeSet($vars,$size)>
=item B<$cube = makeSet($vars,$size,$offset)>
Create a cube (all-true minterm, e.g. C<$v1 AND $v2 AND $v3> where each C<$vi> is a BDD variable) of C<$size> variables from the array referenced by C<$vars>, starting at position 0 (or C<$offset>).
=item B<$bdd = exists($bdd1,$cube)>
BDD existential quantification. Parameter C<$cube> is an all-true minterm. The returned result is already referenced.
=item B<$bdd = forall($bdd1,$cube)>
BDD universal quantification. Parameter C<$cube> is an all-true minterm. The returned result is already referenced.
=item B<$bdd = relProd($bdd_left,$bdd_right,$cube)>
BDD relation-product (quantification and product computation in one pass): C<EXISTS $cube: $bdd_left AND $bdd_right>. The returned result is already referenced.
=item B<$bdd = restrict($bdd1,$minterm)>
Restrict a set of variables to constant values. The returned result is already referenced.
=item B<$bdd = constrain($bdd1,$bdd2)>
Compute the generalized co-factor of C<$bdd1> w.r.t. C<$bdd2>. The returned result is already referenced.
=back
=head2 VARIABLES REPLACEMENT
lib/AI/PBDD.pm view on Meta::CPAN
Free the memory occupied by C<$pair>.
=item B<$bdd = replace($bdd1,$pair)>
Replace the variables in a BDD according to the given pair. The returned result is already referenced.
=item B<showPair($pair)>
Print a pair.
lib/AI/PBDD.pm view on Meta::CPAN
Print the BDD minterms to STDOUT.
=item B<print($bdd)>
Print the BDD in the native BuDDy representation to STDOUT.
=back
=head2 DEBUGGING
lib/AI/PBDD.pm view on Meta::CPAN
No reordering.
=item BDD_REORDER_WIN2
Reordering using a sliding window of size 2. This algorithm swaps two adjacent variable blocks and if this results in more nodes then the two blocks are swapped back again. Otherwise the result is kept in the variable order. This is then repeated for...
=item BDD_REORDER_WIN3
Same as above but with a window size of 3.
lib/AI/PBDD.pm view on Meta::CPAN
Gianluca Torta
mailto:torta@di.unito.it
=head1 COPYRIGHT
Copyright (c) 2011 by Gianluca Torta. All rights reserved.
=head1 LICENSE
This package is free software; you can use, modify and redistribute
it under the same terms as Perl itself, i.e., at your option, under
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PSO.pm view on Meta::CPAN
#---------- BEGIN GLOBAL PARAMETERS ------------
#-#-# search parameters #-#-#
my $numParticles = 'null'; # This is the number of particles that actually search the problem hyperspace
my $numNeighbors = 'null'; # This is the number of neighboring particles that each particle shares information with
# which must obviously be less than the number of particles and greater than 0.
# TODO: write code to preconstruct different topologies. Such as fully connected, ring, star etc.
# Currently, neighbors are chosen by a simple hash function.
# It would be fun (no theoretical benefit that I know of) to play with different topologies.
my $maxIterations = 'null'; # This is the maximum number of optimization iterations before exiting if the fitness goal is never reached.
lib/AI/PSO.pm view on Meta::CPAN
return &swarm();
}
#
# pso_get_solution_array
# - returns the array of parameters corresponding to the best solution so far
sub pso_get_solution_array() {
return @solution;
}
lib/AI/PSO.pm view on Meta::CPAN
#--------- BEGIN INTERNAL SUBROUTINES -----------
#
# init
# - initializes global variables
# - initializes particle data structures
#
sub init() {
if($psoRandomRange =~ m/null/) {
$useModifiedAlgorithm = 1;
} else {
lib/AI/PSO.pm view on Meta::CPAN
&initialize_particles();
}
#
# initialize_particles
# - sets up internal data structures
# - initializes particle positions and velocities with an element of randomness
#
sub initialize_particles() {
for(my $p = 0; $p < $numParticles; $p++) {
$particles[$p] = {}; # each particle is a hash of arrays with the array sizes being the dimensionality of the problem space
lib/AI/PSO.pm view on Meta::CPAN
}
}
#
# at this point we have exceeded the maximum number of iterations, so let's at least print out the best result so far
#
print STDERR "MAX ITERATIONS REACHED WITHOUT MEETING EXIT CRITERION...printing best solution\n";
my $bestFit = -1;
my $bestPartIndex = -1;
for(my $p = 0; $p < $numParticles; $p++) {
lib/AI/PSO.pm view on Meta::CPAN
return $particleNeighborIndex;
}
#
# clamp_velocity
# - restricts the change in velocity to be within a certain range (prevents large jumps in problem hyperspace)
#
sub clamp_velocity($) {
my ($dx) = @_;
if($dx < $deltaMin) {
$dx = $deltaMin;
lib/AI/PSO.pm view on Meta::CPAN
use AI::PSO;
my %params = (
numParticles => 4, # total number of particles involved in search
numNeighbors => 3, # number of particles with which each particle will share its progress
maxIterations => 1000, # maximum number of iterations before exiting with no solution found
dimensions => 4, # number of parameters you want to optimize
deltaMin => -4.0, # minimum change in velocity during PSO update
deltaMax => 4.0, # maximum change in velocity during PSO update
meWeight => 2.0, # 'individuality' weighting constant (higher means more individuality)
lib/AI/PSO.pm view on Meta::CPAN
exitFitness => 0.9, # minimum fitness to achieve before exiting
verbose => 0, # 0 prints solution
# 1 prints (Y|N):particle:fitness at each iteration
# 2 dumps each particle (+1)
psoRandomRange => 4.0, # setting this enables the original PSO algorithm and
# also subsequently ignores the me*/them* parameters
);
sub custom_fitness_function(@input) {
# this is a callback function.
lib/AI/PSO.pm view on Meta::CPAN
PSO is a cooperative approach to optimization rather than an
evolutionary approach which kills off unsuccessful members of the
search team. In the swarm framework each particle, is a relatively
unintelligent search agent. It is in the collective sharing of
knowledge that solutions are found. Each particle simply shares its
information with its neighboring particles. So, if one particle is
not doing to well (has a low fitness), then it looks to its neighbors
for help and tries to be more like them while still maintaining a
sense of individuality.
lib/AI/PSO.pm view on Meta::CPAN
weights involved in the positional updates so that each particle is
truly independent and can take its own search path while still
incorporating good information from other particles. In this
particluar perl module, the user is able to choose from two
implementations of the algorithm. One is the original implementation
from I<Swarm Intelligence> which requires the definition of a
'random range' to which the two stochastic weights are required to
sum. The other implementation allows the user to define the weighting
of how much a particle follows its own path versus following its
peers. In both cases there is an element of randomness.
Solution convergence is quite fast once one particle becomes close to
a local maxima. Having more particles active means there is more of
a chance that you will not be stuck in a local maxima. Often times
different neighborhoods (when not configured in a global neighborhood
fashion) will converge to different maxima. It is quite interesting
to watch graphically. If the fitness function is expensive to
compute, then it is often useful to start out with a small number of
particles first and get a feel for how the algorithm converges.
The algorithm implemented in this module is taken from the book
I<Swarm Intelligence> by Russell Eberhart and James Kennedy.
I highly suggest you read the book if you are interested in this
sort of thing.
=head1 EXPORTED FUNCTIONS
lib/AI/PSO.pm view on Meta::CPAN
this. Also, you may want to take a look at either t/PSO.t for the
simple test or examples/NeuralNetwork/pso_ann.pl for an example on
how to train a simple 3-layer feed forward neural network. (Note
that a real training application would have a real dataset with many
input-output pairs...pso_ann.pl is a _very_ simple example. Also note
that the neural network exmaple requires g++. Type 'make run' in the
examples/NeuralNetwork directory to run the example. Lastly, the
neural network c++ code is in a very different coding style. I did
indeed write this, but it was many years ago when I was striving to
make my code nicely formatted and good looking :)).
view all matches for this distribution
view release on metacpan or search on metacpan
example/PSOTest-MultiCore.pl view on Meta::CPAN
-fitFunc => \&calcFit,
-dimensions => 10,
-iterations => 10,
-numParticles => 1000,
# only for many-core version # the best if == $#cores of your system
# selecting best value if undefined
-workers => 4,
);
view all matches for this distribution
view release on metacpan or search on metacpan
example/PSOTest-MultiCore.pl view on Meta::CPAN
-fitFunc => \&calcFit,
-dimensions => 10,
-iterations => 10,
-numParticles => 1000,
# only for many-core version # the best if == $#cores of your system
# selecting best value if undefined
-workers => 4,
);
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/ParticleSwarmOptimization.pm view on Meta::CPAN
technique for locating the best location on the surface (where 'best' is the
minimum of some fitness function). For a Wikipedia discussion of PSO see
http://en.wikipedia.org/wiki/Particle_swarm_optimization.
This pure Perl module is an implementation of the Particle Swarm Optimization
technique for finding minima of hyper surfaces. It presents an object oriented
interface that facilitates easy configuration of the optimization parameters and
(in principle) allows the creation of derived classes to reimplement all aspects
of the optimization engine (a future version will describe the replaceable
engine components).
lib/AI/ParticleSwarmOptimization.pm view on Meta::CPAN
my ($context, @values) = @_;
...
return $fitness;
}
In addition to any user provided parameters the list of values representing the
current particle position in the hyperspace is passed in. There is one value per
hyperspace dimension.
=item I<-inertia>: positive or zero number, optional
lib/AI/ParticleSwarmOptimization.pm view on Meta::CPAN
prints particle details when its fit becomes bebtter than its previous best.
=item * kLogStall
prints particle details when its velocity reaches 0 or falls below the stall
threshold.
=item * kLogIter
Shows the current iteration number.
lib/AI/ParticleSwarmOptimization.pm view on Meta::CPAN
=head1 BUGS
Please report any bugs or feature requests to C<bug-AI-ParticleSwarmOptimization
at rt.cpan.org>, or through the web interface at
L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=AI-ParticleSwarmOptimization>.
I will be notified, and then you'll automatically be notified of progress on
your bug as I make changes.
=head1 SUPPORT
This module is supported by the author through CPAN. The following links may be
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/AStar/Rectangle.pm view on Meta::CPAN
sub create_map($){
unshift @_, __PACKAGE__;
goto &new;
}
1 for ($a, $b); #suppress warnings
sub set_passability_string{
my $self = shift;
my $passability = shift;
die "Bad passabilitity param for set_passability_string" unless $self->width * $self->height == length( $passability );
lib/AI/Pathfinding/AStar/Rectangle.pm view on Meta::CPAN
}
}
my $path = $map->astar( $from_x, $from_y, $to_x, $to_y);
print $path, "\n"; # print path in presentation of "12346789" like keys at keyboard
=head1 DESCRIPTION
AI::Pathfinding::AStar::Rectangle provide abstraction for Rectangle map with AStar algoritm
lib/AI/Pathfinding/AStar/Rectangle.pm view on Meta::CPAN
=over 4
=item new { "width" => map_width, "height" => map_heigth }
Create AI::Pathfinding::AStar::Rectangle object. Object represent map with given height and width.
=item set_passability x, y, value # value: 1 - can pass through point, 0 - can't
Set passability for point(x,y)
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/AStar.pm view on Meta::CPAN
my $curr_node = $open->extract_top();
$curr_node->{inopen} = 0;
my $G = $curr_node->{g};
#get surrounding squares
my $surr_nodes = $map->getSurrounding($curr_node->{id}, $target);
foreach my $node (@$surr_nodes) {
my ($surr_id, $surr_cost, $surr_h) = @$node;
#skip the node if it's in the CLOSED list
lib/AI/Pathfinding/AStar.pm view on Meta::CPAN
}
print "Completed Path: ", join(', ', @{$state->{path}}), "\n";
=head1 DESCRIPTION
This module implements the A* pathfinding algorithm. It acts as a base class from which a custom map object can be derived. It requires from the map object a subroutine named C<getSurrounding> (described below) and provides to the object two routin...
AI::Pathfinding::AStar requires that the map object define a routine named C<getSurrounding> which accepts the starting and target node ids for which you are calculating the path. In return it should provide an array reference containing the followi...
=over
=item * Node ID
lib/AI/Pathfinding/AStar.pm view on Meta::CPAN
=back
Basically you should return an array reference like this: C<[ [$node1, $cost1, $h1], [$node2, $cost2, $h2], [...], ...];> For more information on heuristics and the best ways to calculate them, visit the links listed in the I<SEE ALSO> section below...
As mentioned earlier, AI::Pathfinding::AStar provides two routines named C<findPath> and C<findPathIncr>. C<findPath> requires as input the starting and target node identifiers. It is unimportant what format you choose for your node IDs. As long a...
=head1 PREREQUISITES
This module requires Heap (specifically Heap::Binomial and Heap::Elem) to function.
=head1 SEE ALSO
L<http://www.policyalmanac.org/games/aStarTutorial.htm>, L<http://xenon.stanford.edu/~amitp/gameprog.html>
lib/AI/Pathfinding/AStar.pm view on Meta::CPAN
Aaron Dalton - aaron@daltons.ca
This is my very first CPAN contribution and I am B<not> a professional programmer. Any feedback you may have, even regarding issues of style, would be greatly appreciated. I hope it is of some use.
=head1 COPYRIGHT AND LICENSE
Copyright (c) 2004 Aaron Dalton. All rights reserved.
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
my $self = shift;
my $optimize_for = $self->_optimize_for();
my %resolve = (
len => "_get_iter_state_params_len",
minmax_len => "_get_iter_state_params_minmax_len",
speed => "_get_iter_state_params_speed",
);
return $resolve{$optimize_for};
}
sub _get_iter_state_params
{
my $self = shift;
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
scan_idx => $selected_scan_idx,
}
);
}
sub calc_flares_meta_scan
{
my $self = shift;
$self->chosen_scans( [] );
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
$self->_total_iters(0);
$self->_status("iterating");
my $iters_quota = 0;
my $flares_num_iters = PDL::Core::pdl( [ (0) x $self->_get_num_scans() ] );
my $ones_constant =
PDL::Core::pdl( [ map { [1] } ( 1 .. $self->_get_num_scans() ) ] );
my $next_num_iters_for_each_scan_x_scan =
( ( $ones_constant x $flares_num_iters ) );
my $num_moves = $self->_scans_data->slice(":,:,1");
# The number of moves for dimension 0,1,2 above.
my $num_moves_repeat = $num_moves->clump( 1 .. 2 )->xchg( 0, 1 )
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
{
$iters_quota += $q_more;
# Next number of iterations for each scan x scan combination.
my $next_num_iters = (
( $ones_constant x $flares_num_iters ) + (
PDL::MatrixOps::identity( $self->_get_num_scans() ) *
$iters_quota
)
);
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
$last_avg = $min_avg;
push @{ $self->chosen_scans() },
$self->_calc_chosen_scan( $selected_scan_idx, $iters_quota );
$flares_num_iters->set( $selected_scan_idx,
$flares_num_iters->at($selected_scan_idx) + $iters_quota );
$self->_selected_scans()->[$selected_scan_idx]->mark_as_used();
$iters_quota = 0;
my $num_solved = $solved_moves_counts->at($selected_scan_idx);
my $flares_num_iters_repeat =
$flares_num_iters->dummy( 0, $self->_num_boards() );
# A boolean tensor:
# Dimension 0 - board.
# Dimension 1 - scans.
my $solved_with_which_iter =
( $flares_num_iters_repeat >= $iters->clump( 1 .. 2 ) ) &
( $iters->clump( 1 .. 2 ) >= 0 );
my $total_num_iters = (
( $solved_with_which_iter * $flares_num_iters_repeat )->sum() + (
$solved_with_which_iter->not()->andover() *
$flares_num_iters->sum()
)->sum()
);
print "Finished ", $loop_iter_num++,
" ; #Solved = $num_solved ; Iters = $total_num_iters ; Avg = $min_avg\n";
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
quotas => [400, 300, 200],
selected_scans =>
[
AI::Pathfinding::OptimizeMultiple::Scan->new(
id => 'first_search',
cmd_line => "--preset first_search",
),
AI::Pathfinding::OptimizeMultiple::Scan->new(
id => 'second_search',
cmd_line => "--preset second_search",
),
AI::Pathfinding::OptimizeMultiple::Scan->new(
id => 'third_search',
cmd_line => "--preset third_search",
),
],
}
);
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
=item * L<http://www.shlomifish.org/lecture/Perl/Lightning/Opt-Multi-Task-in-PDL/>
=back
Given statistics on the performance of several game AI searches (or scans)
across a representative number of initial cases, find a scan
that solves most deals with close-to-optimal performance, by using switch
tasking.
=head1 SUBROUTINES/METHODS
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
=head2 $calc_meta_scan->calc_meta_scan()
Calculates the meta-scan after initialisation. See here for the details
of the algorithm:
L<http://www.shlomifish.org/lecture/Freecell-Solver/The-Next-Pres/slides/multi-tasking/best-meta-scan/>
=head2 $self->calc_flares_meta_scan()
This function calculates the flares meta-scan: i.e: assuming that all atomic
scans are run one after the other and the shortest solutions of all
successful scans are being picked.
=head2 $calc_meta_scan->calc_board_iters($board_idx)
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
=item * "out_of_quotas"
=back
=head2 my $sim_results_obj = $calc_meta_scan->simulate_board($board_idx, $args)
Simulates the board No $board_idx through the scan. Returns a
L<AI::Pathfinding::OptimizeMultiple::SimulationResults> object.
$args is an optional hash reference. It may contain a value with the key of
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
=item * L<Freecell Solver|http://fc-solve.shlomifish.org/>
For which this code was first written and used.
=item * L<Alternative Implementation in C#/.NET|https://bitbucket.org/shlomif/fc-solve/src/cc5b428ed9bad0132d7a7bc1a14fc6d3650edf45/fc-solve/presets/soft-threads/meta-moves/auto-gen/optimize-seq?at=master>
An Alternative implementation in C#/.NET, which was written because the
performance of the Perl/PDL code was too slow.
=item * L<PDL> - Perl Data Language
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
=head1 SUPPORT
=head2 Websites
The following websites have more information about this module, and may be of help to you. As always,
in addition to those websites please use your favorite search engine to discover more resources.
=over 4
=item *
lib/AI/Pathfinding/OptimizeMultiple.pm view on Meta::CPAN
=item *
CPAN Testers Matrix
The CPAN Testers Matrix is a website that provides a visual overview of the test results for a distribution on various Perls/platforms.
L<http://matrix.cpantesters.org/?dist=AI-Pathfinding-OptimizeMultiple>
=item *
CPAN Testers Dependencies
The CPAN Testers Dependencies is a website that shows a chart of the test results of all dependencies for a distribution.
L<http://deps.cpantesters.org/?module=AI::Pathfinding::OptimizeMultiple>
=back
=head2 Bugs / Feature Requests
Please report any bugs or feature requests by email to C<bug-ai-pathfinding-optimizemultiple at rt.cpan.org>, or through
the web interface at L<https://rt.cpan.org/Public/Bug/Report.html?Queue=AI-Pathfinding-OptimizeMultiple>. You will be automatically notified of any
progress on the request by the system.
=head2 Source Code
The code is open to the world, and available for you to hack on. Please feel free to browse it and play
with it, or whatever. If you want to contribute patches, please send me a diff or prod me to pull
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
\&AI::Pathfinding::SMAstar::Path::get_descendants_iterator_smastar,
\&AI::Pathfinding::SMAstar::Path::fcost,
\&AI::Pathfinding::SMAstar::Path::backup_fvals,
$log_function,
$str_function,
\&AI::Pathfinding::SMAstar::Path::progress,
$self->{_show_prog_func},
$max_states_in_queue,
$max_cost,
);
}
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
my ($priority_queue,
$goal_p,
$successors_func,
$eval_func,
$backup_func,
$log_function, # debug string func; represent state object as a string.
$str_function,
$prog_function,
$show_prog_func,
$max_states_in_queue,
$max_cost,
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
_state_num_successors_func => \&FrontierObj::get_num_successors,
# must return *one* successor at a time
_state_successors_iterator => \&FrontierObj::get_successors_iterator,
# can be any suitable string representation
_state_get_data_func => \&FrontierObj::string_representation,
# gets called once per iteration, useful for showing algorithm progress
_show_prog_func => \&FrontierObj::progress_callback,
);
# You can start the search from multiple start-states.
# Add the initial states to the smastar object before starting the search.
foreach my $frontierObj (@start_states){
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
# contains only the start state.
#
# $frontierGoalPath->state() contains the goal FrontierObj itself.
#
my $frontierGoalPath = $smastar->start_search(
\&log_function, # returns a string used for logging progress
\&str_function, # returns a string used to *uniquely* identify a node
$max_states_in_queue, # indicate the maximum states allowed in memory
$MAX_COST, # indicate the maximum cost allowed in search
);
In the example above, a hypothetical object, C<FrontierObj>, is used to
represent a state, or I<node> in your search space. To use SMA* search to
find a shortest path from a starting node to a goal in your search space, you must
define what a I<node> is, in your search space (or I<point>, or I<state>).
A common example used for informed search methods, and one that is used in Russell's
original paper, is optimal puzzle solving, such as solving an 8 or 15-tile puzzle
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
There is an example provided in the /t directory of this module's distribution,
where SMA* is applied to the problem of finding the shortest palindrome that
contains a minimum number of letters specified, over a given list of words.
Once you have a definition and representation of a node in your search space, SMA*
search requires the following functions to work:
=over
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
=item *
B<State get-data function> (C<_state_get_data_func> above)
This function returns a string representation of this node.
=item *
B<State show-progress function> (C<_show_prog_func> above)
This is a callback function for displaying the progress of the search.
It can be an empty callback if you do not need this output.
=item *
B<log string function> (C<log_function> above)
This is an arbitrary string used for logging. It also gets passed to
the show-progress function above.
=item *
B<str_function> (C<str_function> above)
This function returns a *unique* string representation of this node.
Uniqueness is required for SMA* to work properly.
=item *
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
=head1 DESCRIPTION
=head2 Overview
Simplified Memory-bounded A* search (or SMA* search) addresses some of the
limitations of conventional A* search, by bounding the amount of space required
to perform a shortest-path search. This module is an implementation of
SMA*, which was first introduced by Stuart Russell in 1992. SMA* is a simpler,
more efficient variation of the original MA* search introduced by P. Chakrabarti
et al. in 1989 (see references below).
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
Like A* search, SMA* search is an optimal and complete algorithm for finding
a least-cost path. Unlike A*, SMA* will not run out of memory, I<unless the size
of the shortest path exceeds the amount of space in available memory>.
SMA* addresses the possibility of running out of memory
by pruning the portion of the search-space that is being examined. It relies on
the I<pathmax>, or I<monotonicity> constraint on I<f(n)> to remove the shallowest
of the highest-cost nodes from the search queue when there is no memory left to
expand new nodes. It records the best costs of the pruned nodes within their
antecedent nodes to ensure that crucial information about the search space is
not lost. To facilitate this mechanism, the search queue is best maintained
as a search-tree of search-trees ordered by cost and depth, respectively.
=head4 Nothing is for free
The pruning of the search queue allows SMA* search to utilize all available
memory for search without any danger of overflow. It can, however, make
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
=head2 start_search()
my $frontierGoalObj = $smastar->start_search(
\&log_function, # returns a string used for logging progress
\&str_function, # returns a string used to *uniquely* identify a node
$max_states_in_queue, # indicate the maximum states allowed in memory
$MAX_COST, # indicate the maximum cost allowed in search
);
Initiates a memory-bounded search. When calling this function, pass a handle to
a function for recording current status( C<log_function> above- this can be
an empty subroutine if you don't care), a function that returns a *unique* string
representing a node in the search-space (this *cannot* be an empty subroutine), a
maximum number of expanded states to store in the queue, and a maximum cost
value (beyond which the search will cease).
=head2 state_eval_func()
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
next successor of this node.
=head2 state_get_data_func()
$smastar->state_get_data_func(\&FrontierObj::string_representation);
Set/get the handle to the function that returns a string
representation of this node.
=head2 show_prog_func()
$smatar->show_prog_func(\&FrontierObj::progress_callback);
Sets/gets the callback function for displaying the progress of the search.
It can be an empty callback (sub{}) if you do not need this output.
=head2 DEPENDENCIES
lib/AI/Pathfinding/SMAstar.pm view on Meta::CPAN
[1] Russell, Stuart. (1992) I<"Efficient Memory-bounded Search Methods.">
Proceedings of the 10th European conference on Artificial intelligence, pp. 1-5
[2] Chakrabarti, P. P., Ghose, S., Acharya, A., and de Sarkar, S. C. (1989)
I<"Heuristic search in restricted memory."> Artificial Intelligence Journal,
41, pp. 197-221.
=head1 AUTHOR
Matthias Beebe, E<lt>matthiasbeebe@gmail.comE<gt>
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
# create a new nerve / neuron / perceptron
$nerve = AI::Perceptron::Simple->new( {
initial_value => $size_of_each_dendrite,
learning_rate => 0.3, # optional
threshold => 0.85, # optional
attribs => \@dendrites,
} );
# train
$nerve->tame( ... );
$nerve->exercise( ... );
$nerve->train( $training_data_csv, $expected_column_name, $save_nerve_to );
# or
$nerve->train(
$training_data_csv, $expected_column_name, $save_nerve_to,
$show_progress, $identifier); # these two parameters must go together
# validate
$nerve->take_lab_test( ... );
$nerve->take_mock_exam( ... );
# fill results to original file
$nerve->validate( {
stimuli_validate => $validation_data_csv,
predicted_column_index => 4,
} );
# or
# fill results to a new file
$nerve->validate( {
stimuli_validate => $validation_data_csv,
predicted_column_index => 4,
results_write_to => $new_csv
} );
# test - see "validate" method, same usage
$nerve->take_real_exam( ... );
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
# saving and loading data of perceptron locally
# NOTE: nerve data is automatically saved after each trainning process
use AI::Perceptron::Simple ":local_data";
my $nerve_file = "apples.nerve";
preserve( ... );
save_perceptron( $nerve, $nerve_file );
# load data of percpetron for use in actual program
my $apple_nerve = revive( ... );
my $apple_nerve = load_perceptron( $nerve_file );
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
# for portability of nerve data
use AI::Perceptron::Simple ":portable_data";
my $yaml_nerve_file = "pearls.yaml";
preserve_as_yaml ( ... );
save_perceptron_yaml ( $nerve, $yaml_nerve_file );
# load nerve data on the other computer
my $pearl_nerve = revive_from_yaml ( ... );
my $pearl_nerve = load_perceptron_yaml ( $yaml_nerve_file );
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=cut
use Exporter qw( import );
our @EXPORT_OK = qw(
shuffle_data shuffle_stimuli
preserve save_perceptron revive load_perceptron
preserve_as_yaml save_perceptron_yaml revive_from_yaml load_perceptron_yaml
);
our %EXPORT_TAGS = (
process_data => [ qw( shuffle_data shuffle_stimuli ) ],
local_data => [ qw( preserve save_perceptron revive load_perceptron ) ],
portable_data => [ qw( preserve_as_yaml save_perceptron_yaml revive_from_yaml load_perceptron_yaml ) ],
);
=head1 DESCRIPTION
This module provides methods to build, train, validate and test a perceptron. It can also save the data of the perceptron for future use for any actual AI programs.
This module is also aimed to help newbies grasp hold of the concept of perceptron, training, validation and testing as much as possible. Hence, all the methods and subroutines in this module are decoupled as much as possible so that the actual script...
The implementation here is super basic as it only takes in input of the dendrites and calculate the output. If the output is higher than the threshold, the final result (category) will
be 1 aka perceptron is activated. If not, then the result will be 0 (not activated).
Depending on how you view or categorize the final result, the perceptron will fine tune itself (aka train) based on the learning rate until the desired result is met. Everything from
here on is all mathematics and numbers which only makes sense to the computer and not humans anymore.
Whenever the perceptron fine tunes itself, it will increase/decrease all the dendrites that is significant (attributes labelled 1) for each input. This means that even when the
perceptron successfully fine tunes itself to suite all the data in your file for the first round, the perceptron might still get some of the things wrong for the next round of training.
Therefore, the perceptron should be trained for as many rounds as possible. The more "confusion" the perceptron is able to correctly handle, the more "mature" the perceptron is.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
The learning rate of the perceptron for the fine-tuning process.
This value is usually between 0 and 1. However, it all depends on your combination of numbers for the other options.
=item threshold => $decimal
Optional. The default is C<0.5>
This is the passing rate to determine the neuron output (C<0> or C<1>).
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
my $data_ref = shift;
my %data = %{ $data_ref };
# check keys
$data{ learning_rate } = LEARNING_RATE if not exists $data{ learning_rate };
$data{ threshold } = THRESHOLD if not exists $data{ threshold };
#####
# don't pack this key checking process into a subroutine for now
# this is also used in &_real_validate_or_test
my @missing_keys;
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
}
croak "Missing keys: @missing_keys" if @missing_keys;
#####
# continue to process the rest of the data
my %attributes;
for ( @{ $data{ attribs } } ) {
$attributes{ $_ } = $data{ initial_value };
}
my %processed_data = (
learning_rate => $data{ learning_rate },
threshold => $data{ threshold },
attributes_hash_ref => \%attributes,
);
bless \%processed_data, $class;
}
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
} else {
$self->{learning_rate}
}
}
=head2 threshold ( $value )
=head2 threshold
If C<$value> is given, sets the threshold / passing rate to C<$value>. If not, then it returns the passing rate.
=cut
sub threshold {
my $self = shift;
if ( @_ ) {
$self->{ threshold } = shift;
} else {
$self->{ threshold };
}
}
=head1 TRAINING RELATED SUBROUTINES/METHODS
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=item old sum
The original sum of all C<weightage * input> or C<dendrite_size * binary_input>
=item threshold
The threshold of the nerve
=item new sum
The new sum of all C<weightage * input> after fine-tuning the nerve
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
my $output = _calculate_output( $self, $row );
#print "Sum = ", $output, "\n";
# $expected_output_header to be checked together over here
# if output >= threshold
# then category/result aka output is considered 1
# else output considered 0
# output expected/actual tuning
# 0 0 -
# 1 0 down
# 0 1 up
# 1 1 -
if ( ($output >= $self->threshold) and ( $row->{$expected_output_header} eq 0 ) ) {
_tune( $self, $row, TUNE_DOWN );
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> TUNED DOWN";
print " Old sum = ", $output;
print " Threshold = ", $self->threshold;
print " New Sum = ", _calculate_output( $self, $row ), "\n";
}
} elsif ( ($output < $self->threshold) and ( $row->{$expected_output_header} eq 1 ) ) {
_tune( $self, $row, TUNE_UP );
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> TUNED UP";
print " Old sum = ", $output;
print " Threshold = ", $self->threshold;
print " New Sum = ", _calculate_output( $self, $row ), "\n";
}
} elsif ( ($output < $self->threshold) and ( $row->{$expected_output_header} eq 0 ) ) {
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> NO TUNING NEEDED";
print " Sum = ", _calculate_output( $self, $row );
print " Threshold = ", $self->threshold, "\n";
}
next ROW;
} elsif ( ($output >= $self->threshold) and ( $row->{$expected_output_header} eq 1 ) ) {
if ( $display_stats ) {
print $row->{$identifier}, "\n";
print " -> NO TUNING NEEDED";
print " Sum = ", _calculate_output( $self, $row );
print " Threshold = ", $self->threshold, "\n";
}
next ROW;
} #else { print "Something's not right\n'" }
}
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
Calculates and returns the C<sum(weightage*input)> for each individual row of data. Actually, it justs add up all the existing weight since the C<input> is always 1 for now :)
C<%stimuli_hash> is the actual data to be used for training. It might contain useless columns.
This will get all the avaible dendrites using the C<get_attributes> method and then use all the keys ie. headers to access the corresponding values.
This subroutine should be called in the procedural way for now.
=cut
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=head2 validate ( \%options )
This method validates the perceptron against another set of data after it has undergone the training process.
This method calculates the output of each row of data and write the result into the predicted column. The data begin written into the new file or the original file will maintain it's sequence.
Please take note that this method will load all the data of the validation stimuli, so please split your stimuli into multiple files if possible and call this method a few more times.
For C<%options>, the followings are needed unless mentioned:
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=item predicted_column_index => $column_number
This is the index of the column that contains the predicted output values. C<$index> starts from C<0>.
This column will be filled with binary numbers and the full new data will be saved to the file specified in the C<results_write_to> key.
=item results_write_to => $new_csv_file
Optional.
The default behaviour will write the predicted output back into C<stimuli_validate> ie the original data. The sequence of the data will be maintained.
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
my $stimuli_validate = $data_hash_ref->{ stimuli_validate };
my $predicted_index = $data_hash_ref->{ predicted_column_index };
# actual processing starts here
my $output_file = defined $data_hash_ref->{ results_write_to }
? $data_hash_ref->{ results_write_to }
: $stimuli_validate;
# open for writing results
my $aoa = csv (in => $stimuli_validate, encoding => ":encoding(utf-8)");
my $attrib_array_ref = shift @$aoa; # 'remove' the header, it's annoying :)
$aoa = _fill_predicted_values( $self, $stimuli_validate, $predicted_index, $aoa );
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
# individual row
my $row = 0;
while ( my $data = $csv->getline_hr($data_fh) ) {
if ( _calculate_output( $self, $data ) >= $self->threshold ) {
# write 1 into aoa
$aoa->[ $row ][ $predicted_index ] = 1;
} else {
#write 0 into aoa
$aoa->[ $row ][ $predicted_index ] = 0;
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=head1 RESULTS RELATED SUBROUTINES/METHODS
This part is related to generating the confusion matrix.
=head2 get_exam_results ( ... )
The parameters and usage are the same as C<get_confusion_matrix>. See the next method.
=head2 get_confusion_matrix ( \%options )
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=back
=cut
sub get_exam_results {
my ( $self, $info ) = @_;
$self->get_confusion_matrix( $info );
}
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
my $denominator = 2;
$c_matrix->{ balanced_accuracy } = $numerator / $denominator; # numerator already in %
}
=head2 display_exam_results ( ... )
The parameters are the same as C<display_confusion_matrix>. See the next method.
=head2 display_confusion_matrix ( \%confusion_matrix, \%labels )
Display the confusion matrix. If C<%confusion_matrix> has C<more_stats> elements, it will display them if they exists. The default elements ie C<accuracy> and C<sensitivity> must be present, while the rest can be absent.
C<%confusion_matrix> is the same confusion matrix returned by the C<get_confusion_matrix> method.
For C<%labels>, since C<0>'s and C<1>'s won't make much sense as the output labels in most cases, therefore, the following keys must be specified:
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
For the C<%labels>, there is no need to enter "actual X", "predicted X" etc. It will be prefixed with C<A: > for actual and C<P: > for the predicted values by default.
=cut
sub display_exam_results {
my ( $self, $c_matrix, $labels ) = @_;
$self->display_confusion_matrix( $c_matrix, $labels );
}
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=head2 &_build_matrix ( $c_matrix, $labels )
Builds the matrix using C<Text::Matrix> module.
C<$c_matrix> and C<$labels> are the same as the ones passed to C<display_exam_results> and C<>display_confusion_matrix.
Returns a list C<( $matrix, $c_matrix )> which can directly be passed to C<_print_extended_matrix>.
=cut
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
B<The subroutines are to be called in the procedural way>. No checking is done currently.
See C<PERCEPTRON DATA> and C<KNOWN ISSUES> sections for more details on the subroutines in this section.
=head2 preserve ( ... )
The parameters and usage are the same as C<save_perceptron>. See the next subroutine.
=head2 save_perceptron ( $nerve, $nerve_file )
Saves the C<AI::Perceptron::Simple> object into a C<Storable> file. There shouldn't be a need to call this method manually since after every training
process this will be called automatically.
=cut
sub preserve {
save_perceptron( @_ );
}
sub save_perceptron {
my $self = shift;
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
These subroutines can be imported using the C<:portable_data> tag.
The file type currently supported is YAML. Please be careful with the data as you won't want the nerve data accidentally modified.
=head2 preserve_as_yaml ( ... )
The parameters and usage are the same as C<save_perceptron_yaml>. See the next subroutine.
=head2 save_perceptron_yaml ( $nerve, $yaml_nerve_file )
Saves the C<AI::Perceptron::Simple> object into a C<YAML> file.
=cut
sub preserve_as_yaml {
save_perceptron_yaml( @_ );
}
sub save_perceptron_yaml {
my $self = shift;
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
If you really need to send the nerve data to different computers with different versions of C<Storable> module, see the docs of the following subroutines:
=over 4
=item * C<&preserve_as_yaml> or C<&save_perceptron_yaml> for storing data.
=item * C<&revive_from_yaml> or C<&load_perceptron_yaml> for retrieving the data.
=back
lib/AI/Perceptron/Simple.pm view on Meta::CPAN
=head1 BUGS
Please report any bugs or feature requests to C<bug-ai-perceptron-simple at rt.cpan.org>, or through
the web interface at L<https://rt.cpan.org/NoAuth/ReportBug.html?Queue=AI-Perceptron-Simple>. I will be notified, and then you'll
automatically be notified of progress on your bug as I make changes.
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
view all matches for this distribution
view release on metacpan or search on metacpan
examples/and.pl view on Meta::CPAN
use Data::Dumper;
use AI::Perceptron;
print( "Example: training a perceptron to recognize an 'AND' function.\n",
"usage: $0 [<threshold> <weight1> <weight2>]\n" );
my $p = AI::Perceptron->new
->num_inputs( 2 )
->learning_rate( 0.1 );
if (@ARGV) {
$p->threshold( shift(@ARGV) )
->weights([ shift(@ARGV), shift(@ARGV) ]);
}
my @training_exs = (
[-1 => -1, -1],
examples/and.pl view on Meta::CPAN
print "\nAfter Training\n";
dump_perceptron( $p );
sub dump_perceptron {
my $p = shift;
print "\tThreshold: ", $p->threshold, " Weights: ", join(', ', @{ $p->weights }), "\n";
foreach my $inputs (@training_exs) {
my $target = $inputs->[0];
print "\tInputs = {", join(',', @$inputs[1..2]), "}, target=$target, output=", $p->compute_output( @$inputs[1..2] ), "\n";
}
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PredictionClient/Alien/TensorFlowServingProtos.pm view on Meta::CPAN
use Module::Build;
use AI::PredictionClient::Alien::TensorFlowServingProtos;
my $builder = Module::Build->new(
...
configure_requires => {
'AI::PredictionClient::Alien::TensorFlowServingProtos' => '0',
...
},
extra_compiler_flags => AI::PredictionClient::Alien::TensorFlowServingProtos->cflags,
extra_linker_flags => AI::PredictionClient::Alien::TensorFlowServingProtos->libs,
lib/AI/PredictionClient/Alien/TensorFlowServingProtos.pm view on Meta::CPAN
This distribution builds a C++ library for use by other Perl XS modules to
communicate with Google TensorFlow Serving model servers. It is primarily intended to be used
with the cpan AI::PredictionClient module.
This module builds a library 'tensorflow_serving_protos_so' that provides the protos for the
Predict, Classify, Regress and MultiInference prediction services.
The built library is installed in a private share location within this module
for use by other modules.
=cut
view all matches for this distribution
view release on metacpan or search on metacpan
bin/Inception.pl view on Meta::CPAN
option host => (
is => 'ro',
required => 0,
format => 's',
default => $default_host,
doc => "IP address of the server [Default: $default_host]"
);
option port => (
is => 'ro',
required => 0,
format => 's',
bin/Inception.pl view on Meta::CPAN
printf("Sending image %s to server at host:%s port:%s\n",
$self->image_file, $self->host, $self->port);
if ($client->call_inception($image_ref)) {
my $results_ref = $client->inception_results;
my $classifications_ref = $results_ref->{'classes'};
my $scores_ref = $results_ref->{'scores'};
my $comments = 'Clasification Results for ' . $self->image_file;
my $results_text
= form
'.===========================================================================.',
'| Class | Score |',
'|-----------------------------------------------------------+---------------|',
'| {[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[} |{]].[[[[[[[[} |',
$classifications_ref, $scores_ref,
'|===========================================================================|',
'| {[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[} |',
$comments,
"'==========================================================================='";
print $results_text;
} else {
printf("Failed. Status: %s, Status Code: %s, Status Message: %s \n",
$client->status, $client->status_code, $client->status_message);
return 1;
view all matches for this distribution
view release on metacpan or search on metacpan
examples/append.pl view on Meta::CPAN
append([W|X], Y, [W|Z]) :- append(X, Y, Z).
END_PROLOG
print "Appending two lists 'append([a],[b,c,d],Z).'\n";
$prolog->query('append([a],[b,c,d],Z).');
while (my $result = $prolog->results) {
print Dumper($result),"\n";
}
print "\nWhich lists appends to a known list to form another known list?\n'append(X,[b,c,d],[a,b,c,d]).'\n";
$prolog->query('append(X,[b,c,d],[a,b,c,d]).');
while (my $result = $prolog->results) {
print Dumper($result),"\n";
}
print "\nWhich lists can be appended to form a given list?\n'append(X, Y, [foo, bar, 7, baz]).'\n";
my $list = $prolog->list(qw/foo bar 7 baz/);
$prolog->query("append(X,Y,[$list]).");
while (my $result = $prolog->results) {
print Dumper($result),"\n";
}
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
# The anneal() function takes a reference to an array of number
# specifications (which are references to hashes containing "LowerBound",
# "UpperBound", and "Precision" fields), a reference to a cost function
# (which takes a list of numbers matching the specifications and returns a
# number representing a cost to be minimized), and a positive integer
# specifying the number of randomization cycles to perform at each
# temperature during the annealing process.
#
# The function returns a reference to an array containing the
# optimized list of numbers.
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
# The use_brute_force() function takes a reference to an array of number
# specifications (which are references to hashes containing "LowerBound",
# "UpperBound", and "Precision" fields) and a reference to a cost function
# (which takes a list of numbers matching the specifications and returns a
# number representing a cost to be minimized). The method tests every
# possible combination of numbers matching the specifications and returns a
# reference to an array containing the optimal numbers, where "optimal"
# means producing the lowest cost.
sub use_brute_force {
my $number_specs = validate_number_specs($_[0]);
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
$cursors[$dex] = 0;
} # end if
} # next $dex
} until ($finished);
# Return the result:
return \@optimized_list;
} # end sub
# The validate_number_specs() function takes a reference to an array of
# number specifications (which are references to hashes with "LowerBound",
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
die "ERROR: In a number specification, the lower and upper "
. "bounds must be numbers such that the upper bound is "
. "greater than the lower bound, the upper bound is not "
. "greater than 10 to the power of (4 - p) where p is the "
. "precision, and the lower bound is not less than -1 times "
. "the result of taking 10 to the power of (4 - p).\n";
} # end unless
# Round the bounds inward as necessary:
my $integral_lower_bound = ceil( $lower_bound * (10 ** $precision));
my $integral_upper_bound = floor($upper_bound * (10 ** $precision));
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
precision, where the precision is an integer in the range 0 to 4 that
specifies the number of decimal places to which all instances of the
number will be rounded. The upper bound must be greater than the
lower bound but not greater than 10 to the power of (4 - p), where "p"
is the precision. The lower bound must be not less than -1 times the
result of taking 10 to the power of (4 - p).
A bound that has a higher degree of precision than that specified for
the number to which the bound applies is rounded inward (that is,
downward for an upper bound and upward for a lower bound) to the
nearest instance of the specified precision.
The attributes of a number (bounds and precision) are encapsulated
within a number specification, which is a reference to a hash
containing "LowerBound", "UpperBound", and "Precision" fields.
The anneal() function takes a reference to an array of number
specifications, a cost function, and a positive integer specifying
the number of randomization cycles per temperature to perform. The
anneal() function returns a reference to an array having the same
length as the array of number specifications. The returned list
represents the optimal list of numbers matching the specified
attributes, where "optimal" means producing the lowest cost.
The cost function must take a reference to an array of numbers that
match the number specifications. The function must return a single
number representing a cost to be minimized.
In order to work efficiently with the varying precisions, the anneal()
function converts each bound to an integer by multiplying it by 10 to
the power of the precision; then the function performs the temperature
reductions and randomization cycles (which include tests performed via
calls to the cost function) on integers in the resulting ranges. When
passing an integer to the cost function or when storing the integer in
a collection of numbers to be returned by the function, anneal() first
converts the integer back to the appropriate decimal number by
dividing the integer by 10 to the power of the precision.
The initial temperature is the size of the largest range after the
bounds have been converted to integers. During each temperature
reduction, the anneal() function multiplies the temperature by 0.95
and then rounds the result down to the nearest integer (if the result
isn't already an integer). When the temperature reaches zero,
annealing is immediately terminated.
NOTE: Annealing can sometimes complete before the temperature
reaches zero if, after a particular temperature reduction, a
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
When there is a tie between two possible positions for the subrange
within the total range, a "coin flip" decides.
=head1 PREREQUISITES
This module requires Perl 5, version 5.10.1 or later.
=head1 METHODS
=over
lib/AI/SimulatedAnnealing.pm view on Meta::CPAN
The anneal() function takes a reference to an array of number specifications
(which are references to hashes containing "LowerBound", "UpperBound", and
"Precision" fields), a code reference pointing to a cost function (which
takes a list of numbers matching the specifications and returns a number
representing a cost to be minimized), and a positive integer specifying the
number of randomization cycles to perform at each temperature.
The function returns a reference to an array containing the optimized list
of numbers.
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/TensorFlow/Libtensorflow/ApiDefMap.pm view on Meta::CPAN
$ffi->attach( [ 'NewApiDefMap' => 'New' ] => [
arg 'TF_Buffer' => 'op_list_buffer',
arg 'TF_Status' => 'status',
] => 'TF_ApiDefMap' => sub {
my ($xs, $class, @rest) = @_;
$xs->(@rest);
});
$ffi->attach( ['DeleteApiDefMap' => 'DESTROY'] => [
arg 'TF_ApiDefMap' => 'apimap'
] => 'void');
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Termites.pm view on Meta::CPAN
$termites->iterate for 0..10000;
=head1 DESCRIPTION
This module simulates a termites world based on the ideas described on
the book "Adventures in Modeling" by Vanessa Stevens Colella, Eric
Klopfer and Mitchel Resnick
(L<http://education.mit.edu/starlogo/adventures/>).
In this version, termites can move in a n-dimensional boxed space, and
are not limited to integer coordinates.
Also, the way they decide when to pick or leave wood are customizable,
lib/AI/Termites.pm view on Meta::CPAN
=over 4
=item LoginquitasPostulo
This termites subspecie measures the distance to the nearest piece of
wood.
=item NemusNidor
This termite smells the wood.
view all matches for this distribution
view release on metacpan or search on metacpan
examples/basic.pl view on Meta::CPAN
my $train_data = DMatrix->From(file => 'agaricus.txt.train');
my $test_data = DMatrix->From(file => 'agaricus.txt.test');
# With XGBoost we can solve this problem using 'gbtree' booster
# and as loss function a logistic regression 'binary:logistic'
# (Gradient Boosting Regression Tree)
# XGBoost Tree Booster has a lot of parameters that we can tune
# (https://github.com/dmlc/xgboost/blob/master/doc/parameter.md)
my $booster = train(data => $train_data, number_of_rounds => 10, params => {
objective => 'binary:logistic',
eta => 1.0,
max_depth => 2,
silent => 1
});
# For binay classification predictions are probability confidence scores in [0, 1]
# indicating that the label is positive (1 in the first column of agaricus.txt.test)
my $predictions = $booster->predict(data => $test_data);
say join "\n", @$predictions[0 .. 10];
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AIIA/GMT.pm view on Meta::CPAN
=head1 SYNOPSIS
use YAML;
use AIIA::GMT;
$result = &text2entity('less than 3000 words');
print Dump $result;
=head1 DESCRIPTION
AIIA::GMT is a XML-RPC client of a web-service server, AIIA gene mention tagger, which provides the service to recognize named entities in the biomedical articles.
view all matches for this distribution
view release on metacpan or search on metacpan
print <<EOF;
Content-Type: text/plain
AIS::client module was not able to open DirDB [${SessionPrefix}_sessions]
eval result:
$@
AIS::client version $VERSION
my $Response = miniget $host, $port,
"${aissri}query?$OTUkey", $agent;
# carp "got $Response";
(my $AISXML) =
$Response =~ m#<aisresponse>(.+)</aisresponse>#si
or die "no <aisresponse> element from ${aissri}query?$OTUkey\n in BEGINRESPONSE\n$Response\nENDRESPONSE";
$Sessions{$Coo}->{AISXML} = $AISXML;
# parse AISXML...
my %aisvar;
foreach (qw{
identity
};
goto HAVE_ID;
}else{
# redirect us to AIS server PRESENT function
redirect "${aissri}present?http$ssl_ext://$ENV{SERVER_NAME}$ENV{SCRIPT_NAME}$ENV{PATH_INFO}?OTU_KEY=";
exit;
};
1;
__END__
=head1 NAME
AIS::client - get an authenticated e-mail address for users of your web service
=head1 SYNOPSIS
BEGIN{umask(0077 & umask())}; # if your web server gives you a 0177 umask
use AIS::client;
=head1 DESCRIPTION
The goal of AIS::client is to provide a very easy way to require an
authenticated identity for a perl web application. The user's e-mail
address appears in a global variable C<$AIS_IDENTITY> and a persistent
session stash is available in C<%AIS_STASH>.
=head1 USE-LINE CONFIGURATION OPTIONS
=item aissri
=item freq
By default, AIS::client will examine the session directory for stale
sessions approximately once every 2000 invocations. Adjust this
with the C<freq> parameter. C<0> will suppress housekeeping entirely.
=item maxage
Minimum time in seconds since C<$AIS_STASH{last_access}> that will
trigger session deletion at housekeeping time. Defaults to C<72*60*60>.
=head1 ENDING SESSIONS
AIS::client recognizes a reserved QUERY_STRING of C<LOGOUT> which will
end a session, delete all session data, and offer the user a link to
the logout function of the specified AIS server so they can log out
of that too if they want.
=head1 HISTORY
view all matches for this distribution
view release on metacpan or search on metacpan
print "ok 1\n";
######################### End of black magic.
# Insert your test code below (better if it prints "ok 13"
# (correspondingly "not ok 13") depending on the success of chunk 13
# of the test code):
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AIX/LVM.pm view on Meta::CPAN
sub init
{
my $self = shift;
my ($result, %lslv, %lspv, %lsvg, @lslv, @lsvg, @lspv);
my ($lsvg, $lsvg_error) = $self->_exec_open3("lsvg -o");
croak "Error found during execution of lsvg -o: $lsvg_error\n" if $lsvg_error;
@lsvg = $self->_splitter($lsvg, qr'\n+');
foreach my $lvg (@lsvg) {
$self->{$lvg}= $self->_get_lv_pv_props($lvg); #Hierarchy is lsvg -> lslv and lspv
lib/AIX/LVM.pm view on Meta::CPAN
# This subroutine is used to execute the commands using open3 to capture Error stream.
sub _exec_open3
{
my $self = shift;
my ($result, $error);
my $writer_h = new IO::Handle;
my $reader_h = new IO::Handle;
my $error_h = new IO::Handle;
my $pid = open3($writer_h, $reader_h, $error_h, @_) or croak "Not able to open3: $! \n";
$reader_h->autoflush();
lib/AIX/LVM.pm view on Meta::CPAN
$selector->add($reader_h, $error_h); ## Add the handlers to select call ##
while( my @ready = $selector->can_read ){
foreach my $fh ( @ready ){
if( fileno($fh) == fileno($reader_h) ){
my $ret = $reader_h->sysread($_, 1024);
$result .= $_;
$selector->remove($fh) unless $ret;
}
if( fileno($fh) == fileno($error_h) ){
my $ret = $error_h->sysread($_, 1024);
$error .= $_;
lib/AIX/LVM.pm view on Meta::CPAN
$reader_h->autoflush();
$error_h->autoflush();
waitpid $pid, 0;
my $rc = $? >> 8;
carp "Error in executing the command\n" if ($rc);
return $result, $error;
}
# Splitter based on pattern
sub _splitter
lib/AIX/LVM.pm view on Meta::CPAN
use AIX::LVM;
my $lvm = AIX::LVM->new;
my @volume_group = $lvm->get_logical_volume_group(); #List all the Volume groups present.
my @pvs = $lvm->get_physical_volumes(); #List all the Physical volumes present.
my @lvs = $lvm->get_logical_volumes(); #List all the Physical volumes present.
#%vg_props consist of all the volume group properties in key=>value format.
my %vg_props = $lvm->get_volume_group_properties("rootvg");
#%lv_props consist of all the properties for logical volume "x" under volume group "rootvg";
lib/AIX/LVM.pm view on Meta::CPAN
=over 4
=item get_logical_volume_group();
Returns an array of volume groups present.
=item get_physical_volumes();
Returns an array of Physical volumes present.
=item get_logical_volumes();
Returns an array of Logical volumes present.
=item get_volume_group_properties("rootvg")
Returns a hash of properties for volume group "rootvg"
=item get_logical_volume_properties("rootvg","hd5")
Returns a hash of properties for logical volume "hd5" present under volume group "rootvg"
=item get_physical_volume_properties("rootvg","hdisk0")
Returns a hash of properties for physical volume "hdisk0" present under volume group "rootvg"
=item get_LV_logical_command("rootvg","hd5")
Returns output as scalar for command equivalent of lslv -l hd5
view all matches for this distribution
view release on metacpan or search on metacpan
example1.pl view on Meta::CPAN
#!/usr/bin/env perl
#
#
# Dump the result of all AIX::Perfstat functions
#
#
use strict;
use warnings;
view all matches for this distribution
view release on metacpan or search on metacpan
#--------------------------------------------------------
# Simple functions to populate the hash
#--------------------------------------------------------
sub prtconf_param {
my $param = shift @_;
my @result = grep {/$param/} @pconf_array;
return undef unless ( scalar @result );
($_ = pop @result) =~ /:\s*(.*)/;
return $1;
}
sub get_total_ram {
my $hash = shift @_;
my $memory = prtconf_param( '^Memory Size:' );
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ALBD.pm view on Meta::CPAN
# This is a description heared more towards understanding or modifying
# the code, rather than using the program.
#
# LiteratureBasedDiscovery.pm - provides functionality to perform LBD
#
# Matrix Representation:
# LBD is performed using Matrix and Vector operations. The major components
# are an explicit knowledge matrix, which is squared to find the implicit
# knowledge matrix.
#
# The explicit knowledge is read from UMLS::Association N11 matrix. This
# matrix contains the co-occurrence counts for all CUI pairs. The
# UMLS::Association database is completely independent from
# implementation, so any dataset, window size, or anything else may be used.
# Data is read in as a sparse matrix using the Discovery::tableToSparseMatrix
# function. This returns the primary data structures and variables used
# throughtout LBD.
#
# Matrix representation:
# This module uses a matrix representation for LBD. All operations are
# performed either as matrix or vector operations. The core data structure
# are the co-occurrence matrices explicitMatrix and implicitMatrix. These
# matrices have dimensions vocabulary size by vocabulary size. Each row
# corresponds to the all co-occurrences for a single CUI. Each column of that
# row corresponding to a co-occurrence with a single CUI. Since the matrices
# tend to be sparse, they are stored as hashes of hashes, where the the first
# key is for a row, and the second key is for a column. The keys of each hash
# are the indeces within the matrix. The hash values are the number of
# co-ocurrences for that CUI pair (e.g. ${${$explicit{C0000000}}{C1111111} = 10
# means that CUI C0000000 and C1111111 co-occurred 10 times).
#
# Now with an understanding of the data strucutres, below is a breif
# description of each:
#
# startingMatrix <- A matrix containing the explicit matrix rows for all of the
# start terms. This makes it easy to have multiple start terms
# and using this matrix as opposed to the entire explicit
lib/ALBD.pm view on Meta::CPAN
my $DEBUG = 0;
my $N11_TABLE = 'N_11';
my %lbdOptions = ();
#rankingProcedure <-- the procedure to use for ranking
#rankingMeasure <-- the association measure to use for ranking
#implicitOutputFile <--- the output file of results
#explicitInputFile <-- file to load explicit matrix from
#implicitInputFile <-- load implicit from file rather than calculating
#references to other packages
my $umls_interface;
lib/ALBD.pm view on Meta::CPAN
#####################################################
####################################################
# performs LBD
# input: none
# ouptut: none, but a results file is written to disk
sub performLBD {
my $self = shift;
my $start; #used to record run times
#implicit matrix ranking requires a different set of procedures
if ($lbdOptions{'rankingProcedure'} eq 'implicitMatrix') {
$self->performLBD_implicitMatrixRanking();
return;
}
if (exists $lbdOptions{'targetCuis'}) {
lib/ALBD.pm view on Meta::CPAN
$start = time();
my $startingMatrixRef =
Discovery::getRows($startCuisRef, $explicitMatrixRef);
print "Got Starting Matrix in ".(time() - $start)."\n";
#if using average minimum weight, grab the a->b scores
my %abPairsWithScores = ();
if ($lbdOptions{'rankingProcedure'} eq 'averageMinimumWeight'
|| $lbdOptions{'rankingProcedure'} eq 'ltc_amw') {
#apply semantic type filter to columns only
if ((scalar keys %{$linkingAcceptTypesRef}) > 0) {
lib/ALBD.pm view on Meta::CPAN
$explicitMatrixRef, $linkingAcceptTypesRef, $umls_interface);
}
#initialize the abPairs to frequency of co-occurrence
foreach my $row (keys %{$startingMatrixRef}) {
foreach my $col (keys %{${$startingMatrixRef}{$row}}) {
$abPairsWithScores{"$row,$col"} = ${${$startingMatrixRef}{$row}}{$col};
}
}
Rank::getBatchAssociationScores(\%abPairsWithScores, $explicitMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association);
}
#Apply Semantic Type Filter to the explicit matrix
if ((scalar keys %{$linkingAcceptTypesRef}) > 0) {
$start = time();
lib/ALBD.pm view on Meta::CPAN
print "Semantic Type Filter in ".(time() - $start)."\n";
}
#Score Implicit Connections
$start = time();
my $scoresRef;
if ($lbdOptions{'rankingProcedure'} eq 'allPairs') {
$scoresRef = Rank::scoreImplicit_fromAllPairs($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association);
} elsif ($lbdOptions{'rankingProcedure'} eq 'averageMinimumWeight') {
$scoresRef = Rank::scoreImplicit_averageMinimumWeight($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association, \%abPairsWithScores);
} elsif ($lbdOptions{'rankingProcedure'} eq 'linkingTermCount') {
$scoresRef = Rank::scoreImplicit_linkingTermCount($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef);
} elsif ($lbdOptions{'rankingProcedure'} eq 'frequency') {
$scoresRef = Rank::scoreImplicit_frequency($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef);
} elsif ($lbdOptions{'rankingProcedure'} eq 'ltcAssociation') {
$scoresRef = Rank::scoreImplicit_ltcAssociation($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association);
} elsif ($lbdOptions{'rankingProcedure'} eq 'ltc_amw') {
$scoresRef = Rank::scoreImplicit_LTC_AMW($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association, \%abPairsWithScores);
} else {
die ("Error: Invalid Ranking Procedure\n");
}
print "Scored in: ".(time()-$start)."\n";
#Rank Implicit Connections
$start = time();
my $ranksRef = Rank::rankDescending($scoresRef);
print "Ranked in: ".(time()-$start)."\n";
#Output The Results
open OUT, ">$lbdOptions{implicitOutputFile}"
or die "unable to open implicit ouput file: "
."$lbdOptions{implicitOutputFile}\n";
my $outputString = $self->_rankedTermsToString($scoresRef, $ranksRef);
my $paramsString = $self->_parametersToString();
print OUT $paramsString;
print OUT $outputString;
close OUT;
lib/ALBD.pm view on Meta::CPAN
#----------------------------------------------------------------------------
# performs LBD, closed discovery
# input: none
# ouptut: none, but a results file is written to disk
sub performLBD_closedDiscovery {
my $self = shift;
my $start; #used to record run times
print "Closed Discovery\n";
lib/ALBD.pm view on Meta::CPAN
}
print " num in common = ".(scalar keys %inCommon)."\n";
#Score and Rank
#Score the linking terms in common
my $scoresRef = \%inCommon;
#TODO score is just summed frequency right now
#Rank Implicit Connections
$start = time();
my $ranksRef = Rank::rankDescending($scoresRef);
print "Ranked in: ".(time()-$start)."\n";
#Output The Results
open OUT, ">$lbdOptions{implicitOutputFile}"
or die "unable to open implicit ouput file: "
."$lbdOptions{implicitOutputFile}\n";
my $outputString = $self->_rankedTermsToString($scoresRef, $ranksRef);
my $paramsString = $self->_parametersToString();
print OUT $paramsString;
print OUT $outputString;
print OUT "\n\n---------------------------------------\n\n";
lib/ALBD.pm view on Meta::CPAN
#Done
print "DONE!\n\n";
}
#NOTE, this is experimental code for using the implicit matrix as input
# to association measures and then rank. This provides a nice method of
# association for implicit terms, but there are implementation problems
# primarily memory constraints or time constraints now, because this
# requires the entire implicit matrix be computed. This can be done, but
# access to it is then slow. Would require a major redo of the code
#
=comment
# performs LBD, but using implicit matrix ranking schemes.
# Since the order of operations for those methods are slighly different
# a new method has been created.
# input: none
# output: none, but a results file is written to disk
sub performLBD_implicitMatrixRanking {
my $self = shift;
my $start; #used to record run times
print $self->_parametersToString();
print "In Implicit Ranking\n";
lib/ALBD.pm view on Meta::CPAN
print "linkingAcceptTypes = ".(join(',', keys %{$linkingAcceptTypesRef}))."\n";
print "targetAcceptTypes = ".(join(',', keys %{$targetAcceptTypesRef}))."\n";
#Score Implicit Connections
$start = time();
my $scoresRef;
$scoresRef = Rank::scoreImplicit_fromImplicitMatrix($startCuisRef, $lbdOptions{'implicitInputFile'}, $lbdOptions{rankingMeasue}, $umls_association);
print "Scored in: ".(time()-$start)."\n";
#Rank Implicit Connections
$start = time();
my $ranksRef = Rank::rankDescending($scoresRef);
print "Ranked in: ".(time()-$start)."\n";
#Output The Results
open OUT, ">$lbdOptions{implicitOutputFile}"
or die "unable to open implicit ouput file: "
."$lbdOptions{implicitOutputFile}\n";
my $outputString = $self->_rankedTermsToString($scoresRef, $ranksRef);
my $paramsString = $self->_parametersToString();
print OUT $paramsString;
print OUT $outputString;
close OUT;
lib/ALBD.pm view on Meta::CPAN
##################################################
################ Time Slicing ####################
##################################################
#NOTE: This function isn't really tested, and is really slow right now
# Generates precision and recall values by varying the threshold
# of the A->B ranking measure.
# input: none
# output: none, but precision and recall values are printed to STDOUT
sub timeSlicing_generatePrecisionAndRecall_explicit {
my $NUM_SAMPLES = 100; #TODO, read fomr file number of samples to average over for timeslicing
lib/ALBD.pm view on Meta::CPAN
#create the starting matrix
my $startingMatrixRef
= TimeSlicing::generateStartingMatrix($explicitMatrixRef, \%lbdOptions, $startAcceptTypesRef, $NUM_SAMPLES, $umls_interface);
#get association scores for the starting matrix
my $assocScoresRef = TimeSlicing::getAssociationScores(
$startingMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association);
my ($min, $max) = TimeSlicing::getMinMax($assocScoresRef);
my $range = $max-$min;
#load the post cutoff matrix for the necassary rows
my $postCutoffMatrixRef
= TimeSlicing::loadPostCutOffMatrix($startingMatrixRef, $explicitMatrixRef, $lbdOptions{'postCutoffFileName'});
lib/ALBD.pm view on Meta::CPAN
if ((scalar keys %{$targetAcceptTypesRef}) > 0) {
Filters::semanticTypeFilter_columns(
$postCutoffMatrixRef, $targetAcceptTypesRef, $umls_interface);
}
#apply a threshold at $numIntervals% intervals to generate an 11 point
# interpolated precision/recall curve for linking term ranking/thresholding
#stats for collecting info about predicted vs. true
my $predictedAverage = 0;
my $trueAverage = 0;
my $trueMin = 99999;
my $trueMax = -999999;
my $predictedMin = 999999;
my $predictedMax = 999999;
my $predictedTotal = 0;
my $trueTotal = 0;
my $allPairsCount = scalar keys %{$assocScoresRef};
for (my $i = $numIntervals; $i >= 0; $i--) {
#determine the number of samples to threshold
my $numSamples = $i*($allPairsCount/$numIntervals);
print "i, numSamples/allPairsCount = $i, $numSamples/$allPairsCount\n";
#grab samples at just 10 to estimate the final point (this is what
# makes it an 11 point curve)
if ($numSamples == 0) {
$numSamples = 10;
}
#apply a threshold (number of samples)
my $thresholdedStartingMatrixRef = TimeSlicing::grabKHighestRankedSamples($numSamples, $assocScoresRef, $startingMatrixRef);
#generate implicit knowledge
my $implicitMatrixRef = Discovery::findImplicit($explicitMatrixRef, $thresholdedStartingMatrixRef);
#Remove Known Connections
$implicitMatrixRef
= Discovery::removeExplicit($startingMatrixRef, $implicitMatrixRef);
lib/ALBD.pm view on Meta::CPAN
print "predicted - total, min, max, average = $predictedTotal, $predictedMin, $predictedMax, $predictedAverage\n";
print "true - total, min, max, average = $trueTotal, $trueMin, $trueMax, $trueAverage\n";
}
# generates precision and recall values by varying the threshold
# of the A->C ranking measure. Also generates precision at k, and
# mean average precision
# input: none
# output: none, but precision, recall, precision at k, and map values
# output to STDOUT
lib/ALBD.pm view on Meta::CPAN
#-------
#-------
# AB Scoring (if needed)
#-------
#if using average minimum weight, grab the a->b scores, #TODO this is sloppy here, but it has to be here...how to make it fit better?
my %abPairsWithScores = ();
if ($lbdOptions{'rankingProcedure'} eq 'averageMinimumWeight'
|| $lbdOptions{'rankingProcedure'} eq 'ltc_amw') {
print "getting AB scores\n";
#apply semantic type filter to columns only
if ((scalar keys %{$linkingAcceptTypesRef}) > 0) {
Filters::semanticTypeFilter_columns(
$explicitMatrixRef, $linkingAcceptTypesRef, $umls_interface);
}
#intitialize the abPairs to the frequency of co-ocurrence
foreach my $row (keys %{$startingMatrixRef}) {
foreach my $col (keys %{${$startingMatrixRef}{$row}}) {
$abPairsWithScores{"$row,$col"} = ${${$startingMatrixRef}{$row}}{$col};
}
}
Rank::getBatchAssociationScores(
\%abPairsWithScores, $explicitMatrixRef, $lbdOptions{'rankingMeasure'}, $umls_association);
}
#--------
#------------
# Matrix Filtering/Thresholding
#------------
#load or threshold the matrix
if (exists $lbdOptions{'thresholdedMatrix'}) {
print "loading thresholded matrix\n";
$explicitMatrixRef = (); #clear (for memory)
$explicitMatrixRef = Discovery::fileToSparseMatrix($lbdOptions{'thresholdedMatrix'});
}
#else {#TODO apply a threshold}
#NOTE, we must threshold the entire matrix because that is how we are calculating association scores
#Apply Semantic Type Filter to the explicit matrix
print "applying semantic filter to explicit matrix\n";
if ((scalar keys %{$linkingAcceptTypesRef}) > 0) {
Filters::semanticTypeFilter_rowsAndColumns(
lib/ALBD.pm view on Meta::CPAN
}
}
#-------------------------------------------
#At this point, the explicitMatrixRef has been filtered and thresholded
#The predictions matrix Ref has been generated from the filtered and
# thresholded explicitMatrixRef, only rows of starting terms remain, filtered, and
# had explicit removed
#Association scores are generated using the explicitMatrixRef
#--------------
# Get the ranks of all predictions
#--------------
#get the scores and ranks seperately for each row
# thereby generating scores and ranks for each starting
# term individually
my %rowRanks = ();
my ($n1pRef, $np1Ref, $npp);
print "getting row ranks\n";
foreach my $rowKey (keys %{$predictionsMatrixRef}) {
lib/ALBD.pm view on Meta::CPAN
$startingRow{$rowKey} = ${$startingMatrixRef}{$rowKey};
my %implicitRow = ();
$implicitRow{$rowKey} = ${$predictionsMatrixRef}{$rowKey};
#Score Implicit Connections
my $scoresRef;
if ($lbdOptions{'rankingProcedure'} eq 'allPairs') {
#get stats just a single time
if (!defined $n1pRef || !defined $np1Ref || !defined $npp) {
($n1pRef, $np1Ref, $npp) = Rank::getAllStats($explicitMatrixRef);
}
$scoresRef = Rank::scoreImplicit_fromAllPairs(\%startingRow, $explicitMatrixRef, \%implicitRow, $lbdOptions{'rankingMeasure'}, $umls_association, $n1pRef, $np1Ref, $npp);
} elsif ($lbdOptions{'rankingProcedure'} eq 'averageMinimumWeight') {
#get stats just a single time
if (!defined $n1pRef || !defined $np1Ref || !defined $npp) {
($n1pRef, $np1Ref, $npp) = Rank::getAllStats($explicitMatrixRef);
}
$scoresRef = Rank::scoreImplicit_averageMinimumWeight(\%startingRow, $explicitMatrixRef, \%implicitRow, $lbdOptions{'rankingMeasure'}, $umls_association, \%abPairsWithScores, $n1pRef, $np1Ref, $npp);
} elsif ($lbdOptions{'rankingProcedure'} eq 'linkingTermCount') {
$scoresRef = Rank::scoreImplicit_linkingTermCount(\%startingRow, $explicitMatrixRef, \%implicitRow);
} elsif ($lbdOptions{'rankingProcedure'} eq 'frequency') {
$scoresRef = Rank::scoreImplicit_frequency(\%startingRow, $explicitMatrixRef, \%implicitRow);
} elsif ($lbdOptions{'rankingProcedure'} eq 'ltcAssociation') {
$scoresRef = Rank::scoreImplicit_ltcAssociation(\%startingRow, $explicitMatrixRef, \%implicitRow, $lbdOptions{'rankingMeasure'}, $umls_association);
} elsif ($lbdOptions{'rankingProcedure'} eq 'ltc_amw') {
#get stats just a single time
if (!defined $n1pRef || !defined $np1Ref || !defined $npp) {
($n1pRef, $np1Ref, $npp) = Rank::getAllStats($explicitMatrixRef);
}
$scoresRef = Rank::scoreImplicit_LTC_AMW(\%startingRow, $explicitMatrixRef, \%implicitRow, $lbdOptions{'rankingMeasure'}, $umls_association, \%abPairsWithScores, $n1pRef, $np1Ref, $npp);
} else {
die ("Error: Invalid Ranking Procedure\n");
}
#Rank Implicit Connections
my $ranksRef = Rank::rankDescending($scoresRef);
#save the row ranks
$rowRanks{$rowKey} = $ranksRef;
}
#output the results at 10 intervals
TimeSlicing::outputTimeSlicingResults($goldMatrixRef, \%rowRanks, 10);
}
lib/ALBD.pm view on Meta::CPAN
##############################################################################
# function to produce output
##############################################################################
# outputs the implicit terms to string
# input: $scoresRef <- a reference to a hash of scores (hash{CUI}=score)
# $ranksRef <- a reference to an array of CUIs ranked by their score
# $printTo <- optional, outputs the $printTo top ranked terms. If not
# specified, all terms are output
# output: a line seperated string containing ranked terms, scores, and thier
# preferred terms
sub _rankedTermsToString {
my $self = shift;
my $scoresRef = shift;
my $ranksRef = shift;
my $printTo = shift;
#set printTo
if (!$printTo) {
lib/ALBD.pm view on Meta::CPAN
for (my $i = 0; $i < $printTo; $i++) {
#add the rank
$index = $i+1;
$string .= "$index\t";
#add the score
$string .= sprintf "%.5f\t", "${$scoresRef}{${$ranksRef}[$i]}\t";
#add the CUI
$string .= "${$ranksRef}[$i]\t";
#add the name
my $name = $umls_interface->getPreferredTerm(${$ranksRef}[$i]);
#if no preferred name, get anything
lib/ALBD.pm view on Meta::CPAN
print "npp = $npp\n";
print "n1p = $n1p\n";
print "np1 = $np1\n";
#Test other rank methods
my $scoresRef = Rank::scoreImplicit_fromAllPairs($startingMatrixRef, $explicitMatrixRef, $implicitMatrixRef, $lbdOptions{rankingMethod}, $umls_association);
my $ranksRef = Rank::rankDescending($scoresRef);
print "Scores: \n";
foreach my $cui (keys %{$scoresRef}) {
print " scores{$cui} = ${$scoresRef}{$cui}\n";
}
print "Ranks = ".join(',', @{$ranksRef})."\n";
}
sub _printMatrix {
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ALPM/Conf.pm view on Meta::CPAN
sub _getdb
{
my($dbs, $name) = @_;
# The order databases are added must be preserved as must the order of URLs.
for my $db (@$dbs){
return $db if($db->{'name'} eq $name);
}
my $new = { 'name' => $name };
push @$dbs, $new;
lib/ALPM/Conf.pm view on Meta::CPAN
while(my ($opt, $val) = each %$opts){
# The SetOption type in typemap croaks on error, no need to check.
_setopt($alpm, $opt, $val);
}
my $usesl = grep { /signatures/ } $alpm->caps;
for my $db (@$dbs){
my($r, $sl, $mirs) = @{$db}{'name', 'siglvl', 'mirrors'};
next if(!@$mirs);
_expurls($mirs, $opts->{'arch'}, $r);
view all matches for this distribution
view release on metacpan or search on metacpan
examples/amfclient.pl view on Meta::CPAN
#$client->setHTTPProxy('http://127.0.0.1:8888');
#$client->addHeader( 'serviceBrowser', 'true' );
$client->setHTTPCookieJar( HTTP::Cookies->new(file => "/tmp/lwpcookies.txt", autosave => 1, ignore_discard => 1 ) );
my $params = [ "italy" ];
my ($response) = $client->call( $service.'.'.$method, $params );
my $json = JSON->new;
$json->ascii(1);
$json->utf8(1);
$json->pretty(1);
$json->allow_blessed(1);
$json->convert_blessed(1);
my $json_data = $json->encode( $response->getData );
if ( $response->is_success ) {
print $json_data;
} else {
die "Can not send remote request for $service.$method method with params on $endpoint using AMF".$client->getEncoding()." encoding:\n".$json_data."\n";
};
view all matches for this distribution
view release on metacpan or search on metacpan
#!/usr/bin/perl -w
# Copyright (c) 2003 by Vsevolod (Simon) Ilyushchenko. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# The code is based on the -PHP project (http://amfphp.sourceforge.net/)
#This is a server-side script that responds to an Macromedia Flash client
#talking in ActionScript. See the FLAP project site (http://www.simonf.com/amfperl)
#for more information.
use strict;
view all matches for this distribution