Result:
found more than 317 distributions - search limited to the first 2001 files matching your query ( run in 1.283 )


AI-General

 view release on metacpan or  search on metacpan

test.pl  view on Meta::CPAN

# Before `make install' is performed this script should be runnable with
# `make test'. After `make install' it should work as `perl test.pl'

#########################

# change 'tests => 1' to 'tests => last_test_to_print';

use Test;
BEGIN { plan tests => 1 };
use AI::General;
ok(1); # If we made it this far, we're ok.

 view all matches for this distribution


AI-Genetic-Pro

 view release on metacpan or  search on metacpan

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
	
    my $gd = $graph->plot( [ [ 0..$#{$data->[0]} ], @$data ] ) or croak($@);
    open(my $fh, '>', $params{-filename}) or croak($@);
    binmode $fh;
    print $fh $gd->png;
    close $fh;
    
    return 1;
}
#=======================================================================

lib/AI/Genetic/Pro.pm  view on Meta::CPAN

	
    # evolve 10 generations
    $ga->evolve(10);
    
    # best score
    print "SCORE: ", $ga->as_value($ga->getFittest), ".\n";
    
    # save evolution path as a chart
    $ga->chart(-filename => 'evolution.png');
     
    # save state of GA

lib/AI/Genetic/Pro.pm  view on Meta::CPAN


Y label (default: I<Value>).

=item -format

Format of values, like C<sprintf> (default: I<'%.2f'>).

=item -legend1

Description of min line (default: I<Min value>).

 view all matches for this distribution


AI-Genetic

 view release on metacpan or  search on metacpan

Genetic.pm  view on Meta::CPAN


    last if $self->{TERM}->($self);

#    my @f = $self->getFittest(10);
#    for my $f (@f) {
#      print STDERR "    Fitness = ", $f->score, "..\n";
#      print STDERR "    Genes are: @{$f->genes}.\n";
#    }
  }
}

# sub sortIndividuals():

Genetic.pm  view on Meta::CPAN

	-terminate  => \&terminateFunc,
       );

     $ga->init(10);
     $ga->evolve('rouletteTwoPoint', 100);
     print "Best score = ", $ga->getFittest->score, ".\n";

     sub fitnessFunc {
         my $genes = shift;

         my $fitness;

 view all matches for this distribution


AI-Image

 view release on metacpan or  search on metacpan

lib/AI/Image.pm  view on Meta::CPAN

        'key'   => 'sk-......',
    );
    
    my $image_url = $ai->image("A photorealistic image of a cat wearing a top hat and monocle.");

    print $image_url;

=head1 DESCRIPTION

This module provides a simple interface to generate images using OpenAI's DALL-E API.

 view all matches for this distribution


AI-Logic-AnswerSet

 view release on metacpan or  search on metacpan

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

our $VERSION = '0.02';

sub executeFromFileAndSave {		#Executes DLV with a file as input and saves the output in another file

	open DLVW, ">>", "$_[1]";
	print DLVW $_[2];
	close DLVW;

	open(SAVESTDOUT, ">&STDOUT") or die "Can't save STDOUT: $!\n";
	open(STDOUT, ">$_[0]") or die "Can't open STDOUT to $_[0]", "$!\n";

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

		}
		
	}

	else {
		print "INPUT ERROR\n";
	}

	return @returned_value;

}

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	my $code = $_[1];
	my @isAFile = stat($program);

	if(@isAFile) {
		open PROGRAM, ">>", $program;
		print PROGRAM "$code\n";
		close PROGRAM;
	}

	else {
		$program = \($_[0]);

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	my $comparison = 0;
	if(@num and @operators) {
		$comparison = 1;
	}
	elsif(@num and !@operators) {
		print "Error: comparison element missing";
		return @ans;
	}
	
	

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN


	my $file = $_[0];
	my $code = $_[1];

	open FILE, ">", $file;
	print FILE "$code\n";
	close FILE;

}

sub addFacts {

lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

	my $filename = $_[3];
	
	open FILE, $append, $filename;

	foreach my $f (@facts) {
		print FILE "$name($f).\n";
	}
	close FILE;
}


lib/AI/Logic/AnswerSet.pm  view on Meta::CPAN

The user can set some constraints on the data to be saved in the hashmap, such as predicates, or answer sets, or both.

	my @mappedAS = AI::Logic::AnswerSet::mapAS(@result,@predicates,@answerSets);

For instance, think about the 3-colorability problem: imagine to 
have the edges in the hashmap, and to print the edges contained in the third answer set
returned by DLV; this is an example of the print instruction, useful to understand how
the hashmap works:

	print "Edges: @{$mappedAS[2]{edge}}\n";

In this case, we are printing the array containing the predicate "edge".

=head3 getPred

Easily manage the hashmap and get the desired predicate(see the print example
described in the method above):

	my @edges = AI::Logic::AnswerSet::getPred(\@mappedAS,3,"edge");

=head3 getProjection

 view all matches for this distribution


AI-ML

 view release on metacpan or  search on metacpan

inc/MyBuilder.pm  view on Meta::CPAN


    open my $i_fh, "<", $file   or die "$!";
    open my $o_fh, ">", $output or die "$!";
    while (<$i_fh>) {
        s/REAL/float/g;
        print {$o_fh} $_;
    }
    close $o_fh;
    close $i_fh;
}

 view all matches for this distribution


AI-MXNet-Gluon-ModelZoo

 view release on metacpan or  search on metacpan

examples/image_classification.pl  view on Meta::CPAN

my $rgb_std = nd->array([0.229, 0.224, 0.225])->reshape([1,3,1,1]);
$image = ($image->astype('float32') / 255 - $rgb_mean) / $rgb_std;

# Now we can recognize the object in the image.
# We perform an additional softmax on the output to obtain probability scores.
# And then print the top-5 recognized objects.
my $prob = $net->($image)->softmax;
for my $idx (@{ $prob->topk(k=>5)->at(0) })
{
    my $i = $idx->asscalar;
    printf(
        "With prob = %.5f, it contains %s\n",
        $prob->at(0)->at($i)->asscalar, $text_labels[$i]
    );
}

 view all matches for this distribution


AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

    my $func = $args{func};
    my $batch_size = $args{batch_size}//128;
    my($train_iter, $eval_iter) = samples($batch_size, $func);
    my $sym = nn_fc();

    ## call as ./calculator.pl 1 to just print model and exit
    if($ARGV[0]) {
        my @dsz = @{$train_iter->data->[0][1]->shape};
        my @lsz = @{$train_iter->label->[0][1]->shape};
        my $shape = {
            data          => [ $batch_size, splice @dsz,  1 ],
            softmax_label => [ $batch_size, splice @lsz, 1 ],
        };
        print mx->viz->plot_network($sym, shape => $shape)->graph->as_png;
        exit;
    }

    my $model = mx->mod->Module(
        symbol => $sym,

examples/calculator.pl  view on Meta::CPAN


    # wrap a helper around making predictions
    my ($arg_params) = $model->get_params;
    for my $k (sort keys %$arg_params)
    {
	print "$k -> ". $arg_params->{$k}->aspdl."\n";
    }
    return sub {
        my($n, $m) = @_;
        return $model->predict(mx->io->NDArrayIter(
            batch_size => 1,

examples/calculator.pl  view on Meta::CPAN

    my($n, $m) = @_;
    return $n / $m;
}, batch_size => 10, epoch => 80);


print "12345 + 54321 ≈ ", $add->(12345, 54321), "\n";
print "188 - 88 ≈ ", $sub->(188, 88), "\n";
print "250 * 2 ≈ ", $mul->(250, 2), "\n";
print "250 / 2 ≈ ", $div->(250, 2), "\n";

 view all matches for this distribution


AI-MXNetCAPI

 view release on metacpan or  search on metacpan

mxnet.i  view on Meta::CPAN

 * \return 0 when success, -1 when failure happens.
 */
int MXRandomSeed(int seed);
/*!
 * \brief Notify the engine about a shutdown,
 *  This can help engine to print less messages into display.
 *
 *  User do not have to call this function.
 * \return 0 when success, -1 when failure happens.
 */
int MXNotifyShutdown();

mxnet.i  view on Meta::CPAN

 */
int MXSymbolCopy(SymbolHandle symbol, SymbolHandle *out);
/*!
 * \brief Print the content of symbol, used for debug.
 * \param symbol the symbol
 * \param out_str pointer to hold the output string of the printing.
 * \return 0 when success, -1 when failure happens
 */
int MXSymbolPrint(SymbolHandle symbol, const char **out);
/*!
 * \brief Get string name from symbol

mxnet.i  view on Meta::CPAN

 */
int MXExecutorFree(ExecutorHandle handle);
/*!
 * \brief Print the content of execution plan, used for debug.
 * \param handle the executor.
 * \param out_str pointer to hold the output string of the printing.
 * \return 0 when success, -1 when failure happens
 */
int MXExecutorPrint(ExecutorHandle handle, const char **out);
/*!
 * \brief Executor forward method

 view all matches for this distribution


AI-MaxEntropy

 view release on metacpan or  search on metacpan

inc/Module/AutoInstall.pm  view on Meta::CPAN

    goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault;

    my ( $prompt, $default ) = @_;
    my $y = ( $default =~ /^[Yy]/ );

    print $prompt, ' [', ( $y ? 'Y' : 'y' ), '/', ( $y ? 'n' : 'N' ), '] ';
    print "$default\n";
    return $default;
}

# the workhorse
sub import {
    my $class = shift;
    my @args  = @_ or return;
    my $core_all;

    print "*** $class version " . $class->VERSION . "\n";
    print "*** Checking for Perl dependencies...\n";

    my $cwd = Cwd::cwd();

    $Config = [];

inc/Module/AutoInstall.pm  view on Meta::CPAN

              if $option eq 'core';

            next unless $option eq 'core';
        }

        print "[" . ( $FeatureMap{ lc($feature) } || $feature ) . "]\n";

        $modules = [ %{$modules} ] if UNIVERSAL::isa( $modules, 'HASH' );

        unshift @$modules, -default => &{ shift(@$modules) }
          if ( ref( $modules->[0] ) eq 'CODE' );    # XXX: bugward combatability

inc/Module/AutoInstall.pm  view on Meta::CPAN

                @skiptests = @{$arg} if ( $option eq 'skiptests' );

                next;
            }

            printf( "- %-${maxlen}s ...", $mod );

            if ( $arg and $arg =~ /^\D/ ) {
                unshift @$modules, $arg;
                $arg = 0;
            }

            # XXX: check for conflicts and uninstalls(!) them.
            if (
                defined( my $cur = _version_check( _load($mod), $arg ||= 0 ) ) )
            {
                print "loaded. ($cur" . ( $arg ? " >= $arg" : '' ) . ")\n";
                push @Existing, $mod => $arg;
                $DisabledTests{$_} = 1 for map { glob($_) } @skiptests;
            }
            else {
                print "missing." . ( $arg ? " (would need $arg)" : '' ) . "\n";
                push @required, $mod => $arg;
            }
        }

        next unless @required;

inc/Module/AutoInstall.pm  view on Meta::CPAN


    $UnderCPAN = _check_lock();    # check for $UnderCPAN

    if ( @Missing and not( $CheckOnly or $UnderCPAN ) ) {
        require Config;
        print
"*** Dependencies will be installed the next time you type '$Config::Config{make}'.\n";

        # make an educated guess of whether we'll need root permission.
        print "    (You may need to do that as the 'root' user.)\n"
          if eval '$>';
    }
    print "*** $class configuration finished.\n";

    chdir $cwd;

    # import to main::
    no strict 'refs';

inc/Module/AutoInstall.pm  view on Meta::CPAN

# if we are, then we simply let it taking care of our dependencies
sub _check_lock {
    return unless @Missing;

    if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) {
        print <<'END_MESSAGE';

*** Since we're running under CPANPLUS, I'll just let it take care
    of the dependency's installation later.
END_MESSAGE
        return 1;

inc/Module/AutoInstall.pm  view on Meta::CPAN


    if (
            ( $^O eq 'MSWin32' ? _under_cpan() : <LOCK> == getppid() )
        and ( $CPAN::Config->{prerequisites_policy} || '' ) ne 'ignore'
    ) {
        print <<'END_MESSAGE';

*** Since we're running under CPAN, I'll just let it take care
    of the dependency's installation later.
END_MESSAGE
        return 1;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    }

    return @installed unless @modules;  # nothing to do
    return @installed if _check_lock(); # defer to the CPAN shell

    print "*** Installing dependencies...\n";

    return unless _connected_to('cpan.org');

    my %args = @config;
    my %failed;

inc/Module/AutoInstall.pm  view on Meta::CPAN

        _install_cpanplus( \@modules, \@config );
    } else {
        _install_cpan( \@modules, \@config );
    }

    print "*** $class installation finished.\n";

    # see if we have successfully installed them
    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        if ( defined( _version_check( _load($pkg), $ver ) ) ) {
            push @installed, $pkg;
        }
        elsif ( $args{do_once} and open( FAILED, '>> .#autoinstall.failed' ) ) {
            print FAILED "$pkg\n";
        }
    }

    close FAILED if $args{do_once};

inc/Module/AutoInstall.pm  view on Meta::CPAN

        $conf->set_conf( $key, $val );
    }

    my $modtree = $cp->module_tree;
    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        print "*** Installing $pkg...\n";

        MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall;

        my $success;
        my $obj = $modtree->{$pkg};

inc/Module/AutoInstall.pm  view on Meta::CPAN

            }

            my $rv = $cp->install( modules => [ $obj->{module} ] );

            if ( $rv and ( $rv->{ $obj->{module} } or $rv->{ok} ) ) {
                print "*** $pkg successfully installed.\n";
                $success = 1;
            } else {
                print "*** $pkg installation cancelled.\n";
                $success = 0;
            }

            $installed += $success;
        } else {
            print << ".";
*** Could not find a version $ver or above for $pkg; skipping.
.
        }

        MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall;

inc/Module/AutoInstall.pm  view on Meta::CPAN

    local $CPAN::Config->{prerequisites_policy} = 'follow';

    while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) {
        MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall;

        print "*** Installing $pkg...\n";

        my $obj     = CPAN::Shell->expand( Module => $pkg );
        my $success = 0;

        if ( $obj and defined( _version_check( $obj->cpan_version, $ver ) ) ) {

inc/Module/AutoInstall.pm  view on Meta::CPAN

                  ->{install}
                  if $CPAN::META;
            };

            if ( $rv eq 'YES' ) {
                print "*** $pkg successfully installed.\n";
                $success = 1;
            }
            else {
                print "*** $pkg installation failed.\n";
                $success = 0;
            }

            $installed += $success;
        }
        else {
            print << ".";
*** Could not find a version $ver or above for $pkg; skipping.
.
        }

        MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall;

inc/Module/AutoInstall.pm  view on Meta::CPAN

      )
    {
        die "*** Please install $class $ver manually.\n";
    }

    print << ".";
*** Trying to fetch it from CPAN...
.

    # install ourselves
    _load($class) and return $class->import(@_)
      if $class->install( [], $class, $ver );

    print << '.'; exit 1;

*** Cannot bootstrap myself. :-( Installation terminated.
.
}

inc/Module/AutoInstall.pm  view on Meta::CPAN

    my $path = shift;
    mkdir( $path, 0755 ) unless -e $path;

    return 1 if -w $path;

    print << ".";
*** You are not allowed to write to the directory '$path';
    the installation may fail due to insufficient permissions.
.

    if (

inc/Module/AutoInstall.pm  view on Meta::CPAN

        ) =~ /^[Yy]/
      )
    {

        # try to bootstrap ourselves from sudo
        print << ".";
*** Trying to re-execute the autoinstall process with 'sudo'...
.
        my $missing = join( ',', @Missing );
        my $config = join( ',',
            UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} )

inc/Module/AutoInstall.pm  view on Meta::CPAN


        return
          unless system( 'sudo', $^X, $0, "--config=$config",
            "--installdeps=$missing" );

        print << ".";
*** The 'sudo' command exited with error!  Resuming...
.
    }

    return _prompt(

inc/Module/AutoInstall.pm  view on Meta::CPAN

sub Write {
    require Carp;
    Carp::croak "WriteMakefile: Need even number of args" if @_ % 2;

    if ($CheckOnly) {
        print << ".";
*** Makefile not written in check-only mode.
.
        return;
    }

inc/Module/AutoInstall.pm  view on Meta::CPAN


    $PostambleUsed = 0;
    local *MY::postamble = \&postamble unless defined &MY::postamble;
    ExtUtils::MakeMaker::WriteMakefile(%args);

    print << "." unless $PostambleUsed;
*** WARNING: Makefile written with customized MY::postamble() without
    including contents from Module::AutoInstall::postamble() --
    auto installation features disabled.  Please contact the author.
.

 view all matches for this distribution


AI-MegaHAL

 view release on metacpan or  search on metacpan

libmegahal.c  view on Meta::CPAN

static DICTIONARY *make_keywords(MODEL *, DICTIONARY *);
static char *make_output(DICTIONARY *);
static MODEL *new_model(int);
static TREE *new_node(void);
static SWAP *new_swap(void);
static bool print_header(FILE *);
static bool progress(char *, int, int);
static DICTIONARY *reply(MODEL *, DICTIONARY *);
static void save_dictionary(FILE *, DICTIONARY *);
static void save_tree(FILE *, TREE *);
static void save_word(FILE *, STRING);

libmegahal.c  view on Meta::CPAN

#endif
#ifdef __mac_os
    gSpeechExists = initialize_speech();
#endif
    if(!nobanner)
	fprintf(stdout,
		"+------------------------------------------------------------------------+\n"
		"|                                                                        |\n"
		"|  #    #  ######   ####     ##    #    #    ##    #                     |\n"
		"|  ##  ##  #       #    #   #  #   #    #   #  #   #               ###   |\n"
		"|  # ## #  #####   #       #    #  ######  #    #  #              #   #  |\n"

libmegahal.c  view on Meta::CPAN

}

/*
   megahal_output --

   This function pretty prints output.

   Wrapper function to have things in the right namespace.

*/

libmegahal.c  view on Meta::CPAN

    case SAVE:
	save_model("megahal.brn", model);
	break;
    case DELAY:
	typing_delay=!typing_delay;
	printf("MegaHAL typing is now %s.\n", typing_delay?"on":"off");
	return 1;
    case SPEECH:
	speech=!speech;
	printf("MegaHAL speech is now %s.\n", speech?"on":"off");
	return 1;
    case HELP:
	help();
	return 1;
    case VOICELIST:

libmegahal.c  view on Meta::CPAN

    }

    /*
     *		Display the prompt to the user.
     */
    fprintf(stdout, prompt);
    fflush(stdout);

    /*
     *		Loop forever, reading characters and putting them into the input
     *		string.

libmegahal.c  view on Meta::CPAN

	 *		prompt again, and set the character to the space character, as
	 *		we don't permit linefeeds to appear in the input.
	 */
	if((char)(c)=='\n') {
	    if(finish==TRUE) break;
	    fprintf(stdout, prompt);
	    fflush(stdout);
	    finish=TRUE;
	    c=32;
	} else {
	    finish=FALSE;

libmegahal.c  view on Meta::CPAN

    errorfp = fopen(filename, "a");
    if(errorfp==NULL) {
	errorfp=stderr;
	return(FALSE);
    }
    return(print_header(errorfp));
}

/*---------------------------------------------------------------------------*/

/*

libmegahal.c  view on Meta::CPAN

 */
void error(char *title, char *fmt, ...)
{
    va_list argp;

    fprintf(errorfp, "%s: ", title);
    va_start(argp, fmt);
    vfprintf(errorfp, fmt, argp);
    va_end(argp);
    fprintf(errorfp, ".\n");
    fflush(errorfp);

    //    fprintf(stderr, "MegaHAL died for some reason; check the error log.\n");

    exit(1);
}

/*---------------------------------------------------------------------------*/

bool warn(char *title, char *fmt, ...)
{
    va_list argp;

    fprintf(errorfp, "%s: ", title);
    va_start(argp, fmt);
    vfprintf(errorfp, fmt, argp);
    va_end(argp);
    fprintf(errorfp, ".\n");
    fflush(errorfp);

    //    fprintf(stderr, "MegaHAL emitted a warning; check the error log.\n");

    return(TRUE);
}

/*---------------------------------------------------------------------------*/

libmegahal.c  view on Meta::CPAN

    statusfp=fopen(filename, "a");
    if(statusfp==NULL) {
	statusfp=stdout;
	return(FALSE);
    }
    return(print_header(statusfp));
}

/*---------------------------------------------------------------------------*/

/*

libmegahal.c  view on Meta::CPAN

bool status(char *fmt, ...)
{
    va_list argp;

    va_start(argp, fmt);
    vfprintf(statusfp, fmt, argp);
    va_end(argp);
    fflush(statusfp);

    return(TRUE);
}

libmegahal.c  view on Meta::CPAN

/*
 *		Function:	Print_Header
 *
 *		Purpose:		Display a copyright message and timestamp.
 */
bool print_header(FILE *file)
{
    time_t clock;
    char timestamp[1024];
    struct tm *local;

    clock=time(NULL);
    local=localtime(&clock);
    strftime(timestamp, 1024, "Start at: [%Y/%m/%d %H:%M:%S]\n", local);

    fprintf(file, "MegaHALv8\n");
    fprintf(file, "Copyright (C) 1998 Jason Hutchens\n");
    fprintf(file, timestamp);
    fflush(file);

    return(TRUE);
}

libmegahal.c  view on Meta::CPAN


    if(filename==NULL) return;

    file=fopen(filename, "r");
    if(file==NULL) {
	printf("Unable to find the personality %s\n", filename);
	return;
    }

    fseek(file, 0, 2);
    length=ftell(file);

libmegahal.c  view on Meta::CPAN

	return;
    }

    for(i=0; i<dictionary->size; ++i) {
	for(j=0; j<dictionary->entry[i].length; ++j)
	    fprintf(file, "%c", dictionary->entry[i].word[j]);
	fprintf(file, "\n");
    }

    fclose(file);
}

libmegahal.c  view on Meta::CPAN

    if(filename==NULL) error("save_model","Unable to allocate filename");

    show_dictionary(model->dictionary);
    if(filename==NULL) return;

    sprintf(filename, "%s%smegahal.brn", directory, SEP);
    file=fopen(filename, "wb");
    if(file==NULL) {
	warn("save_model", "Unable to open file `%s'", filename);
	return;
    }

libmegahal.c  view on Meta::CPAN


    /*
     *		Don't simulate typing if the feature is turned off
     */
    if(typing_delay==FALSE)	{
	fprintf(stdout, string);
	return;
    }

    /*
     *		Display the entire string, one character at a time

libmegahal.c  view on Meta::CPAN

{
    /*
     *		Standard keyboard delay
     */
    usleep(D_KEY+rnd(V_KEY)-rnd(V_KEY));
    fprintf(stdout, "%c", c);
    fflush(stdout);

    /*
     *		A random thinking delay
     */

libmegahal.c  view on Meta::CPAN

			    gSpeechChannel = nil;
			}
			err = NewSpeechChannel(&voiceSpec, &gSpeechChannel);
			if (!err) {
			    p2cstr((StringPtr)buffer);
			    printf("Now using %s voice\n", buffer);
			    c2pstr(buffer);
			    err = SpeakText(gSpeechChannel, &buffer[1], buffer[0]);
			}
		    }
		}

libmegahal.c  view on Meta::CPAN


		p2cstr(info.name);
		for (temp= info.name; *temp; temp++)
		    if (*temp == ' ')
			*temp = '_';
		printf("%s\n",info.name);
	    }
	}
    }
#endif
}

libmegahal.c  view on Meta::CPAN

{
    static int last=0;
    static bool first=FALSE;

    /*
     *    We have already hit 100%, and a newline has been printed, so nothing
     *    needs to be done.
     */
    if((done*100/total==100)&&(first==FALSE)) return(TRUE);

    /*
     *    Nothing has changed since the last time this function was called,
     *    so do nothing, unless it's the first time!
     */
    if(done*100/total==last) {
	if((done==0)&&(first==FALSE)) {
	  //	    fprintf(stderr, "%s: %3d%%", message, done*100/total);
	    first=TRUE;
	}
	return(TRUE);
    }

    /*
     *    Erase what we printed last time, and print the new percentage.
     */
    last=done*100/total;

    //if(done>0) fprintf(stderr, "%c%c%c%c", 8, 8, 8, 8);
    //fprintf(stderr, "%3d%%", done*100/total);

    /*
     *    We have hit 100%, so reset static variables and print a newline.
     */
    if(last==100) {
	first=FALSE;
	last=0;
	//fprintf(stderr, "\n");
    }

    return(TRUE);
}

libmegahal.c  view on Meta::CPAN

void help(void)
{
    int j;

    for(j=0; j<COMMAND_SIZE; ++j) {
	printf("#%-7s: %s\n", command[j].word.word, command[j].helpstring);
    }
}

/*---------------------------------------------------------------------------*/

libmegahal.c  view on Meta::CPAN

    /*
     *		Check to see if the brain exists
     */

    if(strcmp(directory, DEFAULT)!=0) {
	sprintf(filename, "%s%smegahal.brn", directory, SEP);
	file=fopen(filename, "r");
	if(file==NULL) {
	    sprintf(filename, "%s%smegahal.trn", directory, SEP);
	    file=fopen(filename, "r");
	    if(file==NULL) {
		fprintf(stdout, "Unable to change MegaHAL personality to \"%s\".\n"
			"Reverting to MegaHAL personality \"%s\".\n", directory, last);
		free(directory);
		directory=strdup(last);
		return;
	    }
	}
	fclose(file);
	fprintf(stdout, "Changing to MegaHAL personality \"%s\".\n", directory);
    }

    /*
     *		Free the current personality
     */

libmegahal.c  view on Meta::CPAN


    /*
     *		Train the model on a text if one exists
     */

    sprintf(filename, "%s%smegahal.brn", directory, SEP);
    if(load_model(filename, *model)==FALSE) {
	sprintf(filename, "%s%smegahal.trn", directory, SEP);
	train(*model, filename);
    }

    /*
     *		Read a dictionary containing banned keywords, auxiliary keywords,
     *		greeting keywords and swap keywords
     */
    sprintf(filename, "%s%smegahal.ban", directory, SEP);
    ban=initialize_list(filename);
    sprintf(filename, "%s%smegahal.aux", directory, SEP);
    aux=initialize_list(filename);
    sprintf(filename, "%s%smegahal.grt", directory, SEP);
    grt=initialize_list(filename);
    sprintf(filename, "%s%smegahal.swp", directory, SEP);
    swp=initialize_swap(filename);
}

/*---------------------------------------------------------------------------*/

 view all matches for this distribution


AI-MicroStructure

 view release on metacpan or  search on metacpan

bin/from-folder.pl  view on Meta::CPAN

our $files={};
our $curSysDate = `date +"%F"`;
        $curSysDate=~ s/\n//g;

our %opts = (cache_file =>
              sprintf("%s/%s_.cache",
              $storage,$curSysDate));

GetOptions (\%opts, "cache_file=s");


bin/from-folder.pl  view on Meta::CPAN

     foreach(@ARGV){
    my $t = $_;

    if( ! -d $t &&  $rel_name !~ m/($t)/i){
        $go ++;
        print $rel_name."\n";
       }
    }
        

        if (/\.(html|htm)$/) {

bin/from-folder.pl  view on Meta::CPAN

ok($c->simpleMixedSearch($style,$_)) && ok($c->play($style,$_))   for
 qw(atom antimatter planet);



ok(print Dumper $c->intersect($style,$_)) for
 qw(atom antimatter planet);

ok(print Dumper $c->similar($style,$_)) for
 qw(atom antimatter planet);

#p @out;

1;

bin/from-folder.pl  view on Meta::CPAN

use Getopt::Long;
our $curSysDate = `date +"%F"`;
    $curSysDate=~ s/\n//g;

our %opts = (cache_file =>
              sprintf("/tmp/%s.cache",
              $curSysDate));

GetOptions (\%opts, "cache_file=s");

our $cache = {};

bin/from-folder.pl  view on Meta::CPAN


END{

  lock_store($cache,$opts{cache_file});

  print Dumper [$set->size,$set->members];


  }


bin/from-folder.pl  view on Meta::CPAN

  (my $rel_name = $File::Find::name) =~ s{.*/}{}xs;

  $set->insert(AI::MicroStructure::Object->new($rel_name));

}
#print Dumper join "-", soundex(("rock'n'roll", 'rock and roll', 'rocknroll'));

 view all matches for this distribution


AI-NNEasy

 view release on metacpan or  search on metacpan

lib/AI/NNEasy.pm  view on Meta::CPAN

    
    $file ||= $this->{FILE} ;
        
    my $dump = freeze( {%$this} ) ;
    open (my $fh,">$this->{FILE}") ;
    print $fh $dump ;
    close ($fh) ;
  }
  
  sub learn { 
    my $this = ref($_[0]) ? shift : undef ;

lib/AI/NNEasy.pm  view on Meta::CPAN

    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      $this->{NN}->learn($$set[$i+1]) ;
    }

    my ($err,$learn_ok,$print) ;
    for (my $i = 0 ; $i < @$set ; $i+=2) {
      $this->{NN}->run($$set[$i]) ;
      my $er = $this->{NN}->RMSErr($$set[$i+1]) ;
      $er *= -1 if $er < 0 ;
      ++$learn_ok if $er < $error_ok ;
      $err += $er ;
      $print .= join(' ',@{$$set[$i]}) ." => ". join(' ',@{$$set[$i+1]}) ." > $er\n" if $verbose ;
    }
    
    $err /= $ins_ok ;
    
    return ( $err , $learn_ok , $print ) ;
  }
  
  
  
  

lib/AI/NNEasy.pm  view on Meta::CPAN

  
    my $error_ok = $this->{ERROR_OK} ;
    
    my $check_diff_count = 1000 ;
    
    my ($learn_ok,$counter,$err,$err_last,$err_count,$err_static, $reset_count1 , $reset_count2 ,$print) ;
    
    $err_static = 0 ;
    
    while ( ($learn_ok < $ins_ok) && ($counter < $limit) ) {
      ($err , $learn_ok , $print) = $this->_learn_set_get_output_error(\@set , $error_ok , $ins_ok , $verbose) ;
      
      ++$counter ;
      
      if ( !($counter % 100) || $learn_ok == $ins_ok ) {
        my $err_diff = $err_last - $err ;

lib/AI/NNEasy.pm  view on Meta::CPAN

        
        $err_count += $err_diff ;
        
        ++$err_static if $err_diff <= 0.00001 || $err > 1 ;
        
        print "err_static = $err_static\n" if $verbose && $err_static ;

        $err_last = $err ;
        
        my $reseted ;
        if ( $err_static >= $err_static_limit || ($err > 1 && $err_static >= $err_static_limit_positive) ) {

lib/AI/NNEasy.pm  view on Meta::CPAN

          $reseted = 1 ;
          ++$reset_count1 ;
          
          if ( ( $reset_count1 + $reset_count2 ) > 2 ) {
            $reset_count1 = $reset_count2 = 0 ;
            print "** Reseting NN...\n" if $verbose ;
            $this->reset_nn ;
          }
          else {
            print "** Reseting weights due NULL diff...\n" if $verbose ;
            $this->{NN}->init ;
          }
        }
        
        if ( !($counter % $check_diff_count) ) {
          $err_count /= ($check_diff_count/100) ;
          
          print "ERR COUNT> $err_count\n" if $verbose ;
          
          if ( !$reseted && $err_count < 0.001 ) {
            $err_static = 0 ;
            $counter -= 1000 ;
            ++$reset_count2 ;
            
            if ( ($reset_count1 + $reset_count2) > 2 ) {
              $reset_count1 = $reset_count2 = 0 ;
              print "** Reseting NN...\n" if $verbose ;
              $this->reset_nn ;
            }
            else {
              print "** Reseting weights due LOW diff...\n" if $verbose ;
              $this->{NN}->init ;
            }
          }

          $err_count = 0 ;
        }
        
        if ( $verbose ) {
          print "\nepoch $counter : error_ok = $error_ok : error = $err : err_diff = $err_diff : err_static = $err_static : ok = $learn_ok\n" ;
          print $print ;
        }
      }

      print "epoch $counter : error = $err : ok = $learn_ok\n" if $verbose > 1 ;
      
    }
    
  }
  

lib/AI/NNEasy.pm  view on Meta::CPAN

    STRLEN len;
    int i ;
    HV* self_hv = OBJ_HV( self );
    AV* set_av = OBJ_AV( set ) ;
    SV* nn = FETCH_ATTR(self_hv , "NN") ;
    SV* print_verbose = verbose ? sv_2mortal(newSVpv("",0)) : NULL ;
    SV* ret ;
    double err = 0 ;
    double er = 0 ;
    int learn_ok = 0 ;
        

lib/AI/NNEasy.pm  view on Meta::CPAN

      er = SvNV(ret) ;
      if (er < 0) er *= -1 ;
      if (er < error_ok) ++learn_ok ;
      err += er ;
      
      if ( verbose ) sv_catpvf(print_verbose , "%s => %s > %f\n" ,
                       SvPV( _av_join( OBJ_AV(set_in) ) , len) ,
                       SvPV( _av_join( OBJ_AV(set_out) ) , len) ,
                       er
                     ) ;

lib/AI/NNEasy.pm  view on Meta::CPAN


    if (verbose) {
      EXTEND(SP , 3) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;
        ST(1) = sv_2mortal(newSViv(learn_ok)) ;
        ST(2) = print_verbose ;
      XSRETURN(3) ;
    }
    else {
      EXTEND(SP , 2) ;
        ST(0) = sv_2mortal(newSVnv(err)) ;

lib/AI/NNEasy.pm  view on Meta::CPAN

  }
  
  ## Use the NN:
  
  my $out = $nn->run_get_winner([0,0]) ;
  print "0 0 => @$out\n" ; ## 0 0 => 0
  
  my $out = $nn->run_get_winner([0,1]) ;
  print "0 1 => @$out\n" ; ## 0 1 => 1
  
  my $out = $nn->run_get_winner([1,0]) ;
  print "1 0 => @$out\n" ; ## 1 0 => 1
  
  my $out = $nn->run_get_winner([1,1]) ;
  print "1 1 => @$out\n" ; ## 1 1 => 0
  
  ## or just interate through the @set:
  for (my $i = 0 ; $i < @set ; $i+=2) {
    my $out = $nn->run_get_winner($set[$i]) ;
    print "@{$set[$i]}) => @$out\n" ;
  }

=head1 METHODS

=head2 new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )

 view all matches for this distribution


AI-NNFlex

 view release on metacpan or  search on metacpan

examples/add.pl  view on Meta::CPAN


my $err = 10;
# Stop after 4096 epochs -- don't want to wait more than that
for ( my $i = 0; ($err > 0.0001) && ($i < 4096); $i++ ) {
    $err = $dataset->learn($network);
    print "Epoch = $i error = $err\n";
}

foreach (@{$dataset->run($network)})
{
    foreach (@$_){print $_}
    print "\n";    
}

print "this should be 4000 - ";
$network->run([2000,2000]);
foreach ( @{$network->output}){print $_."\n";}

 foreach my $a ( 1..10 ) {
     foreach my $b ( 1..10 ) {
     my($ans) = $a+$b;
     my($nnans) = @{$network->run([$a,$b])};
     print "[$a] [$b] ans=$ans but nnans=$nnans\n" unless $ans == $nnans;
     }
 }


 view all matches for this distribution


AI-NNVMCAPI

 view release on metacpan or  search on metacpan

nnvm.i  view on Meta::CPAN

 */
int NNSymbolCopy(SymbolHandle in, SymbolHandle *out);
/*!
 * \brief Print the content of symbol, used for debug.
 * \param symbol the symbol
 * \param out_str pointer to hold the output string of the printing.
 * \return 0 when success, -1 when failure happens
 */
int NNSymbolPrint(SymbolHandle in, const char **out);
/*!
 * \brief Get string attribute from symbol

 view all matches for this distribution


AI-NaiveBayes

 view release on metacpan or  search on metacpan

LICENSE  view on Meta::CPAN

    that you may choose to grant warranty protection to some or all
    third parties, at your option).

    c) If the modified program normally reads commands interactively when
    run, you must cause it, when started running for such interactive use
    in the simplest and most usual way, to print or display an
    announcement including an appropriate copyright notice and a notice
    that there is no warranty (or else, saying that you provide a
    warranty) and that users may redistribute the program under these
    conditions, and telling the user how to view a copy of this General
    Public License.

 view all matches for this distribution


AI-NaiveBayes1

 view release on metacpan or  search on metacpan

NaiveBayes1.pm  view on Meta::CPAN


    # For real-valued attributes, we use Gaussian distribution
    # let us collect statistics
    foreach my $att (keys(%{$self->{stat_attributes}})) {
        next unless $self->{attribute_type}{$att} eq 'real';
	print STDERR "Smoothing ignored for real attribute $att!\n" if
	    defined($self->{smoothing}{att}) and $self->{smoothing}{att};
        $m->{real_stat}->{$att} = {};
        foreach my $attval (keys %{$self->{stat_attributes}{$att}}){
            foreach my $label (keys %{$self->{stat_attributes}{$att}{$attval}}){
                $m->{real_stat}{$att}{$label}{sum}

NaiveBayes1.pm  view on Meta::CPAN

                    / $m->{real_stat}{$att}{$label}{stddev}
                  ) ** 2
		 );
	  $sum += $nscores{$label};
      }
      if ($sum==0) { print STDERR "Ignoring all Gaussian probabilities: all=0!\n" }
      else {
	  foreach my $label (@labels) { $scores{$label} *= $nscores{$label} }
      }
  }

NaiveBayes1.pm  view on Meta::CPAN

  $sumPx += $scores{$_} foreach (keys(%scores));
  $scores{$_} /= $sumPx foreach (keys(%scores));
  return \%scores;
}

sub print_model {
    my $self = shift;
    my $withcounts = '';
    if ($#_>-1 && $_[0] eq 'with counts')
    { shift @_; $withcounts = 1; }
    my $m = $self->{model};

NaiveBayes1.pm  view on Meta::CPAN

     N     N     N     Y    11   
     N     N     N     N    91   
  -------------------------------
  ");
  $nb->train;
  print "Model:\n" . $nb->print_model;
  print "Model (with counts):\n" . $nb->print_model('with counts');

  $nb = AI::NaiveBayes1->new;
  $nb->add_instances(attributes=>{model=>'H',place=>'B'},
		     label=>'repairs=Y',cases=>30);
  $nb->add_instances(attributes=>{model=>'H',place=>'B'},

NaiveBayes1.pm  view on Meta::CPAN

  $nb->add_instances(attributes=>{model=>'T',place=>'N'},
		     label=>'repairs=N',cases=>84);

  $nb->train;

  print "Model:\n" . $nb->print_model;
  
  # Find results for unseen instances
  my $result = $nb->predict
     (attributes => {model=>'T', place=>'N'});

  foreach my $k (keys(%{ $result })) {
      print "for label $k P = " . $result->{$k} . "\n";
  }

  # export the model into a string
  my $string = $nb->export_to_YAML();

NaiveBayes1.pm  view on Meta::CPAN

=item C<export_to_YAML_file( $file_name )>

Writes a C<YAML> string representation of an C<AI::NaiveBayes1>
object to a file.  Requires YAML module.

=item C<print_model( OPTIONAL 'with counts' )>

Returns a string, human-friendly representation of the model.
The model is supposed to be trained before calling this method.
One argument 'with counts' can be supplied, in which case explanatory
expressions with counts are printed as well.

=item train()

Calculates the probabilities that will be necessary for categorization
using the C<predict()> method.

NaiveBayes1.pm  view on Meta::CPAN

 $nb->add_instance(attributes=>{outlook=>'overcast',temperature=>81,humidity=>75,windy=>'FALSE'},label=>'play=yes');
 $nb->add_instance(attributes=>{outlook=>'rainy',temperature=>71,humidity=>91,windy=>'TRUE'},label=>'play=no');
 
 $nb->train;
 
 my $printedmodel =  "Model:\n" . $nb->print_model;
 my $p = $nb->predict(attributes=>{outlook=>'sunny',temperature=>66,humidity=>90,windy=>'TRUE'});

 YAML::DumpFile('file', $p);
 die unless (abs($p->{'play=no'}  - 0.792) < 0.001);
 die unless(abs($p->{'play=yes'} - 0.208) < 0.001);

 view all matches for this distribution


AI-Nerl

 view release on metacpan or  search on metacpan

examples/digits/deep_digits.pl  view on Meta::CPAN

   my $n = int rand(8000);
   my $m = $n+499;
   my @train = ($images->slice("$n:$m")->copy, $y->slice("$n:$m")->copy);
   $nerl->train(@train,passes=>10);
   my ($cost, $nc) = $nerl->cost( @test );
   print "cost:$cost\n,num correct: $nc / 1000\n";
#   $nerl->network->show_neuron(1);
   $passes++;
   if ($cost < $prev_cost or $passes<10){
      $prev_cost = $cost;
      $prev_nerl = $nerl;
   } else { # use $nerl as basis for $nerl
      $passes=0;

      print "New layer!";
      $prev_cost = 1000;
      $nerl = AI::Nerl->new(
         basis => $prev_nerl,
         l2 => int(rand(12))+5,
      );

examples/digits/deep_digits.pl  view on Meta::CPAN

      $prev_nerl = $nerl;
      #die $nerl->network->theta1->slice("1:2") . $nerl->network->theta2->slice("1:2");
   }


   #print "example output, images 0 to 4\n";
   #print "Labels: " . $y(0:4) . "\n";
   #print $nerl->run($images(0:4));
#   $nerl->network->show_neuron($_) for (0..4);

}

 view all matches for this distribution


AI-NeuralNet-BackProp

 view release on metacpan or  search on metacpan

BackProp.pm  view on Meta::CPAN

	}	
	
	# Rounds floats to ints
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	
	# Receives input from other neurons. They must

BackProp.pm  view on Meta::CPAN

			my $delta = 
					$ammount * 
						($value<$what?1:-1) * 
							$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT};
			
			#print "($value,$what) delta:$delta\n";
			
			# Recursivly apply
			$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT}  +=  $delta;
			$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
			    

BackProp.pm  view on Meta::CPAN


	# Debugging subs
	$AI::NeuralNet::BackProp::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::BackProp::DEBUG = shift || 0; } 
	sub out1  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 1) }
	sub out2  { print  shift() if (($AI::NeuralNet::BackProp::DEBUG eq 1) || ($AI::NeuralNet::BackProp::DEBUG eq 2)) }
	sub out3  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG) }
	sub out4  { print  shift() if ($AI::NeuralNet::BackProp::DEBUG eq 4) }
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;

BackProp.pm  view on Meta::CPAN

		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;
			$x++;
			if($x>$break-1) {
				print "\n";
				$x=0;
			}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.

BackProp.pm  view on Meta::CPAN

				$a=1 if(!$a);
				$diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift);
		sprintf("%.3f",((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100);
	}
	
	# This sub will take an array ref of a data set, which it expects in this format:
	#   my @data_set = (	[ ...inputs... ], [ ...outputs ... ],
	#				   				   ... rows ...

BackProp.pm  view on Meta::CPAN

	    my $error	=	$args{error};
	    my $p		=	(defined $args{flag})	?$args{flag}	   :1;
	    my $row		=	(defined $args{pattern})?$args{pattern}*2+1:1;
	    my ($fa,$fb);
		for my $x (0..$len) {
			print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG);
			my $str = $self->learn( $data->[$x*2],			# The list of data to input to the net
					  		  		$data->[$x*2+1], 		# The output desired
					    			inc=>$inc,				# The starting learning gradient
					    			max=>$max,				# The maximum num of loops allowed
					    			error=>$error);			# The maximum (%) error allowed
			print $str if($AI::NeuralNet::BackProp::DEBUG); 
		}
			
		
		my $res;
		$data->[$row] = $self->crunch($data->[$row]) if($data->[$row] == 0);

BackProp.pm  view on Meta::CPAN

		while(1) {
			_GET_X:
			my $x=$self->intr(rand()*$len);
			goto _GET_X if($learned[$x]);
			$learned[$x]=1;
			print "\nLearning index $x...\n" if($AI::NeuralNet::BackProp::DEBUG); 
			my $str =  $self->learn($data->[$x*2],			# The list of data to input to the net
					  		  		$data->[$x*2+1], 		# The output desired
					    			inc=>$inc,				# The starting learning gradient
			 		    			max=>$max,				# The maximum num of loops allowed
					    			error=>$error);			# The maximum (%) error allowed
			print $str if($AI::NeuralNet::BackProp::DEBUG); 
		}
			
		
		return 1; 
	}

BackProp.pm  view on Meta::CPAN

		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;

BackProp.pm  view on Meta::CPAN

		my $ref		=	shift;
		my $b		=	shift;
		if(substr($ref,0,5) ne "ARRAY") {
			if(($ref == 0) && (!defined $b)) {
				$ref	= $self->crunch($ref);
				#print "\$ref is a string, crunching to ",join(',',@{$ref}),"\n";
			} else {
    			my $a	= $ref;
    			$a		= $self->crunch($a)->[0] if($a == 0);
				$b		= $self->crunch($b)->[0] if($b == 0);
				$_[++$#_] = $a;
    			$_[++$#_] = $b;
    			$ref	= \@_;
				#print "Found ranged definition, joined to ",join(',',@{$ref}),"\n";
			}
		}
		my $rA		=	0;
		my $rB		=	$#{$ref};
		my $rS		=	0; #shift;

BackProp.pm  view on Meta::CPAN

		my $out 	=	[];
		# Adjust for a maximum outside what we have seen so far
		for my $i (0..$l) {
			$rS=$in->[$i] if($in->[$i]>$rS);
		}
		#print "\$l:$l,\$rA:$rA,\$rB:$rB,\$rS:$rS,\$r:$r\n";
		# Loop through, convert values to percentage of maximum, then multiply
		# percentage by range and add to base of range to get finaly value
		for my $i (0..$l) {
			#print "\$i:$i,\$in:$in->[$i]\n";
			$rS=1 if(!$rS);
			my $t=intr((($rS-$in->[$i])/$rS)*$r+$rA);
			#print "t:$t,$self->{rRef}->[$t],i:$i\n";
			$out->[$i] = $self->{rRef}->[$t];
		}
		$self->{rS}=$rS;
		return $out;
	}

BackProp.pm  view on Meta::CPAN

		    # pre-exisiting refrence
			return $self->load($layers,1);
		}
				
		
		#print "Creating $size neurons in each layer for $layers layer(s)...\n";
		
		AI::NeuralNet::BackProp::out2 "Creating $size neurons in each layer for $layers layer(s)...\n";
		
		# Error checking
		return undef if($out>$size);

BackProp.pm  view on Meta::CPAN

		# when connecting the outputs and inputs.
		my $div = $size;
		my $size = $layers * $size;
		
		AI::NeuralNet::BackProp::out2 "Creating RUN and MAP systems for network...\n";
		#print "Creating RUN and MAP systems for network...\n";
		
		# Create a new runner and mapper for the network.
		$self->{RUN} = new AI::NeuralNet::BackProp::_run($self);
		$self->{MAP} = new AI::NeuralNet::BackProp::_map($self);
		

BackProp.pm  view on Meta::CPAN

		my $out		=	$self->{OUT};
		my $flag	=	$self->{FLAG};

	    open(FILE,">$file");
	    
	    print FILE "size=$size\n";
	    print FILE "div=$div\n";
	    print FILE "out=$out\n";
	    print FILE "flag=$flag\n";
	    print FILE "rand=$self->{random}\n";
	    print FILE "cw=$self->{col_width}\n";
		print FILE "crunch=$self->{_CRUNCHED}->{_LENGTH}\n";
		print FILE "rA=$self->{rA}\n";
		print FILE "rB=$self->{rB}\n";
		print FILE "rS=$self->{rS}\n";
		print FILE "rRef=",(($self->{rRef})?join(',',@{$self->{rRef}}):''),"\n";
			
		for my $a (0..$self->{_CRUNCHED}->{_LENGTH}-1) {
			print FILE "c$a=$self->{_CRUNCHED}->{LIST}->[$a]\n";
		}
		
		my $w;
		for my $a (0..$self->{SIZE}-1) {
			$w="";
			for my $b (0..$self->{DIV}-1) {
				$w .= "$self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},";
			}
			chop($w);
			print FILE "n$a=$w\n";
		}
	
	    close(FILE);
	    
	    return $self;

BackProp.pm  view on Meta::CPAN


	# Dumps the complete weight matrix of the network to STDIO
	sub show {
		my $self	=	shift;
		for my $a (0..$self->{SIZE}-1) {
			print "Neuron $a: ";
			for my $b (0..$self->{DIV}-1) {
				print $self->{NET}->[$a]->{SYNAPSES}->{LIST}->[$b]->{WEIGHT},"\t";
			}
			print "\n";
		}
	}
	
	# Used internally by new() and learn().
	# This is the sub block that actually creats

BackProp.pm  view on Meta::CPAN

		
		# Reset map and run synapse counters.
		$self->{RUN}->{REGISTRATION} = $self->{MAP}->{REGISTRATION} = 0;
		
		AI::NeuralNet::BackProp::out2 "There will be $size neurons in this network group, with a divison value of $div.\n";
		#print "There will be $size neurons in this network group, with a divison value of $div.\n";
		
		# Create initial neuron packages in one long array for the entire group
		for($y=0; $y<$size; $y++) {
			#print "Initalizing neuron $y...     \r";
			$self->{NET}->[$y]=new AI::NeuralNet::BackProp::neuron();
		}
		
		AI::NeuralNet::BackProp::out2 "Creating synapse grid...\n";
		

BackProp.pm  view on Meta::CPAN


	# Demonstrate a simple learn() call
	my @inputs = ( 0,0,1,1,1 );
	my @ouputs = ( 1,0,1,0,1 );
	
	print $net->learn(\@inputs, \@outputs),"\n";

	# Create a data set to learn
	my @set = (
		[ 2,2,3,4,1 ], [ 1,1,1,1,1 ],
		[ 1,1,1,1,1 ], [ 0,0,0,0,0 ],
		[ 1,1,1,0,0 ], [ 0,0,0,1,1 ]	
	);
	
	# Demo learn_set()
	my $f = $net->learn_set(\@set);
	print "Forgetfulness: $f unit\n";
	
	# Crunch a bunch of strings and return array refs
	my $phrase1 = $net->crunch("I love neural networks!");
	my $phrase2 = $net->crunch("Jay Lenno is wierd.");
	my $phrase3 = $net->crunch("The rain in spain...");

BackProp.pm  view on Meta::CPAN

	
	# Run a test phrase through the network
	my $test_phrase = $net->crunch("I love neural networking!");
	my $result = $net->run($test_phrase);
	
	# Get this, it prints "Jay Leno is  networking!" ...  LOL!
	print $net->uncrunch($result),"\n";



=head1 UPDATES

BackProp.pm  view on Meta::CPAN

	Learning took X loops and X wallclock seconds (X.XXX usr + X.XXX sys = X.XXX CPU).

With the X's replaced by time or loop values for that loop call. So,
to view the learning stats for every learn call, you can just:
	
	print $net->learn(\@map,\@res);
	

If you call "$net->debug(4)" with $net being the 
refrence returned by the new() constructor, you will get benchmarking 
information for the learn function, as well as plenty of other information output. 

BackProp.pm  view on Meta::CPAN

					  max	=>	500,
					  p		=>	1
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/ex_dow.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

BackProp.pm  view on Meta::CPAN


UPDATE: bencmarked() now returns just the string from timestr() for the last run() or
loop() call. Exception: If the last call was a loop the string will be prefixed with "%d loops and ".

This returns a benchmark info string for the last learn() or the last run() call, 
whichever occured later. It is easily printed as a string,
as following:

	print $net->benchmarked() . "\n";





=item $net->debug($level)

Toggles debugging off if called with $level = 0 or no arguments. There are four levels
of debugging.

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : This causes ALL debugging information for the network to be dumped
as the network runs. In this mode, it is a good idea to pipe your STDIO to a file, especially
for large programs.

Level 2 ($level = 2) : A slightly-less verbose form of debugging, not as many internal 
data dumps.

Level 3 ($level = 3) : JUST prints weight mapping as weights change.

Level 4 ($level = 4) : JUST prints the benchmark info for EACH learn loop iteteration, not just
learning as a whole. Also prints the percentage difference for each loop between current network
results and desired results, as well as learning gradient ('incremenet').   

Level 4 is useful for seeing if you need to give a smaller learning incrememnt to learn() .
I used level 4 debugging quite often in creating the letters.pl example script and the small_1.pl
example script.

BackProp.pm  view on Meta::CPAN


=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.10f (sprintf-format) 
percent sting.


=item $net->p($a,$b);

BackProp.pm  view on Meta::CPAN




=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

BackProp.pm  view on Meta::CPAN

		$net->learn($net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."));
	}
	
	my $response = $net->run($net->crunch("I love corn."));
	
	print $net->uncrunch($response),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make

BackProp.pm  view on Meta::CPAN

These must be in the range of 0..319 for $left and $right, and the range of 0..199 for
$top and $bottom. The block is returned as an array ref with horizontal lines in sequental order.
I.e. to get a pixel from [2,5] in the block, and $left-$right was 20, then the element in 
the array ref containing the contents of coordinates [2,5] would be found by [5*20+2] ($y*$width+$x).
    
	print (@{$pcx->get_block(0,0,20,50)})[5*20+2];

This would print the contents of the element at block coords [2,5].



=item $pcx->get($x,$y);

 view all matches for this distribution


AI-NeuralNet-FastSOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN


	while ($A->getdim(0) < $A->getdim(1)) {                          # make the beast quadratic
	    $A = append ($A, zeroes (1));                                # by padding zeroes
	}
	my ($E, $e) = eigens_sym $A;
#	print $E;
#	print $e;

	my @es = list $e;                                                # eigenvalues
#	warn "es : ".Dumper \@es;
	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;

 view all matches for this distribution


AI-NeuralNet-Hopfield

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Hopfield.pm  view on Meta::CPAN

		$new_matrix->set($row, 1, $value);
	}
	return $new_matrix;
}

sub print_matrix() {
    my $matrix  = shift;
    my $rs = $matrix->{_rows};
    my $cs = $matrix->{_cols};

	for (my $i = 1; $i <= $rs; $i++) {

 view all matches for this distribution


AI-NeuralNet-Kohonen-Demo-RGB

 view release on metacpan or  search on metacpan

RGB.pm  view on Meta::CPAN

# Used only by &tk_train
#
sub tk_show { my $self=shift;
	for my $x (0..$self->{map_dim_x}){
		for my $y (0..$self->{map_dim_y}){
			my $colour = sprintf("#%02x%02x%02x",
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),
				(int (255 * $self->{map}->[$x]->[$y]->{weight}->[2])),
			);
			if ($self->{display} and $self->{display} eq 'hex'){

RGB.pm  view on Meta::CPAN


		$self->_adjust_neighbours_of($bmu,$target);
		$self->_decay_learning_rate;

		$self->tk_show;
		$label_txt = sprintf("Epoch: %04d",$self->{t})."  "
		. "Learning: $self->{l}  "
		. sprintf("BMU: %02d,%02d",$bmu->[1],$bmu->[2])."  "
		. "Target: [".join(",",@$target)."]  "
		;
		$self->{c}->update;
		$l->update;
        DoOneEvent(DONT_WAIT);		# be kind and process XEvents if they arise

 view all matches for this distribution


AI-NeuralNet-Kohonen-Visual

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

	use base "AI::NeuralNet::Kohonen::Visual";

	sub get_colour_for { my ($self,$x,$y) = (shift,shift,shift);
		# From here you return a TK colour name.
		# Get it as you please; for example, values of a 3D map:
		return sprintf("#%02x%02x%02x",
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[0])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[1])),
			(int (255 * $self->{map}->[$x]->[$y]->{weight}->[2])),
		);
	}

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

				if ($self->{show_bmu}){
					$self->plot_map(bmu_x=>$bmu->[1],bmu_y=>$bmu->[2]);
				} else {
					$self->plot_map;
				}
				$self->{_label_txt} = sprintf("Epoch: %04d",$self->{t})."  "
				. "Learning: $self->{l}  "
				. sprintf("BMU: %02d,%02d",$bmu->[1],$bmu->[2])."  "
				.( exists $target->{class}? "Target: [$target->{class}]  " : "")
				;
				$self->{_canvas}->update;
				$self->{_label}->update;
				DoOneEvent(DONT_WAIT);		# be kind and process XEvents if they arise

lib/AI/NeuralNet/Kohonen/Visual.pm  view on Meta::CPAN

	$_0 = $self->{missing_colour} || 0 if $_0 eq $self->{missing_mask};
	my $_1 = $self->{map}->[$x]->[$y]->{weight}->[1];
	$_1 = $self->{missing_colour} || 0 if $_1 eq $self->{missing_mask};
	my $_2 = $self->{map}->[$x]->[$y]->{weight}->[2];
	$_2 = $self->{missing_colour} || 0 if $_2 eq $self->{missing_mask};
	return sprintf("#%02x%02x%02x",
		(int (255 * $_0)),
		(int (255 * $_1)),
		(int (255 * $_2)),
	);
}

 view all matches for this distribution


AI-NeuralNet-Kohonen

 view release on metacpan or  search on metacpan

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

Print the current weight values to the screen.

=cut

sub dump { my $self=shift;
	print "    ";
	for my $x (0..$self->{map_dim_x}){
		printf ("  %02d ",$x);
	}
	print"\n","-"x107,"\n";
	for my $x (0..$self->{map_dim_x}){
		for my $w (0..$self->{weight_dim}){
			printf ("%02d | ",$x);
			for my $y (0..$self->{map_dim_y}){
				printf("%.2f ", $self->{map}->[$x]->[$y]->{weight}->[$w]);
			}
			print "\n";
		}
		print "\n";
	}
}

=head1 METHOD smooth

lib/AI/NeuralNet/Kohonen.pm  view on Meta::CPAN

	if (not open OUT,">$path"){
		warn "Could not open file for writing <$path>: $!";
		return undef;
	}
	#- Dimensionality of the vectors (integer, compulsory).
	print OUT ($self->{weight_dim}+1)," ";	# Perl indexing
	#- Topology type, either hexa or rect (string, optional, case-sensitive).
	if (not defined $self->{display}){
		print OUT "rect ";
	} else { # $self->{display} eq 'hex'
		print OUT "hexa ";
	}
	#- Map dimension in x-direction (integer, optional).
	print OUT $self->{map_dim_x}." ";
	#- Map dimension in y-direction (integer, optional).
	print OUT $self->{map_dim_y}." ";
	#- Neighborhood type, either bubble or gaussian (string, optional, case-sen- sitive).
	print OUT "gaussian ";
	# End of header
	print OUT "\n";

	# Format input data
	foreach (@{$self->{input}}){
		print OUT join("\t",@{$_->{values}});
		if ($_->{class}){
			print OUT " $_->{class} " ;
		}
		print OUT "\n";
	}
	# EOF
	print OUT chr 26;
	close OUT;
	return 1;
}


 view all matches for this distribution


AI-NeuralNet-Mesh

 view release on metacpan or  search on metacpan

Mesh.pm  view on Meta::CPAN

	
	# Debugging subs
	$AI::NeuralNet::Mesh::DEBUG  = 0;
	sub whowasi { (caller(1))[3] . '()' }
	sub debug { shift; $AI::NeuralNet::Mesh::DEBUG = shift || 0; } 
	sub d { shift if(substr($_[0],0,4) eq 'AI::'); my ($a,$b,$c)=(shift,shift,$AI::NeuralNet::Mesh::DEBUG); print $a if($c == $b); return $c }
	sub verbose {debug @_};
	sub verbosity {debug @_};
	sub v {debug @_};
	
	

Mesh.pm  view on Meta::CPAN

	sub version {
		shift if(substr($_[0],0,4) eq 'AI::');
		substr((split(/\s/,(shift || $AI::NeuralNet::Mesh::ID)))[2],1);
	}                                  
	
	# Rounds a floating-point to an integer with int() and sprintf()
	sub intr  {
    	shift if(substr($_[0],0,4) eq 'AI::');
      	try   { return int(sprintf("%.0f",shift)) }
      	catch { return 0 }
	}
    
	# Package constructor
	sub new {

Mesh.pm  view on Meta::CPAN

		my $file	=	shift;
		no strict 'refs';
		
		open(FILE,">$file");
	    
	    print FILE "header=$AI::NeuralNet::Mesh::ID\n";
	   	
		print FILE "total_layers=$self->{total_layers}\n";
		print FILE "total_nodes=$self->{total_nodes}\n";
	    print FILE "nodes=$self->{nodes}\n";
	    print FILE "inputs=$self->{inputs}\n";
	    print FILE "outputs=$self->{outputs}\n";
	    print FILE "layers=",(($self->{layers})?join(',',@{$self->{layers}}):''),"\n";
	    
	    print FILE "rand=$self->{random}\n";
	    print FILE "const=$self->{const}\n";
	    print FILE "cw=$self->{col_width}\n";
		print FILE "crunch=$self->{_crunched}->{_length}\n";
		print FILE "rA=$self->{rA}\n";
		print FILE "rB=$self->{rB}\n";
		print FILE "rS=$self->{rS}\n";
		print FILE "rRef=",(($self->{rRef})?join(',',@{$self->{rRef}}):''),"\n";
			
		for my $a (0..$self->{_crunched}->{_length}-1) {
			print FILE "c$a=$self->{_crunched}->{list}->[$a]\n";
		}
	
		my $n = 0;
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
			    my $w='';
				for my $z (0..$self->{layers}->[$x-1]-1) {
					$w.="$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				print FILE "n$n=$w$self->{mesh}->[$n]->{activation},$self->{mesh}->[$n]->{threshold},$self->{mesh}->[$n]->{mean}\n";
				$n++;
			}
		}
		
	    close(FILE);

Mesh.pm  view on Meta::CPAN

		my $n 		=	0;    
		no strict 'refs';
		for my $x (0..$self->{total_layers}) {
			for my $y (0..$self->{layers}->[$x]-1) {
				for my $z (0..$self->{layers}->[$x-1]-1) {
					print "$self->{mesh}->[$n]->{_inputs}->[$z]->{weight},";
				}
				$n++;
			}
			print "\n";
		}
	}
	  
	# Set the activation type of a specific layer.
	# usage: $net->activation($layer,$type);

Mesh.pm  view on Meta::CPAN

		my $rand	=	shift;
		return $self->{random}	if(!(defined $rand));
		$self->{random}	=	$rand;
	}
	
	# Sets/gets column width for printing lists in debug modes 1,3, and 4.
	sub col_width {
		my $self	=	shift;
		my $width	=	shift;
		return $self->{col_width}	if(!$width);
		$self->{col_width}	=	$width;

Mesh.pm  view on Meta::CPAN

	
	# Used to format array ref into columns
	# Usage: 
	#	join_cols(\@array,$row_length_in_elements,$high_state_character,$low_state_character);
	# Can also be called as method of your neural net.
	# If $high_state_character is null, prints actual numerical values of each element.
	sub join_cols {
		no strict 'refs';
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my $map		=	shift;
		my $break   =	shift;

Mesh.pm  view on Meta::CPAN

		my $b		=	shift;
		my $x;
		foreach my $el (@{$map}) { 
			my $str = ((int($el))?$a:$b);
			$str=$el."\0" if(!$a);
			print $str;	$x++;
			if($x>$break-1) { print "\n"; $x=0;	}
		}
		print "\n";
	}
	
	# Returns percentage difference between all elements of two
	# array refs of exact same length (in elements).
	# Now calculates actual difference in numerical value.

Mesh.pm  view on Meta::CPAN

				if($a<$b){$t=$a;$a=$b;$b=$t;}
				$a=1 if(!$a); $diff+=(($a-$b)/$a)*100;
			}
		}
		$a1s = 1 if(!$a1s);
		return sprintf("%.10f",($diff/$a1s));
	}
	
	# Returns $fa as a percentage of $fb
	sub p {
		shift if(substr($_[0],0,4) eq 'AI::'); 
		my ($fa,$fb)=(shift,shift); 
		sprintf("%.3f",$fa/$fb*100); #((($fb-$fa)*((($fb-$fa)<0)?-1:1))/$fa)*100
	}
	
	# Returns the index of the element in array REF passed with the highest 
	# comparative value
	sub high {

Mesh.pm  view on Meta::CPAN

	# Present it with two test cases
	my $result_bit_1 = $net->run([0,1])->[0];
	my $result_bit_2 = $net->run([1,1])->[0];
	
	# Display the results
	print "AND test with inputs (0,1): $result_bit_1\n";
	print "AND test with inputs (1,1): $result_bit_2\n";
	

=head1 VERSION & UPDATES

This is version B<0.44>, an update release for version 0.43.

Mesh.pm  view on Meta::CPAN

					  inc	=>	0.1,	
					  max	=>	500,
					 );
			
	# Print it 
	print "Forgetfullness: $f%";

    
This is a snippet from the example script examples/finance.pl, which demonstrates DOW average
prediction for the next month. A more simple set defenition would be as such:

Mesh.pm  view on Meta::CPAN

=item $net->benchmark();

=item $net->benchmarked();

This returns a benchmark info string for the last learn() call.
It is easily printed as a string, as following:

	print "Last learn() took ",$net->benchmark(),"\n";



=item $net->verbose($level);

Mesh.pm  view on Meta::CPAN


Toggles debugging off if called with $level = 0 or no arguments. There are several levels
of debugging. 

NOTE: Debugging verbosity has been toned down somewhat from AI::NeuralNet::BackProp,
but level 4 still prints the same amount of information as you were used to. The other
levels, however, are mostly for  advanced use. Not much explanation in the other
levels, but they are included for those of you that feel daring (or just plain bored.)

Level 0 ($level = 0) : Default, no debugging information printed. All printing is 
left to calling script.

Level 1 ($level = 1) : Displays the activity between nodes, prints what values were
received and what they were weighted to.

Level 2 ($level = 2) : Just prints info from the learn() loop, in the form of "got: X, wanted Y"
type of information. This is about the third most useful debugging level, after level 12 and
level 4.

Level 3 ($level = 3) : I don't think I included any level 3 debugs in this version.

Level 4 ($level = 4) : This level is the one I use most. It is only used during learning. It
displays the current error (difference between actual outputs and the target outputs you
asked for), as well as the current loop number and the benchmark time for the last learn cycle.
Also printed are the actual outputs and the target outputs below the benchmark times.

Level 12 ($level = 12) : Level 12 prints a dot (period) [.] after each learning loop is
complete. This is useful for letting the user know that stuff is happening, but without
having to display any of the internal variables. I use this in the ex_aln.pl demo,
as well as the ex_agents.pl demo.

Toggles debuging off when called with no arguments. 

Mesh.pm  view on Meta::CPAN


This will load from disk any network saved by save() and completly restore the internal
state at the point it was save() was called at.

If the file is of an invalid file type, then load() will
return undef. Use the error() method, below, to print the error message.

If there were no errors, it will return a refrence to $net.

UPDATE: $filename can now be a newline-seperated set of mesh data. This enables you
to do $net->load(join("\n",<DATA>)) and other fun things. I added this mainly

Mesh.pm  view on Meta::CPAN


=item $net->join_cols($array_ref,$row_length_in_elements,$high_state_character,$low_state_character);

This is more of a utility function than any real necessary function of the package.
Instead of joining all the elements of the array together in one long string, like join() ,
it prints the elements of $array_ref to STDIO, adding a newline (\n) after every $row_length_in_elements
number of elements has passed. Additionally, if you include a $high_state_character and a $low_state_character,
it will print the $high_state_character (can be more than one character) for every element that
has a true value, and the $low_state_character for every element that has a false value. 
If you do not supply a $high_state_character, or the $high_state_character is a null or empty or 
undefined string, it join_cols() will just print the numerical value of each element seperated
by a null character (\0). join_cols() defaults to the latter behaviour.



=item $net->extend(\@array_of_hashes);

Mesh.pm  view on Meta::CPAN




=item $net->intr($float);

Rounds a floating-point number rounded to an integer using sprintf() and int() , Provides
better rounding than just calling int() on the float. Also used very heavily internally.



=item $net->high($array_ref);

Mesh.pm  view on Meta::CPAN



=item $net->pdiff($array_ref_A, $array_ref_B);

This function is used VERY heavily internally to calculate the difference in percent
between elements of the two array refs passed. It returns a %.20f (sprintf-format) 
percent sting.




Mesh.pm  view on Meta::CPAN

			$net->crunch("I love pop."),    $net->crunch("That's Junk Food!")),
			$net->crunch("I love oranges."),$net->crunch("Good, Healthy Food."))
		]);
	}
	
	print $net->run_uc("I love corn.")),"\n";


On my system, this responds with, "Good, Healthy Food." If you try to run crunch() with
"I love pop.", though, you will probably get "Food! apples. apples." (At least it returns
that on my system.) As you can see, the associations are not yet perfect, but it can make

 view all matches for this distribution


AI-NeuralNet-SOM

 view release on metacpan or  search on metacpan

examples/eigenvector_initialization.pl  view on Meta::CPAN


	while ($A->getdim(0) < $A->getdim(1)) {                          # make the beast quadratic
	    $A = append ($A, zeroes (1));                                # by padding zeroes
	}
	my ($E, $e) = eigens_sym $A;
#	print $E;
#	print $e;

	my @es = list $e;                                                # eigenvalues
#	warn "es : ".Dumper \@es;
	my @es_desc = sort { $b <=> $a } @es;                            # eigenvalues sorted desc
#	warn "desc: ".Dumper \@es_desc;

 view all matches for this distribution


AI-NeuralNet-Simple

 view release on metacpan or  search on metacpan

examples/game_ai.pl  view on Meta::CPAN



my $format = "%8s %5s %3s %7s %6s\n";
my @actions = qw/attack run wander hide/;

printf $format, qw/Health Knife Gun Enemies Action/;
display_result($net,2,1,1,1);
display_result($net,2,0,0,2);
display_result($net,2,0,1,2);
display_result($net,2,0,1,3);
display_result($net,1,1,0,0);
display_result($net,1,0,1,2);
display_result($net,0,1,0,3);

while (1) {
    print "Type 'quit' to exit\n";
    my $health  = prompt("Am I in poor, average, or good health? ", qr/^(?i:[pag])/);
    my $knife   = prompt("Do I have a knife? ", qr/^(?i:[yn])/);
    my $gun     = prompt("Do I have a gun? ", qr/^(?i:[yn])/);
    my $enemies = prompt("How many enemies can I see? ", qr/^\d+$/);
    

examples/game_ai.pl  view on Meta::CPAN

    $health =~ tr/pag/012/;
    foreach ($knife,$gun) {
        $_ = substr $_, 0, 1;
        tr/yn/10/;
    }
    printf "I think I will %s!\n\n", $actions[$net->winner([
        $health, 
        $knife, 
        $gun, 
        $enemies])];
}

examples/game_ai.pl  view on Meta::CPAN

{
    my ($message,$domain) = @_;
    my $valid_response = 0;
    my $response;
    do {
        print $message;
        chomp($response = <STDIN>);
        exit if substr(lc $response, 0, 1) eq 'q';
        $valid_response = $response =~ /$domain/;
    } until $valid_response;
    return $response;

examples/game_ai.pl  view on Meta::CPAN

    my ($net,@data) = @_;
    my $result      = $net->winner(\@data);
    my @health      = qw/Poor Average Good/;
    my @knife       = qw/No Yes/;
    my @gun         = qw/No Yes/;
    printf $format, 
        $health[$_[1]], 
        $knife[$_[2]], 
        $gun[$_[3]], 
        $_[4],             # number of enemies
        $actions[$result];

 view all matches for this distribution


AI-Ollama-Client

 view release on metacpan or  search on metacpan

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

        } elsif( $resp->code == 404 ) {
            # Blob was not found
            $res->done($resp);
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

        if( $resp->code == 201 ) {
            # Blob was successfully created
            $res->done($resp);
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                        $queue->finish();
                    }
                });
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
        }
    });

    my $_tx;
    $tx->res->once( progress => sub($msg, @) {

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

        if( $resp->code == 200 ) {
            # Successful operation.
            $res->done($resp);
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                        $queue->finish();
                    }
                });
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
        }
    });

    my $_tx;
    $tx->res->once( progress => sub($msg, @) {

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

        if( $resp->code == 200 ) {
            # Successful operation.
            $res->done($resp);
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                    AI::Ollama::GenerateEmbeddingResponse->new($payload),

                );
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                        $queue->finish();
                    }
                });
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
        }
    });

    my $_tx;
    $tx->res->once( progress => sub($msg, @) {

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                        $queue->finish();
                    }
                });
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d", $resp->code ), $resp);
        }
    });

    my $_tx;
    $tx->res->once( progress => sub($msg, @) {

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                    AI::Ollama::PushModelResponse->new($payload),

                );
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                    AI::Ollama::ModelInfo->new($payload),

                );
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN

                    AI::Ollama::ModelsResponse->new($payload),

                );
            } else {
                # Unknown/unhandled content type
                $res->fail( sprintf("unknown_unhandled content type '%s'", $resp->content_type), $resp );
            }
        } else {
            # An unknown/unhandled response, likely an error
            $res->fail( sprintf( "unknown_unhandled code %d: %s", $resp->code, $resp->body ), $resp);
        }
    })->retain;

    # Start our transaction
    $self->emit(request => $tx);

 view all matches for this distribution


AI-PBDD

 view release on metacpan or  search on metacpan

lib/AI/PBDD.pm  view on Meta::CPAN

// BDD analysis
		 support
		 nodeCount
		 satOne
		 satCount
// printing
		 printDot
		 printSet
		 print
// debugging
		 printStats                 
		 checkPackage
		 debugPackage
		 debugBDD
// low-level access
		 internal_index

lib/AI/PBDD.pm  view on Meta::CPAN

  } else {
    return satCount__II($bdd, $vars_ignored);
  }
}

sub printDot {
  my ($bdd, $filename) = @_;

  if (!defined($filename)) {
      printDot__I($bdd);
  } else {
      printDot__II($bdd, $filename);
  }
}

sub makeSet {
  my ($vars, $size, $offset) = @_;

lib/AI/PBDD.pm  view on Meta::CPAN


Perl wrapper for the BuDDy C library

=head1 SYNOPSIS

  use AI::PBDD qw(init createBDD and printDot kill);

  init(100, 100000);

  my $var1 = createBDD();
  my $var2 = createBDD();

  my $bdd = and($var1, $var2);

  printDot($bdd);

  kill();

=head1 DESCRIPTION

lib/AI/PBDD.pm  view on Meta::CPAN


=head2 PRINTING

=over 4

=item B<printDot($bdd)>

=item B<printDot($bdd,$filename)>

Print the BDD as a Graphviz DOT model to STDOUT (or the given C<$filename>).

=item B<printSet($bdd)>

Print the BDD minterms to STDOUT.

=item B<print($bdd)>

Print the BDD in the native BuDDy representation to STDOUT.

=back

=head2 DEBUGGING

=over 4

=item B<printStats>

Print package statistics to STDOUT.

=item B<$ok = checkPackage>

 view all matches for this distribution


AI-PSO

 view release on metacpan or  search on metacpan

examples/NeuralNet/pso_ann.pl  view on Meta::CPAN


sub test_fitness_function(@) {
    my (@arr) = (@_);
	&writeAnnConfig($annConfig, $numInputs, $numHidden, $xferFunc, @arr);
	my $netValue = &runANN($annConfig, $annInputs);
	print "network value = $netValue\n";

	# the closer the network value gets to our desired value
	# then we want to set the fitness closer to 1.
	#
	# This is a special case of the sigmoid, and looks an awful lot

examples/NeuralNet/pso_ann.pl  view on Meta::CPAN


sub writeAnnConfig() {
	my ($configFile, $inputs, $hidden, $func, @weights) = (@_);

	open(ANN, ">$configFile");
	print ANN "$inputs $hidden\n";
	print ANN "$func\n";
	foreach my $weight (@weights) {
		print ANN "$weight ";
	}
	print ANN "\n";
	close(ANN);
}

sub runANN($$) {
	my ($configFile, $dataFile) = @_;

 view all matches for this distribution


( run in 1.283 second using v1.01-cache-2.11-cpan-de7293f3b23 )