view release on metacpan or search on metacpan
lib/A1z/HTML5/Template.pm view on Meta::CPAN
$h->open_file("C:/Inetpub/wwwroot/MyApp/menu.txt", "menu", "Menu");
This is the heart of the App.
=head2 OUTPUT FORMAT OPTIONS:
table, accordion, menu, as is; where "as is" is the default
$h->open_file( file => "abs/path/to/file", output_format => "table", output_header => "Heading" );
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ACH/Generator.pm view on Meta::CPAN
}
}
}
# Open the file
if ( open(OUTPUT, ">$file") ) {}
else { print "Error: Couldn't open file $file\n"; die; }
# Print data out to ACH file
print OUTPUT "$data";
# Close the ACH file
close (OUTPUT);
}
=head2 CAVEATS
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ACH/Parser.pm view on Meta::CPAN
# Get the file name
my $self = shift;
my $file = shift or _croak "Need an ACH file";
# Open the file
if ( open(INPUT, "$file") ) {}
else { print "Error: Couldn't open file $file\n"; die; }
# Get the file contents
my @data = <INPUT>;
my $dataline = $data[0];
my $pos = 0;
# Loop Through all entries
while ($pos < length($dataline)) {
lib/ACH/Parser.pm view on Meta::CPAN
# Save data to list
@{$self->{_achData}}[scalar @{$self->{_achData}}] = \@dataArray;
}
# Close the Input file
close (INPUT);
}
=head2 CAVEATS
view all matches for this distribution
view release on metacpan or search on metacpan
lib/ACL/Regex.pm view on Meta::CPAN
containing the return code, matched regex, and any comment associated
with the regex.
=back
=head2 INPUT FILES
=head3 ACL REGEX FILE
An example of ain input ACL file can be found in the I<t> folder of this project, but it simply
comprises of rows that look like:
view all matches for this distribution
view release on metacpan or search on metacpan
AES_init_ctx_iv(&ctx, key, key + 16);
AES_CTR_xcrypt_buffer(&ctx, output, text_size + padding_len);
RETVAL = newSVpv(output, text_size + padding_len);
free(output);
OUTPUT:
RETVAL
SV *
AES128_CTR_decrypt(SV *sv_cipher_text, SV *sv_secret)
CODE:
AES_init_ctx_iv(&ctx, key, key + 16);
AES_CTR_xcrypt_buffer(&ctx, cipher_text, text_size);
uint8_t padding_len = cipher_text[text_size -1];
RETVAL = newSVpv(cipher_text, text_size - padding_len);
OUTPUT:
RETVAL
view all matches for this distribution
view release on metacpan or search on metacpan
pod/afsmonitor.pod view on Meta::CPAN
=item B<output>
Names the file to which the B<afsmonitor> function writes all of the
statistics that it collects. By default, no output file is created.
See the L<"writing to an output file"|/"WRITING TO AN OUTPUT FILE">
section below for more information on this file.
=item B<detailed>
Formats the information in the output file named by B<output> argument in a
pod/afsmonitor.pod view on Meta::CPAN
associated with the threshold, the key B<handler>'s value should be the name
of the handler function and any arguments to be passed to the handler.
=back
=head1 OUTPUT
The return values are references to two arrays, one for file
servers and one for cache managers. Each entry in each array
is a reference to a hash containing information about one of
the hosts specified either by the fshosts or cmhosts options,
pod/afsmonitor.pod view on Meta::CPAN
For examples of accessing the information in the returned data structures
and printing it in a readable format, refer to the B<afsmonitor> script
in the B<examples> directory.
=head1 WRITING TO AN OUTPUT FILE
Include the B<output> argument to name the file into which the
B<afsmonitor> function writes all of the statistics it collects.
The output file is in ASCII format and records the same
view all matches for this distribution
view release on metacpan or search on metacpan
RETVAL = (code == 0);
#else
not_here("AFS::CM::setcrypt");
#endif
}
OUTPUT:
RETVAL
void
fs_whichcell(dir,follow=1)
char * dir
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_flushvolume(path,follow=1)
char * path
vi.out = (caddr_t) space;
code = pioctl(path, VIOC_FLUSHVOLUME, &vi, follow);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_flush(path,follow=1)
char * path
code = pioctl(path, VIOCFLUSH, &vi, follow);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_flushcb(path,follow=1)
char * path
vi.out = (caddr_t) space;
code = pioctl(path, VIOCFLUSHCB, &vi, follow);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_setquota(path,newquota,follow=1)
char * path
code = pioctl(path, VIOCSETVOLSTAT, &vi, follow);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
void
fs_getquota(path,follow=1)
char * path
code = symlink(buffer, mountp);
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_checkvolumes()
CODE:
vi.out_size = 0;
code = pioctl(NULL, VIOCCKBACK, &vi, 0);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_checkconn()
CODE:
vi.out = (caddr_t) & status;
code = pioctl(NULL, VIOCCKCONN, &vi, 0);
SETCODE(code);
RETVAL = (status == 0);
}
OUTPUT:
RETVAL
int32
fs_getcacheparms()
PPCODE:
vi.out = 0;
code = pioctl(NULL, VIOCSETCACHESIZE, &vi, 0);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_unlog()
CODE:
vi.out_size = 0;
code = pioctl(NULL, VIOCUNLOG, &vi, 0);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_getfid(path,follow=1)
char * path
code = errno;
else
code = 0;
SETCODE(code);
}
OUTPUT:
RETVAL
int32
fs_cm_access(path,perm="read",follow=1)
char * path
code = pioctl(path, VIOCACCESS, &vi, follow);
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
fs_ascii2rights(perm)
char * perm
if (code != 0)
rights = -1;
RETVAL = rights;
}
OUTPUT:
RETVAL
void
fs_rights2ascii(perm)
int32 perm
code = pioctl(dir, VIOCSETAL, &vi, follow);
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
MODULE = AFS PACKAGE = AFS::KTC_PRINCIPAL PREFIX = ktcp_
{
safefree(p);
# SETCODE(0); this spoils the ERROR code
RETVAL = 1;
}
OUTPUT:
RETVAL
void
ktcp_name(p,name=0)
AFS::KTC_PRINCIPAL p
{
if (t && t != &the_null_token) safefree(t);
# SETCODE(0); this spoils the ERROR code
RETVAL = 1;
}
OUTPUT:
RETVAL
int32
ktct_startTime(t)
AFS::KTC_TOKEN t
{
safefree(k);
# SETCODE(0); this spoils the ERROR code
RETVAL = 1;
}
OUTPUT:
RETVAL
void
ktck_string(k)
AFS::KTC_EKEY k
SETCODE(code);
/* printf("DEBUG-24 \n"); */
RETVAL = (code == 0);
/* printf("DEBUG-25 \n"); */
}
OUTPUT:
RETVAL
void
vos_status(cstruct, aserver)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vos_create(cstruct, server, partition, name, maxquota=Nullsv, vid=Nullsv, rovid=Nullsv)
AFS::VOS cstruct
RETVAL = (int32)volid;
done:
;
}
OUTPUT:
RETVAL
int32
vos_backup(cstruct, name)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vos_remove(cstruct, name, servername=NULL, parti=NULL)
AFS::VOS cstruct
RETVAL = volid;
done:
;
}
OUTPUT:
RETVAL
int32
vos_rename(cstruct,oldname,newname)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vos__setfields(cstruct, name, mquota=Nullsv, clearuse=Nullsv)
AFS::VOS cstruct
;
#else
not_here("AFS::VOS::setfields");
#endif
}
OUTPUT:
RETVAL
int32
vos_restore(cstruct,server,partition,name,file=NULL,id=NULL,inter=Nullsv,overwrite=NULL,offline=Nullsv,readonly=Nullsv)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vos_dump(cstruct, id, time=NULL, file=NULL, server=NULL, partition=NULL, clone=Nullsv, omit=Nullsv)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
void
vos_partinfo(cstruct, server, partname=NULL)
AFS::VOS cstruct
RETVAL = volid;
done:
;
}
OUTPUT:
RETVAL
int32
vos_zap(cstruct, servername, parti, id, force=Nullsv, backup=Nullsv)
AFS::VOS cstruct
RETVAL = volid;
done:
;
}
OUTPUT:
RETVAL
int32
vos_offline(cstruct, servername, parti, id, busy=Nullsv, sleep=Nullsv)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vos_online(cstruct, servername, parti, id)
AFS::VOS cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
void
vos__backupsys(cstruct, seenprefix=Nullsv, servername=NULL, partition=NULL, exclude=Nullsv, seenxprefix=Nullsv, noaction=Nullsv)
AFS::VOS cstruct
SETCODE(code);
/* printf("DEBUG-24 \n"); */
RETVAL = (code == 0);
/* printf("DEBUG-25 \n"); */
}
OUTPUT:
RETVAL
int32
vldb_addsite(cstruct, server, partition, id, roid=NULL, valid=0)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb_changeloc(cstruct, id, server, partition)
AFS::VLDB cstruct
;
#else
not_here("AFS::VLDB::changeloc");
#endif
}
OUTPUT:
RETVAL
void
vldb__listvldb(cstruct, name=NULL, servername=NULL, parti=NULL, lock=0)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb_unlock(cstruct, id)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb_unlockvldb(cstruct, server=NULL, partition=NULL)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb__syncvldb(cstruct, server=NULL, partition=NULL, volname=NULL)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb__changeaddr(cstruct, oldip, newip, remove=0)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb_remsite(cstruct,server,partition,name)
AFS::VLDB cstruct
}
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
int32
vldb_syncserv(cstruct, servername, parti=NULL)
AFS::VLDB cstruct
RETVAL = 1;
done:
;
}
OUTPUT:
RETVAL
MODULE = AFS PACKAGE = AFS::BOS PREFIX = bos_
{
rx_DestroyConnection(self);
/* printf("bos DEBUG rx_Destroy\n"); */
RETVAL = 1;
}
OUTPUT:
RETVAL
void
bos__status(self, lng=0, object=NULL)
AFS::BOS self
}
SETCODE(code);
RETVAL = (code == 0);
}
}
OUTPUT:
RETVAL
int32
bos_exec(self, cmd)
AFS::BOS self
BSETCODE(code, buffer);
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos_addhost(self, object, clone=Nullsv)
AFS::BOS self
RETVAL = (code == 0);
done:
;
}
OUTPUT:
RETVAL
int32
bos_removehost(self, object)
AFS::BOS self
} /* for loop */
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos_prune(self, all=0, bak=0, old=0, core=0)
AFS::BOS self
}
SETCODE(code);
RETVAL = (code == 0);
}
}
OUTPUT:
RETVAL
int32
bos_adduser(self, object)
AFS::BOS self
} /* for loop */
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos_removeuser(self, object)
AFS::BOS self
}
} /* for loop */
}
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos_addkey(self, kvno, string=NULL)
RETVAL = (code == 0);
}
if (tcell)
free(tcell);
}
OUTPUT:
RETVAL
int32
bos_removekey(self, object)
AFS::BOS self
}
}
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos__create(self, name, type, object, notifier=NULL)
AFS::BOS self
SETCODE(code);
done:
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos__restart(self, bosserver=0, all=0, object=NULL)
AFS::BOS self
}
done:
;
}
OUTPUT:
RETVAL
int32
bos_setrestart(self, time, general=Nullsv, newbinary=Nullsv)
AFS::BOS self
SETCODE(code);
done:
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
void
bos_getrestart(self)
AFS::BOS self
/* printf("DEBUG-bos-delete-10 \n"); */
/* if (name) */
/* Safefree(name); */
/* printf("DEBUG-bos-delete-11 \n"); */
}
OUTPUT:
RETVAL
void
bos_getlog(self, file)
AFS::BOS self
SETCODE(code);
done:
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos__startup(self, object=NULL)
AFS::BOS self
SETCODE(code);
done:
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos__stop(self, object=NULL, wait=0)
AFS::BOS self
SETCODE(code);
done:
/* printf("DEBUG-XS-bos-stop-7 \n"); */
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos__shutdown(self, object=NULL, wait=0)
AFS::BOS self
}
SETCODE(code);
done:
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
int32
bos_setcellname(self, name)
AFS::BOS self
char buffer[256];
sprintf(buffer, "AFS::BOS: failed to set cell (%s)\n", em(code));
BSETCODE(code, buffer);
}
}
OUTPUT:
RETVAL
void
bos_listkeys(self, showkey=0)
AFS::BOS self
#else
RETVAL = 0;
not_here("AFS::BOS::getrestricted");
#endif
}
OUTPUT:
RETVAL
int32
bos_setrestricted(self, mode)
AFS::BOS self
#else
RETVAL = 0;
not_here("AFS::BOS::setrestricted");
#endif
}
OUTPUT:
RETVAL
int32
bos_salvage(self, partition=NULL, volume=NULL, all=0, outName=NULL, showlog=0, parallel=NULL, tmpDir=NULL, orphans=NULL, localauth=0, tmpname=NULL, debug=0, nowrite=0, force=0, oktozap=0, rootfiles=0, salvagedirs=0, blockreads=0, ListResidencies=0, S...
AFS::BOS self
SETCODE(code);
done:
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
MODULE = AFS PACKAGE = AFS::PTS PREFIX = pts_
/* printf("pts DEBUG ubik_ClientDestroy\n"); */
code = ubik_ClientDestroy(server);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
void
pts_id(server,object,anon=1)
AFS::PTS server
if (lids.idlist_val)
free(lids.idlist_val);
}
if (lnames.namelist_val)
safefree(lnames.namelist_val);
PUTBACK;
return;
}
}
else if (SvTYPE(SvRV(object)) == SVt_PVHV) {
int32 code = 0, id;
if (lids.idlist_val)
free(lids.idlist_val);
}
if (lnames.namelist_val)
safefree(lnames.namelist_val);
PUTBACK;
return;
}
}
void
if (lnames.namelist_val)
free(lnames.namelist_val);
}
if (lids.idlist_val)
safefree(lids.idlist_val);
PUTBACK;
return;
}
}
else if (SvTYPE(SvRV(object)) == SVt_PVHV) {
int32 code = 0;
if (lnames.namelist_val)
free(lnames.namelist_val);
}
if (lids.idlist_val)
safefree(lids.idlist_val);
PUTBACK;
return;
}
}
void
int32 code;
code = ubik_ClientDestroy(server);
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
void
kas_KAM_GetEntry(server,user,inst)
AFS::KAS server
if (code != 0)
flags = 0;
RETVAL = flags;
}
OUTPUT:
RETVAL
void
afs_ptsaccess2ascii(flags)
int32 flags
code = setpag();
SETCODE(code);
RETVAL = (code == 0);
}
OUTPUT:
RETVAL
void
afs_expandcell(cell)
char * cell
flag = (int) SvIV(ST(0));
convert_numeric_names = (flag != 0);
}
RETVAL = convert_numeric_names;
}
OUTPUT:
RETVAL
int32
afs_raise_exception(...)
CODE:
flag = (int) SvIV(ST(0));
raise_exception = (flag != 0);
}
RETVAL = raise_exception;
}
OUTPUT:
RETVAL
void
afs_configdir(...)
PPCODE:
view all matches for this distribution
view release on metacpan or search on metacpan
_Inline/build/AI/ANN/Neuron_6185/INLINE.h view on Meta::CPAN
#define Inline_Stack_Vars dXSARGS
#define Inline_Stack_Items items
#define Inline_Stack_Item(x) ST(x)
#define Inline_Stack_Reset sp = mark
#define Inline_Stack_Push(x) XPUSHs(x)
#define Inline_Stack_Done PUTBACK
#define Inline_Stack_Return(x) XSRETURN(x)
#define Inline_Stack_Void XSRETURN(0)
#define INLINE_STACK_VARS Inline_Stack_Vars
#define INLINE_STACK_ITEMS Inline_Stack_Items
view all matches for this distribution
view release on metacpan or search on metacpan
t/AI-Calibrate-KL.t view on Meta::CPAN
0.425 > SCORE >= 0.359 prob = 0.385
0.359 > SCORE >= 0.000 prob = 0.000
";
my $output = '';
open TOOUTPUT, '>', \$output or die "Can't open TOOUTPUT: $!";
my $stdout = select(TOOUTPUT);
print_mapping($calibrated_got);
close(TOOUTPUT);
select $stdout;
is(trim($output), trim($expected_mapping), "printed mapping");
view all matches for this distribution
view release on metacpan or search on metacpan
Instance/Instance.xs view on Meta::CPAN
instance->values[i] = (int) SvIV( *av_fetch(values, i, 0) );
}
RETVAL = instance;
}
OUTPUT:
RETVAL
char *
name (instance)
Instance* instance
CODE:
{
RETVAL = instance->name;
}
OUTPUT:
RETVAL
void
set_result (instance, result)
Instance* instance
Instance/Instance.xs view on Meta::CPAN
}
else {
RETVAL = instance->values[attribute];
}
}
OUTPUT:
RETVAL
int
result_int (instance)
Instance * instance
CODE:
{
RETVAL = instance->result;
}
OUTPUT:
RETVAL
void
DESTROY (instance)
Instance * instance
Instance/Instance.xs view on Meta::CPAN
$totals{ $v }++;
$tallies{ $v }{ $_->result_int }++;
}
*/
}
OUTPUT:
RETVAL
view all matches for this distribution
view release on metacpan or search on metacpan
SAVEFREEPV(layers);
for (i = 0; i < num_layers; i++) {
layers[i] = SvIV(ST(i+1));
}
RETVAL = fann_create_standard_array(num_layers, layers);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
struct fann *
SAVEFREEPV(layers);
for (i = 0; i < num_layers; i++) {
layers[i] = SvIV(ST(i+2));
}
RETVAL = fann_create_sparse_array(connection_rate, num_layers, layers);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
struct fann *
SAVEFREEPV(layers);
for (i = 0; i < num_layers; i++) {
layers[i] = SvIV(ST(i+1));
}
RETVAL = fann_create_shortcut_array(num_layers, layers);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
struct fann *
fann_new_from_file(klass, filename)
SV *klass;
char *filename;
CODE:
RETVAL = fann_create_from_file(filename);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
void
fann_save(self, filename)
struct fann *self;
char * filename;
CODE:
RETVAL = !fann_save(self, filename);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)self);
fta_output
fann_train_data_new_from_file(klass, filename)
SV *klass;
const char *filename;
CODE:
RETVAL = fann_train_data_create_from_file(filename);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
struct fann_train_data *
unsigned int num_data;
unsigned int num_input;
unsigned int num_output;
CODE:
RETVAL = fann_train_data_create(num_data, num_input, num_output);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
void
Perl_croak(aTHX_ "input array is empty");
num_output = av_len(output) + 1;
if (!num_output)
Perl_croak(aTHX_ "output array is empty");
RETVAL = fann_train_data_create(num_data, num_input, num_output);
OUTPUT:
RETVAL
CLEANUP:
_check_error(aTHX_ (struct fann_error *)RETVAL);
/* we do that at cleanup to ensure that the just created object is
* freed if we croak */
view all matches for this distribution
view release on metacpan or search on metacpan
LibNeural.pm view on Meta::CPAN
# This allows declaration use AI::LibNeural ':all';
our %EXPORT_TAGS = ( 'all' => [ qw(
ALL
HIDDEN
INPUT
OUTPUT
) ] );
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
our @EXPORT = qw(
LibNeural.pm view on Meta::CPAN
=item $nn = AI::LibNeural->new(FILENAME)
Creates a new AI::LibNeural object from the supplied memory file.
=item $nn = AI::LibNeural->new(INTPUTS,HIDDENS,OUTPUTS)
Creates a new AI::LibNeural object with INPUTS input nodes, HIDDENS hidden
nodes, and OUTPUTS output nodes.
=item $nn->train([I1,I2,...],[O1,O2,...],MINERR,TRAINRATE)
Completes a training cycle for the given inputs I1-IN, with the expected
results of O1-OM, where N is the number of inputs and M is the number of
LibNeural.pm view on Meta::CPAN
suitably trained.
=item NUM = $nn->get_layersize(WHICH)
Retrieves the number of nodes at the specified layer, WHICH. WHICH should be
one of ALL, INPUT, HIDDEN, OUTPUT. Usefully mainly with a network is loaded
from a file.
=item status = $nn->load(FILENAME)
=item status = $nn->save(FILENAME)
LibNeural.pm view on Meta::CPAN
=item ALL
The total number of nodes on all three layers
=item INPUT
The number of nodes on the input layer
=item HIDDEN
The number of nodes on the hidden layer
=item OUTPUT
The number of nodes on the output layer
=back
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Logic/AnswerSet.pm view on Meta::CPAN
my @args = ("./dlv", "$_[1]");
system(@args) == 0
or die "system @args failed: $?";
open(STDOUT,">&SAVESTDOUT"); #close file and restore STDOUT
close OUTPUT;
}
sub executeAndSave { #Executes DLV and saves the output of the program written by the user in a file
lib/AI/Logic/AnswerSet.pm view on Meta::CPAN
my @args = ("./dlv --");
system(@args) == 0 or die "system @args failed: $?";
open(STDOUT,">&SAVESTDOUT"); #close file and restore STDOUT
close OUTPUT;
}
lib/AI/Logic/AnswerSet.pm view on Meta::CPAN
}
}
else {
print "INPUT ERROR\n";
}
return @returned_value;
}
lib/AI/Logic/AnswerSet.pm view on Meta::CPAN
my @facts;
if(@isAFile) {
open INPUT, "<", "$input";
my @rows = <INPUT>;
foreach my $row (@rows) {
if($row =~ /^(\w+)(\(((\w|\d|\.)+,?)*\))?\./) {
push @facts, $row;
}
}
close INPUT;
}
else {
my @str = split /\. /,$input;
foreach my $elem (@str) {
view all matches for this distribution
view release on metacpan or search on metacpan
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_sigmoid((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_tanh(a)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_tanh((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_d_tanh(a)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_d_tanh((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_relu(a)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_ReLU((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_d_relu(a)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_d_ReLU((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_lrelu(a, v)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_LReLU((Matrix*)a, v);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_d_lrelu(a, v)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_d_LReLU((Matrix*)a, v);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_predict_binary_classification(a, v)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = predict_binary_classification((Matrix*)a, v);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_softmax(a)
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_softmax((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_d_softmax(a)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_d_softmax((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_d_sigmoid(a)
unsigned long a;
XS/ML.xs.inc view on Meta::CPAN
CODE:
m = matrix_d_sigmoid((Matrix*)a);
RETVAL = newSVuv((unsigned long)m);
OUTPUT:
RETVAL
SV *
_sigmoid_cost(x, y, w)
unsigned long x;
XS/ML.xs.inc view on Meta::CPAN
unsigned long w;
CODE:
RETVAL = newSVnv(sigmoid_cost((Matrix*)x, (Matrix*)y, (Matrix*)w));
OUTPUT:
RETVAL
SV *
_mini_batch(m, start, s, axis)
XS/ML.xs.inc view on Meta::CPAN
CODE:
r = mini_batch((Matrix*)m, start, s, axis);
RETVAL = newSVnv((unsigned long)r);
OUTPUT:
RETVAL
SV *
_accuracy(y, yatt)
unsigned long y;
unsigned long yatt;
CODE:
RETVAL = newSVnv(accuracy((Matrix*)y, (Matrix*)yatt));
OUTPUT:
RETVAL
SV *
_precision(y, yatt)
unsigned long y;
unsigned long yatt;
CODE:
RETVAL = newSVnv(precision((Matrix*)y, (Matrix*)yatt));
OUTPUT:
RETVAL
SV *
_recall(y, yatt)
unsigned long y;
unsigned long yatt;
CODE:
RETVAL = newSVnv(recall((Matrix*)y, (Matrix*)yatt));
OUTPUT:
RETVAL
SV *
_f1(y, yatt)
unsigned long y;
unsigned long yatt;
CODE:
RETVAL = newSVnv(f1((Matrix*)y, (Matrix*)yatt));
OUTPUT:
RETVAL
view all matches for this distribution
view release on metacpan or search on metacpan
dSP;
PUSHMARK(SP);
XPUSHs(sv_2mortal(newSViv(index)));
XPUSHs(SWIG_NewPointerObj(SWIG_as_voidptr(recv), SWIGTYPE_p_MXNDArray, 0));
XPUSHs(SWIG_NewPointerObj(SWIG_as_voidptr(local), SWIGTYPE_p_MXNDArray, 0));
PUTBACK;
call_sv((SV*)callback, G_DISCARD);
}
}
static void KVStoreServer_callback(int head, const char *body, void* callback)
{
dSP;
PUSHMARK(SP);
XPUSHs(sv_2mortal(newSViv(head)));
XPUSHs(sv_2mortal(newSVpv(body, 0)));
PUTBACK;
call_sv((SV*)callback, G_DISCARD);
}
}
static void ExecutorMonitor_callback(const char* name, NDArrayHandle handle, void* callback)
{
dSP;
PUSHMARK(SP);
XPUSHs(sv_2mortal(newSVpv(name, 0)));
XPUSHs(SWIG_NewPointerObj(SWIG_as_voidptr(handle), SWIGTYPE_p_MXNDArray, 0));
PUTBACK;
call_sv((SV*)callback, G_DISCARD);
}
}
%}
view all matches for this distribution
view release on metacpan or search on metacpan
AI-MaxEntropy.xs view on Meta::CPAN
XPUSHs(sv_2mortal(newSViv(k)));
XPUSHs(sv_2mortal(newRV_noinc((SV*)av_lambda)));
XPUSHs(sv_2mortal(newRV_noinc((SV*)av_d_lambda)));
XPUSHs(sv_2mortal(newSVnv(lambda_norm)));
XPUSHs(sv_2mortal(newSVnv(d_lambda_norm)));
PUTBACK;
call_sv(progress_cb, G_ARRAY);
SPAGAIN;
sv_r = POPs;
r = SvIV(sv_r);
PUTBACK;
FREETMPS;
LEAVE;
while (SvREFCNT(sv_r) > 0) { SvREFCNT_dec(sv_r); }
if (r != 0) break;
}
AI-MaxEntropy.xs view on Meta::CPAN
av_extend(av_lambda, f_num - 1);
for (i = 0; i < f_num; i++)
av_store(av_lambda, i, newSVnv(lambda[i]));
RETVAL = newRV_noinc((SV*)av_lambda);
TRACE("leave");
OUTPUT:
RETVAL
CLEANUP:
free(p_f);
free(p1_f);
free(lambda);
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NNEasy.pm view on Meta::CPAN
SV* set_out = *av_fetch(set_av, i+1 ,0) ;
PUSHMARK(SP) ;
XPUSHs( nn );
XPUSHs( set_in );
PUTBACK ;
call_method("run", G_DISCARD) ;
PUSHMARK(SP) ;
XPUSHs( nn );
XPUSHs( set_out );
PUTBACK ;
call_method("learn", G_SCALAR) ;
}
for (i = 0 ; i <= av_len(set_av) ; i+=2) {
SV* set_in = *av_fetch(set_av, i ,0) ;
SV* set_out = *av_fetch(set_av, i+1 ,0) ;
PUSHMARK(SP) ;
XPUSHs( nn );
XPUSHs( set_in );
PUTBACK ;
call_method("run", G_DISCARD) ;
PUSHMARK(SP) ;
XPUSHs( nn );
XPUSHs( set_out );
PUTBACK ;
call_method("RMSErr", G_SCALAR) ;
SPAGAIN ;
ret = POPs ;
er = SvNV(ret) ;
lib/AI/NNEasy.pm view on Meta::CPAN
print "@{$set[$i]}) => @$out\n" ;
}
=head1 METHODS
=head2 new ( FILE , @OUTPUT_TYPES , ERROR_OK , IN_SIZE , OUT_SIZE , @HIDDEN_LAYERS , %CONF )
=over 4
=item FILE
The file path to save the NN. Default: 'nneasy.nne'.
=item @OUTPUT_TYPES
An array of outputs that the NN can have, so the NN can find the nearest number in this
list to give your the right output.
=item ERROR_OK
The maximal error of the calculated output.
If not defined ERROR_OK will be calculated by the minimal difference between 2 types at
@OUTPUT_TYPES dived by 2:
@OUTPUT_TYPES = [0 , 0.5 , 1] ;
ERROR_OK = (1 - 0.5) / 2 = 0.25 ;
=item IN_SIZE
lib/AI/NNEasy.pm view on Meta::CPAN
$nn->learn( [0,1] , [1] , 10 ) ;
=back
=head2 learn_set (@SET , OK_OUTPUTS , LIMIT , VERBOSE)
Learn a set of inputs until get the right error for the outputs.
=over 4
=item @SET
A list of inputs and outputs.
=item OK_OUTPUTS
Minimal number of outputs that should be OK when calculating the erros.
By default I<OK_OUTPUTS> should have the same size of number of different
inouts in the @SET.
=item LIMIT
Limit of interations when learning. Default: 30000
lib/AI/NNEasy.pm view on Meta::CPAN
If TRUE turn verbose method ON when learning.
=back
=head2 get_set_error (@SET , OK_OUTPUTS)
Get the actual error of a set in the NN. If the returned error is bigger than
I<ERROR_OK> defined on I<new()> you should learn or relearn the set.
=head2 run (@INPUT)
Run a input and return the output calculated by the NN based in what the NN already have learned.
=head2 run_get_winner (@INPUT)
Same of I<run()>, but the output will return the nearest output value based in the
I<@OUTPUT_TYPES> defined at I<new()>.
For example an input I<[0,1]> learned that have
the output I<[1]>, actually will return something like 0.98324 as output and
not 1, since the error never should be 0. So, with I<run_get_winner()>
we get the output of I<run()>, let's say that is 0.98324, and find what output
lib/AI/NNEasy.pm view on Meta::CPAN
I<backprop> as learning algorithm. With I<backprop> it pastes the inputs through
the Neural Network and adjust the I<weights> using random numbers until we find
a set of I<weights> that give to us the right output.
The secret of a NN is the number of hidden layers and nodes/neurons for each layer.
Basically the best way to define the hidden layers is 1 layer of (INPUT_NODES+OUTPUT_NODES).
So, a layer of 2 input nodes and 1 output node, should have 3 nodes in the hidden layer.
This definition exists because the number of inputs define the maximal variability of
the inputs (N**2 for bollean inputs), and the output defines if the variability is reduced by some logic restriction, like
int the XOR example, where we have 2 inputs and 1 output, so, hidden is 3. And as we can see in the
logic we have 3 groups of inputs:
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/NNFlex/Backprop.pm view on Meta::CPAN
use AI::NNFlex::Dataset;
my $dataset = AI::NNFlex::Dataset->new([
[INPUTARRAY],[TARGETOUTPUT],
[INPUTARRAY],[TARGETOUTPUT]]);
my $sqrError = 10;
while ($sqrError >0.01)
lib/AI/NNFlex/Backprop.pm view on Meta::CPAN
The Fahlman constant modifies the slope of the error curve. 0.1 is the standard value for everything, and speeds the network up immensely. If no Fahlman constant is set, the network will default to 0.1
=head2 AI::NNFlex::Dataset
new ( [[INPUT VALUES],[OUTPUT VALUES],
[INPUT VALUES],[OUTPUT VALUES],..])
=head2 INPUT VALUES
These should be comma separated values. They can be applied to the network with ::run or ::learn
=head2 OUTPUT VALUES
These are the intended or target output values. Comma separated. These will be used by ::learn
=head1 METHODS
view all matches for this distribution
view release on metacpan or search on metacpan
BackProp.pm view on Meta::CPAN
# using the code we gave it when it registered itself with us. The code is in $sid,
# (synapse ID) and we use that to track the weight of the connection.
# This line simply multiplies the value by its weight and gets the integer from it.
$self->{SYNAPSES}->{LIST}->[$sid]->{VALUE} = intr($value * $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT});
$self->{SYNAPSES}->{LIST}->[$sid]->{FIRED} = 1;
$self->{SYNAPSES}->{LIST}->[$sid]->{INPUT} = $value;
# Debugger
AI::NeuralNet::BackProp::out1("\nRecieved input of $value, weighted to $self->{SYNAPSES}->{LIST}->[$sid]->{VALUE}, synapse weight is $self->{SYNAPSES}->{LIST}->[$sid]->{WEIGHT} (sid is $sid for $self).\n");
AI::NeuralNet::BackProp::out1((($self->input_complete())?"All synapses have fired":"Not all synapses have fired"));
AI::NeuralNet::BackProp::out1(" for $self.\n");
BackProp.pm view on Meta::CPAN
# Loops thru and outputs to every neuron that this
# neuron is registered as synapse of.
sub output {
my $self = shift;
my $size = $self->{OUTPUTS}->{SIZE} || 0;
my $value = $self->get_output();
for (0..$size-1) {
AI::NeuralNet::BackProp::out1("Outputing to $self->{OUTPUTS}->{LIST}->[$_]->{PKG}, index $_, a value of $value with ID $self->{OUTPUTS}->{LIST}->[$_]->{ID}.\n");
$self->{OUTPUTS}->{LIST}->[$_]->{PKG}->input($self->{OUTPUTS}->{LIST}->[$_]->{ID},$value);
}
}
# Used internally by output().
sub get_output {
BackProp.pm view on Meta::CPAN
if(0) {
# Formula by Steve Purkis
# Converges very fast for low-value inputs. Has trouble converging on high-value
# inputs. Feel free to play and try to get to work for high values.
my $delta = $ammount * ($what - $value) * $self->{SYNAPSES}->{LIST}->[$i]->{INPUT};
$self->{SYNAPSES}->{LIST}->[$i]->{WEIGHT} += $delta;
$self->{SYNAPSES}->{LIST}->[$i]->{PKG}->weight($ammount,$what);
}
# This formula in use by default is original by me (Josiah Bryan) as far as I know.
BackProp.pm view on Meta::CPAN
# then we ask that synapse to let us register as an input connection and we
# save the sid that the ouput synapse returns.
sub connect {
my $self = shift;
my $to = shift;
my $oid = $self->{OUTPUTS}->{SIZE} || 0;
AI::NeuralNet::BackProp::out1("Connecting $self to $to at $oid...\n");
$self->{OUTPUTS}->{LIST}->[$oid]->{PKG} = $to;
$self->{OUTPUTS}->{LIST}->[$oid]->{ID} = $to->register_synapse($self);
$self->{OUTPUTS}->{SIZE} = ++$oid;
return $self->{OUTPUTS}->{LIST}->[$oid]->{ID};
}
1;
package AI::NeuralNet::BackProp;
BackProp.pm view on Meta::CPAN
my $self = shift;
my $sid = shift;
my $value = shift;
my $size = $self->{PARENT}->{DIV};
my $flag = 1;
$self->{OUTPUT}->[$sid]->{VALUE} = $self->{PARENT}->intr($value);
$self->{OUTPUT}->[$sid]->{FIRED} = 1;
AI::NeuralNet::BackProp::out1 "Received value $self->{OUTPUT}->[$sid]->{VALUE} and sid $sid, self $self.\n";
}
# Here we simply collect the value of every neuron connected to this
# one from the layer below us and return an array ref to the final map..
sub map {
BackProp.pm view on Meta::CPAN
my $value;
AI::NeuralNet::BackProp::out1 "Num output neurons: $out, Input neurons: $size, Division: $divide\n";
for(0..$out-1) {
$value=0;
for my $a (0..$divide-1) {
$value += $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE};
AI::NeuralNet::BackProp::out1 "\$a is $a, index is ".(($_*$divide)+$a).", value is $self->{OUTPUT}->[($_*$divide)+$a]->{VALUE}\n";
}
$map[$_] = AI::NeuralNet::BackProp->intr($value/$divide);
AI::NeuralNet::BackProp::out1 "Map position $_ is $map[$_] in @{[\@map]} with self set to $self.\n";
$self->{OUTPUT}->[$_]->{FIRED} = 0;
}
my $ret=\@map;
return $self->{PARENT}->_range($ret);
}
1;
view all matches for this distribution
view release on metacpan or search on metacpan
- added auto-README generation to Makefile.PL
- update copyright notice
0.15 Wed Jul 11 00:13:02 2012
- tidy up build a bit
- fixed warnings from CODE blocks using RETVAL without OUTPUT
blocks in newer perls
- yet another typemap workaround. this time we have a 5.6.2 with
a new ParseXS and an old xsubpp. i wont even mention the problem
i found in old Test::More finding this. i hope it never becomes
an issue. (Note: since this is an almost 3 year old issue and
view all matches for this distribution
view release on metacpan or search on metacpan
sub input {
my $self = shift;
my $input = shift;
my $from_id = shift;
$self->{_parent}->d("GOT INPUT [$input] FROM [$from_id]\n",1);
$self->{_inputs}->[$from_id] = $self->{_parent}->intr($input);
}
sub get_outputs {
my $self = shift;
view all matches for this distribution
view release on metacpan or search on metacpan
{
CHECK_BDD (bdd);
RETVAL = (bdd < varcount * 2) ? (bdd / 2 - 1) : bddlevel2var[LEVEL (bdd)];
}
OUTPUT:
RETVAL
void printSet(bdd)
int bdd;
PPCODE:
CHECK_BDD (ret);
RETVAL = ret;
}
OUTPUT:
RETVAL
int getZero()
CODE:
{
CHECK_BDD (ret);
RETVAL=ret;
}
OUTPUT:
RETVAL
int createBDD()
CODE:
// bddnodes[ret].refcou = 1; // why does BuDDy sets the initial
// refcount to MAXREF (0x3FF) ?
RETVAL=ret;
}
OUTPUT:
RETVAL
int getVarCount()
CODE:
{
RETVAL = varcount;
}
OUTPUT:
RETVAL
int getBDD(index)
int index
CODE:
fprintf (stderr, "[JBUDDY.getBDD] requested bad BDD: %d\n", index);
RETVAL=bdd_false();
}
RETVAL=bdd_ithvar (index);
}
OUTPUT:
RETVAL
int ref(bdd)
int bdd
CODE:
{
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
void localDeref(bdd)
int bdd
CODE:
bdd = bdd_apply (l, r, bddop_and);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int or(l, r)
int l
int r
bdd = bdd_apply (l, r, bddop_or);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int andTo(l, r)
int l
int r
ret = bdd_apply (l, r, bddop_and);
bdd_addref (ret);
bdd_delref (l);
RETVAL=ret;
}
OUTPUT:
RETVAL
int orTo(l, r)
int l
int r
ret = bdd_apply (l, r, bddop_or);
bdd_addref (ret);
bdd_delref (l);
RETVAL=ret;
}
OUTPUT:
RETVAL
int nand(l, r)
int l
int r
bdd = bdd_apply (l, r, bddop_nand);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int nor(l, r)
int l
int r
bdd = bdd_apply (l, r, bddop_nor);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int xor(l, r)
int l
int r
bdd = bdd_apply (l, r, bddop_xor);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int ite(if_, then_, else_)
int if_
int then_
bdd = bdd_ite (if_, then_, else_);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int imp(l, r)
int l
int r
bdd = bdd_apply (l, r, bddop_imp);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int biimp(l, r)
int l
int r
bdd = bdd_apply (l, r, bddop_biimp);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int not(bdd)
int bdd
CODE:
tmp = bdd_not (bdd);
bdd_addref (tmp);
RETVAL = tmp;
}
OUTPUT:
RETVAL
int exists(bdd, cube)
int bdd
int cube
tmp = bdd_exist (bdd, cube);
bdd_addref (tmp);
RETVAL=tmp;
}
OUTPUT:
RETVAL
int forall(bdd, cube)
int bdd
int cube
tmp = bdd_forall (bdd, cube);
bdd_addref (tmp);
RETVAL=tmp;
}
OUTPUT:
RETVAL
int relProd(l, r, cube)
int l
int r
bdd = bdd_appex (l, r, bddop_and, cube);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int restrict(r, var)
int r
int var
bdd = bdd_restrict (r, var);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
int constrain(f, c)
int f
int c
bdd = bdd_constrain (f, c);
bdd_addref (bdd);
RETVAL=bdd;
}
OUTPUT:
RETVAL
long createPairI(old, new_,size)
AV *old
AV *new_
free(newarr);
free(oldarr);
RETVAL=pair;
}
OUTPUT:
RETVAL
void deletePair(pair)
long pair
PPCODE:
tmp = bdd_replace (bdd, (bddPair *) pair);
bdd_addref (tmp);
RETVAL=tmp;
}
OUTPUT:
RETVAL
void showPair(pair)
int pair
PPCODE:
tmp = bdd_support (bdd);
bdd_addref (tmp);
RETVAL=tmp;
}
OUTPUT:
RETVAL
int nodeCount(bdd)
int bdd
CODE:
{
CHECK_BDD (bdd);
RETVAL=bdd_nodecount (bdd);
}
OUTPUT:
RETVAL
int satOne(bdd)
int bdd
CODE:
tmp = bdd_satone (bdd);
bdd_addref (tmp);
RETVAL=tmp;
}
OUTPUT:
RETVAL
double satCount__I(bdd)
int bdd
CODE:
sat /= div;
RETVAL=sat;
}
OUTPUT:
RETVAL
double satCount__II(bdd, vars_ignored)
int bdd
int vars_ignored
// 2 + varnum + 2 * vars_ignored);
//
RETVAL=(double) bdd_satcount (bdd) / pow(2, 2 + varnum + vars_ignored);
}
OUTPUT:
RETVAL
void gc()
PPCODE:
{
int checkPackage()
CODE:
{
RETVAL=(checkBuddy () ? 1 : 0);
}
OUTPUT:
RETVAL
void debugPackage()
PPCODE:
{
CHECK_BDD (bdd);
RETVAL=(bddnodes[bdd].refcou);
}
OUTPUT:
RETVAL
int internal_isconst(bdd)
int bdd
CODE:
CHECK_BDD (bdd);
RETVAL=(bdd == bddfalse) || (bdd == bddtrue);
}
OUTPUT:
RETVAL
int internal_constvalue(bdd)
int bdd
CODE:
if (bdd == bddfalse)
RETVAL=0;
else
RETVAL=1;
}
OUTPUT:
RETVAL
int internal_iscomplemented(bdd)
int bdd
CODE:
{
CHECK_BDD (bdd);
RETVAL=0; // no CE in BuDDy
}
OUTPUT:
RETVAL
int internal_then(bdd)
int bdd
CODE:
{
CHECK_BDD (bdd);
RETVAL=bdd_high (bdd);
}
OUTPUT:
RETVAL
int internal_else(bdd)
int bdd
CODE:
{
CHECK_BDD (bdd);
RETVAL=bdd_low (bdd);
}
OUTPUT:
RETVAL
void verbose(verb_)
int verb_
PPCODE:
varsarr[i] = SvNV(*elem);
}
RETVAL = makeSet(varsarr, size, 0);
}
OUTPUT:
RETVAL
int makeSetII(vars, size, offset)
AV *vars
int size
varsarr[i] = SvNV(*elem);
}
RETVAL = makeSet(varsarr, size, offset);
}
OUTPUT:
RETVAL
int debugBDD(bdd)
int bdd
CODE:
CHECK_BDD (bdd);
dumpBDD_info (bdd);
RETVAL=(checkBDD (bdd) ? 1 : 0);
}
OUTPUT:
RETVAL
void reorder_enableDynamic(enable)
int enable
PPCODE:
view all matches for this distribution
view release on metacpan or search on metacpan
MPL-1.1.txt view on Meta::CPAN
(INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL
DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED CODE,
OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR
ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY
CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL,
WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY
RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW
PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE
view all matches for this distribution
view release on metacpan or search on metacpan
PUSHmortal|5.009002||p
PUSHn|||
PUSHp|||
PUSHs|||
PUSHu|5.004000||p
PUTBACK|||
PerlIO_clearerr||5.007003|
PerlIO_close||5.007003|
PerlIO_context_layers||5.009004|
PerlIO_eof||5.007003|
PerlIO_error||5.007003|
eval_sv(sv, G_SCALAR);
SvREFCNT_dec(sv);
SPAGAIN;
sv = POPs;
PUTBACK;
if (croak_on_error && SvTRUE(GvSV(errgv)))
croak(SvPVx(GvSV(errgv), na));
return sv;
view all matches for this distribution
view release on metacpan or search on metacpan
t/06-validate.t view on Meta::CPAN
use constant TRAINING_DATA => $FindBin::Bin . "/book_list_train.csv";
use constant VALIDATION_DATA => $FindBin::Bin . "/book_list_validate.csv";
use constant VALIDATION_DATA_OUTPUT_FILE => $FindBin::Bin . "/book_list_validate-filled.csv";
use constant MODULE_NAME => "AI::Perceptron::Simple";
use constant WANT_STATS => 1;
use constant IDENTIFIER => "book_name";
# 36 headers
t/06-validate.t view on Meta::CPAN
# with new output file
stdout_like {
ok ( $perceptron->validate( {
stimuli_validate => VALIDATION_DATA,
predicted_column_index => 4,
results_write_to => VALIDATION_DATA_OUTPUT_FILE
} ),
"Validate succedded!" );
} qr/book_list_validate\-filled\.csv/, "Correct output for validate when saving to NEW file";
ok( -e VALIDATION_DATA_OUTPUT_FILE, "New validation file found" );
isnt( -s VALIDATION_DATA_OUTPUT_FILE, 0, "New output file is not empty" );
done_testing;
# besiyata d'shmaya
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/PredictionClient/CPP/Typemaps/more_typemaps_STL_String.txt view on Meta::CPAN
TYPEMAP
std::string T_STD_STRING
std::string* T_STD_STRING_PTR
INPUT
T_STD_STRING
$var = std::string( SvPV_nolen( $arg ), SvCUR( $arg ) );
T_STD_STRING_PTR
$var = new std::string( SvPV_nolen( $arg ), SvCUR( $arg ) );
OUTPUT
T_STD_STRING
$arg = newSVpvn( $var.c_str(), $var.length() );
T_STD_STRING_PTR
$arg = newSVpvn( $var->c_str(), $var->length() );
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/Prolog/Engine/Primitives.pm view on Meta::CPAN
my ( $self, $term, $c ) = @_;
$self->_splice_goal_list($term);
CONTINUE;
};
my $HELP_OUTPUT;
$PRIMITIVES[31] = sub { # help/0
_load_builtins();
if ( not $HELP_OUTPUT ) {
$HELP_OUTPUT = "Help is available for the following builtins:\n\n";
my @predicates = sort keys %DESCRIPTION_FOR;
my $length = length $LONGEST_PREDICATE;
my $columns = 5;
my $format = join ' ' => ("%-${length}s") x $columns;
while (@predicates) {
lib/AI/Prolog/Engine/Primitives.pm view on Meta::CPAN
for ( 1 .. $columns ) {
push @row => @predicates
? shift @predicates
: '';
}
$HELP_OUTPUT .= sprintf $format => @row;
$HELP_OUTPUT .= "\n";
}
$HELP_OUTPUT .= "\n";
}
AI::Prolog::Engine::_print($HELP_OUTPUT);
CONTINUE;
};
$PRIMITIVES[32] = sub { # help/1
my ( $self, $term, $c ) = @_;
view all matches for this distribution
view release on metacpan or search on metacpan
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
IPerl->png( bytestream => path($plot_output_path)->slurp_raw );
}
B<DISPLAY>:
=for html <span style="display:inline-block;margin-left:1em;"><p><img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA+gAAAMgCAIAAAA/et9qAAAgAElEQVR4nOzdd2AUVeIH8Ddb0jshBAIEpSo1GjoIpyAgCOqd3uGdoGBBUQQFRUVBRbkTf9gOBQucqFiwUhSSgJQYCCSBkJBAet1k...
=head2 Parts of the original notebook that fall outside the scope
In the orignal notebook, there are several more steps that have not been ported here:
view all matches for this distribution
view release on metacpan or search on metacpan
PUSHmortal|5.009002||p
PUSHn|||
PUSHp|||
PUSHs|||
PUSHu|5.004000||p
PUTBACK|||
PerlIO_clearerr||5.007003|
PerlIO_close||5.007003|
PerlIO_eof||5.007003|
PerlIO_error||5.007003|
PerlIO_fileno||5.007003|
eval_sv(sv, G_SCALAR);
SvREFCNT_dec(sv);
SPAGAIN;
sv = POPs;
PUTBACK;
if (croak_on_error && SvTRUE(GvSV(errgv)))
croak(SvPVx(GvSV(errgv), na));
return sv;
view all matches for this distribution