view release on metacpan or search on metacpan
CONTRIBUTING view on Meta::CPAN
<<<=== COMMITMENT TO FREE & OPEN SOURCE SOFTWARE ===>>>
Auto-Parallel Technologies, Inc. is committed to maintaining the free-and-open-source software (FOSS) basis of the APTech Family.
If your APTech Family contribution is accepted and merged into an official APTech Family source repository, then your contribution is automatically published online with FOSS licensing, currently the Apache License Version 2.0.
<<<=== EMPLOYER COPYRIGHT DISCLAIMER AGREEMENT ===>>>
The file named EMPLOYERS.pdf contains the Employer Copyright Disclaimer Agreement. If you are employed or work as an independent contractor, and either your job involves computer programming or you have executed an agreement giving your employer or ...
<<<=== OTHER CONTRIBUTORS ===>>>
If anyone other than yourself has written software source code or documentation as part of your APTech Family contribution, then they must submit their contributions themselves under the terms of the APTech Family Copyright Assignment Agreement above...
Please be sure you DO NOT STUDY OR INCLUDE any 3rd-party or public-domain intellectual property as part of your APTech Family contribution, including but not limited to: source code; documentation; copyrighted, trademarked, or patented components; or...
<<<=== RECOGNITION ===>>>
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2022 Auto-Parallel Technologies, Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_DeleteServer(TF_Server* server);
=head2 TF_RegisterLogListener
=over 2
Register a listener method that processes printed messages.
If any listeners are registered, the print operator will call all listeners
with the printed messages and immediately return without writing to the
logs.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_RegisterLogListener(
void (*listener)(const char*));
=head2 TF_RegisterFilesystemPlugin
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
TF_OpDefinitionBuilder* builder, bool is_aggregate);
=head2 TF_OpDefinitionBuilderSetIsStateful
=over 2
Sets the is_stateful property of the builder to the given value.
The op built by this builder is stateful if its behavior depends on some
state beyond its input tensors (e.g. variable reading op) or if it has a
side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
must always produce the same output for the same input and have no
side-effects.
By default Ops may be moved between devices. Stateful ops should either not
be moved, or should only be moved if that state can also be moved (e.g. via
some sort of save / restore). Stateful ops are guaranteed to never be
optimized away by Common Subexpression Elimination (CSE).
=back
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
my $from_xy = join ",", $box_corners_yx_img->slice('-1:0,(0)')->list;
my $to_xy = join ",", $box_corners_yx_img->slice('-1:0,(1)')->list;
my $label_xy = join ",", $box_corners_yx_img->at(1,1), $box_corners_yx_img->at(0,1);
(
[ object => [ "rect" =>
from => $from_xy, to => $to_xy,
qq{front fs empty border lc rgb "$lc_rgb" lw 5} ], ],
[ label => [
sprintf("%s: %.1f",
$subset{detection_class_labels}[$idx],
100*$subset{detection_scores}->at($idx,0) ) =>
at => $label_xy, 'left',
offset => 'character 0,-0.25',
qq{font ",12" boxed front tc rgb "#ffffff"} ], ],
)
} 0..$subset{detection_boxes}->dim(1)-1
);
$gp->plot(
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
my $from_xy = join ",", $box_corners_yx_img->slice('-1:0,(0)')->list;
my $to_xy = join ",", $box_corners_yx_img->slice('-1:0,(1)')->list;
my $label_xy = join ",", $box_corners_yx_img->at(1,1), $box_corners_yx_img->at(0,1);
(
[ object => [ "rect" =>
from => $from_xy, to => $to_xy,
qq{front fs empty border lc rgb "$lc_rgb" lw 5} ], ],
[ label => [
sprintf("%s: %.1f",
$subset{detection_class_labels}[$idx],
100*$subset{detection_scores}->at($idx,0) ) =>
at => $label_xy, 'left',
offset => 'character 0,-0.25',
qq{font ",12" boxed front tc rgb "#ffffff"} ], ],
)
} 0..$subset{detection_boxes}->dim(1)-1
);
$gp->plot(
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
my $offset = ($width-1) % 2;
$new_interval->start( $center - $half - $offset );
$new_interval->end( $center + $half );
return $new_interval;
}
use overload '""' => \&_op_stringify;
sub _op_stringify { sprintf "%s:%s", $_[0]->seq_id // "(no sequence)", $_[0]->to_FTstring }
}
#####
{
say "Testing interval resizing:\n";
sub _debug_resize {
my ($interval, $to, $msg) = @_;
my $resized_interval = $interval->resize($to);
die "Wrong interval size for $interval --($to)--> $resized_interval"
unless $resized_interval->length == $to;
say sprintf "Interval: %s -> %s, length %2d : %s",
$interval,
$resized_interval, $resized_interval->length,
$msg;
}
for my $interval_spec ( [4, 8], [5, 8], [5, 9], [6, 9]) {
my ($start, $end) = @$interval_spec;
my $test_interval = Interval->new( -seq_id => 'chr11', -start => $start, -end => $end );
say sprintf "Testing interval %s with length %d", $test_interval, $test_interval->length;
say "-----";
for(0..5) {
my $base = $test_interval->length;
my $to = $base + $_;
_debug_resize $test_interval, $to, "$base -> $to (+ $_)";
}
say "";
}
}
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
my $pad_upstream = 'N' x List::Util::max( -($interval->start-1), 0 );
my $pad_downstream = 'N' x List::Util::max( $interval->end - $chrom_length, 0 );
return join '', $pad_upstream, $seq, $pad_downstream;
}
sub seq_info {
my ($seq, $n) = @_;
$n ||= 10;
if( length $seq > $n ) {
sprintf "%s...%s (length %d)", uc substr($seq, 0, $n), uc substr($seq, -$n), length $seq;
} else {
sprintf "%s (length %d)", uc $seq, length $seq;
}
}
####
{
say "Testing sequence extraction:";
say "1 base: ", seq_info
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
my $offset = ($width-1) % 2;
$new_interval->start( $center - $half - $offset );
$new_interval->end( $center + $half );
return $new_interval;
}
use overload '""' => \&_op_stringify;
sub _op_stringify { sprintf "%s:%s", $_[0]->seq_id // "(no sequence)", $_[0]->to_FTstring }
}
#####
{
say "Testing interval resizing:\n";
sub _debug_resize {
my ($interval, $to, $msg) = @_;
my $resized_interval = $interval->resize($to);
die "Wrong interval size for $interval --($to)--> $resized_interval"
unless $resized_interval->length == $to;
say sprintf "Interval: %s -> %s, length %2d : %s",
$interval,
$resized_interval, $resized_interval->length,
$msg;
}
for my $interval_spec ( [4, 8], [5, 8], [5, 9], [6, 9]) {
my ($start, $end) = @$interval_spec;
my $test_interval = Interval->new( -seq_id => 'chr11', -start => $start, -end => $end );
say sprintf "Testing interval %s with length %d", $test_interval, $test_interval->length;
say "-----";
for(0..5) {
my $base = $test_interval->length;
my $to = $base + $_;
_debug_resize $test_interval, $to, "$base -> $to (+ $_)";
}
say "";
}
}
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
my $pad_upstream = 'N' x List::Util::max( -($interval->start-1), 0 );
my $pad_downstream = 'N' x List::Util::max( $interval->end - $chrom_length, 0 );
return join '', $pad_upstream, $seq, $pad_downstream;
}
sub seq_info {
my ($seq, $n) = @_;
$n ||= 10;
if( length $seq > $n ) {
sprintf "%s...%s (length %d)", uc substr($seq, 0, $n), uc substr($seq, -$n), length $seq;
} else {
sprintf "%s (length %d)", uc $seq, length $seq;
}
}
####
{
say "Testing sequence extraction:";
say "1 base: ", seq_info
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
my $http = HTTP::Tiny->new;
my $response = $http->get( $uri );
die "Could not fetch image from $uri" unless $response->{success};
say "Downloaded $uri";
my $img = Imager->new;
$img->read( data => $response->{content} );
my $rescaled = imager_scale_to($img, $image_size);
say sprintf "Rescaled image from [ %d x %d ] to [ %d x %d ]",
$img->getwidth, $img->getheight,
$rescaled->getwidth, $rescaled->getheight;
my $padded = imager_paste_center_pad($rescaled, $image_size,
# ARGB fits in 32-bits (uint32_t)
channels => 4
);
say sprintf "Padded to [ %d x %d ]", $padded->getwidth, $padded->getheight;
# Create PDL ndarray from Imager data in-memory.
my $data;
$padded->write( data => \$data, type => 'raw' )
or die "could not write ". $padded->errstr;
# $data is packed as PDL->dims == [w,h] with ARGB pixels
# $ PDL::howbig(ulong) # 4
my $pdl_raw = zeros(ulong, $padded->getwidth, $padded->getheight);
${ $pdl_raw->get_dataref } = $data;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
while( my ($i, $label_index) = each @top_for_image ) {
my $class_index = $includes_background_class ? $label_index : $label_index + 1;
push @rows, [ (
$i + 1,
$class_index,
$labels[$class_index],
$probabilities_batched->at($label_index,$batch_idx),
) ];
}
say generate_table( rows => [ $header, @rows ], header_row => 1 );
print "\n";
}
}
my $p_approx_batched = $probabilities_batched->sumover->approx(1, 1e-5);
p $p_approx_batched;
say "All probabilities sum up to approximately 1" if $p_approx_batched->all->sclr;
use Filesys::DiskUsage qw/du/;
my $total = du( { 'human-readable' => 1, dereference => 1 },
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
my $http = HTTP::Tiny->new;
my $response = $http->get( $uri );
die "Could not fetch image from $uri" unless $response->{success};
say "Downloaded $uri";
my $img = Imager->new;
$img->read( data => $response->{content} );
my $rescaled = imager_scale_to($img, $image_size);
say sprintf "Rescaled image from [ %d x %d ] to [ %d x %d ]",
$img->getwidth, $img->getheight,
$rescaled->getwidth, $rescaled->getheight;
my $padded = imager_paste_center_pad($rescaled, $image_size,
# ARGB fits in 32-bits (uint32_t)
channels => 4
);
say sprintf "Padded to [ %d x %d ]", $padded->getwidth, $padded->getheight;
# Create PDL ndarray from Imager data in-memory.
my $data;
$padded->write( data => \$data, type => 'raw' )
or die "could not write ". $padded->errstr;
# $data is packed as PDL->dims == [w,h] with ARGB pixels
# $ PDL::howbig(ulong) # 4
my $pdl_raw = zeros(ulong, $padded->getwidth, $padded->getheight);
${ $pdl_raw->get_dataref } = $data;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
Downloaded https://upload.wikimedia.org/wikipedia/commons/b/b0/Bengal_tiger_%28Panthera_tigris_tigris%29_female_3_crop.jpg
Rescaled image from [ 4500 x 3000 ] to [ 224 x 149 ]
Padded to [ 224 x 224 ]
Downloaded https://upload.wikimedia.org/wikipedia/commons/8/80/Turtle_golfina_escobilla_oaxaca_mexico_claudio_giovenzana_2010.jpg
Rescaled image from [ 2000 x 1329 ] to [ 224 x 149 ]
Padded to [ 224 x 224 ]
B<STREAM (STDERR)>:
=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">PDL</span><span style="color: #33ccff;"> {</span><span style="">
</span><span style="color: #6666cc;">Data </span><span style=""> : </span><span style="color: #669933;">too long to print</span><span style="">
</span><span style="color: #6666cc;">Type </span><span style=""> : </span><span style="color: #cc66cc;">float</span><span style="">
</span><span style="color: #6666cc;">Shape </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #9999cc;">3 224 224 12</span><span style="color: #33ccff;">]</span><span style="">
</span><span style="color: #6666cc;">Nelem </span><span style=""> : </span><span style="color: #dd6;">1806336</span><span style="">
</span><span style="color: #6666cc;">Min </span><span style=""> : </span><span style="color: #f66;">0</span><span style="">
</span><span style="color: #6666cc;">Max </span><span style=""> : </span><span style="color: #99f;">1</span><span style="">
</span><span style="color: #6666cc;">Badflag </span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #6666cc;">Has Bads</span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Tensor</span><span style=""> </span><span style="color: #33ccff;">{</span><span style="">
</span><span style="color: #6666cc;">Type </span><span style=""> </span><span style="color: #cc66cc;">FLOAT</span><span style="">
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
$$ {\displaystyle \sigma (\mathbf {z} )I<{i}={\frac {e^{z>{i}}}{\sum I<{j=1}^{K}e^{z>{j}}}}\ \ {\text{ for }}i=1,\dotsc ,K{\text{ and }}\mathbf {z} =(zI<{1},\dotsc ,z>{K})\in \mathbb {R} ^{K}.} $$
my $output_pdl_batched = FloatTFTensorToPDL($RunSession->($session, $t));
my $softmax = sub { ( map $_/sumover($_)->dummy(0), exp($_[0]) )[0] };
my $probabilities_batched = $softmax->($output_pdl_batched);
p $probabilities_batched;
B<STREAM (STDERR)>:
=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">PDL</span><span style="color: #33ccff;"> {</span><span style="">
</span><span style="color: #6666cc;">Data </span><span style=""> : </span><span style="color: #669933;">too long to print</span><span style="">
</span><span style="color: #6666cc;">Type </span><span style=""> : </span><span style="color: #cc66cc;">float</span><span style="">
</span><span style="color: #6666cc;">Shape </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #9999cc;">1001 12</span><span style="color: #33ccff;">]</span><span style="">
</span><span style="color: #6666cc;">Nelem </span><span style=""> : </span><span style="color: #dd6;">12012</span><span style="">
</span><span style="color: #6666cc;">Min </span><span style=""> : </span><span style="color: #f66;">2.73727380317723e-07</span><span style="">
</span><span style="color: #6666cc;">Max </span><span style=""> : </span><span style="color: #99f;">0.980696022510529</span><span style="">
</span><span style="color: #6666cc;">Badflag </span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #6666cc;">Has Bads</span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
while( my ($i, $label_index) = each @top_for_image ) {
my $class_index = $includes_background_class ? $label_index : $label_index + 1;
push @rows, [ (
$i + 1,
$class_index,
$labels[$class_index],
$probabilities_batched->at($label_index,$batch_idx),
) ];
}
say generate_table( rows => [ $header, @rows ], header_row => 1 );
print "\n";
}
}
B<DISPLAY>:
=for html <span style="display:inline-block;margin-left:1em;"><p><table style="width: 100%"><tr><td><tt>apple</tt></td><td><a href="https://upload.wikimedia.org/wikipedia/commons/1/15/Red_Apple.jpg"><img alt="apple" src="https://upload.wikimedia.org/...
my $p_approx_batched = $probabilities_batched->sumover->approx(1, 1e-5);
p $p_approx_batched;
say "All probabilities sum up to approximately 1" if $p_approx_batched->all->sclr;
lib/AI/TensorFlow/Libtensorflow/Operation.pm view on Meta::CPAN
arg 'TF_Input_struct_array' => 'consumers',
arg 'int' => 'max_consumers',
] => 'int' => sub {
my ($xs, $self, $output) = @_;
my $max_consumers = $self->OutputNumConsumers( $output );
my $consumers = AI::TensorFlow::Libtensorflow::Input->_adef->create( $max_consumers );
my $count = $xs->($output, $consumers, $max_consumers);
return AI::TensorFlow::Libtensorflow::Input->_from_array( $consumers );
});
sub _data_printer {
my ($self, $ddp) = @_;
my %data = (
Name => $self->Name,
OpType => $self->OpType,
NumInputs => $self->NumInputs,
NumOutputs => $self->NumOutputs,
);
return sprintf('%s %s',
$ddp->maybe_colorize(ref $self, 'class' ),
$ddp->parse(\%data) );
}
1;
__END__
=pod
lib/AI/TensorFlow/Libtensorflow/Output.pm view on Meta::CPAN
use overload
'""' => \&_op_stringify;
sub _op_stringify {
join ":", (
( defined $_[0]->_oper ? $_[0]->oper->Name : '<undefined operation>' ),
( defined $_[0]->index ? $_[0]->index : '<no index>' )
);
}
sub _data_printer {
my ($self, $ddp) = @_;
my %data = (
oper => $self->oper,
index => $self->index,
);
return sprintf('%s %s',
$ddp->maybe_colorize(ref $self, 'class' ),
$ddp->parse(\%data) );
};
1;
__END__
=pod
lib/AI/TensorFlow/Libtensorflow/Status.pm view on Meta::CPAN
$ffi->attach( 'Message' => [ 'TF_Status' ], 'string' );
use overload
'""' => \&_op_stringify;
sub _op_stringify {
$_TF_CODE_INT_TO_NAME{$_[0]->GetCode};
}
sub _data_printer {
my ($self, $ddp) = @_;
if( $self->GetCode != AI::TensorFlow::Libtensorflow::Status::OK() ) {
return sprintf('%s %s %s %s%s%s %s',
$ddp->maybe_colorize( ref($self), 'class' ),
$ddp->maybe_colorize( '{', 'brackets' ),
$ddp->maybe_colorize( $_TF_CODE_INT_TO_NAME{$self->GetCode}, 'escaped' ),
$ddp->maybe_colorize( '(', 'brackets' ),
$ddp->maybe_colorize( $self->Message, 'string' ),
$ddp->maybe_colorize( ')', 'brackets' ),
$ddp->maybe_colorize( '}', 'brackets' ),
);
} else {
return sprintf('%s %s %s %s',
$ddp->maybe_colorize( ref($self), 'class' ),
$ddp->maybe_colorize( '{', 'brackets' ),
$ddp->maybe_colorize( $_TF_CODE_INT_TO_NAME{$self->GetCode}, 'escaped' ),
$ddp->maybe_colorize( '}', 'brackets' ),
);
}
}
1;
lib/AI/TensorFlow/Libtensorflow/Tensor.pm view on Meta::CPAN
map {
$ffi->cast(
'opaque',
'TF_Tensor',
$array->[$_]->p)
} 0.. $array->count - 1
]
}
#### Data::Printer ####
sub _data_printer {
my ($self, $ddp) = @_;
my @data = (
[ Type => $ddp->maybe_colorize( $self->Type, 'class' ), ],
[ Dims => sprintf "%s %s %s",
$ddp->maybe_colorize('[', 'brackets'),
join(" ",
map $ddp->maybe_colorize( $self->Dim($_), 'number' ),
0..$self->NumDims-1),
$ddp->maybe_colorize(']', 'brackets'),
],
[ NumDims => $ddp->maybe_colorize( $self->NumDims, 'number' ), ],
[ ElementCount => $ddp->maybe_colorize( $self->ElementCount, 'number' ), ],
);
my $output;
$output .= $ddp->maybe_colorize(ref $self, 'class' );
$output .= ' ' . $ddp->maybe_colorize('{', 'brackets');
$ddp->indent;
for my $item (@data) {
$output .= $ddp->newline;
$output .= join " ",
$ddp->maybe_colorize(sprintf("%-15s", $item->[0]), 'hash'),
$item->[1];
}
$ddp->outdent;
$output .= $ddp->newline;
$output .= $ddp->maybe_colorize('}', 'brackets');
return $output;
}
1;
maint/process-capi.pl view on Meta::CPAN
subcommand 'generate-capi-docs' => method(@) {
$self->generate_capi_funcs;
};
subcommand 'check-types' => method(@) {
$self->check_types;
};
subcommand 'check-functions' => method(@) {
if( $_[0] eq '--help' ) {
print STDERR "$0 check-functions [TYPE]\n";
exit;
}
$self->check_functions(shift @_);
};
sub BUILD {
Moo::Role->apply_roles_to_object(
AI::TensorFlow::Libtensorflow::Lib->ffi
=> qw(AttachedFunctionTrackable));
load 'AI::TensorFlow::Libtensorflow';
maint/process-notebook.pl view on Meta::CPAN
perl -0777 -pi -e 's/(=head1 NAME\n+)$ENV{SRC_BASENAME}/\1$ENV{PODNAME}/' $DST
## Edit to local section link (Markdown::Pod does not yet recognise this).
perl -pi -E 's,\QL<CPANFILE|#CPANFILE>\E,L<CPANFILE|/CPANFILE>,g' $DST
## Add
## =head1 CPANFILE
##
## requires '...';
## requires '...';
scan-perl-prereqs-nqlite --cpanfile $DST | perl -M5';print qq|=head1 CPANFILE\n\n|' -plE '$_ = q| | . $_;' | sponge -a $DST ;
## Check output (if on TTY)
if [ -t 0 ]; then
perldoc $DST;
fi
## Check and run script in the directory of the original (e.g., to get data
## files).
perl -c $DST
#&& perl -MCwd -MPath::Tiny -E '
t/02_load_graph.t view on Meta::CPAN
my $buf = AI::TensorFlow::Libtensorflow::Buffer->NewFromString(\$data);
ok $buf;
my $graph = AI::TensorFlow::Libtensorflow::Graph->New;
my $status = AI::TensorFlow::Libtensorflow::Status->New;
my $opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
$graph->ImportGraphDef( $buf, $opts, $status );
if( $status->GetCode == AI::TensorFlow::Libtensorflow::Status::OK ) {
print "Load graph success\n";
pass;
} else {
fail;
}
pass;
};
done_testing;
t/lib/TF_Utils.pm view on Meta::CPAN
$graph = AI::TensorFlow::Libtensorflow::Graph->New;
$session ||= AI::TensorFlow::Libtensorflow::Session->New($graph, $opts, $s);
}
my $device_list = $session->ListDevices($s);
my @devices = map {
my $idx = $_;
my %h = map { ( $_ => $device_list->$_( $idx, $s ) ) } qw(Name Type MemoryBytes Incarnation);
\%h;
} 0..$device_list->Count - 1;
use Data::Dumper; print Dumper(\@devices);
}
1;