view release on metacpan or search on metacpan
lib/AI/TensorFlow/Libtensorflow/ApiDefMap.pm view on Meta::CPAN
$ffi->attach( [ 'NewApiDefMap' => 'New' ] => [
arg 'TF_Buffer' => 'op_list_buffer',
arg 'TF_Status' => 'status',
] => 'TF_ApiDefMap' => sub {
my ($xs, $class, @rest) = @_;
$xs->(@rest);
});
$ffi->attach( ['DeleteApiDefMap' => 'DESTROY'] => [
arg 'TF_ApiDefMap' => 'apimap'
] => 'void');
$ffi->attach( [ 'ApiDefMapPut' => 'Put' ] => [
arg 'TF_ApiDefMap' => 'api_def_map',
arg 'tf_text_buffer' => [qw(text text_len)],
arg 'TF_Status' => 'status',
] => 'void' );
$ffi->attach( ['ApiDefMapGet' => 'Get' ] => [
arg 'TF_ApiDefMap' => 'api_def_map',
arg 'tf_text_buffer' => [qw(name name_len)],
arg 'TF_Status' => 'status',
] => 'TF_Buffer');
1;
__END__
=pod
lib/AI/TensorFlow/Libtensorflow/ApiDefMap.pm view on Meta::CPAN
use aliased 'AI::TensorFlow::Libtensorflow::ApiDefMap' => 'ApiDefMap';
=head1 CONSTRUCTORS
=head2 New
use AI::TensorFlow::Libtensorflow;
use AI::TensorFlow::Libtensorflow::Status;
my $map = ApiDefMap->New(
AI::TensorFlow::Libtensorflow::TFLibrary->GetAllOpList,
my $status = AI::TensorFlow::Libtensorflow::Status->New
);
ok $map, 'Created ApiDefMap';
B<C API>: L<< C<TF_NewApiDefMap>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_NewApiDefMap >>
=head1 METHODS
=head2 Put
B<C API>: L<< C<TF_ApiDefMapPut>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_ApiDefMapPut >>
=head2 Get
=over 2
C<<<
Get($name, $status)
>>>
=back
my $api_def_buf = $map->Get(
'NoOp',
my $status = AI::TensorFlow::Libtensorflow::Status->New
);
cmp_ok $api_def_buf->length, '>', 0, 'Got ApiDef buffer for NoOp operation';
B<Parameters>
=over 4
lib/AI/TensorFlow/Libtensorflow/DataType.pm view on Meta::CPAN
);
my %_REV_ENUM_DTYPE = reverse %_ENUM_DTYPE;
if( STRICT ) { # ASSERT
die "Duplicate values for \%_ENUM_DTYPE" unless keys %_ENUM_DTYPE == keys %_REV_ENUM_DTYPE
}
my %_DTYPES;
Const::Exporter->import(
dtypes => [
do {
%_DTYPES = map {
$_ => bless \do {
my $value = $_ENUM_DTYPE{$_};
}, __PACKAGE__;
} keys %_ENUM_DTYPE;
},
'@DTYPES' => [ sort { $$a <=> $$b } values %_DTYPES ],
]
);
use namespace::autoclean;
lib/AI/TensorFlow/Libtensorflow/DataType.pm view on Meta::CPAN
AI::TensorFlow::Libtensorflow::DataType - Datatype enum
=head1 SYNOPSIS
use AI::TensorFlow::Libtensorflow::DataType qw(FLOAT @DTYPES);
use List::Util qw(max);
my $dtype = FLOAT;
is FLOAT->Size, 4, 'FLOAT is 4 bytes large';
is max(map { $_->Size } @DTYPES), 16,
'Largest type has sizeof() == 16 bytes';
=head1 DESCRIPTION
Enum representing native data types used inside of containers such as
L<TFTensor|AI::TensorFlow::Libtensorflow::Lib::Types/TFTensor>.
=head1 CONSTANTS
=head2 STRING
lib/AI/TensorFlow/Libtensorflow/ImportGraphDefOptions.pm view on Meta::CPAN
$ffi->attach( [ 'ImportGraphDefOptionsNumReturnOperations' => 'NumReturnOperations' ] => [
arg TF_ImportGraphDefOptions => 'opts',
] => 'int' );
$ffi->attach( [ 'ImportGraphDefOptionsAddControlDependency' => 'AddControlDependency' ] => [
arg TF_ImportGraphDefOptions => 'opts',
arg TF_Operation => 'oper',
] => 'void' );
$ffi->attach( [ 'ImportGraphDefOptionsRemapControlDependency' => 'RemapControlDependency' ] => [
arg TF_ImportGraphDefOptions => 'opts',
arg string => 'src_name',
arg TF_Operation => 'dst',
] => 'void' );
1;
__END__
=pod
lib/AI/TensorFlow/Libtensorflow/ImportGraphDefOptions.pm view on Meta::CPAN
B<C API>: L<< C<TF_ImportGraphDefOptionsAddReturnOperation>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_ImportGraphDefOptionsAddReturnOperation >>
=head2 NumReturnOperations
B<C API>: L<< C<TF_ImportGraphDefOptionsNumReturnOperations>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_ImportGraphDefOptionsNumReturnOperations >>
=head2 AddControlDependency
B<C API>: L<< C<TF_ImportGraphDefOptionsAddControlDependency>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_ImportGraphDefOptionsAddControlDependency >>
=head2 RemapControlDependency
B<C API>: L<< C<TF_ImportGraphDefOptionsRemapControlDependency>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_ImportGraphDefOptionsRemapControlDependency >>
=head1 DESTRUCTORS
=head2 DESTROY
B<C API>: L<< C<TF_DeleteImportGraphDefOptions>|AI::TensorFlow::Libtensorflow::Manual::CAPI/TF_DeleteImportGraphDefOptions >>
=head1 AUTHOR
Zakariyya Mughal <zmughal@cpan.org>
lib/AI/TensorFlow/Libtensorflow/ImportGraphDefResults.pm view on Meta::CPAN
] => 'void' => sub {
my ($xs, $results) = @_;
my $num_outputs;
my $outputs_array = undef;
$xs->($results, \$num_outputs, \$outputs_array);
return [] if $num_outputs == 0;
my $sizeof_output = $ffi->sizeof('TF_Output');
window(my $outputs_packed, $outputs_array, $sizeof_output * $num_outputs );
# due to unpack, these are copies (no longer owned by $results)
my @outputs = map bless(\$_, "AI::TensorFlow::Libtensorflow::Output"),
unpack "(a${sizeof_output})*", $outputs_packed;
return \@outputs;
});
$ffi->attach( [ 'ImportGraphDefResultsReturnOperations' => 'ReturnOperations' ] => [
arg TF_ImportGraphDefResults => 'results',
arg 'int*' => 'num_opers',
arg 'opaque*' => { id => 'opers', type => 'TF_Operation_array*' },
] => 'void' => sub {
my ($xs, $results) = @_;
my $num_opers;
my $opers_array = undef;
$xs->($results, \$num_opers, \$opers_array);
return [] if $num_opers == 0;
my $opers_array_base_packed = buffer_to_scalar($opers_array,
$ffi->sizeof('opaque') * $num_opers );
my @opers = map {
$ffi->cast('opaque', 'TF_Operation', $_ )
} unpack "(@{[ AI::TensorFlow::Libtensorflow::Lib::_pointer_incantation ]})*", $opers_array_base_packed;
return \@opers;
} );
$ffi->attach( [ 'ImportGraphDefResultsMissingUnusedInputMappings' => 'MissingUnusedInputMappings' ] => [
arg TF_ImportGraphDefResults => 'results',
arg 'int*' => 'num_missing_unused_input_mappings',
arg 'opaque*' => { id => 'src_names', ctype => 'const char***' },
arg 'opaque*' => { id => 'src_indexes', ctype => 'int**' },
] => 'void' => sub {
my ($xs, $results) = @_;
my $num_missing_unused_input_mappings;
my $src_names;
my $src_indexes;
$xs->($results,
\$num_missing_unused_input_mappings,
\$src_names, \$src_indexes
);
my $src_names_str = $ffi->cast('opaque',
"string[$num_missing_unused_input_mappings]", $src_names);
my $src_indexes_int = $ffi->cast('opaque',
"int[$num_missing_unused_input_mappings]", $src_indexes);
return [ List::Util::zip($src_names_str, $src_indexes_int) ];
});
1;
__END__
=pod
=encoding UTF-8
lib/AI/TensorFlow/Libtensorflow/Input.pm view on Meta::CPAN
my $output = $class->_adef->create(0 + @_);
for my $idx (0..@_-1) {
next unless defined $_[$idx];
$class->_copy_to_other( $_[$idx], $output->[$idx] );
}
$output;
}
sub _from_array {
my ($class, $array) = @_;
[
map {
my $record = $class->new;
$class->_copy_to_other($array->[$_], $record);
$record;
} 0..$array->count-1
]
}
sub _copy_to_other {
my ($class, $this, $that) = @_;
$that->_oper ($this->_oper);
$that->_index($this->_index);
lib/AI/TensorFlow/Libtensorflow/Lib/FFIType/Variant/RecordArrayRef.pm view on Meta::CPAN
die "Missing/invalid module name: $arguments{record_module}"
unless is_module_name($arguments{record_module});
my $record_module = $arguments{record_module};
my $with_size = exists $arguments{with_size} ? $arguments{with_size} : 1;
my @stack;
my $perl_to_native = install perl_to_native => sub {
my ($value, $i) = @_;
my $data = pack "(a*)*", map $$_, @$value;
my($pointer, $size) = scalar_to_buffer($data);
my $n = @$value;
my $sizeof = $size / $n;
push @stack, [ \$data, $n, $pointer, $size , $sizeof ];
arguments_set_pointer $i , $pointer;
arguments_set_sint32 $i+1, $n if $with_size;
};
my $perl_to_native_post = install perl_to_native_post => sub {
my($data_ref, $n, $pointer, $size, $sizeof) = @{ pop @stack };
$$data_ref = buffer_to_scalar($pointer, $size);
@{$_[0]} = map {
bless \$_, $record_module
} unpack "(a${sizeof})*", $$data_ref;
();
};
install ffi_custom_type_api_1 => sub {
{
native_type => 'opaque',
argument_count => ($with_size ? 2 : 1),
perl_to_native => $perl_to_native,
lib/AI/TensorFlow/Libtensorflow/Lib/_Alloc.pm view on Meta::CPAN
# See <https://github.com/tensorflow/tensorflow/issues/58112>.
# This is a power-of-two.
const our $EIGEN_MAX_ALIGN_BYTES => do { _tf_alignment(); };
sub _tf_alignment {
# Bytes of alignment sorted in descending order:
# NOTE Alignment can not currently be larger than 128-bytes as the pure
# Perl implementation of _aligned_alloc() only supports alignment of up
# to 255 bytes (which means 128 bytes is the maximum power-of-two
# alignment).
my @alignments = map 2**$_, reverse 0..7;
# 1-byte element
my $el = INT8;
my $el_size = $el->Size;
my $max_alignment = $alignments[0];
my $req_size = 2 * $max_alignment + $el_size;
# All data that is sent to TF_NewTensor here is within the block of
# memory allocated at $ptr_base.
my $ptr_base = malloc($req_size);
lib/AI/TensorFlow/Libtensorflow/Lib/_Alloc.pm view on Meta::CPAN
return AI::TensorFlow::Libtensorflow::Tensor->New(
$el, [int($space_for_data/$el_size)], \$data, sub {
$$dealloc_called = 1
}
);
};
for my $a_idx (0..@alignments-2) {
my @dealloc = (0, 0);
my @t = map {
$create_tensor_at_alignment->($alignments[$a_idx + $_], \$dealloc[$_]);
} (0..1);
return $alignments[$a_idx] if $dealloc[0] == 0 && $dealloc[1] == 1;
}
return 1;
}
sub _tf_aligned_alloc {
my ($class, $size) = @_;
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
`dst` references a node already existing in the graph being imported into.
`src_name` is copied and has no lifetime requirements.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ImportGraphDefOptionsAddInputMapping(
TF_ImportGraphDefOptions* opts, const char* src_name, int src_index,
TF_Output dst);
=head2 TF_ImportGraphDefOptionsRemapControlDependency
=over 2
Set any imported nodes with control input `src_name` to have that input
replaced with `dst`. `src_name` refers to a node in the graph to be imported,
`dst` references an operation already existing in the graph being imported
into. `src_name` is copied and has no lifetime requirements.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ImportGraphDefOptionsRemapControlDependency(
TF_ImportGraphDefOptions* opts, const char* src_name, TF_Operation* dst);
=head2 TF_ImportGraphDefOptionsAddControlDependency
=over 2
Cause the imported graph to have a control dependency on `oper`. `oper`
should exist in the graph being imported into.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ImportGraphDefOptionsAddControlDependency(
TF_ImportGraphDefOptions* opts, TF_Operation* oper);
=head2 TF_ImportGraphDefOptionsAddReturnOutput
=over 2
Add an output in `graph_def` to be returned via the `return_outputs` output
parameter of TF_GraphImportGraphDef(). If the output is remapped via an input
mapping, the corresponding existing tensor in `graph` will be returned.
`oper_name` is copied and has no lifetime requirements.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ImportGraphDefOptionsAddReturnOutput(
TF_ImportGraphDefOptions* opts, const char* oper_name, int index);
=head2 TF_ImportGraphDefOptionsNumReturnOutputs
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ImportGraphDefResultsReturnOperations(
TF_ImportGraphDefResults* results, int* num_opers, TF_Operation*** opers);
=head2 TF_ImportGraphDefResultsMissingUnusedInputMappings
=over 2
Fetches any input mappings requested via
TF_ImportGraphDefOptionsAddInputMapping() that didn't appear in the GraphDef
and weren't used as input to any node in the imported graph def. The number
of fetched mappings is returned in `num_missing_unused_input_mappings`. The
array of each mapping's source node name is returned in `src_names`, and the
array of each mapping's source index is returned in `src_indexes`.
`*src_names`, `*src_indexes`, and the memory backing each string in
`src_names` are owned by and have the lifetime of `results`.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ImportGraphDefResultsMissingUnusedInputMappings(
TF_ImportGraphDefResults* results, int* num_missing_unused_input_mappings,
const char*** src_names, int** src_indexes);
=head2 TF_DeleteImportGraphDefResults
=over 2
Deletes a results object returned by TF_GraphImportGraphDefWithResults().
=back
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
=head2 TF_DeleteApiDefMap
=over 2
Deallocates a TF_ApiDefMap.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_DeleteApiDefMap(TF_ApiDefMap* apimap);
=head2 TF_ApiDefMapPut
=over 2
Add ApiDefs to the map.
`text` corresponds to a text representation of an ApiDefs protocol message.
(https://www.tensorflow.org/code/tensorflow/core/framework/api_def.proto).
The provided ApiDefs will be merged with existing ones in the map, with
precedence given to the newly added version in case of conflicts with
previous calls to TF_ApiDefMapPut.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern void TF_ApiDefMapPut(TF_ApiDefMap* api_def_map,
const char* text, size_t text_len,
TF_Status* status);
=head2 TF_ApiDefMapGet
=over 2
Returns a serialized ApiDef protocol buffer for the TensorFlow operation
named `name`.
=back
/* From <tensorflow/c/c_api.h> */
TF_CAPI_EXPORT extern TF_Buffer* TF_ApiDefMapGet(TF_ApiDefMap* api_def_map,
const char* name,
size_t name_len,
TF_Status* status);
=head2 TF_GetAllRegisteredKernels
=over 2
Returns a serialized KernelList protocol buffer containing KernelDefs for all
registered kernels.
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
=back
/* From <tensorflow/c/eager/c_api_experimental.h> */
TF_CAPI_EXPORT extern void TFE_OpAddAttrs(TFE_Op* op, const TFE_OpAttrs* attrs);
=head2 TFE_OpAttrsSerialize
=over 2
Serialize `attrs` as a tensorflow::NameAttrList protocol buffer (into `buf`),
containing the op name and a map of its attributes.
=back
/* From <tensorflow/c/eager/c_api_experimental.h> */
TF_CAPI_EXPORT extern void TFE_OpAttrsSerialize(const TFE_OpAttrs* attrs,
TF_Buffer* buf,
TF_Status* status);
=head2 TFE_OpSetAttrValueProto
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
TF_CAPI_EXPORT extern void TFE_RegisterCustomDevice(TFE_Context* ctx,
TFE_CustomDevice device,
const char* device_name,
void* device_info,
TF_Status* status);
=head2 TFE_IsCustomDevice
=over 2
Returns whether `device_name` maps to a registered custom device.
=back
/* From <tensorflow/c/eager/c_api_experimental.h> */
TF_CAPI_EXPORT extern bool TFE_IsCustomDevice(TFE_Context* ctx,
const char* device_name);
=head2 TFE_NewCustomDeviceTensorHandle
=over 2
lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
TF_SavedModel* model, const char* function_path, TF_Status* status);
=head2 TF_GetSavedModelSignatureDefFunction
=over 2
Retrieve a function from the TF SavedModel via a SignatureDef key.
Params:
model - The SavedModel to load a function from.
signature_def_key - The string key of the SignatureDef map of a SavedModel:
https://github.com/tensorflow/tensorflow/blob/69b08900b1e991d84bce31f3b404f5ed768f339f/tensorflow/core/protobuf/meta_graph.proto#L89
status - Set to OK on success and an appropriate error on failure.
Returns:
If status is not OK, returns nullptr. Otherwise, returns a
TF_SignatureDefFunction instance. Once `model` is deleted, all
`TF_SignatureDefFunctions` retrieved from it are invalid, and have been
deleted.
=back
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
sub FloatPDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
FLOAT, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub FloatTFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(float,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
sub Uint8PDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
UINT8, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub Uint8TFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(byte,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
# image_size => [width, height] (but usually square images)
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
}
use Archive::Extract;
my $ae = Archive::Extract->new( archive => $model_archive_path );
die "Could not extract archive" unless $ae->extract( to => $model_base );
my $saved_model = path($model_base)->child('saved_model.pb');
say "Saved model is in $saved_model" if -f $saved_model;
# Get the labels
my $response = $http->get('https://raw.githubusercontent.com/tensorflow/models/a4944a57ad2811e1f6a7a87589a9fc8a776e8d3c/object_detection/data/mscoco_label_map.pbtxt');
my %labels_map = $response->{content} =~ m<
(?:item \s+ \{ \s+
\Qname:\E \s+ "[^"]+" \s+
\Qid:\E \s+ (\d+) \s+
\Qdisplay_name:\E \s+ "([^"]+)" \s+
})+
>sgx;
my $label_count = List::Util::max keys %labels_map;
say "We have a label count of $label_count. These labels include: ",
join ", ", List::Util::head( 5, @labels_map{ sort keys %labels_map } );
my @tags = ( 'serve' );
if( File::Which::which('saved_model_cli')) {
local $ENV{TF_CPP_MIN_LOG_LEVEL} = 3; # quiet the TensorFlow logger for the following command
system(qw(saved_model_cli show),
qw(--dir) => $model_base,
qw(--tag_set) => join(',', @tags),
qw(--signature_def) => 'serving_default'
) == 0 or die "Could not run saved_model_cli";
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
detection_boxes => 0,
detection_classes => 1,
detection_scores => 2,
num_detections => 3,
}
},
);
my %outputs;
%outputs = map {
my $put_type = $_;
my $op = $ops{$put_type}{op};
my $port_dict = $ops{$put_type}{dict};
$put_type => +{
map {
my $dict_key = $_;
my $index = $port_dict->{$_};
$dict_key => AI::TensorFlow::Libtensorflow::Output->New( {
oper => $op,
index => $index,
});
} keys %$port_dict
}
} keys %ops;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
if $img->getchannels != 3;
# $data is packed as PDL->dims == [w,h] with RGB pixels
my $pdl_raw = zeros(byte, $img->getchannels, $img->getwidth, $img->getheight);
${ $pdl_raw->get_dataref } = $data;
$pdl_raw->upd_data;
$pdl_raw;
}
my @pdl_images = map {
load_image_to_pdl(
$images_for_test_to_uri{$_},
$model_name_to_params{$model_name}{image_size}
);
} ($image_names[0]);
my $pdl_image_batched = cat(@pdl_images);
my $t = Uint8PDLTOTFTensor($pdl_image_batched);
die "There should be 4 dimensions" unless $pdl_image_batched->ndims == 4;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
);
AssertOK($s);
return { mesh \@keys, \@outputs_t };
};
undef;
my $tftensor_output_by_name = $RunSession->($session, $t);
my %pdl_output_by_name = map {
$_ => FloatTFTensorToPDL( $tftensor_output_by_name->{$_} )
} keys $tftensor_output_by_name->%*;
undef;
my $min_score_thresh = 0.30;
my $which_detect = which( $pdl_output_by_name{detection_scores} > $min_score_thresh );
my %subset;
$subset{detection_boxes} = $pdl_output_by_name{detection_boxes}->dice('X', $which_detect);
$subset{detection_classes} = $pdl_output_by_name{detection_classes}->dice($which_detect);
$subset{detection_scores} = $pdl_output_by_name{detection_scores}->dice($which_detect);
$subset{detection_class_labels}->@* = map { $labels_map{$_} } $subset{detection_classes}->list;
p %subset;
use PDL::Graphics::Gnuplot;
my $plot_output_path = 'objects-detected.png';
my $gp = gpwin('pngcairo', font => ",12", output => $plot_output_path, aa => 2, size => [10] );
my @qual_cmap = ('#a6cee3','#1f78b4','#b2df8a','#33a02c','#fb9a99','#e31a1c','#fdbf6f','#ff7f00','#cab2d6');
$gp->options(
map {
my $idx = $_;
my $lc_rgb = $qual_cmap[ $subset{detection_classes}->slice("($idx)")->squeeze % @qual_cmap ];
my $box_corners_yx_norm = $subset{detection_boxes}->slice([],$idx,[0,0,0]);
$box_corners_yx_norm->reshape(2,2);
my $box_corners_yx_img = $box_corners_yx_norm * $pdl_images[0]->shape->slice('-1:-2');
my $from_xy = join ",", $box_corners_yx_img->slice('-1:0,(0)')->list;
my $to_xy = join ",", $box_corners_yx_img->slice('-1:0,(1)')->list;
my $label_xy = join ",", $box_corners_yx_img->at(1,1), $box_corners_yx_img->at(0,1);
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
sub FloatPDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
FLOAT, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub FloatTFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(float,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
sub Uint8PDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
UINT8, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub Uint8TFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(byte,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
=head2 Fetch the model and labels
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
$http->mirror( $uri, $path );
}
use Archive::Extract;
my $ae = Archive::Extract->new( archive => $model_archive_path );
die "Could not extract archive" unless $ae->extract( to => $model_base );
my $saved_model = path($model_base)->child('saved_model.pb');
say "Saved model is in $saved_model" if -f $saved_model;
We need to download the COCO 2017 classification labels and parse out the mapping from the numeric index to the textual descriptions.
# Get the labels
my $response = $http->get('https://raw.githubusercontent.com/tensorflow/models/a4944a57ad2811e1f6a7a87589a9fc8a776e8d3c/object_detection/data/mscoco_label_map.pbtxt');
my %labels_map = $response->{content} =~ m<
(?:item \s+ \{ \s+
\Qname:\E \s+ "[^"]+" \s+
\Qid:\E \s+ (\d+) \s+
\Qdisplay_name:\E \s+ "([^"]+)" \s+
})+
>sgx;
my $label_count = List::Util::max keys %labels_map;
say "We have a label count of $label_count. These labels include: ",
join ", ", List::Util::head( 5, @labels_map{ sort keys %labels_map } );
=head2 Load the model and session
We define the tag set C<[ 'serve' ]> which we will use to load the model.
my @tags = ( 'serve' );
We can examine what computations are contained in the graph in terms of the names of the inputs and outputs of an operation found in the graph by running C<saved_model_cli>.
if( File::Which::which('saved_model_cli')) {
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
detection_boxes => 0,
detection_classes => 1,
detection_scores => 2,
num_detections => 3,
}
},
);
my %outputs;
%outputs = map {
my $put_type = $_;
my $op = $ops{$put_type}{op};
my $port_dict = $ops{$put_type}{dict};
$put_type => +{
map {
my $dict_key = $_;
my $index = $port_dict->{$_};
$dict_key => AI::TensorFlow::Libtensorflow::Output->New( {
oper => $op,
index => $index,
});
} keys %$port_dict
}
} keys %ops;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
if $img->getchannels != 3;
# $data is packed as PDL->dims == [w,h] with RGB pixels
my $pdl_raw = zeros(byte, $img->getchannels, $img->getwidth, $img->getheight);
${ $pdl_raw->get_dataref } = $data;
$pdl_raw->upd_data;
$pdl_raw;
}
my @pdl_images = map {
load_image_to_pdl(
$images_for_test_to_uri{$_},
$model_name_to_params{$model_name}{image_size}
);
} ($image_names[0]);
my $pdl_image_batched = cat(@pdl_images);
my $t = Uint8PDLTOTFTensor($pdl_image_batched);
die "There should be 4 dimensions" unless $pdl_image_batched->ndims == 4;
die "With the final dimension of length 1" unless $pdl_image_batched->dim(3) == 1;
p $pdl_image_batched;
p $t;
=head2 Run the model for inference
We can use the C<Run> method to run the session and get the multiple output C<TFTensor>s. The following uses the names in C<$outputs> mapping to help process the multiple outputs more easily.
my $RunSession = sub {
my ($session, $t) = @_;
my @outputs_t;
my @keys = keys %{ $outputs{out} };
my @values = $outputs{out}->@{ @keys };
$session->Run(
undef,
[ values %{$outputs{in} } ], [$t],
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
return { mesh \@keys, \@outputs_t };
};
undef;
my $tftensor_output_by_name = $RunSession->($session, $t);
my %pdl_output_by_name = map {
$_ => FloatTFTensorToPDL( $tftensor_output_by_name->{$_} )
} keys $tftensor_output_by_name->%*;
undef;
=head2 Results summary
Then we use a score threshold to select the objects of interest.
my $min_score_thresh = 0.30;
my $which_detect = which( $pdl_output_by_name{detection_scores} > $min_score_thresh );
my %subset;
$subset{detection_boxes} = $pdl_output_by_name{detection_boxes}->dice('X', $which_detect);
$subset{detection_classes} = $pdl_output_by_name{detection_classes}->dice($which_detect);
$subset{detection_scores} = $pdl_output_by_name{detection_scores}->dice($which_detect);
$subset{detection_class_labels}->@* = map { $labels_map{$_} } $subset{detection_classes}->list;
p %subset;
The following uses the bounding boxes and class label information to draw boxes and labels on top of the image using Gnuplot.
use PDL::Graphics::Gnuplot;
my $plot_output_path = 'objects-detected.png';
my $gp = gpwin('pngcairo', font => ",12", output => $plot_output_path, aa => 2, size => [10] );
my @qual_cmap = ('#a6cee3','#1f78b4','#b2df8a','#33a02c','#fb9a99','#e31a1c','#fdbf6f','#ff7f00','#cab2d6');
$gp->options(
map {
my $idx = $_;
my $lc_rgb = $qual_cmap[ $subset{detection_classes}->slice("($idx)")->squeeze % @qual_cmap ];
my $box_corners_yx_norm = $subset{detection_boxes}->slice([],$idx,[0,0,0]);
$box_corners_yx_norm->reshape(2,2);
my $box_corners_yx_img = $box_corners_yx_norm * $pdl_images[0]->shape->slice('-1:-2');
my $from_xy = join ",", $box_corners_yx_img->slice('-1:0,(0)')->list;
my $to_xy = join ",", $box_corners_yx_img->slice('-1:0,(1)')->list;
my $label_xy = join ",", $box_corners_yx_img->at(1,1), $box_corners_yx_img->at(0,1);
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
sub FloatPDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
FLOAT, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub FloatTFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(float,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
# Model handle
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
say "Number of predictions: ", $model_central_base_pairs_length / $model_central_base_pair_window_size;
use Data::Frame;
my $df = Data::Frame->from_csv( $targets_path, sep => "\t" )
->transform({
file => sub {
my ($col, $df) = @_;
# clean up the paths in 'file' column
[map { join "/", (split('/', $_))[7..8] } $col->list];
}
});
say "Number of targets: ", $df->nrow;
say "";
say "First 5:";
say $df->head(5);
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
sub FloatPDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
FLOAT, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub FloatTFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(float,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
=head2 Download model and data
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod view on Meta::CPAN
and by looking at the targets file:
use Data::Frame;
my $df = Data::Frame->from_csv( $targets_path, sep => "\t" )
->transform({
file => sub {
my ($col, $df) = @_;
# clean up the paths in 'file' column
[map { join "/", (split('/', $_))[7..8] } $col->list];
}
});
say "Number of targets: ", $df->nrow;
say "";
say "First 5:";
say $df->head(5);
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
sub FloatPDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
FLOAT, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub FloatTFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(float,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
use HTML::Tiny;
sub my_table {
my ($data, $cb) = @_;
my $h = HTML::Tiny->new;
$h->table( { style => 'width: 100%' },
[
$h->tr(
map {
[
$h->td( $cb->($_, $h) )
]
} @$data
)
]
)
}
sub show_in_gnuplot {
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
);
AssertOK($s);
my %ops = (
in => $graph->OperationByName('serving_default_inputs'),
out => $graph->OperationByName('StatefulPartitionedCall'),
);
die "Could not get all operations" unless List::Util::all(sub { defined }, values %ops);
my %outputs = map { $_ => [ AI::TensorFlow::Libtensorflow::Output->New( { oper => $ops{$_}, index => 0 } ) ] }
keys %ops;
p %outputs;
say "Input: " , $outputs{in}[0];
say "Output: ", $outputs{out}[0];
my %images_for_test_to_uri = (
"tiger" => "https://upload.wikimedia.org/wikipedia/commons/b/b0/Bengal_tiger_%28Panthera_tigris_tigris%29_female_3_crop.jpg",
#by Charles James Sharp, CC BY-SA 4.0 <https://creativecommons.org/licenses/by-sa/4.0>, via Wikimedia Commons
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
$padded->write( data => \$data, type => 'raw' )
or die "could not write ". $padded->errstr;
# $data is packed as PDL->dims == [w,h] with ARGB pixels
# $ PDL::howbig(ulong) # 4
my $pdl_raw = zeros(ulong, $padded->getwidth, $padded->getheight);
${ $pdl_raw->get_dataref } = $data;
$pdl_raw->upd_data;
# Split uint32_t pixels into first dimension with 3 channels (R,G,B) with values 0-255.
my @shifts = map 8*$_, 0..2;
my $pdl_channels = $pdl_raw->dummy(0)
->and2(ulong(map 0xFF << $_, @shifts)->slice(':,*,*') )
->shiftright( ulong(@shifts)->slice(':,*,*') )
->byte;
my $pdl_scaled = (
# Scale to [ 0, 1 ].
( $pdl_channels / float(255) )
);
## flip vertically to see image right way up
#show_in_gnuplot( $pdl_channels->slice(':,:,-1:0') ); #DEBUG
#show_in_gnuplot( $pdl_scaled->slice(':,:,-1:0') * 255.0 ); #DEBUG
$pdl_scaled;
}
my @pdl_images = map {
load_image_to_pdl(
$images_for_test_to_uri{$_},
$model_name_to_params{$model_name}{image_size}
);
} @image_names;
my $pdl_image_batched = cat(@pdl_images);
my $t = FloatPDLTOTFTensor($pdl_image_batched);
p $pdl_image_batched;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
say "Warming up the model";
use PDL::GSL::RNG;
my $rng = PDL::GSL::RNG->new('default');
my $image_size = $model_name_to_params{$model_name}{image_size};
my $warmup_input = zeros(float, 3, @$image_size, 1 );
$rng->get_uniform($warmup_input);
p $RunSession->($session, FloatPDLTOTFTensor($warmup_input));
my $output_pdl_batched = FloatTFTensorToPDL($RunSession->($session, $t));
my $softmax = sub { ( map $_/sumover($_)->dummy(0), exp($_[0]) )[0] };
my $probabilities_batched = $softmax->($output_pdl_batched);
p $probabilities_batched;
my $N = 5; # number to select
my $top_batched = $probabilities_batched->qsorti->slice([-1, -$N]);
my @top_lists = dog($top_batched);
my $includes_background_class = $probabilities_batched->dim(0) == IMAGENET_LABEL_COUNT_WITH_BG;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
sub FloatPDLTOTFTensor {
my ($p) = @_;
return AI::TensorFlow::Libtensorflow::Tensor->New(
FLOAT, [ reverse $p->dims ], $p->get_dataref, sub { undef $p }
);
}
sub FloatTFTensorToPDL {
my ($t) = @_;
my $pdl = zeros(float,reverse( map $t->Dim($_), 0..$t->NumDims-1 ) );
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
The following is just a small helper to generate an HTML C<<< <table> >>> for output in C<IPerl>.
use HTML::Tiny;
sub my_table {
my ($data, $cb) = @_;
my $h = HTML::Tiny->new;
$h->table( { style => 'width: 100%' },
[
$h->tr(
map {
[
$h->td( $cb->($_, $h) )
]
} @$data
)
]
)
}
This is a helper to display images in Gnuplot for debugging, but those debugging lines are commented out.
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
So let's use the names from the C<saved_model_cli> output to create our C<::Output> C<ArrayRef>s.
my %ops = (
in => $graph->OperationByName('serving_default_inputs'),
out => $graph->OperationByName('StatefulPartitionedCall'),
);
die "Could not get all operations" unless List::Util::all(sub { defined }, values %ops);
my %outputs = map { $_ => [ AI::TensorFlow::Libtensorflow::Output->New( { oper => $ops{$_}, index => 0 } ) ] }
keys %ops;
p %outputs;
say "Input: " , $outputs{in}[0];
say "Output: ", $outputs{out}[0];
B<STREAM (STDOUT)>:
Input: serving_default_inputs:0
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
$padded->write( data => \$data, type => 'raw' )
or die "could not write ". $padded->errstr;
# $data is packed as PDL->dims == [w,h] with ARGB pixels
# $ PDL::howbig(ulong) # 4
my $pdl_raw = zeros(ulong, $padded->getwidth, $padded->getheight);
${ $pdl_raw->get_dataref } = $data;
$pdl_raw->upd_data;
# Split uint32_t pixels into first dimension with 3 channels (R,G,B) with values 0-255.
my @shifts = map 8*$_, 0..2;
my $pdl_channels = $pdl_raw->dummy(0)
->and2(ulong(map 0xFF << $_, @shifts)->slice(':,*,*') )
->shiftright( ulong(@shifts)->slice(':,*,*') )
->byte;
my $pdl_scaled = (
# Scale to [ 0, 1 ].
( $pdl_channels / float(255) )
);
## flip vertically to see image right way up
#show_in_gnuplot( $pdl_channels->slice(':,:,-1:0') ); #DEBUG
#show_in_gnuplot( $pdl_scaled->slice(':,:,-1:0') * 255.0 ); #DEBUG
$pdl_scaled;
}
my @pdl_images = map {
load_image_to_pdl(
$images_for_test_to_uri{$_},
$model_name_to_params{$model_name}{image_size}
);
} @image_names;
my $pdl_image_batched = cat(@pdl_images);
my $t = FloatPDLTOTFTensor($pdl_image_batched);
p $pdl_image_batched;
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod view on Meta::CPAN
</span><span style="color: #6666cc;">NumDims </span><span style=""> </span><span style="color: #ff6633;">2</span><span style="">
</span><span style="color: #6666cc;">ElementCount </span><span style=""> </span><span style="color: #ff6633;">1001</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>
Then we send the batched image data. The returned scores need to by normalised using the L<softmax function|https://en.wikipedia.org/wiki/Softmax_function> with the following formula (taken from Wikipedia):
$$ {\displaystyle \sigma (\mathbf {z} )I<{i}={\frac {e^{z>{i}}}{\sum I<{j=1}^{K}e^{z>{j}}}}\ \ {\text{ for }}i=1,\dotsc ,K{\text{ and }}\mathbf {z} =(zI<{1},\dotsc ,z>{K})\in \mathbb {R} ^{K}.} $$
my $output_pdl_batched = FloatTFTensorToPDL($RunSession->($session, $t));
my $softmax = sub { ( map $_/sumover($_)->dummy(0), exp($_[0]) )[0] };
my $probabilities_batched = $softmax->($output_pdl_batched);
p $probabilities_batched;
B<STREAM (STDERR)>:
=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">PDL</span><span style="color: #33ccff;"> {</span><span style="">
</span><span style="color: #6666cc;">Data </span><span style=""> : </span><span style="color: #669933;">too long to print</span><span style="">
</span><span style="color: #6666cc;">Type </span><span style=""> : </span><span style="color: #cc66cc;">float</span><span style="">
</span><span style="color: #6666cc;">Shape </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #9999cc;">1001 12</span><span style="color: #33ccff;">]</span><span style="">
</span><span style="color: #6666cc;">Nelem </span><span style=""> : </span><span style="color: #dd6;">12012</span><span style="">
lib/AI/TensorFlow/Libtensorflow/Operation.pm view on Meta::CPAN
my $array = $class->_adef->create(0 + @_);
for my $idx (0..@_-1) {
next unless defined $_[$idx];
$array->[$idx]->p($ffi->cast('TF_Operation', 'opaque', $_[$idx]));
}
$array;
}
sub _from_array {
my ($class, $array) = @_;
[
map {
$ffi->cast('opaque', 'TF_Operation', $array->[$_]->p);
} 0..$array->count-1
]
}
$ffi->attach( [ 'OperationName' => 'Name' ], [
arg 'TF_Operation' => 'oper',
] => 'string');
$ffi->attach( [ 'OperationOpType' => 'OpType' ], [
lib/AI/TensorFlow/Libtensorflow/Output.pm view on Meta::CPAN
my $output = $class->_adef->create(0 + @_);
for my $idx (0..@_-1) {
next unless defined $_[$idx];
$class->_copy_to_other( $_[$idx], $output->[$idx] );
}
$output;
}
sub _from_array {
my ($class, $array) = @_;
[
map {
my $record = $class->new;
$class->_copy_to_other($array->[$_], $record);
$record;
} 0..$array->count-1
]
}
sub _copy_to_other {
my ($class, $this, $that) = @_;
$that->_oper ($this->_oper);
$that->_index($this->_index);
lib/AI/TensorFlow/Libtensorflow/Status.pm view on Meta::CPAN
[ INTERNAL => 13 ],
[ UNAVAILABLE => 14 ],
[ DATA_LOSS => 15 ],
);
$ffi->load_custom_type('::Enum', 'TF_Code',
{ rev => 'int', package => __PACKAGE__ },
@_TF_CODE
);
my %_TF_CODE_INT_TO_NAME = map { reverse @$_ } @_TF_CODE;
#}}}
$ffi->attach( [ 'NewStatus' => 'New' ] => [] => 'TF_Status' );
$ffi->attach( [ 'DeleteStatus' => 'DESTROY' ] => [ 'TF_Status' ], 'void' );
$ffi->attach( 'SetStatus' => [ 'TF_Status', 'TF_Code', 'string' ], 'void' );
$ffi->attach( 'SetPayload' => [ 'TF_Status', 'string', 'string' ], 'void' );
lib/AI/TensorFlow/Libtensorflow/Tensor.pm view on Meta::CPAN
my $array = $class->_adef->create(0 + @_);
for my $idx (0..@_-1) {
next unless defined $_[$idx];
$array->[$idx]->p($ffi->cast('TF_Tensor', 'opaque', $_[$idx]));
}
$array;
}
sub _from_array {
my ($class, $array) = @_;
return [
map {
$ffi->cast(
'opaque',
'TF_Tensor',
$array->[$_]->p)
} 0.. $array->count - 1
]
}
#### Data::Printer ####
sub _data_printer {
my ($self, $ddp) = @_;
my @data = (
[ Type => $ddp->maybe_colorize( $self->Type, 'class' ), ],
[ Dims => sprintf "%s %s %s",
$ddp->maybe_colorize('[', 'brackets'),
join(" ",
map $ddp->maybe_colorize( $self->Dim($_), 'number' ),
0..$self->NumDims-1),
$ddp->maybe_colorize(']', 'brackets'),
],
[ NumDims => $ddp->maybe_colorize( $self->NumDims, 'number' ), ],
[ ElementCount => $ddp->maybe_colorize( $self->ElementCount, 'number' ), ],
);
my $output;
$output .= $ddp->maybe_colorize(ref $self, 'class' );
maint/inc/Pod/Elemental/Transformer/TF_CAPI.pm view on Meta::CPAN
sub _expand_capi {
my ($self, $parent) = @_;
my @replacements;
my $content = $parent->content;
my @ids = split /,\s*/, $content;
my $doc_name = 'AI::TensorFlow::Libtensorflow::Manual::CAPI';
my $new_content = "B<C API>: "
. join ", ", map {
die "$_ does not look like a TensorFlow identifier" unless /^TF[E]?_\w+$/;
"L<< C<$_>|$doc_name/$_ >>"
} @ids;
push @replacements, Pod::Elemental::Element::Pod5::Ordinary->new(
content => $new_content,
);
return @replacements;
}
maint/inc/Pod/Elemental/Transformer/TF_Sig.pm view on Meta::CPAN
my @replacements;
if( $is_list_type ) {
@replacements = $orig->($self, $para);
} else {
undef $prefix;
push @replacements, Pod::Elemental::Element::Pod5::Ordinary
->new( { content => do { my $v = <<EOF; chomp $v; $v } });
=over 2
C<<<
@{[ join("\n", map { $_->content } $para->children->@*) ]}
>>>
=back
EOF
}
unshift @replacements, $prefix if defined $prefix;
@replacements;
};
maint/process-capi.pl view on Meta::CPAN
isa => Path,
coerce => 1,
);
lazy capi_path => method() {
$self->root_path->child(qw(tensorflow c));
};
lazy header_paths => method() {
[ map path($_), File::Find::Rule->file
->name('*.h')
->in( $self->capi_path ) ];
};
lazy header_order => method() {
my @order = (
qr{/c/c_api.h$},
qr{/c/tf_[^.]+\.h$},
qr{/c/(ops|env|logging)\.h},
qr{kernels},
maint/process-capi.pl view on Meta::CPAN
}
lazy fdecl_data => method() {
my $re = $self->fdecl_re;
my $data = $self->_process_re($re);
# Used for defensive assertion:
# These are mostly constructors that return a value
# (i.e., not void) but also take a function pointer as a
# parameter.
my %TF_func_ptr = map { ($_ => 1) } qw(
TF_NewTensor
TF_StartThread
TF_NewKernelBuilder
TFE_NewTensorHandleFromDeviceMemory
);
for my $data (@$data) {
my ($func_name) = $data->{fdecl} =~ m/ \A [^(]*? (\w+) \s* \( (?!\s*\*) /xs;
die "Could not extract function name" unless $func_name;
# defensive assertion for parsing
maint/process-capi.pl view on Meta::CPAN
\} \s+
\k<name> \s*
;
)
}xm;
$self->_process_re($re);
};
method check_types() {
my @data = $self->typedef_struct_data->@*;
my %types = map { $_ => 1 } AI::TensorFlow::Libtensorflow::Lib->ffi->types;
my %part;
@part{qw(todo done)} = part { exists $types{$_} } uniq map { $_->{name} } @data;
use DDP; p %part;
}
method check_functions($first_arg = undef) {
my $functions = AI::TensorFlow::Libtensorflow::Lib->ffi->_attached_functions;
my @dupes = map { $_->[0]{c} }
grep { @$_ != 1 } values $functions->%*;
die "Duplicated functions @dupes" if @dupes;
my @data = $self->fdecl_data->@*;
say <<~STATS;
Statistics:
==========
Attached functions: @{[ scalar keys %$functions ]}
Total CAPI functions: @{[ scalar @data ]}
t/05_session_run.t view on Meta::CPAN
[$input_op ], [$input_tensor],
[$output_op], \@output_values,
undef,
undef,
$status
);
die "run failed" unless $status->GetCode == AI::TensorFlow::Libtensorflow::Status::OK;
my $output_tensor = $output_values[0];
my $output_pdl = zeros(float,( map $output_tensor->Dim($_), 0..$output_tensor->NumDims-1) );
memcpy scalar_to_pointer( ${$output_pdl->get_dataref} ),
scalar_to_pointer( ${$output_tensor->Data} ),
$output_tensor->ByteSize;
$output_pdl->upd_data;
my $expected_pdl = float( -0.409784, -0.302862, 0.0152587, 0.690515 )->transpose;
ok approx( $output_pdl, $expected_pdl )->all, 'got expected data';
t/lib/TF_Utils.pm view on Meta::CPAN
my $TFOutput = TFOutput->plus_constructors(
HashRef, 'New'
)->plus_coercions(TFOutputFromTuple);
sub Add {
my ($l, $r, $graph, $s, $name, $check) = @_;
$name ||= 'add';
$check = 1 if not defined $check;
my $desc = AI::TensorFlow::Libtensorflow::OperationDescription->New(
$graph, "AddN", $name);
$desc->AddInputList([
$TFOutput->map( [ $l => 0 ], [ $r => 0 ] )
]);
my $op = $desc->FinishOperation($s);
AssertStatusOK($s) if $check;
$op;
}
sub AddNoCheck {
my ($l, $r, $graph, $s, $name) = @_;
return Add( $l, $r, $graph, $s, $name, 0);
}
t/lib/TF_Utils.pm view on Meta::CPAN
my $s = AI::TensorFlow::Libtensorflow::Status->New;
my $graph;
if( ! $session ) {
my $opts = AI::TensorFlow::Libtensorflow::SessionOptions->New;
$graph = AI::TensorFlow::Libtensorflow::Graph->New;
$session ||= AI::TensorFlow::Libtensorflow::Session->New($graph, $opts, $s);
}
my $device_list = $session->ListDevices($s);
my @devices = map {
my $idx = $_;
my %h = map { ( $_ => $device_list->$_( $idx, $s ) ) } qw(Name Type MemoryBytes Incarnation);
\%h;
} 0..$device_list->Count - 1;
use Data::Dumper; print Dumper(\@devices);
}
1;
t/upstream/CAPI/018_ImportGraphDef.t view on Meta::CPAN
};
is $scalar, $empty_control_inputs, 'scalar control inputs';
is $scalar, $empty_control_outputs, 'scalar control outputs';
is $feed, $empty_control_inputs, 'feed control inputs';
is $feed, $empty_control_outputs, 'feed control outputs';
is $neg, $empty_control_inputs, 'neg control inputs';
is $neg, $empty_control_outputs, 'neg control outputs';
note q|Import it again, with an input mapping, return outputs, and a return
operation, into the same graph.|;
undef $opts;
$opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
$opts->SetPrefix('imported2');
$opts->AddInputMapping( 'scalar', 0, $TFOutput->coerce([$scalar=>0]));
$opts->AddReturnOutput('feed', 0);
$opts->AddReturnOutput('scalar', 0);
is $opts->NumReturnOutputs, 2, 'num return outputs';
$opts->AddReturnOperation('scalar');
is $opts->NumReturnOperations, 1, 'num return operations';
my $results = $graph->ImportGraphDefWithResults( $graph_def, $opts, $s );
TF_Utils::AssertStatusOK($s);
ok my $scalar2 = $graph->OperationByName("imported2/scalar"), "imported2/scalar";
ok my $feed2 = $graph->OperationByName("imported2/feed"), "imported2/feed";
ok my $neg2 = $graph->OperationByName("imported2/neg"), "imported2/neg";
note 'Check input mapping';
$neg_input = $neg->Input( $TFInput->coerce( [$neg => 0 ]) );
is $neg_input, object {
call sub { shift->oper->Name } => $scalar->Name;
call index => 0;
}, 'neg input';
note 'Check return outputs';
my $return_outputs = $results->ReturnOutputs;
is $return_outputs, array {
item 0 => object {
call sub { shift->oper->Name } => $feed2->Name;
call index => 0;
};
item 1 => object {
# remapped
call sub { shift->oper->Name } => $scalar->Name;
call index => 0;
};
end;
}, 'return outputs';
note 'Check return operation';
my $return_opers = $results->ReturnOperations;
is $return_opers, array {
item 0 => object {
# not remapped
call Name => $scalar2->Name;
};
end;
}, 'return opers';
undef $results;
note 'Import again, with control dependencies, into the same graph.';
undef $opts;
$opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
t/upstream/CAPI/018_ImportGraphDef.t view on Meta::CPAN
item 1 => object { call Name => $feed2->Name };
end;
}, 'feed3 control inputs';
note 'Export to a graph def so we can import a graph with control dependencies';
undef $graph_def;
$graph_def = AI::TensorFlow::Libtensorflow::Buffer->New;
$graph->ToGraphDef( $graph_def, $s );
TF_Utils::AssertStatusOK($s);
note 'Import again, with remapped control dependency, into the same graph';
undef $opts;
$opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
$opts->SetPrefix("imported4");
$opts->RemapControlDependency("imported/feed", $feed );
$graph->ImportGraphDef($graph_def, $opts, $s);
TF_Utils::AssertStatusOK($s);
ok my $scalar4 = $graph->OperationByName("imported4/imported3/scalar"),
"imported4/imported3/scalar";
ok my $feed4 = $graph->OperationByName("imported4/imported2/feed"),
"imported4/imported2/feed";
note q|Check that imported `imported3/scalar` has remapped control dep from
original graph and imported control dep|;
is $scalar4->GetControlInputs, array {
item object { call Name => $feed->Name };
item object { call Name => $feed4->Name };
end;
}, 'scalar4 control inputs';
undef $opts;
undef $graph_def;
t/upstream/CAPI/019_ImportGraphDef_WithReturnOutputs.t view on Meta::CPAN
my $opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
$opts->AddReturnOutput('feed', 0);
$opts->AddReturnOutput('scalar', 0);
is $opts->NumReturnOutputs, 2, '2 return outputs';
my $return_outputs = $graph->ImportGraphDefWithReturnOutputs(
$graph_def, $opts, $s
);
TF_Utils::AssertStatusOK($s);
is [
my ($scalar, $feed, $neg) = map $graph->OperationByName($_),
qw(scalar feed neg)
], array {
item D() for 0..2;
end;
}, 'get operations';
note 'Check return outputs';
is $return_outputs, array {
item 0 => object {
call sub { shift->oper->Name } => $feed->Name;
t/upstream/CAPI/020_ImportGraphDef_MissingUnusedInputMappings.t view on Meta::CPAN
note 'Import it in a fresh graph.';
undef $graph;
$graph = AI::TensorFlow::Libtensorflow::Graph->New;
my $opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
$graph->ImportGraphDef($graph_def, $opts, $s);
TF_Utils::AssertStatusOK($s);
my $scalar = $graph->OperationByName('scalar');
note 'Import it in a fresh graph with an unused input mapping.';
undef $opts;
$opts = AI::TensorFlow::Libtensorflow::ImportGraphDefOptions->New;
$opts->SetPrefix("imported");
$opts->AddInputMapping("scalar", 0, $TFOutput->coerce([$scalar, 0]));
$opts->AddInputMapping("fake", 0, $TFOutput->coerce([$scalar, 0]));
my $results = $graph->ImportGraphDefWithResults($graph_def, $opts, $s);
TF_Utils::AssertStatusOK($s);
note 'Check unused input mappings';
is my $srcs = $results->MissingUnusedInputMappings, array {
item [ 'fake', 0 ];
end;
}, 'missing unused input mappings';
};
done_testing;
t/upstream/CAPI/026_SessionPRun.t view on Meta::CPAN
my $plusB = TF_Utils::Add($plus2, $op_b, $graph, $s, "plusB");
TF_Utils::AssertStatusOK($s);
note q{Setup a session and a partial run handle. The partial run will allow
computation of A + 2 + B in two phases (calls to TF_SessionPRun):
1. Feed A and get (A+2)
2. Feed B and get (A+2)+B};
my $opts = AI::TensorFlow::Libtensorflow::SessionOptions->New;
my $sess = AI::TensorFlow::Libtensorflow::Session->New($graph, $opts, $s);
my @feeds = $TFOutput->map([ $op_a => 0 ], [$op_b => 0]);
my @fetches = $TFOutput->map([$plus2 => 0], [$plusB => 0]);
my $handle = $sess->PRunSetup( \@feeds, \@fetches, undef, $s);
note 'Feed A and fetch A + 2.';
my @feeds1 = $TFOutput->map( [$op_a => 0] );
my @fetches1 = $TFOutput->map( [$plus2 => 0] );
my @feedValues1 = ( TF_Utils::Int32Tensor(1) );
my @fetchValues1;
$sess->PRun( $handle,
\@feeds1, \@feedValues1,
\@fetches1, \@fetchValues1,
undef,
$s );
TF_Utils::AssertStatusOK($s);
is unpack("l", ${ $fetchValues1[0]->Data }), 3,
'(A := 1) + Const(2) = 3';
undef @feedValues1;
undef @fetchValues1;
note 'Feed B and fetch (A + 2) + B.';
my @feeds2 = $TFOutput->map( [$op_b => 0] );
my @fetches2 = $TFOutput->map( [$plusB => 0] );
my @feedValues2 = ( TF_Utils::Int32Tensor(4) );
my @fetchValues2;
$sess->PRun($handle,
\@feeds2, \@feedValues2,
\@fetches2, \@fetchValues2,
undef,
$s);
TF_Utils::AssertStatusOK($s);
is unpack("l", ${ $fetchValues2[0]->Data }), 7,
'( (A := 1) + Const(2) ) + ( B := 4 ) = 7';