AI-TensorFlow-Libtensorflow
view release on metacpan or search on metacpan
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
memcpy scalar_to_pointer( ${$pdl->get_dataref} ),
scalar_to_pointer( ${$t->Data} ),
$t->ByteSize;
$pdl->upd_data;
$pdl;
}
# image_size => [width, height] (but usually square images)
my %model_name_to_params = (
centernet_hourglass_512x512 => {
handle => 'https://tfhub.dev/tensorflow/centernet/hourglass_512x512/1',
image_size => [ 512, 512 ],
},
);
my $model_name = 'centernet_hourglass_512x512';
say "Selected model: $model_name : $model_name_to_params{$model_name}{handle}";
my $model_uri = URI->new( $model_name_to_params{$model_name}{handle} );
$model_uri->query_form( 'tf-hub-format' => 'compressed' );
my $model_base = substr( $model_uri->path, 1 ) =~ s,/,_,gr;
my $model_archive_path = "${model_base}.tar.gz";
my $http = HTTP::Tiny->new;
for my $download ( [ $model_uri => $model_archive_path ],) {
my ($uri, $path) = @$download;
say "Downloading $uri to $path";
next if -e $path;
$http->mirror( $uri, $path );
}
use Archive::Extract;
my $ae = Archive::Extract->new( archive => $model_archive_path );
die "Could not extract archive" unless $ae->extract( to => $model_base );
my $saved_model = path($model_base)->child('saved_model.pb');
say "Saved model is in $saved_model" if -f $saved_model;
# Get the labels
my $response = $http->get('https://raw.githubusercontent.com/tensorflow/models/a4944a57ad2811e1f6a7a87589a9fc8a776e8d3c/object_detection/data/mscoco_label_map.pbtxt');
my %labels_map = $response->{content} =~ m<
(?:item \s+ \{ \s+
\Qname:\E \s+ "[^"]+" \s+
\Qid:\E \s+ (\d+) \s+
\Qdisplay_name:\E \s+ "([^"]+)" \s+
})+
>sgx;
my $label_count = List::Util::max keys %labels_map;
say "We have a label count of $label_count. These labels include: ",
join ", ", List::Util::head( 5, @labels_map{ sort keys %labels_map } );
my @tags = ( 'serve' );
if( File::Which::which('saved_model_cli')) {
local $ENV{TF_CPP_MIN_LOG_LEVEL} = 3; # quiet the TensorFlow logger for the following command
system(qw(saved_model_cli show),
qw(--dir) => $model_base,
qw(--tag_set) => join(',', @tags),
qw(--signature_def) => 'serving_default'
) == 0 or die "Could not run saved_model_cli";
} else {
say "Install the tensorflow Python package to get the `saved_model_cli` command.";
}
my $opt = AI::TensorFlow::Libtensorflow::SessionOptions->New;
my $graph = AI::TensorFlow::Libtensorflow::Graph->New;
my $session = AI::TensorFlow::Libtensorflow::Session->LoadFromSavedModel(
$opt, undef, $model_base, \@tags, $graph, undef, $s
);
AssertOK($s);
my %ops = (
in => {
op => $graph->OperationByName('serving_default_input_tensor'),
dict => {
input_tensor => 0,
}
},
out => {
op => $graph->OperationByName('StatefulPartitionedCall'),
dict => {
detection_boxes => 0,
detection_classes => 1,
detection_scores => 2,
num_detections => 3,
}
},
);
my %outputs;
%outputs = map {
my $put_type = $_;
my $op = $ops{$put_type}{op};
my $port_dict = $ops{$put_type}{dict};
$put_type => +{
map {
my $dict_key = $_;
my $index = $port_dict->{$_};
$dict_key => AI::TensorFlow::Libtensorflow::Output->New( {
oper => $op,
index => $index,
});
} keys %$port_dict
}
} keys %ops;
p %outputs;
use HTML::Tiny;
my %images_for_test_to_uri = (
"beach_scene" => 'https://github.com/tensorflow/models/blob/master/research/object_detection/test_images/image2.jpg?raw=true',
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
centernet_hourglass_512x512 => {
handle => 'https://tfhub.dev/tensorflow/centernet/hourglass_512x512/1',
image_size => [ 512, 512 ],
},
);
my $model_name = 'centernet_hourglass_512x512';
say "Selected model: $model_name : $model_name_to_params{$model_name}{handle}";
We download the model to the current directory and then extract the model to a folder with the name given in C<$model_base>.
my $model_uri = URI->new( $model_name_to_params{$model_name}{handle} );
$model_uri->query_form( 'tf-hub-format' => 'compressed' );
my $model_base = substr( $model_uri->path, 1 ) =~ s,/,_,gr;
my $model_archive_path = "${model_base}.tar.gz";
my $http = HTTP::Tiny->new;
for my $download ( [ $model_uri => $model_archive_path ],) {
my ($uri, $path) = @$download;
say "Downloading $uri to $path";
next if -e $path;
$http->mirror( $uri, $path );
}
use Archive::Extract;
my $ae = Archive::Extract->new( archive => $model_archive_path );
die "Could not extract archive" unless $ae->extract( to => $model_base );
my $saved_model = path($model_base)->child('saved_model.pb');
say "Saved model is in $saved_model" if -f $saved_model;
We need to download the COCO 2017 classification labels and parse out the mapping from the numeric index to the textual descriptions.
# Get the labels
my $response = $http->get('https://raw.githubusercontent.com/tensorflow/models/a4944a57ad2811e1f6a7a87589a9fc8a776e8d3c/object_detection/data/mscoco_label_map.pbtxt');
my %labels_map = $response->{content} =~ m<
(?:item \s+ \{ \s+
\Qname:\E \s+ "[^"]+" \s+
\Qid:\E \s+ (\d+) \s+
\Qdisplay_name:\E \s+ "([^"]+)" \s+
})+
>sgx;
my $label_count = List::Util::max keys %labels_map;
say "We have a label count of $label_count. These labels include: ",
join ", ", List::Util::head( 5, @labels_map{ sort keys %labels_map } );
=head2 Load the model and session
We define the tag set C<[ 'serve' ]> which we will use to load the model.
my @tags = ( 'serve' );
We can examine what computations are contained in the graph in terms of the names of the inputs and outputs of an operation found in the graph by running C<saved_model_cli>.
if( File::Which::which('saved_model_cli')) {
local $ENV{TF_CPP_MIN_LOG_LEVEL} = 3; # quiet the TensorFlow logger for the following command
system(qw(saved_model_cli show),
qw(--dir) => $model_base,
qw(--tag_set) => join(',', @tags),
qw(--signature_def) => 'serving_default'
) == 0 or die "Could not run saved_model_cli";
} else {
say "Install the tensorflow Python package to get the `saved_model_cli` command.";
}
The above C<saved_model_cli> output shows that the model input is at C<serving_default_input_tensor:0> which means the operation named C<serving_default_input_tensor> at index C<0> and there are multiple outputs with different shapes.
Per the L<model description|https://tfhub.dev/tensorflow/centernet/hourglass_512x512/1> on TensorFlow Hub:
=over 2
B<Inputs>
A three-channel image of variable size - the model does NOT support batching. The input tensor is a C<tf.uint8> tensor with shape [1, height, width, 3] with values in [0, 255].
B<Outputs>
The output dictionary contains:
=over
=item -
C<num_detections>: a C<tf.int> tensor with only one value, the number of detections [N].
=item -
C<detection_boxes>: a C<tf.float32> tensor of shape [N, 4] containing bounding box coordinates in the following order: [ymin, xmin, ymax, xmax].
=item -
C<detection_classes>: a C<tf.int> tensor of shape [N] containing detection class index from the label file.
=item -
C<detection_scores>: a C<tf.float32> tensor of shape [N] containing detection scores.
=back
=back
Note that the above documentation has two errors: both C<num_detections> and C<detection_classes> are not of type C<tf.int>, but are actually C<tf.float32>.
Now we can load the model from that folder with the tag set C<[ 'serve' ]> by using the C<LoadFromSavedModel> constructor to create a C<::Graph> and a C<::Session> for that graph.
my $opt = AI::TensorFlow::Libtensorflow::SessionOptions->New;
my $graph = AI::TensorFlow::Libtensorflow::Graph->New;
my $session = AI::TensorFlow::Libtensorflow::Session->LoadFromSavedModel(
$opt, undef, $model_base, \@tags, $graph, undef, $s
);
AssertOK($s);
So let's use the names from the C<saved_model_cli> output to create our C<::Output> C<ArrayRef>s.
my %ops = (
lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod view on Meta::CPAN
offset => 'character 0,-0.25',
qq{font ",12" boxed front tc rgb "#ffffff"} ], ],
)
} 0..$subset{detection_boxes}->dim(1)-1
);
$gp->plot(
topcmds => q{set style textbox opaque fc "#505050f0" noborder},
square => 1,
yrange => [$pdl_images[0]->dim(2),0],
with => 'image', $pdl_images[0],
);
$gp->close;
IPerl->png( bytestream => path($plot_output_path)->slurp_raw ) if IN_IPERL;
=head1 RESOURCE USAGE
use Filesys::DiskUsage qw/du/;
my $total = du( { 'human-readable' => 1, dereference => 1 },
$model_archive_path, $model_base );
say "Disk space usage: $total"; undef;
=head1 CPANFILE
requires 'AI::TensorFlow::Libtensorflow';
requires 'AI::TensorFlow::Libtensorflow::DataType';
requires 'Archive::Extract';
requires 'Data::Printer';
requires 'Data::Printer::Filter::PDL';
requires 'FFI::Platypus::Buffer';
requires 'FFI::Platypus::Memory';
requires 'File::Which';
requires 'Filesys::DiskUsage';
requires 'HTML::Tiny';
requires 'HTTP::Tiny';
requires 'Imager';
requires 'List::Util', '1.56';
requires 'PDL';
requires 'PDL::Graphics::Gnuplot';
requires 'Path::Tiny';
requires 'Syntax::Construct';
requires 'Text::Table::Tiny';
requires 'URI';
requires 'constant';
requires 'feature';
requires 'lib::projectroot';
requires 'strict';
requires 'utf8';
requires 'warnings';
=head1 AUTHOR
Zakariyya Mughal <zmughal@cpan.org>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2022-2023 by Auto-Parallel Technologies, Inc.
This is free software, licensed under:
The Apache License, Version 2.0, January 2004
=cut
( run in 0.772 second using v1.01-cache-2.11-cpan-39bf76dae61 )