AI-PredictionClient
view release on metacpan or search on metacpan
bin/Inception.pl view on Meta::CPAN
is => 'ro',
required => 0,
doc => "Test loopback through dummy server"
);
option debug_camel => (
is => 'ro',
required => 0,
doc => "Test using camel image"
);
sub run {
my ($self) = @_;
my $image_ref = $self->read_image($self->image_file);
my $client = AI::PredictionClient::InceptionClient->new(
host => $self->host,
port => $self->port
);
$client->model_name($self->model_name);
bin/Inception.pl view on Meta::CPAN
print $results_text;
} else {
printf("Failed. Status: %s, Status Code: %s, Status Message: %s \n",
$client->status, $client->status_code, $client->status_message);
return 1;
}
return 0;
}
sub read_image {
my $self = shift;
return \'' if $self->debug_camel;
my $file_name = shift;
my $max_file_size = 16 * 1000 * 1000; # A large but safe maximum
open(my $fh, '<:raw', $file_name)
or die "Could not open file: $file_name";
requires "JSON" => "0";
requires "MIME::Base64" => "0";
requires "Moo" => "0";
requires "Moo::Role" => "0";
requires "MooX::Options" => "0";
requires "Perl6::Form" => "0";
requires "perl" => "5.01";
requires "strict" => "0";
requires "warnings" => "0";
on 'test' => sub {
requires "Test::More" => "0";
};
on 'configure' => sub {
requires "AI::PredictionClient::Alien::TensorFlowServingProtos" => "0.05";
requires "Alien::Google::GRPC" => "0.06";
requires "ExtUtils::MakeMaker" => "0";
requires "Inline" => "0";
requires "Inline::CPP" => "0";
requires "Inline::MakeMaker" => "0";
};
on 'develop' => sub {
requires "Test::MinimumVersion" => "0";
requires "Test::Perl::Critic" => "0";
requires "Test::Pod" => "1.41";
requires "Test::Spelling" => "0.12";
};
lib/AI/PredictionClient/Classes/SimpleTensor.pm view on Meta::CPAN
use warnings;
package AI::PredictionClient::Classes::SimpleTensor;
$AI::PredictionClient::Classes::SimpleTensor::VERSION = '0.05';
# ABSTRACT: A simplified version of the TensorFlow Tensor proto.
use 5.010;
use MIME::Base64 qw( encode_base64 decode_base64 );
use Moo;
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if (@_ == 1) {
return $class->$orig(tensor_ds => $_[0]);
} else {
return $class->$orig(@_);
}
};
has tensor_ds => (
is => 'ro',
default => sub {
{
dtype => "DT_STRING",
tensorShape => { dim => [ { size => 1 } ] },
stringVal => [""] };
},
);
sub shape {
my ($self, $shape_aref) = @_;
my $tensor_shape_ref = \$self->tensor_ds->{"tensorShape"}->{"dim"};
$$tensor_shape_ref = $shape_aref if $shape_aref;
return $$tensor_shape_ref;
}
sub dtype {
my ($self, $dtype) = @_;
my $tensor_dtype_ref = \$self->tensor_ds->{"dtype"};
$$tensor_dtype_ref = $dtype if $dtype;
return $$tensor_dtype_ref;
}
has use_base64_strings => (
is => 'rw',
default => 0,
);
has dtype_values => (
is => 'ro',
default => sub {
{
DT_HALF => 'halfVal',
DT_FLOAT => 'floatVal',
DT_DOUBLE => 'doubleVal',
DT_INT16 => 'intVal',
DT_INT8 => 'intVal',
DT_UINT8 => 'intVal',
DT_STRING => 'stringVal',
DT_COMPLEX64 => 'scomplexVal',
DT_INT64 => 'int64Val',
DT_BOOL => 'boolVal',
DT_COMPLEX128 => 'dcomplexVal',
DT_RESOURCE => 'resourceHandleVal'
};
});
sub value {
my ($self, $value_aref) = @_;
my $decoded_aref;
my $value_type = $self->dtype_values->{ $self->dtype };
my $tensor_value_ref = \$self->tensor_ds->{$value_type};
if ($value_aref) {
if ($self->dtype eq 'DT_STRING' && !$self->use_base64_strings) {
lib/AI/PredictionClient/InceptionClient.pm view on Meta::CPAN
use AI::PredictionClient::Classes::SimpleTensor;
use AI::PredictionClient::Testing::Camel;
extends 'AI::PredictionClient::Predict';
has inception_results => (is => 'rwp');
has camel => (is => 'rw',);
sub call_inception {
my $self = shift;
my $image = shift;
my $tensor = AI::PredictionClient::Classes::SimpleTensor->new();
$tensor->shape([ { size => 1 } ]);
$tensor->dtype("DT_STRING");
if ($self->camel) {
my $camel_test = AI::PredictionClient::Testing::Camel->new();
$tensor->value([ $camel_test->camel_jpeg_ref ]);
lib/AI/PredictionClient/Roles/PredictRole.pm view on Meta::CPAN
package AI::PredictionClient::Roles::PredictRole;
$AI::PredictionClient::Roles::PredictRole::VERSION = '0.05';
# ABSTRACT: Implements the Predict service specific interface
use AI::PredictionClient::Classes::SimpleTensor;
use Moo::Role;
requires 'request_ds', 'reply_ds';
sub inputs {
my ($self, $inputs_href) = @_;
my $inputs_converted_href;
foreach my $inkey (keys %$inputs_href) {
$inputs_converted_href->{$inkey} = $inputs_href->{$inkey}->tensor_ds;
}
$self->request_ds->{"inputs"} = $inputs_converted_href;
return;
}
sub callPredict {
my $self = shift;
my $request_ref = $self->serialize_request();
my $result_ref = $self->perception_client_object->callPredict($request_ref);
return $self->deserialize_reply($result_ref);
}
sub outputs {
my $self = shift;
my $predict_outputs_ref = $self->reply_ds->{outputs};
my $tensorsout_href;
foreach my $outkey (keys %$predict_outputs_ref) {
$tensorsout_href->{$outkey} = AI::PredictionClient::Classes::SimpleTensor->new(
$predict_outputs_ref->{$outkey});
}
lib/AI/PredictionClient/Roles/PredictionRole.pm view on Meta::CPAN
has debug_verbose => (
is => 'rw',
default => 0,
);
has perception_client_object => (
is => 'lazy',
builder => 1,
);
sub _build_perception_client_object {
my $self = $_[0];
my $server_port = $self->host . ':' . $self->port;
return $self->loopback
? AI::PredictionClient::Testing::PredictionLoopback->new($server_port)
: AI::PredictionClient::CPP::PredictionGrpcCpp::PredictionClient->new(
$server_port);
}
has request_ds => (
is => 'ro',
default => sub { { modelSpec => { name => "", signatureName => "" } } },
);
has reply_ds => (
is => 'rwp',
default => sub { {} },
);
sub model_name {
my ($self, $model_name) = @_;
$self->request_ds->{"modelSpec"}->{"name"} = $model_name;
return;
}
sub model_signature {
my ($self, $model_signature) = @_;
$self->request_ds->{"modelSpec"}->{"signatureName"} = $model_signature;
return;
}
has status => (is => 'rwp',);
has status_code => (is => 'rwp',);
has status_message => (is => 'rwp',);
sub serialize_request {
my $self = shift;
printf("Debug - Request: %s \n", Dumper(\$self->request_ds))
if $self->debug_verbose;
my $json = JSON->new;
my $request_json = $json->encode($self->request_ds);
printf("Debug - JSON Request: %s \n", Dumper(\$request_json))
if $self->debug_verbose;
return $request_json;
}
sub deserialize_reply {
my $self = shift;
my $serialized_return = shift;
printf("Debug - JSON Response: %s \n", Dumper(\$serialized_return))
if $self->debug_verbose;
my $json = JSON->new;
my $returned_ds = $json->decode(
ref($serialized_return) ? $$serialized_return : $serialized_return);
lib/AI/PredictionClient/Testing/Camel.pm view on Meta::CPAN
use Moo;
my $camel_jpeg_b64;
my $camel_png_b64;
has camel_jpeg_ref => (
is => 'lazy',
builder => 1,
);
sub _build_camel_jpeg_ref {
my $self = $_[0];
return \decode_base64(${ $self->camel_jpeg_b64_ref });
}
has camel_png_ref => (
is => 'lazy',
builder => 1,
);
sub _build_camel_png_ref {
my $self = $_[0];
return \decode_base64(${ $self->camel_png_b64_ref });
}
has camel_jpeg_b64_ref => (
is => 'ro',
default => sub { \$camel_jpeg_b64 },
);
has camel_png_b64_ref => (
is => 'ro',
default => sub { \$camel_png_b64 },
);
$camel_jpeg_b64
= '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkS
Ew8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJ
CQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIy
MjIyMjIyMjIyMjIyMjL/wAARCABAAEADASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEA
AAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIh
MUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6
Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZ
lib/AI/PredictionClient/Testing/PredictionLoopback.pm view on Meta::CPAN
use warnings;
package AI::PredictionClient::Testing::PredictionLoopback;
$AI::PredictionClient::Testing::PredictionLoopback::VERSION = '0.05';
# ABSTRACT: A loopback interface for client testing and development
use 5.010;
use Data::Dumper;
use Moo;
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if (@_ == 1 && !ref $_[0]) {
return $class->$orig(server_port => $_[0]);
} else {
return $class->$orig(@_);
}
};
has server_port => (is => 'rw',);
sub callPredict {
my ($self, $request_data) = @_;
my $test_return01
= '{"outputs":{"classes":{"dtype":"DT_STRING","tensorShape":{"dim":[{"size":"1"},{"size":"6"}]},"stringVal":["bG9vcGJhY2sgdGVzdCBkYXRhCg==","bWlsaXRhcnkgdW5pZm9ybQ==","Ym93IHRpZSwgYm93LXRpZSwgYm93dGll","bW9ydGFyYm9hcmQ=","c3VpdCwgc3VpdCBvZiBjbG90...
my $test_return02
= '{"outputs":{"classes":{"dtype":"DT_STRING","tensorShape":{"dim":[{"size":"1"},{"size":"5"}]},"stringVal":["bG9hZCBpdAo=","Y2hlY2sgaXQK","cXVpY2sgLSByZXdyaXRlIGl0Cg==","dGVjaG5vbG9naWMK","dGVjaG5vbG9naWMK"]},"scores":{"dtype":"DT_FLOAT","tensor...
my $return_ser = '{"Status": "OK", ';
$return_ser .= '"StatusCode": "42", ';
( run in 0.264 second using v1.01-cache-2.11-cpan-4d50c553e7e )