AI-PredictionClient
view release on metacpan - search on metacpan
view release on metacpan or search on metacpan
dist.ini
lib/AI/PredictionClient.pm
lib/AI/PredictionClient/CPP/PredictionGrpcCpp.pm
lib/AI/PredictionClient/CPP/Typemaps/more_typemaps_STL_String.txt
lib/AI/PredictionClient/Classes/SimpleTensor.pm
lib/AI/PredictionClient/Docs/Overview.pod
lib/AI/PredictionClient/InceptionClient.pm
lib/AI/PredictionClient/Predict.pm
lib/AI/PredictionClient/Roles/PredictRole.pm
lib/AI/PredictionClient/Roles/PredictionRole.pm
lib/AI/PredictionClient/Testing/Camel.pm
lib/AI/PredictionClient/Testing/PredictionLoopback.pm
t/00load.t
t/author-critic.t
t/author-pod-spell.t
t/author-pod-syntax.t
t/release-minimum-version.t
"AI::PredictionClient::Alien::TensorFlowServingProtos" : "0.05",
"Alien::Google::GRPC" : "0.06",
"ExtUtils::MakeMaker" : "0",
"Inline" : "0",
"Inline::CPP" : "0",
"Inline::MakeMaker" : "0"
}
},
"develop" : {
"requires" : {
"Test::MinimumVersion" : "0",
"Test::Perl::Critic" : "0",
"Test::Pod" : "1.41",
"Test::Spelling" : "0.12"
}
},
"runtime" : {
"requires" : {
"AI::PredictionClient::Alien::TensorFlowServingProtos" : "0",
"Alien::Google::GRPC" : "0",
"Cwd" : "0",
"Data::Dumper" : "0",
"Inline" : "0",
"JSON" : "0",
---
abstract: 'A Perl Prediction client for Google TensorFlow Serving.'
author:
- 'Tom Stall <stall@cpan.org>'
build_requires:
Test::More: '0'
configure_requires:
AI::PredictionClient::Alien::TensorFlowServingProtos: '0.05'
Alien::Google::GRPC: '0.06'
ExtUtils::MakeMaker: '0'
Inline: '0'
Inline::CPP: '0'
Inline::MakeMaker: '0'
dynamic_config: 0
generated_by: 'Dist::Zilla version 6.009, CPAN::Meta::Converter version 2.143240'
license: perl
bin/Inception.pl view on Meta::CPAN
is => 'ro',
required => 0,
format => 's',
default => $default_model_signature,
doc => "API signature for model [Default: $default_model_signature]"
);
option debug_verbose => (is => 'ro', doc => 'Verbose output');
option debug_loopback_interface => (
is => 'ro',
required => 0,
doc => "Test loopback through dummy server"
);
option debug_camel => (
is => 'ro',
required => 0,
doc => "Test using camel image"
);
sub run {
my ($self) = @_;
my $image_ref = $self->read_image($self->image_file);
my $client = AI::PredictionClient::InceptionClient->new(
host => $self->host,
port => $self->port
requires "MIME::Base64" => "0";
requires "Moo" => "0";
requires "Moo::Role" => "0";
requires "MooX::Options" => "0";
requires "Perl6::Form" => "0";
requires "perl" => "5.01";
requires "strict" => "0";
requires "warnings" => "0";
on 'test' => sub {
requires "Test::More" => "0";
};
on 'configure' => sub {
requires "AI::PredictionClient::Alien::TensorFlowServingProtos" => "0.05";
requires "Alien::Google::GRPC" => "0.06";
requires "ExtUtils::MakeMaker" => "0";
requires "Inline" => "0";
requires "Inline::CPP" => "0";
requires "Inline::MakeMaker" => "0";
};
on 'develop' => sub {
requires "Test::MinimumVersion" => "0";
requires "Test::Perl::Critic" => "0";
requires "Test::Pod" => "1.41";
requires "Test::Spelling" => "0.12";
};
[Prereqs]
perl = 5.01
[Prereqs / ConfigureRequires]
Inline = 0
Inline::CPP = 0
Inline::MakeMaker = 0
Alien::Google::GRPC = 0.06
AI::PredictionClient::Alien::TensorFlowServingProtos = 0.05
[Test::MinimumVersion]
max_target_perl = 5.10.1
[MetaProvides::Package]
[CheckChangeLog]
[Test::PodSpelling]
stopword = TensorFlow
stopword = gRPC
stopword = protofile
stopword = protofiles
stopword = loopback
stopword = proto
stopword = autotools
[Test::Perl::Critic]
critic_config = perlcritic.rc
[ExtraTests]
[PodSyntaxTests]
[PruneCruft]
[GithubMeta]
[Clean]
lib/AI/PredictionClient.pm view on Meta::CPAN
This client implements a command line interface to the
InceptionClient module 'AI::PredictionClient::InceptionClient', and provides
a working example of using this module for building your own clients.
The commands for the Inception client can be displayed by running the Inception.pl client with no arguments.
$ Inception.pl
image_file is missing
USAGE: Inception.pl [-h] [long options ...]
--debug_camel Test using camel image
--debug_loopback_interface Test loopback through dummy server
--debug_verbose Verbose output
--host=String IP address of the server [Default:
127.0.0.1]
--image_file=String * Required: Path to image to be processed
--model_name=String Model to process image [Default: inception]
--model_signature=String API signature for model [Default:
predict_images]
--port=String Port number of the server [Default: 9000]
-h show a compact help message
lib/AI/PredictionClient/Roles/PredictionRole.pm view on Meta::CPAN
use strict;
use warnings;
package AI::PredictionClient::Roles::PredictionRole;
$AI::PredictionClient::Roles::PredictionRole::VERSION = '0.05';
# ABSTRACT: Implements the Prediction service interface
use AI::PredictionClient::CPP::PredictionGrpcCpp;
use AI::PredictionClient::Testing::PredictionLoopback;
use JSON ();
use Data::Dumper;
use MIME::Base64 qw( encode_base64 decode_base64 );
use Moo::Role;
has host => (is => 'ro');
has port => (is => 'ro',);
has loopback => (
t/author-critic.t view on Meta::CPAN
unless ($ENV{AUTHOR_TESTING}) {
print qq{1..0 # SKIP these tests are for testing by the author\n};
exit
}
}
use strict;
use warnings;
use Test::Perl::Critic (-profile => "perlcritic.rc") x!! -e "perlcritic.rc";
all_critic_ok();
t/author-pod-spell.t view on Meta::CPAN
BEGIN {
unless ($ENV{AUTHOR_TESTING}) {
print qq{1..0 # SKIP these tests are for testing by the author\n};
exit
}
}
use strict;
use warnings;
use Test::More;
# generated by Dist::Zilla::Plugin::Test::PodSpelling 2.007004
use Test::Spelling 0.12;
use Pod::Wordlist;
add_stopwords(<DATA>);
all_pod_files_spelling_ok( qw( bin lib ) );
__DATA__
AI
CPP
Camel
Classes
t/release-minimum-version.t view on Meta::CPAN
#!perl
BEGIN {
unless ($ENV{RELEASE_TESTING}) {
print qq{1..0 # SKIP these tests are for release candidate testing\n};
exit
}
}
use Test::More;
eval "use Test::MinimumVersion";
plan skip_all => "Test::MinimumVersion required for testing minimum versions"
if $@;
all_minimum_version_ok( qq{5.10.1} );
view all matches for this distributionview release on metacpan - search on metacpan
( run in 1.478 second using v1.00-cache-2.02-grep-82fe00e-cpan-585fae043c8 )