AI-PredictionClient

 view release on metacpan or  search on metacpan

MANIFEST  view on Meta::CPAN

dist.ini
lib/AI/PredictionClient.pm
lib/AI/PredictionClient/CPP/PredictionGrpcCpp.pm
lib/AI/PredictionClient/CPP/Typemaps/more_typemaps_STL_String.txt
lib/AI/PredictionClient/Classes/SimpleTensor.pm
lib/AI/PredictionClient/Docs/Overview.pod
lib/AI/PredictionClient/InceptionClient.pm
lib/AI/PredictionClient/Predict.pm
lib/AI/PredictionClient/Roles/PredictRole.pm
lib/AI/PredictionClient/Roles/PredictionRole.pm
lib/AI/PredictionClient/Testing/Camel.pm
lib/AI/PredictionClient/Testing/PredictionLoopback.pm
t/00load.t
t/author-critic.t
t/author-pod-spell.t
t/author-pod-syntax.t
t/release-minimum-version.t

META.json  view on Meta::CPAN

            "AI::PredictionClient::Alien::TensorFlowServingProtos" : "0.05",
            "Alien::Google::GRPC" : "0.06",
            "ExtUtils::MakeMaker" : "0",
            "Inline" : "0",
            "Inline::CPP" : "0",
            "Inline::MakeMaker" : "0"
         }
      },
      "develop" : {
         "requires" : {
            "Test::MinimumVersion" : "0",
            "Test::Perl::Critic" : "0",
            "Test::Pod" : "1.41",
            "Test::Spelling" : "0.12"
         }
      },
      "runtime" : {
         "requires" : {
            "AI::PredictionClient::Alien::TensorFlowServingProtos" : "0",
            "Alien::Google::GRPC" : "0",
            "Cwd" : "0",
            "Data::Dumper" : "0",
            "Inline" : "0",
            "JSON" : "0",

META.json  view on Meta::CPAN

            "Moo::Role" : "0",
            "MooX::Options" : "0",
            "Perl6::Form" : "0",
            "perl" : "5.01",
            "strict" : "0",
            "warnings" : "0"
         }
      },
      "test" : {
         "requires" : {
            "Test::More" : "0"
         }
      }
   },
   "provides" : {
      "AI::PredictionClient" : {
         "file" : "lib/AI/PredictionClient.pm",
         "version" : "0.05"
      },
      "AI::PredictionClient::CPP::PredictionGrpcCpp" : {
         "file" : "lib/AI/PredictionClient/CPP/PredictionGrpcCpp.pm",

META.json  view on Meta::CPAN

         "version" : "0.05"
      },
      "AI::PredictionClient::Roles::PredictRole" : {
         "file" : "lib/AI/PredictionClient/Roles/PredictRole.pm",
         "version" : "0.05"
      },
      "AI::PredictionClient::Roles::PredictionRole" : {
         "file" : "lib/AI/PredictionClient/Roles/PredictionRole.pm",
         "version" : "0.05"
      },
      "AI::PredictionClient::Testing::Camel" : {
         "file" : "lib/AI/PredictionClient/Testing/Camel.pm",
         "version" : "0.05"
      },
      "AI::PredictionClient::Testing::PredictionLoopback" : {
         "file" : "lib/AI/PredictionClient/Testing/PredictionLoopback.pm",
         "version" : "0.05"
      }
   },
   "release_status" : "stable",
   "resources" : {
      "homepage" : "https://github.com/mountaintom/AI-PredictionClient",
      "repository" : {
         "type" : "git",
         "url" : "https://github.com/mountaintom/AI-PredictionClient.git",
         "web" : "https://github.com/mountaintom/AI-PredictionClient"

META.yml  view on Meta::CPAN

---
abstract: 'A Perl Prediction client for Google TensorFlow Serving.'
author:
  - 'Tom Stall <stall@cpan.org>'
build_requires:
  Test::More: '0'
configure_requires:
  AI::PredictionClient::Alien::TensorFlowServingProtos: '0.05'
  Alien::Google::GRPC: '0.06'
  ExtUtils::MakeMaker: '0'
  Inline: '0'
  Inline::CPP: '0'
  Inline::MakeMaker: '0'
dynamic_config: 0
generated_by: 'Dist::Zilla version 6.009, CPAN::Meta::Converter version 2.143240'
license: perl

META.yml  view on Meta::CPAN

    version: '0.05'
  AI::PredictionClient::Predict:
    file: lib/AI/PredictionClient/Predict.pm
    version: '0.05'
  AI::PredictionClient::Roles::PredictRole:
    file: lib/AI/PredictionClient/Roles/PredictRole.pm
    version: '0.05'
  AI::PredictionClient::Roles::PredictionRole:
    file: lib/AI/PredictionClient/Roles/PredictionRole.pm
    version: '0.05'
  AI::PredictionClient::Testing::Camel:
    file: lib/AI/PredictionClient/Testing/Camel.pm
    version: '0.05'
  AI::PredictionClient::Testing::PredictionLoopback:
    file: lib/AI/PredictionClient/Testing/PredictionLoopback.pm
    version: '0.05'
requires:
  AI::PredictionClient::Alien::TensorFlowServingProtos: '0'
  Alien::Google::GRPC: '0'
  Cwd: '0'
  Data::Dumper: '0'
  Inline: '0'
  JSON: '0'
  MIME::Base64: '0'
  Moo: '0'

Makefile.PL  view on Meta::CPAN

    "JSON" => 0,
    "MIME::Base64" => 0,
    "Moo" => 0,
    "Moo::Role" => 0,
    "MooX::Options" => 0,
    "Perl6::Form" => 0,
    "strict" => 0,
    "warnings" => 0
  },
  "TEST_REQUIRES" => {
    "Test::More" => 0
  },
  "VERSION" => "0.05",
  "test" => {
    "TESTS" => "t/*.t"
  }
);

my %FallbackPrereqs = (
  "AI::PredictionClient::Alien::TensorFlowServingProtos" => 0,
  "Alien::Google::GRPC" => 0,
  "Cwd" => 0,
  "Data::Dumper" => 0,
  "Inline" => 0,
  "JSON" => 0,
  "MIME::Base64" => 0,
  "Moo" => 0,
  "Moo::Role" => 0,
  "MooX::Options" => 0,
  "Perl6::Form" => 0,
  "Test::More" => 0,
  "strict" => 0,
  "warnings" => 0
);

unless ( eval { ExtUtils::MakeMaker->VERSION(6.63_03) } ) {
  delete $WriteMakefileArgs{TEST_REQUIRES};
  delete $WriteMakefileArgs{BUILD_REQUIRES};
  $WriteMakefileArgs{PREREQ_PM} = \%FallbackPrereqs;
}

bin/Inception.pl  view on Meta::CPAN

  is       => 'ro',
  required => 0,
  format   => 's',
  default  => $default_model_signature,
  doc      => "API signature for model [Default: $default_model_signature]"
);
option debug_verbose => (is => 'ro', doc => 'Verbose output');
option debug_loopback_interface => (
  is       => 'ro',
  required => 0,
  doc      => "Test loopback through dummy server"
);
option debug_camel => (
  is       => 'ro',
  required => 0,
  doc      => "Test using camel image"
);

sub run {
  my ($self) = @_;

  my $image_ref = $self->read_image($self->image_file);

  my $client = AI::PredictionClient::InceptionClient->new(
    host => $self->host,
    port => $self->port

cpanfile  view on Meta::CPAN

requires "MIME::Base64" => "0";
requires "Moo" => "0";
requires "Moo::Role" => "0";
requires "MooX::Options" => "0";
requires "Perl6::Form" => "0";
requires "perl" => "5.01";
requires "strict" => "0";
requires "warnings" => "0";

on 'test' => sub {
  requires "Test::More" => "0";
};

on 'configure' => sub {
  requires "AI::PredictionClient::Alien::TensorFlowServingProtos" => "0.05";
  requires "Alien::Google::GRPC" => "0.06";
  requires "ExtUtils::MakeMaker" => "0";
  requires "Inline" => "0";
  requires "Inline::CPP" => "0";
  requires "Inline::MakeMaker" => "0";
};

on 'develop' => sub {
  requires "Test::MinimumVersion" => "0";
  requires "Test::Perl::Critic" => "0";
  requires "Test::Pod" => "1.41";
  requires "Test::Spelling" => "0.12";
};

dist.ini  view on Meta::CPAN

[Prereqs]
perl = 5.01

[Prereqs / ConfigureRequires]
Inline = 0
Inline::CPP = 0
Inline::MakeMaker = 0
Alien::Google::GRPC = 0.06
AI::PredictionClient::Alien::TensorFlowServingProtos = 0.05

[Test::MinimumVersion]
max_target_perl = 5.10.1

[MetaProvides::Package]

[CheckChangeLog]

[Test::PodSpelling]
stopword = TensorFlow
stopword = gRPC
stopword = protofile
stopword = protofiles
stopword = loopback
stopword = proto
stopword = autotools

[Test::Perl::Critic]
critic_config = perlcritic.rc

[ExtraTests]
[PodSyntaxTests]

[PruneCruft]

[GithubMeta]

[Clean]

lib/AI/PredictionClient.pm  view on Meta::CPAN

This client implements a command line interface to the 
InceptionClient module 'AI::PredictionClient::InceptionClient', and provides 
a working example of using this module for building your own clients.

The commands for the Inception client can be displayed by running the Inception.pl client with no arguments.

 $ Inception.pl 
 image_file is missing
 USAGE: Inception.pl [-h] [long options ...]

    --debug_camel               Test using camel image
    --debug_loopback_interface  Test loopback through dummy server
    --debug_verbose             Verbose output
    --host=String               IP address of the server [Default:
                                127.0.0.1]
    --image_file=String         * Required: Path to image to be processed
    --model_name=String         Model to process image [Default: inception]
    --model_signature=String    API signature for model [Default:
                                predict_images]
    --port=String               Port number of the server [Default: 9000]
    -h                          show a compact help message

lib/AI/PredictionClient.pm  view on Meta::CPAN


Installing autotools is optional. If they are installed this package will use them, 
otherwise it will build and install its own local copies.

 $ [sudo] apt-get install autoconf automake libtool

See the Alien::Google::GRPC for potential additional build dependencies.

At this time only Linux builds are supported.

=head2 CPAN Testers Note

This module may fail CPAN Testers' tests. 
The build support tools needed by this module and especially the 
Alien::Google::GRPC module are normally installed on the 
CPAN Testers' machines, but not always.

The system build tools dependencies have been reduced, so hopefully 
a large number of machines will build without manually installing 
system dependencies.

=head2 NOTE

This is a complex package with a lot of moving parts. Please pardon if this first release has some minor bug or missing dependency that went undiscovered in my testing.

=head1 AUTHOR

lib/AI/PredictionClient/Docs/Overview.pod  view on Meta::CPAN

This client implements a command line interface to the 
InceptionClient module 'AI::PredictionClient::InceptionClient', and provides 
a working example of using this module for building your own clients.

The commands for the Inception client can be displayed by running the Inception.pl client with no arguments.

 $ Inception.pl 
 image_file is missing
 USAGE: Inception.pl [-h] [long options ...]

    --debug_camel               Test using camel image
    --debug_loopback_interface  Test loopback through dummy server
    --debug_verbose             Verbose output
    --host=String               IP address of the server [Default:
                                127.0.0.1]
    --image_file=String         * Required: Path to image to be processed
    --model_name=String         Model to process image [Default: inception]
    --model_signature=String    API signature for model [Default:
                                predict_images]
    --port=String               Port number of the server [Default: 9000]
    -h                          show a compact help message

lib/AI/PredictionClient/Docs/Overview.pod  view on Meta::CPAN


Installing autotools is optional. If they are installed this package will use them, 
otherwise it will build and install its own local copies.

 $ [sudo] apt-get install autoconf automake libtool

See the Alien::Google::GRPC for potential additional build dependencies.

At this time only Linux builds are supported.

=head2 CPAN Testers Note

This module may fail CPAN Testers' tests. 
The build support tools needed by this module and especially the 
Alien::Google::GRPC module are normally installed on the 
CPAN Testers' machines, but not always.

The system build tools dependencies have been reduced, so hopefully 
a large number of machines will build without manually installing 
system dependencies.

=head2 NOTE

This is a complex package with a lot of moving parts. Please pardon if this first release has some minor bug or missing dependency that went undiscovered in my testing.

=head1 AUTHOR

lib/AI/PredictionClient/InceptionClient.pm  view on Meta::CPAN

$AI::PredictionClient::InceptionClient::VERSION = '0.05';

# ABSTRACT: A module implementing the TensorFlow Serving Inception client

use 5.010;

use Data::Dumper;
use Moo;

use AI::PredictionClient::Classes::SimpleTensor;
use AI::PredictionClient::Testing::Camel;

extends 'AI::PredictionClient::Predict';

has inception_results => (is => 'rwp');

has camel => (is => 'rw',);

sub call_inception {
  my $self  = shift;
  my $image = shift;

  my $tensor = AI::PredictionClient::Classes::SimpleTensor->new();
  $tensor->shape([ { size => 1 } ]);
  $tensor->dtype("DT_STRING");

  if ($self->camel) {
    my $camel_test = AI::PredictionClient::Testing::Camel->new();
    $tensor->value([ $camel_test->camel_jpeg_ref ]);
  } else {
    $tensor->value([$image]);
  }

  $self->inputs({ images => $tensor });

  if ($self->callPredict()) {

    my $predict_output_map_href = $self->outputs;

lib/AI/PredictionClient/Roles/PredictionRole.pm  view on Meta::CPAN

use strict;
use warnings;
package AI::PredictionClient::Roles::PredictionRole;
$AI::PredictionClient::Roles::PredictionRole::VERSION = '0.05';

# ABSTRACT: Implements the Prediction service interface

use AI::PredictionClient::CPP::PredictionGrpcCpp;
use AI::PredictionClient::Testing::PredictionLoopback;
use JSON ();
use Data::Dumper;
use MIME::Base64 qw( encode_base64 decode_base64 );
use Moo::Role;

has host => (is => 'ro');

has port => (is => 'ro',);

has loopback => (

lib/AI/PredictionClient/Roles/PredictionRole.pm  view on Meta::CPAN

has perception_client_object => (
  is      => 'lazy',
  builder => 1,
);

sub _build_perception_client_object {
  my $self = $_[0];

  my $server_port = $self->host . ':' . $self->port;
  return $self->loopback
    ? AI::PredictionClient::Testing::PredictionLoopback->new($server_port)
    : AI::PredictionClient::CPP::PredictionGrpcCpp::PredictionClient->new(
    $server_port);
}

has request_ds => (
  is      => 'ro',
  default => sub { { modelSpec => { name => "", signatureName => "" } } },
);

has reply_ds => (

lib/AI/PredictionClient/Testing/Camel.pm  view on Meta::CPAN

use strict;
use warnings;
package AI::PredictionClient::Testing::Camel;
$AI::PredictionClient::Testing::Camel::VERSION = '0.05';
# ABSTRACT: Provides a camel image for testing  in various formats 
use 5.010;
use MIME::Base64 qw( encode_base64 decode_base64 );
use Moo;

my $camel_jpeg_b64;
my $camel_png_b64;

has camel_jpeg_ref => (
  is      => 'lazy',

lib/AI/PredictionClient/Testing/Camel.pm  view on Meta::CPAN

gC0AWwC2AGwBeH7G/wLjtM2l8m/OTQAAAABJRU5ErkJggg==';

__END__

=pod

=encoding UTF-8

=head1 NAME

AI::PredictionClient::Testing::Camel - Provides a camel image for testing  in various formats 

=head1 VERSION

version 0.05

=head1 AUTHOR

Tom Stall <stall@cpan.org>

=head1 COPYRIGHT AND LICENSE

lib/AI/PredictionClient/Testing/PredictionLoopback.pm  view on Meta::CPAN

use strict;
use warnings;
package AI::PredictionClient::Testing::PredictionLoopback;
$AI::PredictionClient::Testing::PredictionLoopback::VERSION = '0.05';

# ABSTRACT: A loopback interface for client testing and development

use 5.010;
use Data::Dumper;
use Moo;

around BUILDARGS => sub {
  my $orig  = shift;
  my $class = shift;

lib/AI/PredictionClient/Testing/PredictionLoopback.pm  view on Meta::CPAN

1;

__END__

=pod

=encoding UTF-8

=head1 NAME

AI::PredictionClient::Testing::PredictionLoopback - A loopback interface for client testing and development

=head1 VERSION

version 0.05

=head1 AUTHOR

Tom Stall <stall@cpan.org>

=head1 COPYRIGHT AND LICENSE

t/00load.t  view on Meta::CPAN

## no critic(RCS,VERSION,explicit,Module)
use strict;
use warnings;

use Test::More;

BEGIN {
    use_ok('AI::PredictionClient::CPP::PredictionGrpcCpp');
}
ok( 1, 'AI::PredictionClient::CPP::PredictionGrpcCpp loaded.' );
done_testing();

t/author-critic.t  view on Meta::CPAN

  unless ($ENV{AUTHOR_TESTING}) {
    print qq{1..0 # SKIP these tests are for testing by the author\n};
    exit
  }
}


use strict;
use warnings;

use Test::Perl::Critic (-profile => "perlcritic.rc") x!! -e "perlcritic.rc";
all_critic_ok();

t/author-pod-spell.t  view on Meta::CPAN


BEGIN {
  unless ($ENV{AUTHOR_TESTING}) {
    print qq{1..0 # SKIP these tests are for testing by the author\n};
    exit
  }
}

use strict;
use warnings;
use Test::More;

# generated by Dist::Zilla::Plugin::Test::PodSpelling 2.007004
use Test::Spelling 0.12;
use Pod::Wordlist;


add_stopwords(<DATA>);
all_pod_files_spelling_ok( qw( bin lib ) );
__DATA__
AI
CPP
Camel
Classes

t/author-pod-spell.t  view on Meta::CPAN

Predict
PredictRole
PredictionClient
PredictionGrpcCpp
PredictionLoopback
PredictionRole
Roles
SimpleTensor
Stall
TensorFlow
Testing
Tom
autotools
bin
gRPC
lib
loopback
proto
protofile
protofiles
stall

t/author-pod-syntax.t  view on Meta::CPAN

#!perl

BEGIN {
  unless ($ENV{AUTHOR_TESTING}) {
    print qq{1..0 # SKIP these tests are for testing by the author\n};
    exit
  }
}

# This file was automatically generated by Dist::Zilla::Plugin::PodSyntaxTests.
use strict; use warnings;
use Test::More;
use Test::Pod 1.41;

all_pod_files_ok();

t/release-minimum-version.t  view on Meta::CPAN

#!perl

BEGIN {
  unless ($ENV{RELEASE_TESTING}) {
    print qq{1..0 # SKIP these tests are for release candidate testing\n};
    exit
  }
}


use Test::More;

eval "use Test::MinimumVersion";
plan skip_all => "Test::MinimumVersion required for testing minimum versions"
  if $@;
all_minimum_version_ok( qq{5.10.1} );



( run in 0.598 second using v1.01-cache-2.11-cpan-4d50c553e7e )