view release on metacpan or search on metacpan
to provide version information here so that people can decide whether
fixes for the module are worth downloading.
INSTALLATION
To install this module, run the following commands:
perl Build.PL
./Build
./Build test
./Build install
===================================================
[*] if you see any prerequisites message like below while executing
perl Build.PL command:
checking prerequisites...
requires:
! LWP::Parallel::UserAgent is not installed
APP-REST-RestTestSuite
======================
This is my pet project on creating test automation tool for restfull services
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
use constant LOG_FILE => 'rest_client.log';
use constant ERR_LOG_FILE => 'rest_client_error.log';
use constant LINE => '=' x 50;
$| = 1; #make the pipe hot
$Data::Dumper::Indent = 1;
=head1 NAME
APP::REST::RestTestSuite - Suite for testing restful web services
=head1 VERSION
Version 0.03
=cut
our $VERSION = '0.03';
=head1 SYNOPSIS
use APP::REST::RestTestSuite;
my $suite = APP::REST::RestTestSuite->new();
$suite->execute_test_cases( $suite->get_test_cases() );
my ( $cases_in_config, $executed, $skipped, $passed, $failed ) =
$suite->get_result_summary();
#OR
use APP::REST::RestTestSuite;
# overrides the default config and log file paths
my $suite = APP::REST::RestTestSuite->new(
REST_CONFIG_FILE => <config file>,
LOG_FILE_PATH => <path>,
);
$suite->execute_test_cases( $suite->get_test_cases() );
my ( $cases_in_config, $executed, $skipped, $passed, $failed ) =
$suite->get_result_summary();
=head1 DESCRIPTION
APP::REST::RestTestSuite object is instantiated with the data in config file.
Default config file format is defined in __DATA__ and that can be overridden
by passing the config file as an argument to the class.
Default LOG file path is the current working directory of the script which
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
my $self = {};
bless( $self, $class );
$self->_init(%args);
return $self;
}
=head2 get_test_cases
=cut
sub get_test_cases {
my ( $self, %args ) = @_;
if ( $self->{test_cases} ) {
return %{ $self->{test_cases} };
} else {
return undef;
}
}
=head2 get_log_file_handle
=cut
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
=head2 get_result_summary
=cut
sub get_result_summary {
my ( $self, %args ) = @_;
return (
$self->{test_result_log}->{test_cases_in_config},
$self->{test_result_log}->{test_cases_exececuted},
$self->{test_result_log}->{test_cases_skipped},
$self->{test_result_log}->{test_cases_passed},
$self->{test_result_log}->{test_cases_failed},
);
}
=head2 validate_test_cases
=cut
sub validate_test_cases {
my ($self) = shift;
my $err = undef;
unless (@_) {
$err = "There is no test cases defined to execute.\n";
} elsif ( ( (@_) % 2 ) == 1 ) {
$err =
"Test cases are not properly configured in '"
. $self->get_config_file()
. "'\nDefine test cases properly.\nPlease see the README file for more info.\n";
}
return $err if ($err);
my %test_cases = @_;
my @spec = sort qw(
test_case
uri
request_content_type
request_method
request_body
response_status
execute
response_content_type
);
#below two are not mandatory for a test case as of now; if required add them to above array
# response_header
# response_body
foreach my $count ( sort { $a <=> $b } keys(%test_cases) ) {
my $tc = $test_cases{$count};
my @keys = sort keys %{$tc};
no warnings;
$err .= "Test case '$tc->{test_case}' not properly defined\n"
unless ( _compare_arrays( \@spec, \@keys ) );
}
$err .= "Please see the README file to see the correct format.\n" if ($err);
return $err;
}
=head2 execute_test_cases
=cut
sub execute_test_cases {
my ($self) = shift;
#expects an hash with keys as test case number and value as hash ref with test
#specification; validate that before trying to execute them.
my $err = $self->validate_test_cases(@_);
die "ERROR: $err\n" if ($err);
my %test_cases = @_;
my $ua = LWP::UserAgent->new;
$ua->agent("RTAT/$VERSION");
$ua->timeout(90); # in seconds
$ua->default_header('Accept' => '*/*'); # to get cross platform support
my ( $config, $total, $total_response_time, $skip, $pass, $fail ) = (0) x 6;
my ( $uri, $method, $req_content_type, $req_body, $status ) = (undef) x 5;
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
. qq|<BODY><textarea rows="999999" cols="120" style="border:none;">|;
print $err_fh
qq|<HTML> <HEAD> <TITLE>ERROR LOG for $self->{endpoint}</TITLE> </HEAD>|
. qq|<BODY><textarea rows="999999" cols="120" style="border:none;">|;
}
print STDERR "\nTest Suite executed on $self->{endpoint}\n";
print $fh "\nTest Suite executed on $self->{endpoint}\n";
print $err_fh "\nTest Suite executed on $self->{endpoint}\n";
foreach my $count ( sort { $a <=> $b } keys(%test_cases) ) {
my $tc = $test_cases{$count};
$config++;
print $fh "\n", LINE, "\n";
if ( $tc->{execute} && ( $tc->{execute} =~ /no/i ) ) {
print $fh "\nSkipping Test case $count => $tc->{test_case} \n";
$skip++;
next;
}
$uri = qq|$self->{rest_uri_base}| . qq|$tc->{uri}|;
$method = uc( $tc->{request_method} );
$req_content_type = $tc->{request_content_type};
$req_body = $tc->{request_body} || 0;
$status = $tc->{response_status};
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
if ( $username && $password );
} else {
$request =
HTTP::Request->new( $method, $uri, new HTTP::Headers, $req_body );
$request->authorization_basic( $username, $password )
if ( $username && $password );
$request->content_type($req_content_type);
$request->content_length( length($req_body) );
}
print STDERR "Executing Test case $count => $tc->{test_case}";
print $fh "Executing Test case $count => $tc->{test_case}";
my $start_time = time;
$response = $ua->request($request);
$total++;
my $exec_time = $self->delta_time( start_time => $start_time );
$total_response_time += $exec_time;
$exec_time = sprintf( "%.2f", $exec_time );
print STDERR " [Completed in $exec_time ms]\n";
print $fh " [Completed in $exec_time ms]\n";
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
#my $respose_content_type = $response->{_headers}->content_type;
my $respose_content_type = $response->header('Content-Type');
unless ( defined $respose_content_type ) {
$failed = 1;
} elsif ( $expected_response_content_type !~
m/$respose_content_type/ )
{
$failed = 1;
print $err_fh "\n", LINE, "\n";
print $err_fh
"Executing Test case $count => $tc->{test_case}";
print $err_fh
"\n*********ATTENTION CONTENT TYPE ERROR ******";
print $err_fh
"\n\nExpected content_type is $expected_response_content_type\n";
print $err_fh
"content_type recieved in response is $respose_content_type\n";
print $err_fh
"\n*********ATTENTION CONTENT TYPE ERROR ******";
$self->_print_logs(
fh => $err_fh,
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
fh => $err_fh,
res => $response,
exec_time => $exec_time,
);
}
}
($failed) ? $fail++ : $pass++;
} else {
$fail++;
print $err_fh "\n", LINE, "\n";
print $err_fh "Executing Test case $count => $tc->{test_case}";
$self->_print_logs(
fh => $err_fh,
uri => $uri,
method => $method,
req_body => $req_body,
);
$self->_print_logs(
fh => $err_fh,
res => $response,
exec_time => $exec_time,
);
}
}
#convert milli seconds to seconds for total_exec_time
$total_response_time = sprintf( "%.2f", $total_response_time / 1000 );
my $avg_response_time =
sprintf( "%.2f", ( $total_response_time * 1000 ) / $total );
print STDERR "\nComplete test case report is in $self->{file}->{log_file}";
print STDERR
"\nFailed test case report is in $self->{file}->{err_log_file}\n\n";
print STDERR
"Response time of $total web service calls => [$total_response_time seconds]\n";
print STDERR
"Average response time of a web service => [$avg_response_time milli seconds]\n\n";
print $fh
"Response time of $total web service calls => [$total_response_time seconds]\n";
print $fh
"Average response time of a web service => [$avg_response_time milli seconds]\n\n";
if ( $self->{html_log_required}
&& ( $self->{html_log_required} =~ /yes/i ) )
{
print $fh qq|</textarea></BODY></HTML>|;
print $err_fh qq|</textarea></BODY></HTML>|;
}
$self->{test_result_log} = {
test_cases_in_config => $config,
test_cases_exececuted => $total,
test_cases_skipped => $skip,
test_cases_passed => $pass,
test_cases_failed => $fail,
};
close($fh);
close($err_fh);
}
=head2 execute_test_cases_in_parallel
=cut
sub execute_test_cases_in_parallel {
my ($self) = shift;
#Code expects an hash with keys as test case number and value as hash ref with test
#specification; validate that before trying to execute them.
my $err = $self->validate_test_cases(@_);
die "ERROR: $err\n" if ($err);
my %test_cases = @_;
# use my customized user agent for parallel invokes
my $pua = APP::REST::ParallelMyUA->new();
$pua->agent("RTAT/$VERSION");
$pua->in_order(1); # handle requests in order of registration
$pua->duplicates(0); # ignore duplicates
$pua->timeout(60); # in seconds
$pua->redirect(1); # follow redirects
$pua->default_header('Accept' => '*/*'); # to get cross platform support
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
print $fh
qq|<HTML> <HEAD> <TITLE>LOG for $self->{endpoint}</TITLE> </HEAD>|
. qq|<BODY><textarea rows="999999" cols="120" style="border:none;">|;
}
print STDERR "\nTest Suite executed on $self->{endpoint}\n";
print $fh "\nTest Suite executed on $self->{endpoint}\n";
my @reqs;
foreach my $count ( sort { $a <=> $b } keys(%test_cases) ) {
my $tc = $test_cases{$count};
$config++;
if ( $tc->{execute} =~ /no/i ) {
print $fh "\nSkipping Test case $count => $tc->{test_case} \n";
$skip++;
next;
}
$uri = qq|$self->{rest_uri_base}| . qq|$tc->{uri}|;
#Support only GET methods at present
if ( $tc->{request_method} =~ /get/i ) {
# Create HTTP request pool for later execution by parallel useragent
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
);
}
#convert milli seconds to seconds for total_exec_time
$total_response_time = sprintf( "%.2f", $total_response_time / 1000 );
my $avg_response_time =
sprintf( "%.2f", ( $total_response_time * 1000 ) / $total );
print STDERR
"\n\n\nComplete test case report is in $self->{file}->{log_file}";
print STDERR
"\n\nResponse time of $total web service calls => [$total_response_time seconds]\n";
print STDERR
"Average response time of a web service => [$avg_response_time milli seconds]\n\n";
print $fh
"\n\nResponse time of $total web service calls => [$total_response_time seconds]\n";
print $fh
"Average response time of a web service => [$avg_response_time milli seconds]\n\n";
if ( $self->{html_log_required}
&& ( $self->{html_log_required} =~ /yes/i ) )
{
print $fh qq|</textarea></BODY></HTML>|;
}
$self->{test_result_log} = {
test_cases_in_config => $config,
test_cases_exececuted => $total,
test_cases_skipped => $skip,
test_cases_passed => $pass,
test_cases_failed => $fail,
};
close($fh);
}
=head2 get_sample_test_suite
=cut
sub get_sample_test_suite {
my ( $self, %args ) = @_;
$self->_init_sample_config_file();
my $file = $self->{file};
my $wfh =
$self->_open_fh( FILE => $file->{sample_config_file}, MODE => 'WRITE' );
foreach ( @{$file->{config_file_content}}) {
print $wfh $_;
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
}
sub _init_read_config {
my ( $self, %args ) = @_;
my $fh = $self->get_config_file_handle();
my @buffer = ();
my @start_end_buffer = ();
my ( $start_case, $end_case, $test_no ) = (undef) x 3;
my ( $start_common, $end_common ) = (undef) x 2;
my ( $start_http_code, $end_http_code ) = (undef) x 2;
my ( $start_tag, $lines_between, $end_tag ) = (undef) x 3;
my $separator = ":"; # separator used in config file for key/value pair
my %start_end_hash;
my $file = $self->{file};
while (<$fh>) {
push (@{$file->{config_file_content}} , $_);
_trim( chomp($_) );
next if ( $_ =~ m/^#+$/ || $_ =~ m/^\s*$/ || $_ =~ m/^#\s+/ );
last if ( $_ =~ m/^#END_OF_CONFIG_FILE\s*$/ );
## Process common configuration for all test cases
if ( $_ =~ m/^#START_COMMON_CONFIG\s*$/ ) {
$start_common = 1;
next;
}
$end_common = 1 if ( $_ =~ m/^#END_COMMON_CONFIG\s*$/ );
push( @buffer, $_ ) if ( $start_common && !$end_common );
if ( $start_common && $end_common ) {
foreach my $line (@buffer) {
my @val = split( $separator, $line );
$self->{ _trim( $val[0] ) } = _trim( $val[1] );
}
@buffer = ();
$start_common = 0;
$end_common = 0;
} elsif ( !$start_common && $end_common ) {
die "ERROR in config file format\n";
}
## Process test cases
if ( $_ =~ m/^#START_TEST_CASE\s*$/ ) {
$start_case = 1;
next;
}
$end_case = 1 if ( $_ =~ m/^#END_TEST_CASE\s*$/ );
push( @buffer, $_ ) if ( $start_case && !$end_case );
## Process [START] and [END] tag for any keys
$start_tag = 1 if ( $_ =~ m/^\s*\[START\]\s*$/ );
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
## create the key with the values given in between [START] and [END] tag
$start_end_hash{$line} = $req_body;
@start_end_buffer = ();
$start_tag = 0;
$end_tag = 0;
$lines_between = 0;
}
if ( $start_case && $end_case ) {
$test_no++;
foreach my $line (@buffer) {
my @val = split( $separator, $line );
$self->{test_cases}->{$test_no}->{ _trim( $val[0] ) } =
_trim( $val[1] );
}
# add all those in between [START] and [END] tage to the respective key
while ( my ( $key, $value ) = each %start_end_hash ) {
$self->{test_cases}->{$test_no}->{$key} = $value;
}
%start_end_hash = ();
@buffer = ();
$start_case = 0;
$end_case = 0;
} elsif ( !$start_case && $end_case ) {
die "ERROR in config file format\n";
}
lib/APP/REST/RestTestSuite.pm view on Meta::CPAN
L<https://github.com/rkmithun/APP-REST-RestTestSuite>
=cut
1; # End of APP::REST::RestTestSuite
__DATA__
# RestTestSuite supports config file of below format.
# All values in LHS of ':' are case sensitive.
# Every test case should be within the '#START_TEST_CASE' and '#END_TEST_CASE' block.
# Create application specific config file in below format and pass the
# full path of file as an argument to the constructor
# for POST and PUT methods you need to supply the request body within
# [START] and [END] tags
# request_body :
# [START]
# xml or json or form based
# [END]
################
#Set below values to configure the base URL for all test cases
####################
#START_COMMON_CONFIG
################################################################################
endpoint : www.thomas-bayer.com
port :
base_uri : /sqlrest
html_log_required : no
username :
################################################################################
#END_COMMON_CONFIG
##################
#####################
#START_TEST_CASE
#####################
test_case : get_product
uri : /PRODUCT/49
request_content_type : application/xml
request_method : GET
request_body :
response_status : 200
execute : yes
response_content_type : application/xml
#####################
#END_TEST_CASE
#####################
script/rest-client view on Meta::CPAN
use POSIX;
use APP::REST::RestTestSuite;
use Data::Dumper;
$Data::Dumper::Indent = 1;
my ( $config_file, $log_dir, $action, $suite );
my $client = File::Basename::basename($0);
my %options = (
'c|configfile=s' => \$config_file,
'l|logdir=s' => \$log_dir,
'r|run-test-suite' => \$action->{runtestsuite},
't|test-load=s' => \$action->{testload},
's|get-sample-config' => \$action->{getsampleconfig},
'd|debug' => \$action->{debug},
'h|help' => \&usage,
'V|version' =>
sub { print "Version : ", APP::REST::RestTestSuite->VERSION, "\n"; exit; },
);
mini_usage() unless @ARGV;
GetOptions(%options) or mini_usage();
script/rest-client view on Meta::CPAN
sub usage {
print STDERR <<HELP;
Usage: $client [options] command [...]
Options:
-h,--help Display this usage.
-V,--version Print the version of the tool.
-c,--configfile=<file> Input the config file with full path.
-l,--logdir=<dir> Input full path of the directory where you want to log the test results.
-t,--test-load=n Test the average response time by simulating 'n' number of requests on the web server.
Commands:
-r,--run-test-suite Test the configured web services defined in the config file.
-g,--get-sample-config Get a sample config file to configure your web services as test suite.
***Note:
By default tool uses the sample-config file. You need to get that and configure the suite.
Examples:
$client --get-sample-config
# Get sample config file for configuring web services in the current directory.
$client --run-test-suite --configfile=<rest_config_file>
# Execute the test suite against the supplied config file.
# Supply the full path of config file if it is not present in current directory.
$client --test-load=10 --configfile=<rest_config_file>
# Send parallel requests (10* number of web services configured in config file).
# Give average response time by simulating huge traffic in the web server.
$client --run-test-suite --configfile=<rest_config_file> --logdir=<log-directory-path>
$client --test-load=10 --configfile=<rest_config_file> --logdir=<log-directory-path>
# Create LOG files in the path specified by executing the test cases.
HELP
exit;
}
print
"\n===============================Rest Test Automation Tool=============================\n";
if ( $^O =~ /Win/ ) {
$config_file =~ s/\//\\/g if ($config_file);
script/rest-client view on Meta::CPAN
} elsif ($config_file) {
print "Parsing Config File : $config_file\n\n";
$suite = new APP::REST::RestTestSuite( REST_CONFIG_FILE => $config_file, );
} elsif ($log_dir) {
print <<HELP;
Using log dir as : $log_dir
Using the default test suite. This is just for a demo purpose.
Use below options to specify the config file and log path.
$client --configfile=<rest_config_file> --logdir=<log-directory-path>
HELP
$suite = new APP::REST::RestTestSuite( LOG_FILE_PATH => $log_dir, );
} else {
unless ( $action->{getsampleconfig} ) {
print <<HELP;
Using the default test suite. This is just for a demo purpose.
Use below options to specify the config file and log path.
$client --configfile=<rest_config_file> --logdir=<log-directory-path>
Use below option to get a sample config. Edit the config file to configure your web services
$client --get-sample-config
HELP
}
$suite = new APP::REST::RestTestSuite();
}
if ( $action->{debug} ) {
print Dumper $suite;
exit;
} elsif ( $action->{runtestsuite} ) {
$suite->execute_test_cases( $suite->get_test_cases() );
my ( $config, $total, $skip, $pass, $fail ) = $suite->get_result_summary();
print "=" x 70, "\n";
print "Summary of web service Suite \n";
print "=" x 70, "\n";
print "Total test cases configured : $config\n";
print "Total test cases executed : $total\n";
print "Total test cases skipped : $skip\n";
print "Total test cases pass : $pass\n";
print "Total test cases failed : $fail\n";
print "=" x 70, "\n";
exit;
} elsif ( $action->{testload} ) {
die "--test-load accepts only numeric argument\n"
unless ( $action->{testload} =~ /^\d+$/ );
my %test_cases = $suite->get_test_cases();
#increase the count to load your API $load times with the number of TC
my $tc_in_config = scalar keys %test_cases;
my $load = $action->{testload};
#$load = floor(500 / $tc_in_config);
my %load_test_cases;
#If config file has 10 test cases, formula for $tc_count is 10*$load
my $total_tc = 0;
while ( $load > 0 ) {
foreach my $num ( sort { $a <=> $b } keys(%test_cases) ) {
my $tc = $test_cases{$num};
$total_tc++;
$load_test_cases{$total_tc} = $tc;
}
$load--;
}
# require Data::Dumper;
# $Data::Dumper::Indent = 1;
# print Dumper $suite;
# foreach my $num (sort { $a <=> $b } keys(%load_test_cases)) {
# print Dumper $load_test_cases{$num};
# }
# print sort{ $a <=> $b } keys %load_test_cases;
# exit;
$suite->execute_test_cases_in_parallel(%load_test_cases);
my ( $config, $total, $skip, $pass, $fail ) = $suite->get_result_summary();
print "=" x 70, "\n";
print "Summary of web service load test \n";
print "=" x 70, "\n";
print "Total test cases in config: $config\n";
print "Total test cases executed : $total\n";
print "Total test cases skipped : $skip\n";
print "=" x 70, "\n";
exit;
} elsif ( $action->{getsampleconfig} ) {
$suite->get_sample_test_suite();
print "Sample Config file to configure the web services got created\n";
print "in ", $suite->get_sample_config_file(), "\n";
print "Edit and rename this file as project specific test suite.\n\n";
exit;
} else {
print "\nUse below options to execute the test cases\n\n";
print "$client --configfile=<rest_config_file> --run-test-suite\n\n";
exit;
}
__END__
=head1 NAME
rest-client - Test automation tool for restful web services
=head1 SYNOPSIS
Usage: rest-client [options] command [...]
Options:
-h,--help Display this usage.
-V,--version Print the version of the tool.
-c,--configfile=<file> Input the config file with full path.
-l,--logdir=<dir> Input full path of the directory where you want to log the test results.
-t,--test-load=n Test the average response time by simulating 'n' number of requests on the web server.
Commands:
-r,--run-test-suite Test the configured web services defined in the config file.
-g,--get-sample-config Get a sample config file to configure your web services as test suite.
***Note:
By default tool uses the sample-config file. You need to get that and configure the suite.
Examples:
rest-client --get-sample-config
# Get sample config file for configuring web services in the current directory.
rest-client --run-test-suite --configfile=<rest_config_file>
# Execute the test suite against the supplied config file.
# Supply the full path of config file if it is not present in current directory.
rest-client --test-load=10 --configfile=<rest_config_file>
# Send parallel requests (10* number of web services configured in config file).
# Give average response time by simulating huge traffic in the web server.
rest-client --run-test-suite --configfile=<rest_config_file> --logdir=<log-directory-path>
rest-client --test-load=10 --configfile=<rest_config_file> --logdir=<log-directory-path>
# Create LOG files in the path specified by executing the test cases.
=head1 SEE ALSO
L<https://github.com/rkmithun/APP-REST-RestTestSuite>
=cut
t/00-load.t view on Meta::CPAN
#!perl -T
use 5.006;
use strict;
use warnings FATAL => 'all';
use Test::More;
plan tests => 1;
BEGIN {
use_ok( 'APP::REST::RestTestSuite' ) || print "Bail out!\n";
}
diag( "Testing APP::REST::RestTestSuite $APP::REST::RestTestSuite::VERSION, Perl $], $^X" );
t/boilerplate.t view on Meta::CPAN
#!perl -T
use 5.006;
use strict;
use warnings FATAL => 'all';
use Test::More;
plan tests => 3;
sub not_in_file_ok {
my ($filename, %regex) = @_;
open( my $fh, '<', $filename )
or die "couldn't open $filename for reading: $!";
my %violated;
while (my $line = <$fh>) {
while (my ($desc, $regex) = each %regex) {
t/manifest.t view on Meta::CPAN
#!perl -T
use 5.006;
use strict;
use warnings FATAL => 'all';
use Test::More;
unless ( $ENV{RELEASE_TESTING} ) {
plan( skip_all => "Author tests not required for installation" );
}
my $min_tcm = 0.9;
eval "use Test::CheckManifest $min_tcm";
plan skip_all => "Test::CheckManifest $min_tcm required" if $@;
ok_manifest();
t/pod-coverage.t view on Meta::CPAN
#!perl -T
use 5.006;
use strict;
use warnings FATAL => 'all';
use Test::More;
# Ensure a recent version of Test::Pod::Coverage
my $min_tpc = 1.08;
eval "use Test::Pod::Coverage $min_tpc";
plan skip_all => "Test::Pod::Coverage $min_tpc required for testing POD coverage"
if $@;
# Test::Pod::Coverage doesn't require a minimum Pod::Coverage version,
# but older versions don't recognize some common documentation styles
my $min_pc = 0.18;
eval "use Pod::Coverage $min_pc";
plan skip_all => "Pod::Coverage $min_pc required for testing POD coverage"
if $@;
all_pod_coverage_ok();
#!perl -T
use 5.006;
use strict;
use warnings FATAL => 'all';
use Test::More;
# Ensure a recent version of Test::Pod
my $min_tp = 1.22;
eval "use Test::Pod $min_tp";
plan skip_all => "Test::Pod $min_tp required for testing POD" if $@;
all_pod_files_ok();