view release on metacpan or search on metacpan
lib/AI/MXNet/Metric.pm
lib/AI/MXNet/Executor/Group.pm
lib/AI/MXNet/NDArray.pm
lib/AI/MXNet/RNN/Cell.pm
lib/AI/MXNet/RNN/IO.pm
lib/AI/MXNet/LRScheduler.pm
lib/AI/MXNet/Callback.pm
lib/AI/MXNet/IO.pm
lib/AI/MXNet/Module/Bucketing.pm
lib/AI/MXNet/Module/Base.pm
lib/AI/MXNet/TestUtils.pm
lib/AI/MXNet/Logging.pm
README
lib/AI/MXNet.pm view on Meta::CPAN
=head1 SYNOPSIS
## Convolutional NN for recognizing hand-written digits in MNIST dataset
## It's considered "Hello, World" for Neural Networks
## For more info about the MNIST problem please refer to http://neuralnetworksanddeeplearning.com/chap1.html
use strict;
use warnings;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(GetMNIST_ubyte);
use Test::More tests => 1;
# symbol net
my $batch_size = 100;
### model
my $data = mx->symbol->Variable('data');
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 32, kernel => [3,3], stride => [2,2]);
my $bn1 = mx->symbol->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->symbol->Activation(data => $bn1, name => 'relu1', act_type => "relu");
my $mp1 = mx->symbol->Pooling(data => $act1, name => 'mp1', kernel => [2,2], stride =>[2,2], pool_type=>'max');
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
($acc_g + $self->epsilon)->sqrt
*
$grad;
$acc_delta .= $self->rho * $acc_delta + (1 - $self->rho) * $current_delta * $current_delta;
$weight -= $current_delta + $wd * $weight;
}
__PACKAGE__->register;
# For test use
package AI::MXNet::Test;
use Mouse;
extends 'AI::MXNet::Optimizer';
# Create a state to duplicate weight
method create_state(Index $index, AI::MXNet::NDArray $weight)
{
return AI::MXNet::NDArray->zeros(
$weight->shape,
ctx => $weight->context
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
package AI::MXNet::TestUtils;
use strict;
use warnings;
use PDL;
use Carp;
use Scalar::Util qw(blessed);
use AI::MXNet::Function::Parameters;
use Exporter;
use base qw(Exporter);
@AI::MXNet::TestUtils::EXPORT_OK = qw(same reldiff almost_equal GetMNIST_ubyte
GetCifar10 pdl_maximum pdl_minimum mlp2 conv
check_consistency zip assert enumerate same_array dies_like);
use constant default_numerical_threshold => 1e-6;
=head1 NAME
AI::MXNet::TestUtils - Convenience subs used in tests.
=head2 same
Test if two pdl arrays are the same
Parameters
----------
a : pdl
b : pdl
=cut
func same(PDL $a, PDL $b)
{
return ($a != $b)->sum == 0;
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
if($diff == 0)
{
return 0;
}
my $ret = $diff / $norm;
return $ret;
}
=head2 almost_equal
Test if two pdl arrays are almost equal.
=cut
func almost_equal(PDL $a, PDL $b, Maybe[Num] $threshold=)
{
$threshold //= default_numerical_threshold;
my $rel = reldiff($a, $b);
return $rel <= $threshold;
}
func GetMNIST_ubyte()
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
int32 => 0
};
$tol = {
float16 => $tol,
float32 => $tol,
float64 => $tol,
uint8 => $tol,
int32 => $tol
} unless ref $tol;
Test::More::ok(@$ctx_list > 1);
if(blessed $sym)
{
$sym = [($sym)x@$ctx_list];
}
else
{
Test::More::ok(@$sym == @$ctx_list);
}
my $output_names = $sym->[0]->list_outputs;
my $arg_names = $sym->[0]->list_arguments;
my @exe_list;
zip(sub {
my ($s, $ctx) = @_;
Test::More::is_deeply($s->list_arguments, $arg_names);
Test::More::is_deeply($s->list_outputs, $output_names);
push @exe_list, $s->simple_bind(grad_req=>$grad_req, %$ctx);
}, $sym, $ctx_list);
$arg_params //= {};
$aux_params //= {};
my %arg_dict = %{ $exe_list[0]->arg_dict };
while(my ($n, $arr) = each %arg_dict)
{
if(not exists $arg_params->{$n})
{
$arg_params->{$n} = random(reverse @{ $arr->shape })*$scale;
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
enumerate(sub {
my ($i, $exe) = @_;
if($i == $max_idx)
{
return;
}
zip(sub {
my ($name, $arr) = @_;
my $gtarr = $gt->{$name}->astype($dtypes[$i])->aspdl;
$arr = $arr->aspdl;
Test::More::ok(
almost_equal(
$arr, $gtarr,
$tol->{$dtypes[$i]}
)
);
}, $output_names, $exe->outputs);
}, \@exe_list);
# train
if ($grad_req ne 'null')
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
$exe->forward(1);
$exe->backward($exe->outputs);
}
enumerate(sub {
my ($i, $exe) = @_;
return if($i == $max_idx);
zip(sub {
my ($name, $arr) = @_;
if (not defined $gt->{$name})
{
Test::More::ok(not defined $arr);
return;
}
my $gtarr = $gt->{$name}->astype($dtypes[$i])->aspdl;
$arr = $arr->aspdl;
Test::More::ok(
almost_equal(
$arr, $gtarr,
$tol->{$dtypes[$i]}
)
);
}, [@$output_names, @$arg_names], [@{ $exe->outputs }, @{ $exe->grad_arrays }]);
}, \@exe_list);
}
return $gt;
}
t/AI-MXNet.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 1;
BEGIN { use_ok('AI::MXNet') };
t/test_attr.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 14;
use AI::MXNet qw(mx);
use Storable;
sub contains
{
my ($x, $y) = @_;
while(my ($k, $v) = each %$x)
{
return 0 unless exists $y->{$k};
if(ref $y->{$k} and ref $y->{$k} eq 'HASH')
t/test_conv.t view on Meta::CPAN
use strict;
use warnings;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(GetMNIST_ubyte);
use Test::More tests => 1;
## speed up the tests when gpu present
my $gpu_present = (`perl -e 'use AI::MXNet qw(mx); print mx->nd->ones([1], ctx => mx->gpu(0))->asscalar' 2>/dev/null` eq '1');
# symbol net
my $batch_size = 100;
### model
my $data = mx->symbol->Variable('data');
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 32, kernel => [3,3], stride => [2,2]);
t/test_executor.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 2283;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(reldiff pdl_maximum pdl_minimum);
use PDL;
sub check_bind_with_uniform
{
my ($uf, $gf, $dim, $sf, $lshape, $rshape) = @_;
my $shape = (random($dim)*int(1000**(1.0/$dim))+1)->floor->unpdl;
my $lhs = mx->symbol->Variable('lhs');
my $rhs = mx->symbol->Variable('rhs');
my $ret;
if(defined $sf)
t/test_infer_shape.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 18;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(mlp2);
sub _test_shapes
{
my ($sym, $arg_shapes, %expected_shapes) = @_;
my %arg_shape_dict;
@arg_shape_dict{ @{ $sym->list_arguments() } } = @{ $arg_shapes };
while(my ($k, $v) = each %expected_shapes)
{
is_deeply($arg_shape_dict{$k}, $v);
}
t/test_infer_shape.t view on Meta::CPAN
fc2_bias => [10],
fc2_weight => [10, 1000],
fc1_bias => [1000],
fc1_weight => [1000,100]
);
_test_shapes($out, $arg_shapes, %true_shapes);
}
sub test_mlp2_infer_error
{
# Test shape inconsistent case
my $out = mlp2();
my $weight_shape = [1, 100];
my $data_shape = [100, 100];
eval { $out->infer_shape(data=>$data_shape, fc1_weight=>$weight_shape) };
like($@, qr/Shape inconsistent/);
}
sub test_backward_infer
{
my $w = mx->sym->Variable("weight");
t/test_init.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 4;
use AI::MXNet qw(mx);
sub test_default_init
{
my $data = mx->sym->Variable('data');
my $sym = mx->sym->LeakyReLU(data => $data, act_type => 'prelu');
my $mod = mx->mod->Module($sym);
$mod->bind(data_shapes=>[['data', [10,10]]]);
$mod->init_params;
ok((((values %{ ($mod->get_params)[0] }))[0]->aspdl == 0.25)->all);
t/test_io.t view on Meta::CPAN
use AI::MXNet qw(mx);
use Test::More tests => 31;
use AI::MXNet::TestUtils qw(same reldiff GetMNIST_ubyte GetCifar10);
use PDL;
use PDL::Types;
use PDL::NiceSlice;
$|++;
sub test_Cifar10Rec()
{
GetCifar10();
my $dataiter = mx->io->ImageRecordIter({
t/test_io_image.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 1;
use AI::MXNet qw(mx);
use Time::HiRes qw(time);
sub run_imageiter
{
my ($path_rec, $n, $batch_size) = @_;
$batch_size //= 32;
my $data = mx->img->ImageIter(
batch_size=>$batch_size,
data_shape=>[3, 224, 224],
t/test_kvstore.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 38;
use AI::MXNet qw(mx);
my $shape = [4, 4];
my $keys = [5,7,9];
sub init_kv
{
# init kv
my $kv = mx->kv->create();
# single
t/test_model_parallel.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 4;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(reldiff);
use AI::MXNet::Base;
sub test_chain
{
my $ctx1 = mx->cpu(0);
my $ctx2 = mx->cpu(1);
my $n = 2;
my $data1 = mx->sym->Variable('data1');
my $data2 = mx->sym->Variable('data2');
my $data3 = mx->sym->Variable('data2');
t/test_module.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 257;
use AI::MXNet qw(mx);
use AI::MXNet::Base;
use AI::MXNet::TestUtils qw(almost_equal enumerate same_array dies_like);
use Data::Dumper;
sub test_module_layout
{
my $sym = mx->sym->Variable('data');
$sym = mx->sym->Activation(data=>$sym, act_type=>'relu', __layout__=>'TNC');
my $dshape = [3, 8, 7];
my $mod = mx->mod->Module(
$sym,
t/test_module.t view on Meta::CPAN
my $pred = mx->sym->Reshape($outputs, shape=>[-1, $num_hidden]);
$pred = mx->sym->FullyConnected(data=>$pred, num_hidden=>$num_words, name=>'pred');
$label = mx->sym->Reshape($label, shape=>[-1]);
$pred = mx->sym->SoftmaxOutput(data=>$pred, label=>$label, name=>'softmax');
return $pred;
};
my $test_shared_exec_group = sub { my ($exec_grp_shared, $exec_grp_created, $shared_arg_names, $extra_args) = @_;
# Test shared data arrays
for my $i (0..@{ $exec_grp_shared->execs }-1)
{
# test same shared_data_arrays for two exec groups
my $shared_data_array1 = $exec_grp_shared->shared_data_arrays->[$i];
my $shared_data_array2 = $exec_grp_created->shared_data_arrays->[$i];
if(defined $extra_args)
{
ok(keys(%$shared_data_array1) == @$extra_args);
}
ok(keys(%$shared_data_array1) == keys(%$shared_data_array2));
while(my ($k, $v) = each %{ $shared_data_array1 })
{
if(defined $extra_args)
{
ok(grep { $_ eq $k } @$extra_args);
}
ok(exists $shared_data_array2->{$k});
ok(same_array($v, $shared_data_array2->{$k}));
}
# Test shared argument arrays and gradient arrays
my $exec_shared = $exec_grp_shared->execs->[$i];
my $exec_created = $exec_grp_created->execs->[$i];
if(defined $shared_arg_names)
{
# test shared arguments
for my $arg_name (@$shared_arg_names)
{
ok(exists $exec_created->arg_dict->{$arg_name});
ok(same_array($exec_shared->arg_dict->{$arg_name}, $exec_created->arg_dict->{$arg_name}));
}
t/test_module.t view on Meta::CPAN
$dshape2 = [5, 3, 24, 16];
$lshape = [5];
$data_batch = mx->io->DataBatch(data=>[mx->nd->random_uniform(0, 9, $dshape1),
mx->nd->random_uniform(15, 25, $dshape2)],
label=>[mx->nd->ones($lshape)]);
$mod->forward($data_batch);
is_deeply($mod->get_outputs->[0]->shape, [$lshape->[0], $num_class]);
$mod->backward();
$mod->update();
#Test score
my $dataset_shape1 = [30, 3, 30, 30];
my $dataset_shape2 = [30, 3, 20, 40];
my $labelset_shape = [30];
my $eval_dataiter = mx->io->NDArrayIter(data=>[mx->nd->random_uniform(0, 9, $dataset_shape1),
mx->nd->random_uniform(15, 25, $dataset_shape2)],
label=>[mx->nd->ones($labelset_shape)],
batch_size=>5);
ok(keys %{ $mod->score($eval_dataiter, 'acc') } == 1);
#Test prediction
$dshape1 = [1, 3, 30, 30];
$dshape2 = [1, 3, 20, 40];
$dataset_shape1 = [10, 3, 30, 30];
$dataset_shape2 = [10, 3, 20, 40];
my $pred_dataiter = mx->io->NDArrayIter(data=>[mx->nd->random_uniform(0, 9, $dataset_shape1),
mx->nd->random_uniform(15, 25, $dataset_shape2)]);
$mod->bind(data_shapes=>[['data1', $dshape1], ['data2', $dshape2]],
for_training=>0, force_rebind=>1);
is_deeply($mod->predict($pred_dataiter)->shape, [10, $num_class]);
t/test_multi_device_exec.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 10;
use AI::MXNet qw(mx);
use AI::MXNet::Base;
sub test_ctx_group
{
my ($data, $fc1, $act1);
{
local($mx::AttrScope) = mx->AttrScope(ctx_group=>'stage1');
$data = mx->symbol->Variable('data');
$fc1 = mx->symbol->FullyConnected(data => $data, name=>'fc1', num_hidden=>128);
t/test_ndarray.t view on Meta::CPAN
use strict;
use warnings;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(almost_equal);
use Test::More tests => 10;
sub test_ndarray_reshape
{
my $tensor = mx->nd->array([[[1, 2], [3, 4]],
[[5, 6], [7, 8]]]);
my $true_res = mx->nd->arange(stop => 8) + 1;
is_deeply($tensor->reshape([-1])->aspdl->unpdl, $true_res->aspdl->unpdl);
$true_res = mx->nd->array([[1, 2, 3, 4],
[5, 6, 7, 8]]);
is_deeply($tensor->reshape([2, -1])->aspdl->unpdl, $true_res->aspdl->unpdl);
t/test_optimizers.t view on Meta::CPAN
$weight32 += $mom;
}
}
my $tmp = $weight32->astype($weight->dtype);
$tmp->copyto($weight);
}
}
package main;
use Test::More tests => 1314;
use AI::MXNet::Base;
use PDL::NiceSlice;
use AI::MXNet::TestUtils qw(same reldiff almost_equal);
use AI::MXNet::Function::Parameters;
func compare_optimizer($opt1, $opt2, $shape, $dtype)
{
my $w1 = mx->random->uniform({shape => $shape, dtype=>$dtype});
my $g1 = mx->random->uniform({shape => $shape, dtype=>$dtype});
my $w2 = $w1->copyto(mx->cpu());
my $g2 = $g1->copyto(mx->cpu());
t/test_random.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 8;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(same);
sub check_with_device
{
my ($device) = @_;
my ($a, $b) = (-10, 10);
my ($mu, $sigma) = (10, 2);
my $shape = [100, 100];
mx->random->seed(128);
my $ret1 = mx->random->normal($mu, $sigma, $shape, { ctx => $device });
my $un1 = mx->random->uniform($a, $b, $shape, { ctx => $device });
t/test_recordio.t view on Meta::CPAN
use strict;
use warnings;
use AI::MXNet qw(mx);
use Test::More tests => 1711;
use File::Temp qw/tempfile/;
use PDL;
sub test_recordio
{
my ($fd, $frec) = tempfile();
my $N = 255;
my $writer = mx->recordio->MXRecordIO($frec, 'w');
for my $i (0..$N-1)
t/test_rnn.t view on Meta::CPAN
use strict;
use warnings;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(same);
use PDL;
use Test::More tests => 54;
sub test_rnn
{
my $cell = mx->rnn->RNNCell(100, prefix=>'rnn_');
my ($outputs) = $cell->unroll(3, input_prefix=>'rnn_');
$outputs = mx->sym->Group($outputs);
is_deeply([sort keys %{$cell->params->_params}], ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']);
is_deeply($outputs->list_outputs(), ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']);
my (undef, $outs, undef) = $outputs->infer_shape(rnn_t0_data=>[10,50], rnn_t1_data=>[10,50], rnn_t2_data=>[10,50]);
is_deeply($outs, [[10, 100], [10, 100], [10, 100]]);
t/test_symbol.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 98;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(mlp2 conv check_consistency zip assert enumerate);
use Storable qw(freeze thaw);
use PDL;
sub test_symbol_compose
{
my $data = mx->symbol->Variable('data');
my $net1 = mx->symbol->FullyConnected(data=>$data, name=>'fc1', num_hidden=>10);
$net1 = mx->symbol->FullyConnected(data=>$net1, name=>'fc2', num_hidden=>100);
is_deeply($net1->list_arguments(), ['data',
'fc1_weight', 'fc1_bias',
t/test_symbol.t view on Meta::CPAN
@arg_shapes{ @{ $out->list_arguments } } = @{ $arg_shapes };
is_deeply($arg_shapes{data}, [$num_sample, $num_dim]);
is_deeply($arg_shapes{x2h_weight}, [$num_hidden, $num_dim]);
is_deeply($arg_shapes{h2h_weight}, [$num_hidden, $num_hidden]);
}
test_symbol_infer_shape();
sub test_symbol_infer_shape_var
{
#Test specifying shape information when constructing a variable
my $shape = [2, 3];
my $a = mx->symbol->Variable('a', shape=>$shape);
my $b = mx->symbol->Variable('b');
my $c = mx->symbol->elemwise_add($a, $b);
my ($arg_shapes, $out_shapes, $aux_shapes) = $c->infer_shape();
is_deeply($arg_shapes->[0], $shape);
is_deeply($arg_shapes->[1], $shape);
is_deeply($out_shapes->[0], $shape);
$shape = [5, 6];
t/test_viz.t view on Meta::CPAN
use AI::MXNet qw(mx);
use Test::More tests => 1;
sub test_print_summary
{
my $data = mx->sym->Variable('data');
my $bias = mx->sym->Variable('fc1_bias', lr_mult => 1.0);
my $conv1= mx->sym->Convolution(data => $data, name => 'conv1', num_filter => 32, kernel => [3,3], stride => [2,2]);
my $bn1 = mx->sym->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->sym->Activation(data => $bn1, name => 'relu1', act_type => "relu");
my $mp1 = mx->sym->Pooling(data => $act1, name => 'mp1', kernel => [2,2], stride => [2,2], pool_type => 'max');
my $fc1 = mx->sym->FullyConnected(data => $mp1, bias => $bias, name => 'fc1', num_hidden => 10, lr_mult => 0);