view release on metacpan or search on metacpan
examples/calculator.pl view on Meta::CPAN
#!/usr/bin/perl
use strict;
use warnings;
use AI::MXNet ('mx');
## preparing the samples
## to train our network
sub samples {
my($batch_size, $func) = @_;
# get samples
my $n = 16384;
## creates a pdl with $n rows and two columns with random
## floats in the range between 0 and 1
my $data = PDL->random(2, $n);
## creates the pdl with $n rows and one column with labels
## labels are floats that either sum or product, etc of
## two random values in each corresponding row of the data pdl
my $label = $func->($data->slice('0,:'), $data->slice('1,:'));
examples/calculator.pl view on Meta::CPAN
data => $train_data,
label => $train_label,
), mx->io->NDArrayIter(
batch_size => $batch_size,
data => $validation_data,
label => $validation_label,
));
}
## the network model
sub nn_fc {
my $data = mx->sym->Variable('data');
my $ln = mx->sym->exp(mx->sym->FullyConnected(
data => mx->sym->log($data),
num_hidden => 1,
));
my $wide = mx->sym->Concat($data, $ln);
my $fc = mx->sym->FullyConnected(
$wide,
num_hidden => 1
);
return mx->sym->MAERegressionOutput(data => $fc, name => 'softmax');
}
sub learn_function {
my(%args) = @_;
my $func = $args{func};
my $batch_size = $args{batch_size}//128;
my($train_iter, $eval_iter) = samples($batch_size, $func);
my $sym = nn_fc();
## call as ./calculator.pl 1 to just print model and exit
if($ARGV[0]) {
my @dsz = @{$train_iter->data->[0][1]->shape};
my @lsz = @{$train_iter->label->[0][1]->shape};
examples/calculator.pl view on Meta::CPAN
data_shapes => $iter->provide_data,
label_shapes => $iter->provide_label,
);
# wrap a helper around making predictions
my ($arg_params) = $model->get_params;
for my $k (sort keys %$arg_params)
{
print "$k -> ". $arg_params->{$k}->aspdl."\n";
}
return sub {
my($n, $m) = @_;
return $model->predict(mx->io->NDArrayIter(
batch_size => 1,
data => PDL->new([[ $n, $m ]]),
))->aspdl->list;
};
}
my $add = learn_function(func => sub {
my($n, $m) = @_;
return $n + $m;
});
my $sub = learn_function(func => sub {
my($n, $m) = @_;
return $n - $m;
}, batch_size => 50, epoch => 40);
my $mul = learn_function(func => sub {
my($n, $m) = @_;
return $n * $m;
}, batch_size => 50, epoch => 40);
my $div = learn_function(func => sub {
my($n, $m) = @_;
return $n / $m;
}, batch_size => 10, epoch => 80);
print "12345 + 54321 â ", $add->(12345, 54321), "\n";
print "188 - 88 â ", $sub->(188, 88), "\n";
print "250 * 2 â ", $mul->(250, 2), "\n";
print "250 / 2 â ", $div->(250, 2), "\n";
examples/char_lstm.pl view on Meta::CPAN
'optimizer=s' => \(my $optimizer = 'adam' ),
'mom=f' => \(my $mom = 0 ),
'wd=f' => \(my $wd = 0.00001 ),
'batch-size=i' => \(my $batch_size = 32 ),
'disp-batches=i' => \(my $disp_batches = 50 ),
'chkp-prefix=s' => \(my $chkp_prefix = 'lstm_' ),
'cell-mode=s' => \(my $cell_mode = 'LSTM' ),
'sample-size=i' => \(my $sample_size = 10000 ),
'chkp-epoch=i' => \(my $chkp_epoch = 1 ),
'bidirectional=i'=> \(my $bidirectional= 0 ),
'help' => sub { HelpMessage(0) },
) or HelpMessage(1);
=head1 NAME
char_lstm.pl - Example of training char LSTM RNN on tiny shakespeare using high level RNN interface
with optional inferred sampling (RNN generates Shakespeare like text)
=head1 SYNOPSIS
--num-layers number of stacked RNN layers, default=2
examples/char_lstm.pl view on Meta::CPAN
extends AI::MXNet::DataIter;
has 'data' => (is => 'ro', isa => 'PDL', required => 1);
has 'seq_size' => (is => 'ro', isa => 'Int', required => 1);
has '+batch_size' => (is => 'ro', isa => 'Int', required => 1);
has 'data_name' => (is => 'ro', isa => 'Str', default => 'data');
has 'label_name' => (is => 'ro', isa => 'Str', default => 'softmax_label');
has 'dtype' => (is => 'ro', isa => 'Dtype', default => 'float32');
has [qw/nd counter seq_counter vocab_size
data_size provide_data provide_label idx/] => (is => 'rw', init_arg => undef);
sub BUILD
{
my $self = shift;
$self->data_size($self->data->nelem);
my $segments = int(($self->data_size-$self->seq_size)/($self->batch_size*$self->seq_size));
$self->idx([0..$segments-1]);
$self->vocab_size($self->data->uniq->shape->at(0));
$self->counter(0);
$self->seq_counter(0);
$self->nd(mx->nd->array($self->data, dtype => $self->dtype));
my $shape = [$self->batch_size, $self->seq_size];
examples/char_lstm.pl view on Meta::CPAN
clip_gradient => 5,
rescale_grad => 1/$batch_size,
lr_scheduler => AI::MXNet::FactorScheduler->new(step => 1000, factor => 0.99)
},
initializer => mx->init->Xavier(factor_type => "in", magnitude => 2.34),
num_epoch => $num_epoch,
batch_end_callback => mx->callback->Speedometer($batch_size, $disp_batches),
($chkp_epoch ? (epoch_end_callback => [mx->rnn->do_rnn_checkpoint($stack, $chkp_prefix, $chkp_epoch), \&sample]) : ())
);
sub sample {
return if not $sample_size;
$model->reshape(data_shapes=>[['data',[1, $seq_size]]], label_shapes=>[['softmax_label',[1, $seq_size]]]);
my $input = mx->nd->array($fdata->slice([0, $seq_size-1]))->reshape([1, $seq_size]);
$| = 1;
for (0..$sample_size-1)
{
$model->forward(mx->io->DataBatch(data=>[$input]), is_train => 0);
my $prob = $model->get_outputs(0)->[0][0]->at($seq_size-1)->aspdl;
my $next_char = Math::Random::Discrete->new($prob->reshape(-1)->unpdl, [0..scalar(keys %vocabulary)-1])->rand;
print "$reverse_vocab{$next_char}";
examples/cudnn_lstm_bucketing.pl view on Meta::CPAN
'optimizer=s' => \(my $optimizer = 'adam' ),
'mom=f' => \(my $mom = 0 ),
'wd=f' => \(my $wd = 0.00001 ),
'batch-size=i' => \(my $batch_size = 32 ),
'disp-batches=i' => \(my $disp_batches = 50 ),
'model-prefix=s' => \(my $model_prefix = 'lstm_' ),
'load-epoch=i' => \(my $load_epoch = 0 ),
'stack-rnn' => \(my $stack_rnn ),
'bidirectional=i' => \(my $bidirectional ),
'dropout=f', => \(my $dropout = 0 ),
'help' => sub { HelpMessage(0) },
) or HelpMessage(1);
=head1 NAME
char_lstm.pl - Example of training char LSTM RNN on tiny shakespeare using high level RNN interface
=head1 SYNOPSIS
--test Whether to test or train (default 0)
--num-layers number of stacked RNN layers, default=2
examples/cudnn_lstm_bucketing.pl view on Meta::CPAN
$cell = $stack;
}
else
{
$cell = mx->rnn->FusedRNNCell(
$num_hidden, mode => 'lstm', num_layers => $num_layers,
bidirectional => $bidirectional, dropout => $dropout
);
}
my $sym_gen = sub { my $seq_len = shift;
my $data = mx->sym->Variable('data');
my $label = mx->sym->Variable('softmax_label');
my $embed = mx->sym->Embedding(data=>$data, input_dim=>scalar(keys %$vocab), output_dim=>$num_embed,name=>'embed');
my ($output) = $cell->unroll($seq_len, inputs=>$embed, merge_outputs=>1, layout=>'TNC');
my $pred = mx->sym->Reshape($output, shape=>[-1, $num_hidden*(1+$bidirectional)]);
$pred = mx->sym->FullyConnected(data=>$pred, num_hidden=>scalar(keys %$vocab), name=>'pred');
$label = mx->sym->Reshape($label, shape=>[-1]);
$pred = mx->sym->SoftmaxOutput(data=>$pred, label=>$label, name=>'softmax');
return ($pred, ['data'], ['softmax_label']);
};
examples/cudnn_lstm_bucketing.pl view on Meta::CPAN
wd => $wd,
},
begin_epoch => $load_epoch,
initializer => mx->init->Xavier(factor_type => "in", magnitude => 2.34),
num_epoch => $num_epoch,
batch_end_callback => mx->callback->Speedometer($batch_size, $disp_batches),
($model_prefix ? (epoch_end_callback => mx->rnn->do_rnn_checkpoint($cell, $model_prefix, 1)) : ())
);
};
my $test = sub {
assert($model_prefix, "Must specifiy path to load from");
my (undef, $data_val, $vocab) = get_data('NT');
my $stack;
if($stack_rnn)
{
$stack = mx->rnn->SequentialRNNCell();
for my $i (0..$num_layers-1)
{
my $cell = mx->rnn->LSTMCell(num_hidden => $num_hidden, prefix => "lstm_${i}l0_");
if($bidirectional)
examples/cudnn_lstm_bucketing.pl view on Meta::CPAN
$stack->add($cell);
}
}
else
{
$stack = mx->rnn->FusedRNNCell(
$num_hidden, num_layers => $num_layers,
mode=>'lstm', bidirectional => $bidirectional
)->unfuse()
}
my $sym_gen = sub {
my $seq_len = shift;
my $data = mx->sym->Variable('data');
my $label = mx->sym->Variable('softmax_label');
my $embed = mx->sym->Embedding(
data => $data, input_dim => scalar(keys %$vocab),
output_dim => $num_embed, name => 'embed'
);
$stack->reset;
my ($outputs, $states) = $stack->unroll($seq_len, inputs => $embed, merge_outputs => 1);
my $pred = mx->sym->Reshape($outputs, shape => [-1, $num_hidden*(1+$bidirectional)]);
examples/lstm_bucketing.pl view on Meta::CPAN
'kv-store=s' => \(my $kv_store = 'device'),
'num-epoch=i' => \(my $num_epoch = 25 ),
'lr=f' => \(my $lr = 0.01 ),
'optimizer=s' => \(my $optimizer = 'sgd' ),
'mom=f' => \(my $mom = 0 ),
'wd=f' => \(my $wd = 0.00001 ),
'batch-size=i' => \(my $batch_size = 32 ),
'disp-batches=i' => \(my $disp_batches = 50 ),
'chkp-prefix=s' => \(my $chkp_prefix = 'lstm_' ),
'chkp-epoch=i' => \(my $chkp_epoch = 0 ),
'help' => sub { HelpMessage(0) },
) or HelpMessage(1);
=head1 NAME
lstm_bucketing.pl - Example of training LSTM RNN on Penn Tree Bank data using high level RNN interface
=head1 SYNOPSIS
--num-layers number of stacked RNN layers, default=2
--num-hidden hidden layer size, default=200
examples/lstm_bucketing.pl view on Meta::CPAN
$validation_sentences, $batch_size, buckets => $buckets,
invalid_label => $invalid_label
);
my $stack = mx->rnn->SequentialRNNCell();
for my $i (0..$num_layers-1)
{
$stack->add(mx->rnn->LSTMCell(num_hidden => $num_hidden, prefix => "lstm_l${i}_"));
}
my $sym_gen = sub {
my $seq_len = shift;
my $data = mx->sym->Variable('data');
my $label = mx->sym->Variable('softmax_label');
my $embed = mx->sym->Embedding(
data => $data, input_dim => scalar(keys %$vocabulary),
output_dim => $num_embed, name => 'embed'
);
$stack->reset;
my ($outputs, $states) = $stack->unroll($seq_len, inputs => $embed, merge_outputs => 1);
my $pred = mx->sym->Reshape($outputs, shape => [-1, $num_hidden]);
examples/mnist.pl view on Meta::CPAN
use strict;
use warnings;
# derived from http://mxnet.io/tutorials/python/mnist.html
use LWP::UserAgent ();
use PDL ();
#use Gtk2 '-init';
use AI::MXNet ('mx');
my $ua = LWP::UserAgent->new();
sub download_data {
my($url, $force_download) = @_;
$force_download = 1 if @_ < 2;
my $fname = (split m{/}, $url)[-1];
if($force_download or not -f $fname) {
$ua->get($url, ':content_file' => $fname);
}
return $fname;
}
sub read_data {
my($label_url, $image_url) = @_;
my($magic, $num, $rows, $cols);
open my($flbl), '<:gzip', download_data($label_url);
read $flbl, my($buf), 8;
($magic, $num) = unpack 'N2', $buf;
my $label = PDL->new();
$label->set_datatype($PDL::Types::PDL_B);
$label->setdims([ $num ]);
read $flbl, ${$label->get_dataref}, $num;
examples/mnist.pl view on Meta::CPAN
return($label, $image);
}
my $path='http://yann.lecun.com/exdb/mnist/';
my($train_lbl, $train_img) = read_data(
"${path}train-labels-idx1-ubyte.gz", "${path}train-images-idx3-ubyte.gz");
my($val_lbl, $val_img) = read_data(
"${path}t10k-labels-idx1-ubyte.gz", "${path}t10k-images-idx3-ubyte.gz");
sub show_sample {
print 'label: ', $train_lbl->slice('0:9'), "\n";
my $hbox = Gtk2::HBox->new(0, 2);
for my $i (0 .. 9) {
my $img = $train_img->slice(":,:,$i");
my($w, $h) = $img->dims;
$img->make_physical();
# ugh, pixbufs don't have a grayscale colorspace?!
# burst it to rgb I guess.
my $data = pack 'c*', map { $_, $_, $_ } unpack 'c*', ${$img->get_dataref};
$hbox->add(Gtk2::Image->new_from_pixbuf(
Gtk2::Gdk::Pixbuf->new_from_data($data, 'rgb', 0, 8, $w, $h, $w * 3)
));
}
my $win = Gtk2::Window->new('toplevel');
$win->signal_connect(delete_event => sub { Gtk2->main_quit() });
$win->add($hbox);
$win->show_all();
Gtk2->main();
}
sub show_network {
my($viz) = @_;
my $load = Gtk2::Gdk::PixbufLoader->new();
$load->write($viz->graph->as_png);
$load->close();
my $img = Gtk2::Image->new_from_pixbuf($load->get_pixbuf());
my $sw = Gtk2::ScrolledWindow->new(undef, undef);
$sw->add_with_viewport($img);
my $win = Gtk2::Window->new('toplevel');
$win->signal_connect(delete_event => sub { Gtk2->main_quit() });
$win->add($sw);
$win->show_all();
Gtk2->main();
}
#show_sample();
sub to4d {
my($img) = @_;
return $img->reshape(28, 28, 1, ($img->dims)[2])->float / 255;
}
my $batch_size = 100;
my $train_iter = mx->io->NDArrayIter(
data => to4d($train_img),
label => $train_lbl,
batch_size => $batch_size,
shuffle => 1,
);
my $val_iter = mx->io->NDArrayIter(
data => to4d($val_img),
label => $val_lbl,
batch_size => $batch_size,
);
# Create a place holder variable for the input data
my $data = mx->sym->Variable('data');
sub nn_fc {
# Epoch[9] Train-accuracy=0.978889
# Epoch[9] Time cost=145.437
# Epoch[9] Validation-accuracy=0.964600
my($data) = @_;
# Flatten the data from 4-D shape (batch_size, num_channel, width, height)
# into 2-D (batch_size, num_channel*width*height)
$data = mx->sym->Flatten(data => $data);
# The first fully-connected layer
examples/mnist.pl view on Meta::CPAN
my $fc2 = mx->sym->FullyConnected(data => $data, name => 'fc2', num_hidden => 64);
my $act2 = mx->sym->Activation(data => $fc2, name => 'relu2', act_type => "relu");
# The thrid fully-connected layer, note that the hidden size should be 10, which is the number of unique digits
my $fc3 = mx->sym->FullyConnected(data => $act2, name => 'fc3', num_hidden => 10);
# The softmax and loss layer
my $mlp = mx->sym->SoftmaxOutput(data => $fc3, name => 'softmax');
return $mlp;
}
sub nn_conv {
my($data) = @_;
# Epoch[9] Batch [200] Speed: 1625.07 samples/sec Train-accuracy=0.992090
# Epoch[9] Batch [400] Speed: 1630.12 samples/sec Train-accuracy=0.992850
# Epoch[9] Train-accuracy=0.991357
# Epoch[9] Time cost=36.817
# Epoch[9] Validation-accuracy=0.988100
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 20, kernel => [5,5], stride => [2,2]);
my $bn1 = mx->symbol->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->symbol->Activation(data => $bn1, name => 'relu1', act_type => "relu");
lib/AI/MXNet.pm view on Meta::CPAN
use AI::MXNet::Module::Bucketing;
use AI::MXNet::RNN;
use AI::MXNet::Visualization;
use AI::MXNet::RecordIO;
use AI::MXNet::Image;
use AI::MXNet::Contrib;
use AI::MXNet::Contrib::AutoGrad;
use AI::MXNet::CachedOp;
our $VERSION = '1.0102';
sub import
{
my ($class, $short_name) = @_;
if($short_name)
{
$short_name =~ s/[^\w:]//g;
if(length $short_name)
{
my $short_name_package =<<"EOP";
package $short_name;
no warnings 'redefine';
sub nd { 'AI::MXNet::NDArray' }
sub sym { 'AI::MXNet::Symbol' }
sub symbol { 'AI::MXNet::Symbol' }
sub init { 'AI::MXNet::Initializer' }
sub initializer { 'AI::MXNet::Initializer' }
sub optimizer { 'AI::MXNet::Optimizer' }
sub opt { 'AI::MXNet::Optimizer' }
sub rnd { 'AI::MXNet::Random' }
sub random { 'AI::MXNet::Random' }
sub Context { shift; AI::MXNet::Context->new(\@_) }
sub cpu { AI::MXNet::Context->cpu(\$_[1]//0) }
sub gpu { AI::MXNet::Context->gpu(\$_[1]//0) }
sub kv { 'AI::MXNet::KVStore' }
sub recordio { 'AI::MXNet::RecordIO' }
sub io { 'AI::MXNet::IO' }
sub metric { 'AI::MXNet::Metric' }
sub mod { 'AI::MXNet::Module' }
sub mon { 'AI::MXNet::Monitor' }
sub viz { 'AI::MXNet::Visualization' }
sub rnn { 'AI::MXNet::RNN' }
sub callback { 'AI::MXNet::Callback' }
sub img { 'AI::MXNet::Image' }
sub contrib { 'AI::MXNet::Contrib' }
sub name { '$short_name' }
sub AttrScope { shift; AI::MXNet::Symbol::AttrScope->new(\@_) }
*AI::MXNet::Symbol::AttrScope::current = sub { \$${short_name}::AttrScope; };
\$${short_name}::AttrScope = AI::MXNet::Symbol::AttrScope->new;
sub Prefix { AI::MXNet::Symbol::Prefix->new(prefix => \$_[1]) }
*AI::MXNet::Symbol::NameManager::current = sub { \$${short_name}::NameManager; };
\$${short_name}::NameManager = AI::MXNet::Symbol::NameManager->new;
*AI::MXNet::Context::current_ctx = sub { \$${short_name}::Context; };
\$${short_name}::Context = AI::MXNet::Context->new(device_type => 'cpu', device_id => 0);
1;
EOP
eval $short_name_package;
}
}
}
1;
__END__
lib/AI/MXNet/Base.pm view on Meta::CPAN
Perl version of for x,y,z in zip (arr_x, arr_y, arr_z)
Parameters
----------
$sub_ref, called with @_ filled with $arr_x->[$i], $arr_y->[$i], $arr_z->[$i]
for each loop iteration.
@array_refs
=cut
sub zip
{
my ($sub, @arrays) = @_;
my $len = @{ $arrays[0] };
for (my $i = 0; $i < $len; $i++)
{
$sub->(map { $_->[$i] } @arrays);
}
}
=head2 enumerate
Same as zip, but the argument list in the anonymous sub is prepended
by the iteration count.
=cut
sub enumerate
{
my ($sub, @arrays) = @_;
my $len = @{ $arrays[0] };
zip($sub, [0..$len-1], @arrays);
}
=head2 product
Calculates the product of the input agruments.
=cut
sub product
{
my $p = 1;
map { $p = $p * $_ } @_;
return $p;
}
=head2 bisect_left
https://hg.python.org/cpython/file/2.7/Lib/bisect.py
=cut
sub bisect_left
{
my ($a, $x, $lo, $hi) = @_;
$lo //= 0;
$hi //= @{ $a };
if($lo < 0)
{
Carp::confess('lo must be non-negative');
}
while($lo < $hi)
{
lib/AI/MXNet/Base.pm view on Meta::CPAN
Shuffle the pdl by the last dimension
Parameters
-----------
PDL $pdl
$preshuffle Maybe[ArrayRef[Index]], if defined the array elements are used
as shuffled last dimension's indexes
=cut
sub pdl_shuffle
{
my ($pdl, $preshuffle) = @_;
my $c = $pdl->copy;
my @shuffle = $preshuffle ? @{ $preshuffle } : shuffle(0..$pdl->dim(-1)-1);
my $rem = $pdl->ndims-1;
for my $i (0..$pdl->dim(-1)-1)
{
$c->slice(('X')x$rem, $i) .= $pdl->slice(('X')x$rem, $shuffle[$i])
}
$c;
lib/AI/MXNet/Base.pm view on Meta::CPAN
=head2 assert
Parameters
-----------
Bool $input
Str $error_str
Calls Carp::confess with $error_str//"AssertionError" if the $input is false
=cut
sub assert
{
my ($input, $error_str) = @_;
local($Carp::CarpLevel) = 1;
Carp::confess($error_str//'AssertionError')
unless $input;
}
=head2 check_call
Checks the return value of C API call
This function will raise an exception when error occurs.
Every API call is wrapped with this function.
Returns the C API call return values stripped of first return value,
checks for return context and returns first element in
the values list when called in scalar context.
=cut
sub check_call
{
Carp::confess(AI::MXNetCAPI::GetLastError()) if shift;
return wantarray ? @_ : $_[0];
}
=head2 build_param_doc
Builds argument docs in python style.
arg_names : array ref of str
lib/AI/MXNet/Base.pm view on Meta::CPAN
remove_dup : boolean, optional
Whether to remove duplication or not.
Returns
-------
docstr : str
Python docstring of parameter sections.
=cut
sub build_param_doc
{
my ($arg_names, $arg_types, $arg_descs, $remove_dup) = @_;
$remove_dup //= 1;
my %param_keys;
my @param_str;
zip(sub {
my ($key, $type_info, $desc) = @_;
return if exists $param_keys{$key} and $remove_dup;
$param_keys{$key} = 1;
my $ret = sprintf("%s : %s", $key, $type_info);
$ret .= "\n ".$desc if length($desc);
push @param_str, $ret;
},
$arg_names, $arg_types, $arg_descs
);
return sprintf("Parameters\n----------\n%s\n", join("\n", @param_str));
}
=head2 _notify_shutdown
Notify MXNet about shutdown.
=cut
sub _notify_shutdown
{
check_call(AI::MXNetCAPI::NotifyShutdown());
}
END {
_notify_shutdown();
Time::HiRes::sleep(0.01);
}
*pzeros = \&zeros;
lib/AI/MXNet/CachedOp.pm view on Meta::CPAN
=head1 NAME
AI::MXNet::CachedOp - A wrapper around CachedOpHandle
=cut
use strict;
use warnings;
use AI::MXNet::Base;
use Mouse;
use overload '&{}' => sub { my $self = shift; sub { $self->call(@_) } };
has 'handle' => (is => 'ro', isa => 'CachedOpHandle', required => 1);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
my ($sym) = @_;
my $handle = check_call(
AI::MXNetCAPI::CreateCachedOp(
$sym->handle
)
);
return $class->$orig(handle => $handle);
};
sub DEMOLISH
{
check_call(AI::MXNetCAPI::FreeCachedOp(shift->handle));
}
sub call
{
my $self = shift;
my @args;
my %kwargs;
if(blessed $_[0] and $_[0]->isa('AI::MXNet::NDArray'))
{
while(blessed $_[0] and $_[0]->isa('AI::MXNet::NDArray'))
{
push @args, shift(@_);
}
lib/AI/MXNet/Callback.pm view on Meta::CPAN
package AI::MXNet::Callback;
use strict;
use warnings;
use List::Util qw/max/;
use AI::MXNet::Function::Parameters;
use Mouse;
use overload "&{}" => sub { my $self = shift; sub { $self->call(@_) } };
=head1 NAME
AI::MXNet::Callback - A collection of predefined callback functions
=cut
=head2 module_checkpoint
Callback to save the module setup in the checkpoint files.
lib/AI/MXNet/Callback.pm view on Meta::CPAN
The module to checkpoint.
$prefix : str
The file prefix to checkpoint to
$period=1 : int
How many epochs to wait before checkpointing. Default is 1.
$save_optimizer_states=0 : Bool
Whether to save optimizer states for later training.
Returns
-------
$callback : sub ref
The callback function that can be passed as iter_end_callback to fit.
=cut
method module_checkpoint(
AI::MXNet::Module::Base $mod,
Str $prefix,
Int $period=1,
Int $save_optimizer_states=0
)
{
$period = max(1, $period);
return sub {
my ($iter_no, $sym, $arg, $aux) = @_;
if(($iter_no + 1) % $period == 0)
{
$mod->save_checkpoint($prefix, $iter_no + 1, $save_optimizer_states);
}
}
}
=head2 log_train_metric
lib/AI/MXNet/Callback.pm view on Meta::CPAN
Parameters
----------
$period : Int
The number of batches after which to log the training evaluation metric.
$auto_reset : Bool
Whether to reset the metric after the logging.
Returns
-------
$callback : sub ref
The callback function that can be passed as iter_epoch_callback to fit.
=cut
method log_train_metric(Int $period, Int $auto_reset=0)
{
return sub {
my ($param) = @_;
if($param->nbatch % $period == 0 and defined $param->eval_metric)
{
my $name_value = $param->eval_metric->get_name_value;
while(my ($name, $value) = each %{ $name_value })
{
AI::MXNet::Logging->info(
"Iter[%d] Batch[%d] Train-%s=%f",
$param->epoch, $param->nbatch, $name, $value
);
lib/AI/MXNet/Context.pm view on Meta::CPAN
package AI::MXNet::Context;
use strict;
use warnings;
use Mouse;
use AI::MXNet::Types;
use AI::MXNet::Function::Parameters;
use constant devtype2str => { 1 => 'cpu', 2 => 'gpu', 3 => 'cpu_pinned' };
use constant devstr2type => { cpu => 1, gpu => 2, cpu_pinned => 3 };
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(device_type => $_[0])
if @_ == 1 and $_[0] =~ /^(?:cpu|gpu|cpu_pinned)$/;
return $class->$orig(
device_type => $_[0]->device_type,
device_id => $_[0]->device_id
) if @_ == 1 and blessed $_[0];
return $class->$orig(device_type => $_[0], device_id => $_[0])
if @_ == 2 and $_[0] =~ /^(?:cpu|gpu|cpu_pinned)$/;
lib/AI/MXNet/Context.pm view on Meta::CPAN
has 'device_type' => (
is => 'rw',
isa => enum([qw[cpu gpu cpu_pinned]]),
default => 'cpu'
);
has 'device_type_id' => (
is => 'rw',
isa => enum([1, 2, 3]),
default => sub { devstr2type->{ shift->device_type } },
lazy => 1
);
has 'device_id' => (
is => 'rw',
isa => 'Int',
default => 0
);
use overload
'==' => sub {
my ($self, $other) = @_;
return 0 unless blessed($other) and $other->isa(__PACKAGE__);
return "$self" eq "$other";
},
'""' => sub {
my ($self) = @_;
return sprintf("%s(%s)", $self->device_type, $self->device_id);
};
=head1 NAME
AI::MXNet::Context - A device context.
=cut
=head1 DESCRIPTION
lib/AI/MXNet/Contrib.pm view on Meta::CPAN
package AI::MXNet::Contrib;
use strict;
use warnings;
use AI::MXNet::Contrib::Symbol;
use AI::MXNet::Contrib::NDArray;
sub sym { 'AI::MXNet::Contrib::Symbol' }
sub symbol { 'AI::MXNet::Contrib::Symbol' }
sub nd { 'AI::MXNet::Contrib::NDArray' }
sub autograd { 'AI::MXNet::Contrib::AutoGrad' }
1;
lib/AI/MXNet/Contrib/AutoGrad.pm view on Meta::CPAN
The index of argument to calculate gradient for.
Returns
-------
grad_and_loss_func: a perl sub
A function that would compute both the gradient of arguments and loss value.
=cut
method grad_and_loss(CodeRef $func, Maybe[Int|ArrayRef[Int]] $argnum=)
{
return sub {
my @args = @_;
my @variables = @_;
if(defined $argnum)
{
my @argnum = ref $argnum ? @$argnum : ($argnum);
@variables = map { $_[$_] } @argnum;
}
map {
assert(
(blessed($_) and $_->isa('AI::MXNet::NDArray')),
lib/AI/MXNet/Contrib/AutoGrad.pm view on Meta::CPAN
Returns
-------
grad_func: a perl function
A function that would compute the gradient of arguments.
=cut
method grad(CodeRef $func, Maybe[Int|ArrayRef[Int]] $argnum=)
{
my $grad_with_loss_func = __PACKAGE__->grad_and_loss($func, $argnum);
return sub {
return ($grad_with_loss_func->(@_))[0];
};
}
method train_section(CodeRef $sub)
{
my $prev = __PACKAGE__->set_is_training(1);
$sub->();
__PACKAGE__->set_is_training(0) unless $prev;
}
lib/AI/MXNet/Contrib/NDArray.pm view on Meta::CPAN
package AI::MXNet::Contrib::NDArray;
use strict;
use warnings;
sub AUTOLOAD {
my $sub = $AI::MXNet::Contrib::NDArray::AUTOLOAD;
$sub =~ s/.*:://;
$sub = "_contrib_$sub";
shift;
return AI::MXNet::NDArray->$sub(@_);
}
1;
lib/AI/MXNet/Contrib/Symbol.pm view on Meta::CPAN
package AI::MXNet::Contrib::Symbol;
use strict;
use warnings;
sub AUTOLOAD {
my $sub = $AI::MXNet::Contrib::Symbol::AUTOLOAD;
$sub =~ s/.*:://;
$sub = "_contrib_$sub";
shift;
return AI::MXNet::Symbol->$sub(@_);
}
1;
lib/AI/MXNet/Executor.pm view on Meta::CPAN
Parameters
----------
handle: ExecutorHandle
ExecutorHandle is generated by calling bind.
See Also
--------
AI::MXNet::Symbol->bind : how to create the AI::MXNet::Executor.
=cut
sub BUILD
{
my $self = shift;
my ($symbol, $ctx, $grad_req, $group2ctx)
=
($self->_symbol, $self->_ctx, $self->_grad_req, $self->_group2ctx);
$symbol = $symbol->deepcopy;
$ctx = $ctx->deepcopy;
if(ref $grad_req)
{
if(ref $grad_req eq 'ARRAY')
lib/AI/MXNet/Executor.pm view on Meta::CPAN
{
$group2ctx = { %{ $group2ctx } };
}
$self->_symbol($symbol);
$self->_ctx($ctx);
$self->_grad_req($grad_req);
$self->_group2ctx($group2ctx);
$self->outputs($self->_get_outputs);
}
sub DEMOLISH
{
check_call(AI::MXNetCAPI::ExecutorFree(shift->handle));
}
# Get the dictionary given name and ndarray pairs.
func _get_dict(
ArrayRef[Str] $names,
ArrayRef[Maybe[AI::MXNet::NDArray]] $ndarrays
)
{
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
confess('Too many slices such that some splits are empty');
}
push @slices, [$begin, $end];
}
return \@slices;
}
# Load a array ref of arrays into a array ref of arrays specified by slices
func _load_general($data, $targets, $major_axis)
{
zip(sub {
my ($d_src, $d_targets, $axis) = @_;
if(blessed($d_targets) and $d_targets->isa('AI::MXNet::NDarray'))
{
$d_src->copyto($d_targets);
}
elsif(ref $d_targets eq 'ARRAY' and blessed $d_targets->[0])
{
zip(sub {
my ($src, $dst) = @_;
$src->copyto($dst);
}, $d_src, $d_targets);
}
else
{
for my $d (@{ $d_targets })
{
my ($slice_idx, $d_dst) = @{ $d };
if($axis >= 0)
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
func _load_label($batch, $targets, $major_axis)
{
_load_general($batch->label, $targets, $major_axis);
}
# Merge outputs that live on multiple context into one, so that they look
# like living on one context.
func _merge_multi_context($outputs, $major_axis)
{
my @rets;
zip(sub {
my ($tensors, $axis) = @_;
if($axis >= 0)
{
if(@$tensors == 1)
{
push @rets, $tensors->[0];
}
else
{
my $ctx = $tensors->[0]->context;
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
space for gradient, nor do gradient calculation.
grad_req : ArrayRef[GradReq]|HashRef[GradReq]|GradReq
Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
(default to 'write').
Can be specified globally (str) or for each argument (array ref, hash ref).
state_names: Maybe[ArrayRef[Str]]
=cut
has 'symbol' => (is => 'ro', isa => 'AI::MXNet::Symbol', required => 1);
has 'contexts' => (is => 'ro', isa => 'ArrayRef[AI::MXNet::Context]', required => 1);
has 'workload' => (is => 'ro', isa => 'ArrayRef[Num]', default => sub { [] });
has 'data_shapes' => (is => 'rw', isa => 'ArrayRef[NameShape|AI::MXNet::DataDesc]', required => 1);
has 'label_shapes' => (is => 'rw', isa => 'Maybe[ArrayRef[NameShape|AI::MXNet::DataDesc]]');
has 'param_names' => (is => 'ro', isa => 'ArrayRef[Str]', required => 1);
has 'for_training' => (is => 'ro', isa => 'Bool', required => 1);
has 'inputs_need_grad' => (is => 'ro', isa => 'Bool', default => 0);
has 'shared_group' => (is => 'ro', isa => 'Maybe[AI::MXNet::DataParallelExecutorGroup]');
has 'logger' => (is => 'ro', default => sub { AI::MXNet::Logging->get_logger });
has 'fixed_param_names' => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'state_names' => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'grad_req' => (is => 'rw', isa => 'ArrayRef[GradReq]|HashRef[GradReq]|GradReq', default=>'write');
has '_p' => (is => 'rw', init_arg => undef);
sub BUILD
{
my $self = shift;
my $p = AI::MXNet::DataParallelExecutorGroup::_private->new;
$p->arg_names($self->symbol->list_arguments);
$p->aux_names($self->symbol->list_auxiliary_states);
$p->execs([]);
$self->_p($p);
$self->grad_req('null') if not $self->for_training;
$self->fixed_param_names([]) unless defined $self->fixed_param_names;
$self->state_names([]) unless defined $self->state_names;
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
Parameters
----------
$data_shapes : ArrayRef[AI::MXNet::DataDesc]
=cut
method decide_slices(ArrayRef[AI::MXNet::DataDesc] $data_shapes)
{
confess("empty data_shapes array") unless @{ $data_shapes } > 0;
my $major_axis = [map { AI::MXNet::DataDesc->get_batch_axis($_->layout) } @{ $data_shapes }];
zip(sub {
my ($desc, $axis) = @_;
return if($axis == -1);
my $batch_size = $desc->shape->[$axis];
if(defined $self->_p->batch_size)
{
confess(
"all data must have the same batch size: "
. sprintf("batch_size = %d, but ", $self->_p->batch_size)
. sprintf("%s has shape %s", $desc->name, '('. join(',', @{ $desc->shape }) . ')')
) unless $batch_size == $self->_p->batch_size;
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
target aux arrays
Notes
-----
- This function will inplace update the NDArrays in arg_params and aux_params.
=cut
method get_params(HashRef[AI::MXNet::NDArray] $arg_params, HashRef[AI::MXNet::NDArray] $aux_params)
{
my $weight = 0;
zip(sub {
my ($name, $block) = @_;
my $weight = sum(map { $_->copyto(AI::MXNet::Context->cpu) } @{ $block }) / @{ $block };
$weight->astype($arg_params->{$name}->dtype)->copyto($arg_params->{$name});
}, $self->param_names, $self->_p->param_arrays);
zip(sub {
my ($name, $block) = @_;
my $weight = sum(map { $_->copyto(AI::MXNet::Context->cpu) } @{ $block }) / @{ $block };
$weight->astype($aux_params->{$name}->dtype)->copyto($aux_params->{$name});
}, $self->_p->aux_names, $self->_p->aux_arrays);
}
method get_states($merge_multi_context=1)
{
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
}
$_->forward($is_train) for @{ $self->_p->execs };
}
# Get the shapes of the outputs
method get_output_shapes()
{
my @shapes = map { $_->shape } @{ $self->execs->[0]->outputs };
my @concat_shapes;
zip(sub {
my ($key, $shape, $axis) = @_;
my @the_shape = @{ $shape };
if($axis >= 0)
{
$the_shape[$axis] = $self->_p->batch_size;
}
push @concat_shapes, AI::MXNet::DataDesc->new(name => $key, shape => \@the_shape);
}, $self->symbol->list_outputs, \@shapes, $self->_p->output_layouts);
return \@concat_shapes;
}
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
out_grads : NDArray or array ref of NDArray, optional
Gradient on the outputs to be propagated back.
This parameter is only needed when bind is called
on outputs that are not a loss function.
=cut
method backward(Maybe[AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]] $out_grads=)
{
confess('re-bind with for_training=1 to run backward') unless $self->for_training;
$out_grads //= [];
zip(sub {
my ($i, $exec, $islice) = @_;
my @out_grads_slice;
zip(sub{
my ($grad, $axis) = @_;
if($axis >= 0)
{
my $og_my_slice = $grad->slice_axis({
axis => $axis,
begin => $islice->[0],
end => $islice->[1]
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
Parameters
----------
eval_metric : AI::MXNet::EvalMetric
The metric used for evaluation.
labels : array ref of NDArray
Typically comes from label of AI::MXNet::DataBatch.
=cut
method update_metric(AI::MXNet::EvalMetric $eval_metric, ArrayRef[AI::MXNet::NDArray] $labels)
{
zip(sub {
my ($texec, $islice) = @_;
my @labels_slice;
zip(sub {
my ($label, $axis) = @_;
if($axis == 0)
{
# slicing NDArray along axis 0 can avoid copying
push @labels_slice, $label->slice([$islice->[0], $islice->[1]-1]);
}
elsif($axis > 0)
{
my $label_my_slice = $label->slice_axis({
axis => $axis,
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
----------
shapes : array ref of (str, array ref)
The original (name, shape) pairs.
i : int
Which executor we are dealing with.
=cut
method _sliced_shape(ArrayRef[AI::MXNet::DataDesc] $shapes, Int $i, ArrayRef[Int] $major_axis)
{
my @sliced_shapes;
zip(sub {
my ($desc, $axis) = @_;
my @shape = @{ $desc->shape };
if($axis >= 0)
{
$shape[$axis] = $self->_p->slices->[$i]->[1] - $self->_p->slices->[$i]->[0];
}
push @sliced_shapes, AI::MXNet::DataDesc->new(
name => $desc->name,
shape => \@shape,
dtype => $desc->dtype,
lib/AI/MXNet/Function/Parameters.pm view on Meta::CPAN
package AI::MXNet::Function::Parameters;
use strict;
use warnings;
use Function::Parameters ();
use AI::MXNet::Types ();
sub import {
Function::Parameters->import(
{
func => {
defaults => 'function_strict',
runtime => 1,
reify_type => sub {
Mouse::Util::TypeConstraints::find_or_create_isa_type_constraint($_[0])
}
},
method => {
defaults => 'method_strict',
runtime => 1,
reify_type => sub {
Mouse::Util::TypeConstraints::find_or_create_isa_type_constraint($_[0])
}
},
}
);
}
{
no warnings 'redefine';
*Function::Parameters::_croak = sub {
local($Carp::CarpLevel) = 1;
Carp::confess ("@_");
};
}
1;
lib/AI/MXNet/IO.pm view on Meta::CPAN
package AI::MXNet::DataDesc;
use Mouse;
use overload '""' => \&stringify,
'@{}' => \&to_nameshape;
has 'name' => (is => 'ro', isa => "Str", required => 1);
has 'shape' => (is => 'ro', isa => "Shape", required => 1);
has 'dtype' => (is => 'ro', isa => "Dtype", default => 'float32');
has 'layout' => (is => 'ro', isa => "Str", default => 'NCHW');
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if(@_ >= 2 and ref $_[1] eq 'ARRAY')
{
my $name = shift;
my $shape = shift;
return $class->$orig(name => $name, shape => $shape, @_);
}
return $class->$orig(@_);
};
lib/AI/MXNet/IO.pm view on Meta::CPAN
has 'data' => (is => 'rw', isa => 'Maybe[ArrayRef[AI::MXNet::NDArray]]', required => 1);
has 'label' => (is => 'rw', isa => 'Maybe[ArrayRef[AI::MXNet::NDArray]]');
has 'pad' => (is => 'rw');
has 'index' => (is => 'rw');
has 'bucket_key' => (is => 'rw');
has 'provide_data' => (is => 'rw');
has 'provide_label' => (is => 'rw');
package AI::MXNet::DataIter;
use Mouse;
use overload '<>' => sub { shift->next },
'@{}' => sub { shift->list };
=head1 NAME
AI::MXNet::DataIter - A parent class for MXNet data iterators.
=cut
has 'batch_size' => (is => 'rw', isa => 'Int', default => 0);
=head2 reset
lib/AI/MXNet/IO.pm view on Meta::CPAN
has 'data_iter' => (is => 'ro', isa => 'AI::MXnet::DataIter', required => 1);
has 'size' => (is => 'ro', isa => 'Int', required => 1);
has 'reset_internal' => (is => 'rw', isa => 'Int', default => 1);
has 'cur' => (is => 'rw', isa => 'Int', default => 0);
has 'current_batch' => (is => 'rw', isa => 'Maybe[AI::MXNet::DataBatch]');
has [qw/provide_data
default_bucket_key
provide_label
batch_size/] => (is => 'rw', init_arg => undef);
sub BUILD
{
my $self = shift;
$self->provide_data($self->data_iter->provide_data);
$self->provide_label($self->data_iter->provide_label);
$self->batch_size($self->data_iter->batch_size);
if($self->data_iter->can('default_bucket_key'))
{
$self->default_bucket_key($self->data_iter->default_bucket_key);
}
}
lib/AI/MXNet/IO.pm view on Meta::CPAN
has 'data_list' => (is => 'rw', isa => 'ArrayRef[AI::MXNet::NDArray]');
has 'label' => (is => 'rw', isa => 'Maybe[AcceptableInput|HashRef[AcceptableInput]|ArrayRef[AcceptableInput]]');
has 'batch_size' => (is => 'rw', isa => 'Int', default => 1);
has '_shuffle' => (is => 'rw', init_arg => 'shuffle', isa => 'Bool', default => 0);
has 'last_batch_handle' => (is => 'rw', isa => 'Str', default => 'pad');
has 'label_name' => (is => 'rw', isa => 'Str', default => 'softmax_label');
has 'num_source' => (is => 'rw', isa => 'Int');
has 'cursor' => (is => 'rw', isa => 'Int');
has 'num_data' => (is => 'rw', isa => 'Int');
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if(@_%2)
{
my $data = shift;
return $class->$orig(data => $data, @_);
}
return $class->$orig(@_);
};
sub BUILD
{
my $self = shift;
my $data = AI::MXNet::IO->init_data($self->data, allow_empty => 0, default_name => 'data');
my $label = AI::MXNet::IO->init_data($self->label, allow_empty => 1, default_name => $self->label_name);
my $num_data = $data->[0][1]->shape->[0];
confess("size of data dimension 0 $num_data < batch_size ${\ $self->batch_size }")
unless($num_data >= $self->batch_size);
if($self->_shuffle)
{
my @idx = shuffle(0..$num_data-1);
lib/AI/MXNet/IO.pm view on Meta::CPAN
has 'handle' => (is => 'ro', isa => 'DataIterHandle', required => 1);
has '_debug_skip_load' => (is => 'rw', isa => 'Int', default => 0);
has '_debug_at_begin' => (is => 'rw', isa => 'Int', default => 0);
has 'data_name' => (is => 'ro', isa => 'Str', default => 'data');
has 'label_name' => (is => 'ro', isa => 'Str', default => 'softmax_label');
has [qw/first_batch
provide_data
provide_label
batch_size/] => (is => 'rw', init_arg => undef);
sub BUILD
{
my $self = shift;
$self->first_batch($self->next);
my $data = $self->first_batch->data->[0];
$self->provide_data([
AI::MXNet::DataDesc->new(
name => $self->data_name,
shape => $data->shape,
dtype => $data->dtype
)
lib/AI/MXNet/IO.pm view on Meta::CPAN
$self->provide_label([
AI::MXNet::DataDesc->new(
name => $self->label_name,
shape => $label->shape,
dtype => $label->dtype
)
]);
$self->batch_size($data->shape->[0]);
}
sub DEMOLISH
{
check_call(AI::MXNetCAPI::DataIterFree(shift->handle));
}
=head2 debug_skip_load
Set the iterator to simply return always first batch.
Notes
-----
This can be used to test the speed of network without taking
lib/AI/MXNet/IO.pm view on Meta::CPAN
return pdl(check_call(AI::MXNetCAPI::DataIterGetIndex($self->handle)));
}
method getpad()
{
return scalar(check_call(AI::MXNetCAPI::DataIterGetPadNum($self->handle)));
}
package AI::MXNet::IO;
sub NDArrayIter { shift; return AI::MXNet::NDArrayIter->new(@_); }
my %iter_meta;
method get_iter_meta()
{
return \%iter_meta;
}
# Create an io iterator by handle.
func _make_io_iterator($handle)
{
lib/AI/MXNet/IO.pm view on Meta::CPAN
) = @{ check_call(AI::MXNetCAPI::DataIterGetIterInfo($handle)) };
my $param_str = build_param_doc($arg_names, $arg_types, $arg_descs);
my $doc_str = "$desc\n\n"
."$param_str\n"
."name : string, required.\n"
." Name of the resulting data iterator.\n\n"
."Returns\n"
."-------\n"
."iterator: DataIter\n"
." The result iterator.";
my $iter = sub {
my $class = shift;
my (@args, %kwargs);
if(@_ and ref $_[-1] eq 'HASH')
{
%kwargs = %{ pop(@_) };
}
@args = @_;
Carp::confess("$iter_name can only accept keyword arguments")
if @args;
for my $key (keys %kwargs)
lib/AI/MXNet/Image.pm view on Meta::CPAN
Int $interp=2
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [__PACKAGE__->resize_short($src, $size, $interp)]
=cut
method ResizeAug(Shape $size, Int $interp=2)
{
my $aug = sub {
my $src = shift;
return [__PACKAGE__->resize_short($src, $size, $interp)];
};
return $aug;
}
=head2 RandomCropAug
Makes "random crop augumenter" closure.
lib/AI/MXNet/Image.pm view on Meta::CPAN
Int $interp=2
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [(__PACKAGE__->random_crop($src, $size, $interp))[0]]
=cut
method RandomCropAug(Shape $size, Int $interp=2)
{
my $aug = sub {
my $src = shift;
return [(__PACKAGE__->random_crop($src, $size, $interp))[0]];
};
return $aug;
}
=head2 RandomSizedCropAug
Makes "random crop augumenter" closure.
lib/AI/MXNet/Image.pm view on Meta::CPAN
ArrayRef[Num] $ratio
Int $interp=2
Returns:
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [(__PACKAGE__->random_size_crop($src, $size, $min_area, $ratio, $interp))[0]]
=cut
method RandomSizedCropAug(Shape $size, Num $min_area, ArrayRef[Num] $ratio, Int $interp=2)
{
my $aug = sub {
my $src = shift;
return [(__PACKAGE__->random_size_crop($src, $size, $min_area, $ratio, $interp))[0]];
};
return $aug;
}
=head2 CenterCropAug
Makes "center crop augumenter" closure.
lib/AI/MXNet/Image.pm view on Meta::CPAN
Shape $size
Int $interp=2
Returns:
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [(__PACKAGE__->center_crop($src, $size, $interp))[0]]
=cut
method CenterCropAug(Shape $size, Int $interp=2)
{
my $aug = sub {
my $src = shift;
return [(__PACKAGE__->center_crop($src, $size, $interp))[0]];
};
return $aug;
}
=head2 RandomOrderAug
Makes "Apply list of augmenters in random order" closure.
lib/AI/MXNet/Image.pm view on Meta::CPAN
ArrayRef[CodeRef] $ts
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns ArrayRef[AI::MXNet::NDArray]
=cut
method RandomOrderAug(ArrayRef[CodeRef] $ts)
{
my $aug = sub {
my $src = shift;
my @ts = List::Util::shuffle(@{ $ts });
my @tmp;
for my $t (@ts)
{
push @tmp, &{$t}($src);
}
return \@tmp;
};
return $aug;
lib/AI/MXNet/Image.pm view on Meta::CPAN
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns ArrayRef[AI::MXNet::NDArray]
=cut
method ColorJitterAug(Num $brightness, Num $contrast, Num $saturation)
{
my @ts;
my $coef = AI::MXNet::NDArray->array([[[0.299, 0.587, 0.114]]]);
if($brightness > 0)
{
my $baug = sub { my $src = shift;
my $alpha = 1 + -$brightness + 2 * $brightness * rand;
$src *= $alpha;
return [$src];
};
push @ts, $baug;
}
if($contrast > 0)
{
my $caug = sub { my $src = shift;
my $alpha = 1 + -$contrast + 2 * $contrast * rand;
my $gray = $src*$coef;
$gray = (3.0*(1.0-$alpha)/$gray->size)*$gray->sum;
$src *= $alpha;
$src += $gray;
return [$src];
};
push @ts, $caug;
}
if($saturation > 0)
{
my $saug = sub { my $src = shift;
my $alpha = 1 + -$saturation + 2 * $saturation * rand;
my $gray = $src*$coef;
$gray = AI::MXNet::NDArray->sum($gray, { axis=>2, keepdims =>1 });
$gray *= (1.0-$alpha);
$src *= $alpha;
$src += $gray;
return [$src];
};
push @ts, $saug;
}
lib/AI/MXNet/Image.pm view on Meta::CPAN
PDL $eigvec
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns ArrayRef[AI::MXNet::NDArray]
=cut
method LightingAug(Num $alphastd, PDL $eigval, PDL $eigvec)
{
my $aug = sub { my $src = shift;
my $alpha = AI::MXNet::NDArray->zeros([3]);
AI::MXNet::Random->normal(0, $alphastd, { out => $alpha });
my $rgb = ($eigvec*$alpha->aspdl) x $eigval;
$src += AI::MXNet::NDArray->array($rgb);
return [$src]
};
return $aug
}
=head2 ColorNormalizeAug
lib/AI/MXNet/Image.pm view on Meta::CPAN
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [__PACKAGE__->color_normalize($src, $mean, $std)]
=cut
method ColorNormalizeAug(PDL $mean, PDL $std)
{
$mean = AI::MXNet::NDArray->array($mean);
$std = AI::MXNet::NDArray->array($std);
my $aug = sub { my $src = shift;
return [__PACKAGE__->color_normalize($src, $mean, $std)]
};
return $aug;
}
=head2 HorizontalFlipAug
Makes "Random horizontal flipping" closure.
Parameters:
lib/AI/MXNet/Image.pm view on Meta::CPAN
Num $p < 1
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [$p > rand ? AI::MXNet::NDArray->flip($src, axis=1>) : $src]
=cut
method HorizontalFlipAug(Num $p)
{
my $aug = sub { my $src = shift;
return [$p > rand() ? AI::MXNet::NDArray->flip($src, { axis=>1 }) : $src]
};
return $aug;
}
=head2 CastAug
Makes "Cast to float32" closure.
Returns:
--------
CodeRef that accepts AI::MXNet::NDArray $src as input
and returns [$src->astype('float32')]
=cut
method CastAug()
{
my $aug = sub { my $src = shift;
return [$src->astype('float32')]
};
return $aug;
}
=head2 CreateAugmenter
Create augumenter list
Parameters:
lib/AI/MXNet/Image.pm view on Meta::CPAN
has 'imglist' => (is => 'rw', isa => 'ArrayRef|HashRef');
has 'kwargs' => (is => 'ro', isa => 'HashRef');
has [qw/imgidx
imgrec
seq
cur
provide_data
provide_label
/] => (is => 'rw', init_arg => undef);
sub BUILD
{
my $self = shift;
assert($self->path_imgrec or $self->path_imglist or ref $self->imglist eq 'ARRAY');
if($self->path_imgrec)
{
print("loading recordio...\n");
if($self->path_imgidx)
{
$self->imgrec(
AI::MXNet::IndexedRecordIO->new(
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
=head2 new
Parameters
---------
name : str
name of variable
attrs : hash ref of str to str
attributes of this variable taken from AI::MXNet::Symbol->attr_dict
=cut
has 'name' => (is => 'ro', isa => 'Str', required => 1);
has 'attrs' => (is => 'rw', isa => 'HashRef[Str]', lazy => 1, default => sub { +{} });
use overload '""' => sub { shift->name };
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(name => $_[0]) if @_ == 1;
return $class->$orig(@_);
};
# Base class for Initializers
package AI::MXNet::Initializer;
use Mouse;
use AI::MXNet::Base qw(:DEFAULT pzeros pceil);
use AI::MXNet::NDArray;
use JSON::PP;
use overload "&{}" => sub { my $self = shift; sub { $self->call(@_) } },
'""' => sub {
my $self = shift;
my ($name) = ref($self) =~ /::(\w+)$/;
encode_json(
[lc $name,
$self->kwargs//{ map { $_ => "".$self->$_ } $self->meta->get_attribute_list }
]);
},
fallback => 1;
has 'kwargs' => (is => 'rw', init_arg => undef, isa => 'HashRef');
has '_verbose' => (is => 'rw', isa => 'Bool', lazy => 1, default => 0);
has '_print_func' => (is => 'rw', isa => 'CodeRef', lazy => 1,
default => sub {
return sub {
my $x = shift;
return ($x->norm/sqrt($x->size))->asscalar;
};
}
);
=head1 NAME
AI::MXNet::Initializer - Base class for all Initializers
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
my $existing = $init_registry{ $name };
warn(
"WARNING: New initializer $self.$name"
."is overriding existing initializer $existing.$name"
);
}
$init_registry{ $name } = $self;
{
no strict 'refs';
no warnings 'redefine';
*{"$orig_name"} = sub { shift; $self->new(@_) };
*InitDesc = sub { shift; AI::MXNet::InitDesc->new(@_) };
}
}
=head2 init
Parameters
----------
$desc : AI::MXNet::InitDesc|str
a name of corresponding ndarray
or the object that describes the initializer.
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
=cut
package AI::MXNet::Load;
use Mouse;
extends 'AI::MXNet::Initializer';
has 'param' => (is => "rw", isa => 'HashRef[AI::MXNet::NDArray]', required => 1);
has 'default_init' => (is => "rw", isa => "AI::MXNet::Initializer");
has 'verbose' => (is => "rw", isa => "Int", default => 0);
sub BUILD
{
my $self = shift;
my $param = AI::MXNet::NDArray->load($self->param) unless ref $self->param;
my %self_param;
while(my ($name, $arr) = each %{ $self->param })
{
$name =~ s/^(?:arg|aux)://;
$self_param{ $name } = $arr;
}
$self->param(\%self_param);
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
=cut
package AI::MXNet::Mixed;
use Mouse;
extends 'AI::MXNet::Initializer';
has "map" => (is => "rw", init_arg => undef);
has "patterns" => (is => "ro", isa => 'ArrayRef[Str]');
has "initializers" => (is => "ro", isa => 'ArrayRef[AI::MXnet::Initializer]');
sub BUILD
{
my $self = shift;
confess("patterns count != initializers count")
unless (@{ $self->patterns } == @{ $self->initializers });
my %map;
@map{ @{ $self->patterns } } = @{ $self->initializers };
$self->map(\%map);
}
method call(Str $name, AI::MXNet::NDArray $arr)
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
{
$arr .= 1;
}
__PACKAGE__->register;
package AI::MXNet::Constant;
use Mouse;
extends 'AI::MXNet::Initializer';
has 'value' => (is => 'ro', isa => 'Num', required => 1);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(value => $_[0]) if @_ == 1;
return $class->$orig(@_);
};
method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
$arr .= $self->value;
}
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
Parameters
----------
scale : float, optional
The scale of the uniform distribution.
=cut
package AI::MXNet::Uniform;
use Mouse;
extends 'AI::MXNet::Initializer';
has "scale" => (is => "ro", isa => "Num", default => 0.7);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(scale => $_[0]) if @_ == 1;
return $class->$orig(@_);
};
method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
AI::MXNet::Random->uniform(-$self->scale, $self->scale, { out => $arr });
}
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
Parameters
----------
sigma : float, optional
Standard deviation for the gaussian distribution.
=cut
package AI::MXNet::Normal;
use Mouse;
extends 'AI::MXNet::Initializer';
has "sigma" => (is => "ro", isa => "Num", default => 0.01);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(sigma => $_[0]) if @_ == 1;
return $class->$orig(@_);
};
method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
AI::MXNet::Random->normal(0, $self->sigma, { out => $arr });
}
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
=cut
package AI::MXNet::MSRAPrelu;
use Mouse;
extends 'AI::MXNet::Xavier';
has '+rnd_type' => (default => "gaussian");
has '+factor_type' => (default => "avg");
has 'slope' => (is => 'ro', isa => 'Num', default => 0.25);
sub BUILD
{
my $self = shift;
my $magnitude = 2 / (1 + $self->slope ** 2);
$self->magnitude($magnitude);
$self->kwargs({ slope => $self->slope, factor_type => $self->factor_type });
}
__PACKAGE__->register;
package AI::MXNet::Bilinear;
use Mouse;
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
forget_bias : float
=cut
has 'init' => (is => 'rw', isa => 'Str|AI::MXNet::Initializer', required => 1);
has 'forget_bias' => (is => 'ro', isa => 'Num', default => 1);
has [qw/num_hidden
num_layers/] => (is => 'ro', isa => 'Int', required => 1);
has 'mode' => (is => 'ro', isa => 'Str', required => 1);
has 'bidirectional' => (is => 'ro', isa => 'Bool', default => 0);
sub BUILD
{
my $self = shift;
if(not blessed $self->init)
{
my ($klass, $kwargs);
eval {
($klass, $kwargs) = @{ decode_json($self->init) };
};
confess("FusedRNN failed to init $@") if $@;
$self->init($self->get_init_registry->{ lc $klass }->new(%$kwargs));
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
=head1 DESCRIPTION
Key value store interface of MXNet for parameter synchronization, over multiple devices.
=cut
has 'handle' => (is => 'ro', isa => 'KVStoreHandle', required => 1);
has '_updater' => (is => 'rw', isa => 'AI::MXNet::Updater');
has '_updater_func' => (is => 'rw', isa => 'CodeRef');
sub DEMOLISH
{
check_call(AI::MXNetCAPI::KVStoreFree(shift->handle));
}
=head2 init
Initialize a single or a sequence of key-value pairs into the store.
For each key, one must init it before push and pull.
Only worker 0's (rank == 0) data are used.
This function returns after data have been initialized successfully
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
{
my $is_worker = check_call(AI::MXNetCAPI::KVStoreIsWorkerNode());
if($self->type eq 'dist' and $is_worker)
{
my $optim_str = MIME::Base64::encode_base64(Storable::freeze($optimizer), "");
$self->_send_command_to_servers(0, $optim_str);
}
else
{
$self->_updater(AI::MXNet::Optimizer->get_updater($optimizer));
$self->_set_updater(sub { &{$self->_updater}(@_) });
}
}
=head2 type
Get the type of this kvstore
Returns
-------
type : str
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
This function only changes the local store. Use set_optimizer for
multi-machines.
Parameters
----------
updater : function
the updater function
Examples
--------
>>> my $update = sub { my ($key, input, stored) = @_;
... print "update on key: $key\n";
... $stored += $input * 2; };
>>> $kv->_set_updater($update)
>>> $kv->pull(3, out=>$a)
>>> print $a->aspdl()
[[ 4. 4. 4.]
[ 4. 4. 4.]]
>>> $kv->push(3, mx->nd->ones($shape))
update on key: 3
>>> $kv->pull(3, out=>$a)
>>> print $a->aspdl()
[[ 6. 6. 6.]
[ 6. 6. 6.]]
=cut
method _set_updater(CodeRef $updater_func)
{
$self->_updater_func(
sub {
my ($index, $input_handle, $storage_handle) = @_;
$updater_func->(
$index,
AI::MXNet::NDArray->new(handle => $input_handle),
AI::MXNet::NDArray->new(handle => $storage_handle)
);
}
);
check_call(
AI::MXNetCAPI::KVStoreSetUpdater(
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
kv : KVStore
The created AI::MXNet::KVStore
=cut
method create(Str $name='local')
{
my $handle = check_call(AI::MXNetCAPI::KVStoreCreate($name));
return __PACKAGE__->new(handle => $handle);
}
sub _key_value
{
my ($keys, $vals) = @_;
if(not ref $keys)
{
if(blessed $vals)
{
return ([$keys], [$vals->handle]);
}
else
{
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
assert(blessed($value) and $value->isa('AI::MXNet::NDArray'));
return ([($keys)x@$vals], [map { $_->handle } @$vals]);
}
}
}
else
{
assert(not blessed($vals) and @$keys == @$vals);
my @c_keys;
my @c_vals;
zip(sub {
my ($key, $val) = @_;
my ($c_key, $c_val) = _key_value($key, $val);
push @c_keys, @$c_key;
push @c_vals, @$c_val;
}, $keys, $vals);
return (\@c_keys, \@c_vals);
}
}
1;
lib/AI/MXNet/KVStoreServer.pm view on Meta::CPAN
=head2 new
Initialize a new KVStoreServer.
Parameters
----------
kvstore : KVStore
=cut
has 'kvstore' => (is => 'ro', isa => 'AI::MXNet::KVStore', required => 1);
has 'handle' => (is => 'ro', isa => 'KVStoreHandle', default => sub { shift->kvstore->handle }, lazy => 1);
has 'init_logging' => (is => 'rw', isa => 'Int', default => 0);
# return the server controller
method _controller()
{
return sub {
my ($cmd_id, $cmd_body) = @_;
if (not $self->init_logging)
{
## TODO write logging
$self->init_logging(1);
}
if($cmd_id == 0)
{
my $optimizer = Storable::thaw(MIME::Base64::decode_base64($cmd_body));
$self->kvstore->set_optimizer($optimizer);
lib/AI/MXNet/LRScheduler.pm view on Meta::CPAN
package AI::MXNet::LRScheduler;
use strict;
use warnings;
use Mouse;
use AI::MXNet::Function::Parameters;
use AI::MXNet::Logging;
use overload "&{}" => sub { my $self = shift; sub { $self->call(@_) } },
fallback => 1;
=head1 NAME
AI::MXNet::LRScheduler - The adaptive scheduler of the learning rate.
=cut
=head1 DESCRIPTION
Learning rate scheduler, which adaptively changes the learning rate based on the
lib/AI/MXNet/LRScheduler.pm view on Meta::CPAN
the factor by which to reduce the learning rate.
=cut
use Mouse;
extends 'AI::MXNet::LRScheduler';
has 'step' => (is => 'ro', isa => 'Int', required => 1);
has 'factor' => (is => 'ro', isa => 'Num', default => 1);
has 'count' => (is => 'rw', isa => 'Int', default => 1);
has 'stop_factor_lr' => (is => 'ro', isa => 'Num', default => 1e-8);
sub BUILD
{
my $self = shift;
confess("Schedule step must be greater or equal than 1")
if $self->step < 1;
confess("Factor must be no more than 1 to make lr reduce")
if $self->factor > 1;
}
method call(Int $num_update)
{
lib/AI/MXNet/LRScheduler.pm view on Meta::CPAN
the factor for reducing the learning rate
=cut
use Mouse;
extends 'AI::MXNet::LRScheduler';
has 'step' => (is => 'ro', isa => 'ArrayRef[Int]', required => 1);
has 'factor' => (is => 'ro', isa => 'Num', default => 1);
has 'cur_step_ind' => (is => 'rw', isa => 'Int', default => 0);
has 'count' => (is => 'rw', isa => 'Int', default => 0);
sub BUILD
{
my $self = shift;
confess("step array must have at least one member")
unless @{ $self->step } >=1 ;
for (my $i = 0; $i < @{ $self->step }; $i++)
{
confess("Schedule step must be an increasing integer list")
if($i and $self->step->[$i] <= $self->step->[$i-1]);
confess("Schedule step must be greater or equal than 1")
if $self->step->[$i] < 1;
lib/AI/MXNet/Logging.pm view on Meta::CPAN
package AI::MXNet::Logging;
## TODO
use Mouse;
sub warning { shift; warn sprintf(shift, @_) . "\n" };
*debug = *info = *warning;
sub get_logger { __PACKAGE__->new }
1;
lib/AI/MXNet/Metric.pm view on Meta::CPAN
use warnings;
use AI::MXNet::Function::Parameters;
use Scalar::Util qw/blessed/;
=head1 NAME
AI::MXNet::Metric - Online evaluation metric module.
=cut
# Check to see if the two arrays are the same size.
sub _calculate_shape
{
my $input = shift;
my ($shape);
if(blessed($input))
{
if($input->isa('PDL'))
{
$shape = $input->shape->at(-1);
}
else
lib/AI/MXNet/Metric.pm view on Meta::CPAN
) unless $pred_shape == $label_shape;
}
=head1 DESCRIPTION
Base class of all evaluation metrics.
=cut
package AI::MXNet::EvalMetric;
use Mouse;
use overload '""' => sub {
return "EvalMetric: "
.Data::Dumper->new(
[shift->get_name_value()]
)->Purity(1)->Deepcopy(1)->Terse(1)->Dump
}, fallback => 1;
has 'name' => (is => 'rw', isa => 'Str');
has 'num' => (is => 'rw', isa => 'Int');
has 'num_inst' => (is => 'rw', isa => 'Maybe[Int|ArrayRef[Int]]');
has 'sum_metric' => (is => 'rw', isa => 'Maybe[Num|ArrayRef[Num]]');
sub BUILD
{
shift->reset;
}
method update($label, $pred)
{
confess('NotImplemented');
}
method reset()
lib/AI/MXNet/Metric.pm view on Meta::CPAN
$value = [$value] unless ref $value;
my %ret;
@ret{ @$name } = @$value;
return \%ret;
}
package AI::MXNet::CompositeEvalMetric;
use Mouse;
extends 'AI::MXNet::EvalMetric';
has 'metrics' => (is => 'rw', isa => 'ArrayRef[AI::MXNet::EvalMetric]', default => sub { [] });
has '+name' => (default => 'composite');
# Add a child metric.
method add(AI::MXNet::EvalMetric $metric)
{
push @{ $self->metrics }, $metric;
}
# Get a child metric.
method get_metric(int $index)
lib/AI/MXNet/Metric.pm view on Meta::CPAN
package AI::MXNet::Accuracy;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'accuracy');
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred_label) = @_;
if(join(',', @{$pred_label->shape}) ne join(',', @{$label->shape}))
{
$pred_label = AI::MXNet::NDArray->argmax_channel($pred_label);
}
AI::MXNet::Metric::check_label_shapes($label, $pred_label);
my $sum = ($pred_label->aspdl->flat == $label->aspdl->flat)->sum;
$self->sum_metric($self->sum_metric + $sum);
$self->num_inst($self->num_inst + $pred_label->size);
}, $labels, $preds);
}
package AI::MXNet::TopKAccuracy;
use Mouse;
use List::Util qw/min/;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'top_k_accuracy');
has 'top_k' => (is => 'rw', isa => 'int', default => 1);
sub BUILD
{
my $self = shift;
confess("Please use Accuracy if top_k is no more than 1")
unless $self->top_k > 1;
$self->name($self->name . "_" . $self->top_k);
}
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred_label) = @_;
confess('Predictions should be no more than 2 dims')
unless @{ $pred_label->shape } <= 2;
$pred_label = $pred_label->aspdl->qsorti;
$label = $label->astype('int32')->aspdl;
AI::MXNet::Metric::check_label_shapes($label, $pred_label);
my $num_samples = $pred_label->shape->at(-1);
my $num_dims = $pred_label->ndims;
if($num_dims == 1)
{
lib/AI/MXNet/Metric.pm view on Meta::CPAN
# Calculate the F1 score of a binary classification problem.
package AI::MXNet::F1;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'f1');
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred_label) = @_;
AI::MXNet::Metric::check_label_shapes($label, $pred_label);
$pred_label = $pred_label->aspdl->maximum_ind;
$label = $label->astype('int32')->aspdl;
confess("F1 currently only supports binary classification.")
if $label->uniq->shape->at(0) > 2;
my ($true_positives, $false_positives, $false_negatives) = (0,0,0);
zip(sub{
my ($y_pred, $y_true) = @_;
if($y_pred == 1 and $y_true == 1)
lib/AI/MXNet/Metric.pm view on Meta::CPAN
}, $labels, $preds);
}
package AI::MXNet::Perplexity;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'Perplexity');
has 'ignore_label' => (is => 'ro', isa => 'Maybe[Int]');
has 'axis' => (is => 'ro', isa => 'Int', default => -1);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(ignore_label => $_[0]) if @_ == 1;
return $class->$orig(@_);
};
=head1 NAME
AI::MXNet::Perplexity
=cut
lib/AI/MXNet/Metric.pm view on Meta::CPAN
axis : int (default -1)
The axis from prediction that was used to
compute softmax. By default uses the last
axis.
=cut
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
my ($loss, $num) = (0, 0);
zip(sub {
my ($label, $pred) = @_;
my $label_shape = $label->shape;
my $pred_shape = $pred->shape;
assert(
(product(@{ $label_shape }) == product(@{ $pred_shape })/$pred_shape->[-1]),
"shape mismatch: (@$label_shape) vs. (@$pred_shape)"
);
$label = $label->as_in_context($pred->context)->reshape([$label->size]);
$pred = AI::MXNet::NDArray->pick($pred, $label->astype('int32'), { axis => $self->axis });
if(defined $self->ignore_label)
lib/AI/MXNet/Metric.pm view on Meta::CPAN
# Calculate Mean Absolute Error loss
package AI::MXNet::MAE;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'mae');
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred) = @_;
$label = $label->aspdl;
$pred = $pred->aspdl;
if($label->ndims == 1)
{
$label = $label->reshape(1, $label->shape->at(0));
}
$self->sum_metric($self->sum_metric + ($label - $pred)->abs->avg);
$self->num_inst($self->num_inst + 1);
}, $labels, $preds);
lib/AI/MXNet/Metric.pm view on Meta::CPAN
# Calculate Mean Squared Error loss
package AI::MXNet::MSE;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'mse');
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred) = @_;
$label = $label->aspdl;
$pred = $pred->aspdl;
if($label->ndims == 1)
{
$label = $label->reshape(1, $label->shape->at(0));
}
$self->sum_metric($self->sum_metric + (($label - $pred)**2)->avg);
$self->num_inst($self->num_inst + 1);
}, $labels, $preds);
lib/AI/MXNet/Metric.pm view on Meta::CPAN
# Calculate Root Mean Squred Error loss
package AI::MXNet::RMSE;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'rmse');
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred) = @_;
$label = $label->aspdl;
$pred = $pred->aspdl;
if($label->ndims == 1)
{
$label = $label->reshape(1, $label->shape->at(0));
}
$self->sum_metric($self->sum_metric + sqrt((($label - $pred)**2)->avg));
$self->num_inst($self->num_inst + 1);
}, $labels, $preds);
}
# Calculate Cross Entropy loss
package AI::MXNet::CrossEntropy;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has '+name' => (default => 'cross-entropy');
has 'eps' => (is => 'ro', isa => 'Num', default => 1e-8);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(eps => $_[0]) if @_ == 1;
return $class->$orig(@_);
};
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred) = @_;
$label = $label->aspdl->flat;
$pred = $pred->aspdl;
my $label_shape = $label->shape->at(0);
my $pred_shape = $pred->shape->at(-1);
confess(
"Size of label $label_shape and
.first dimension of pred $pred_shape do not match"
) unless $label_shape == $pred_shape;
my $prob = $pred->index($label);
lib/AI/MXNet/Metric.pm view on Meta::CPAN
>>> $labels = [mx->nd->array([[1, 0], [0, 1], [0, 1]])]
>>> $pr = mx->metric->PearsonCorrelation()
>>> $pr->update($labels, $predicts)
>>> print pr->get()
('pearson-correlation', '0.421637061887229')
=cut
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds);
zip(sub {
my ($label, $pred) = @_;
AI::MXNet::Metric::check_label_shapes($label, $pred);
$label = $label->aspdl->flat;
$pred = $pred->aspdl->flat;
my ($label_mean, $label_stdv) = ($label->stats)[0, 6];
my ($pred_mean, $pred_stdv) = ($pred->stats)[0, 6];
$self->sum_metric(
$self->sum_metric
+
((($label-$label_mean)*($pred-$pred_mean))->sum/$label->nelem)/(($label_stdv*$pred_stdv)->at(0))
);
$self->num_inst($self->num_inst + 1);
}, $labels, $preds);
}
=head1 DESCRIPTION
Custom evaluation metric that takes a sub ref.
Parameters
----------
eval_function : subref
Customized evaluation function.
name : str, optional
The name of the metric
allow_extra_outputs : bool
If true, the prediction outputs can have extra outputs.
This is useful in RNN, where the states are also produced
lib/AI/MXNet/Metric.pm view on Meta::CPAN
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::EvalMetric';
has 'eval_function' => (is => 'ro', isa => 'CodeRef');
has 'allow_extra_outputs' => (is => 'ro', isa => 'Int', default => 0);
method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
AI::MXNet::Metric::check_label_shapes($labels, $preds)
unless $self->allow_extra_outputs;
zip(sub {
my ($label, $pred) = @_;
$label = $label->aspdl;
$pred = $pred->aspdl;
my $value = $self->eval_function->($label, $pred);
my $sum_metric = ref $value ? $value->[0] : $value;
my $num_inst = ref $value ? $value->[1] : 1;
$self->sum_metric($self->sum_metric + $sum_metric);
$self->num_inst($self->num_inst + $num_inst);
}, $labels, $preds);
}
package AI::MXNet::Metric;
=head2 create
Create an evaluation metric.
Parameters
----------
metric : str or sub ref
The name of the metric, or a function
providing statistics given pred, label NDArray.
=cut
my %metrics = qw/
acc AI::MXNet::Accuracy
accuracy AI::MXNet::Accuracy
ce AI::MXNet::CrossEntropy
f1 AI::MXNet::F1
mae AI::MXNet::MAE
lib/AI/MXNet/Metric.pm view on Meta::CPAN
return $metrics{ lc($metric) }->new(%kwargs);
}
}
{
no strict 'refs';
no warnings 'redefine';
for my $metric (values %metrics)
{
my ($name) = $metric =~ /(\w+)$/;
*{__PACKAGE__."::$name"} = sub { shift; $metric->new(@_); };
}
}
1;
lib/AI/MXNet/Module.pm view on Meta::CPAN
return;
}
if($kvstore)
{
my $name = $param_names->[$index];
# push gradient, priority is negative index
$kvstore->push($name, $grad_list, priority => -$index);
# pull back the sum gradients, to the same locations.
$kvstore->pull($name, out => $grad_list, priority => -$index);
}
enumerate(sub {
my ($k, $w, $g) = @_;
# faked an index here, to make optimizer create diff
# state for the same index but on diff devs, TODO(mli)
# use a better solution later
&{$updater}($index*$num_device+$k, $g, $w);
}, $arg_list, $grad_list);
}, $param_arrays, $grad_arrays);
}
method load_checkpoint(Str $prefix, Int $epoch)
lib/AI/MXNet/Module.pm view on Meta::CPAN
=cut
extends 'AI::MXNet::Module::Base';
has '_symbol' => (is => 'ro', init_arg => 'symbol', isa => 'AI::MXNet::Symbol', required => 1);
has '_data_names' => (is => 'ro', init_arg => 'data_names', isa => 'ArrayRef[Str]');
has '_label_names' => (is => 'ro', init_arg => 'label_names', isa => 'Maybe[ArrayRef[Str]]');
has 'work_load_list' => (is => 'rw', isa => 'Maybe[ArrayRef[Int]]');
has 'fixed_param_names' => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'state_names' => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'logger' => (is => 'ro', default => sub { AI::MXNet::Logging->get_logger });
has '_p' => (is => 'rw', init_arg => undef);
has 'context' => (
is => 'ro',
isa => 'AI::MXNet::Context|ArrayRef[AI::MXNet::Context]',
default => sub { AI::MXNet::Context->cpu }
);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if(@_%2)
{
my $symbol = shift;
return $class->$orig(symbol => $symbol, @_);
}
return $class->$orig(@_);
};
sub BUILD
{
my $self = shift;
$self->_p(AI::MXNet::Module::Private->new);
my $context = $self->context;
if(blessed $context)
{
$context = [$context];
}
$self->_p->_context($context);
my $work_load_list = $self->work_load_list;
lib/AI/MXNet/Module.pm view on Meta::CPAN
{
if($self->params_initialized and not $force_init)
{
AI::MXNet::Logging->warning(
"Parameters already initialized and force_init=0. "
."init_params call ignored."
);
return;
}
assert($self->binded, 'call bind before initializing the parameters');
my $_impl = sub {
my ($name, $arr, $cache) = @_;
# Internal helper for parameter initialization
if(defined $cache)
{
if(exists $cache->{$name})
{
my $cache_arr = $cache->{$name};
# just in case the cached array is just the target itself
if($cache_arr->handle ne $arr->handle)
{
lib/AI/MXNet/Module.pm view on Meta::CPAN
if(Data::Dumper->Dump(\@curr_data_shapes) ne Data::Dumper->Dump(\@new_data_shapes))
{
my $new_dshape;
if($data_batch->can('provide_data') and $data_batch->provide_data)
{
$new_dshape = $data_batch->provide_data;
}
else
{
$new_dshape = [];
zip(sub {
my ($i, $shape) = @_;
push @{ $new_dshape }, AI::MXNet::DataDesc->new(
$i->name, $shape, $i->dtype, $i->layout
);
}, $self->data_shapes, \@new_data_shapes);
}
my $new_lshape;
if($data_batch->can('provide_label') and $data_batch->provide_label)
{
$new_lshape = $data_batch->provide_label;
}
elsif($data_batch->can('label') and $data_batch->label)
{
$new_lshape = [];
zip(sub {
my ($i, $j) = @_;
push @{ $new_lshape }, AI::MXNet::DataDesc->new(
$i->name, $j->shape, $i->dtype, $i->layout
);
}, $self->label_shapes, $data_batch->label);
}
$self->reshape(data_shapes => $new_dshape, label_shapes => $new_lshape);
}
$self->_p->_exec_group->forward($data_batch, $is_train);
}
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
this value might not be well defined.
When those intermediate-level API are implemented properly, the following
high-level API will be automatically available for a module:
- fit: train the module parameters on a data set
- predict: run prediction on a data set and collect outputs
- score: run prediction on a data set and evaluate performance
=cut
has 'logger' => (is => 'rw', default => sub { AI::MXNet::Logging->get_logger });
has '_symbol' => (is => 'rw', init_arg => 'symbol', isa => 'AI::MXNet::Symbol');
has [
qw/binded for_training inputs_need_grad
params_initialized optimizer_initialized/
] => (is => 'rw', isa => 'Bool', init_arg => undef, default => 0);
################################################################################
# High Level API
################################################################################
lib/AI/MXNet/Module/Bucketing.pm view on Meta::CPAN
$validation_sentences, $batch_size, buckets => $buckets,
invalid_label => $invalid_label
);
my $stack = mx->rnn->SequentialRNNCell();
for my $i (0..$num_layers-1)
{
$stack->add(mx->rnn->LSTMCell(num_hidden => $num_hidden, prefix => "lstm_l${i}_"));
}
my $sym_gen = sub {
my $seq_len = shift;
my $data = mx->sym->Variable('data');
my $label = mx->sym->Variable('softmax_label');
my $embed = mx->sym->Embedding(
data => $data, input_dim => scalar(keys %$vocabulary),
output_dim => $num_embed, name => 'embed'
);
$stack->reset;
my ($outputs, $states) = $stack->unroll($seq_len, inputs => $embed, merge_outputs => 1);
my $pred = mx->sym->Reshape($outputs, shape => [-1, $num_hidden]);
lib/AI/MXNet/Module/Bucketing.pm view on Meta::CPAN
symbols to be used depending on the `bucket_key` provided by each different
mini-batch of data
=cut
=head2 new
Parameters
----------
$sym_gen : subref or any perl object that overloads &{} op
A sub when called with a bucket key, returns a list with triple
of ($symbol, $data_names, $label_names).
$default_bucket_key : str or anything else
The key for the default bucket.
$logger : Logger
$context : AI::MXNet::Context or array ref of AI::MXNet::Context objects
Default is cpu(0)
$work_load_list : array ref of Num
Default is undef, indicating uniform workload.
$fixed_param_names: arrayref of str
Default is undef, indicating no network parameters are fixed.
$state_names : arrayref of str
states are similar to data and label, but not provided by data iterator.
Instead they are initialized to 0 and can be set by set_states()
=cut
extends 'AI::MXNet::Module::Base';
has '_sym_gen' => (is => 'ro', init_arg => 'sym_gen', required => 1);
has '_default_bucket_key' => (is => 'rw', init_arg => 'default_bucket_key', required => 1);
has '_context' => (
is => 'ro', isa => 'AI::MXNet::Context|ArrayRef[AI::MXNet::Context]',
lazy => 1, default => sub { AI::MXNet::Context->cpu },
init_arg => 'context'
);
has '_work_load_list' => (is => 'rw', init_arg => 'work_load_list', isa => 'ArrayRef[Num]');
has '_curr_module' => (is => 'rw', init_arg => undef);
has '_curr_bucket_key' => (is => 'rw', init_arg => undef);
has '_buckets' => (is => 'rw', init_arg => undef, default => sub { +{} });
has '_fixed_param_names' => (is => 'rw', isa => 'ArrayRef[Str]', init_arg => 'fixed_param_names');
has '_state_names' => (is => 'rw', isa => 'ArrayRef[Str]', init_arg => 'state_names');
has '_params_dirty' => (is => 'rw', init_arg => undef);
sub BUILD
{
my ($self, $original_params) = @_;
$self->_fixed_param_names([]) unless defined $original_params->{fixed_param_names};
$self->_state_names([]) unless defined $original_params->{state_names};
$self->_params_dirty(0);
my ($symbol, $data_names, $label_names) = &{$self->_sym_gen}($self->_default_bucket_key);
$self->_check_input_names($symbol, $data_names//[], "data", 1);
$self->_check_input_names($symbol, $label_names//[], "label", 0);
$self->_check_input_names($symbol, $self->_state_names, "state", 1);
$self->_check_input_names($symbol, $self->_fixed_param_names, "fixed_param", 1);
lib/AI/MXNet/Monitor.pm view on Meta::CPAN
A regular expression specifying which tensors to monitor.
Only tensors with names that match name_pattern will be included.
For example, '.*weight|.*output' will print all weights and outputs;
'.*backward.*' will print all gradients.
=cut
has 'interval' => (is => 'ro', isa => 'Int', required => 1);
has 'stat_func' => (
is => 'ro',
isa => 'CodeRef',
default => sub {
return sub {
# returns |x|/size(x), async execution.
my ($x) = @_;
return $x->norm/sqrt($x->size);
}
},
lazy => 1
);
has 'pattern' => (is => 'ro', isa => 'Str', default => '.*');
has '_sort' => (is => 'ro', isa => 'Bool', init_arg => 'sort', default => 0);
has [qw/queue exes/] => (is => 'rw', init_arg => undef, default => sub { [] });
has [qw/step activated/] => (is => 'rw', init_arg => undef, default => 0);
has 're_pattern' => (
is => 'ro',
init_arg => undef,
default => sub {
my $pattern = shift->pattern;
my $re = eval { qr/$pattern/ };
confess("pattern $pattern failed to compile as a regexp $@")
if $@;
return $re;
},
lazy => 1
);
has 'stat_helper' => (
is => 'ro',
init_arg => undef,
default => sub {
my $self = shift;
return sub {
my ($name, $handle) = @_;
return if(not $self->activated or not $name =~ $self->re_pattern);
my $array = AI::MXNet::NDArray->new(handle => $handle, writable => 0);
push @{ $self->queue }, [$self->step, $name, $self->stat_func->($array)];
}
},
lazy => 1
);
=head2 install
lib/AI/MXNet/Monitor.pm view on Meta::CPAN
method toc()
{
return [] unless $self->activated;
for my $exe (@{ $self->exes })
{
$_->wait_to_read for @{ $exe->arg_arrays };
$_->wait_to_read for @{ $exe->aux_arrays };
}
for my $exe (@{ $self->exes })
{
zip(sub {
my ($name, $array) = @_;
push @{ $self->queue }, [$self->step, $name, $self->stat_func->($array)];
}, $exe->_symbol->list_arguments, $exe->arg_arrays);
zip(sub {
my ($name, $array) = @_;
push @{ $self->queue }, [$self->step, $name, $self->stat_func->($array)];
}, $exe->_symbol->list_auxiliary_states, $exe->aux_arrays);
}
$self->activated(0);
my @res;
if($self->_sort)
{
@{ $self->queue } = sort { $a->[1] cmp $b->[1] } @{ $self->queue };
}
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
'%' => \&modulo,
'%=' => \&imodulo,
'**' => \&power,
'==' => \&equal,
'!=' => \¬_equal,
'>' => \&greater,
'>=' => \&greater_equal,
'<' => \&lesser,
'<=' => \&lesser_equal,
'.=' => \&set,
'=' => sub { $_[0] };
extends 'AI::MXNet::NDArray::Base';
has 'writable' => (is => 'rw', isa => 'Int', default => 1, lazy => 1);
has 'handle' => (is => 'rw', isa => 'NDArrayHandle', required => 1);
sub DEMOLISH
{
check_call(AI::MXNetCAPI::NDArrayFree(shift->handle));
}
method STORABLE_freeze($cloning)
{
my $buf = check_call(AI::MXNetCAPI::NDArraySaveRawBytes($self->handle));
return ($buf,\ $self->writable);
}
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
my $shape = $self->shape;
my $dsize = @$shape;
my $isize = @indices;
confess("Dimensions size $dsize < indexes size $isize")
if $dsize < $isize;
confess("Dimensions size $dsize = indexes size $isize,
ndarray only supports either ->at on dimension 0
or full crop")
if $isize > 1 and $dsize != $isize;
my $i = 0;
zip(sub {
my ($idx, $dim_size) = @_;
confess("Dimension $i mismatch Idx: $idx >= Dim Size: $dim_size")
if $idx >= $dim_size or ($idx + $dim_size) < 0;
++$i;
}, \@indices, $shape);
$i = 0;
for my $v (@indices)
{
$v += $shape->[$i] if $v < 0;
++$i;
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
if $dsize < $isize;
confess("Dimensions size $dsize != slices size $isize,
ndarray only supports either ->slice on dimension 0
or full crop")
if $isize > 1 and $dsize != $isize;
my $i = -1;
@slices = map {
++$i;
ref $_ ? (@$_ == 1 ? [$_->[0], $shape->[$i] - 1] : $_) : ($_ eq 'X' ? [0, $shape->[$i] - 1] : [$_, $_]);
} @slices;
zip(sub {
my ($slice, $dim_size) = @_;
my ($begin, $end, $stride) = @$slice;
confess("NDArray does not support slice strides != 1")
if ($stride//0) > 1;
confess("Dimension $i mismatch slice begin : $begin >= Dim Size: $dim_size")
if $begin >= $dim_size or ($begin + $dim_size) < 0;
confess("Dimension $i mismatch slice end : $end >= Dim Size: $dim_size")
if $end >= $dim_size or ($end + $dim_size) < 0;
}, \@slices, $shape);
$i = 0;
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
$self->handle,
$start,
$stop
)
);
return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}
=head2 _at
Returns a sub NDArray that shares memory with current one.
Parameters
----------
idx : int
index of the sub array.
=cut
method _at(Index $idx)
{
my $handle = check_call(
AI::MXNetCAPI::NDArrayAt(
$self->handle, $idx >=0 ? $idx : $self->shape->[0] + $idx
)
);
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
rfn_scalar : function
function to be called if lhs is numeric value while rhs is NDArray;
if none is provided, then the function is commutative, so rfn_scalar is equal to lfn_scalar
Returns
-------
out: NDArray
result array
=cut
sub _ufunc_helper
{
my ($lhs, $rhs, $fn_array, $lfn_scalar, $rfn_scalar, $reverse) = @_;
($rhs, $lhs) = ($lhs, $rhs) if $reverse and $rfn_scalar;
if(not ref $lhs)
{
if(not $rfn_scalar)
{
return __PACKAGE__->can($lfn_scalar)->(__PACKAGE__, $rhs, $lhs);
}
else
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
qw/broadcast_add _plus_scalar/
);
}
method subtract(AI::MXNet::NDArray|Num $other, $reverse=)
{
return _ufunc_helper(
$self,
$other,
qw/broadcast_sub _minus_scalar _rminus_scalar/,
$reverse
);
}
method isubtract(AI::MXNet::NDArray|Num $other, $reverse=)
{
confess('trying to add to a readonly NDArray') unless $self->writable;
return ref $other
? __PACKAGE__->broadcast_sub($self, $other, { out => $self })
: __PACKAGE__->_minus_scalar($self, $other, { out => $self })
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
Returns a new empty handle.
Empty handle can be used to hold result
Returns
-------
a new empty ndarray handle
=cut
sub _new_empty_handle
{
my $hdl = check_call(AI::MXNetCAPI::NDArrayCreateNone());
return $hdl;
}
=head2 _new_alloc_handle
Returns a new handle with specified shape and context.
Empty handle is only used to hold results