view release on metacpan or search on metacpan
0.9506 Sat Apr 29 20:26:50 PDT 2017
- Ftrl optimizer, new tests, bugfixes.
0.9505 Sun Apr 23 21:26:04 PDT 2017
- Perplexity bugfix, two new examples.
0.9504 Wed Apr 19 18:59:45 PDT 2017
- LR Scheduler bugfix.
0.9503 Wed Apr 19 13:33:57 PDT 2017
- added an example of generation of inferred text via pre-trained RNN.
- bugfixes/tests.
0.9502 Sat Apr 15 17:18:21 PDT 2017
- optimizations/bugfixes.
0.9501 Sat Apr 8 13:01:00 PDT 2017
- ZoneoutCell, nd inferred reshape and moveaxis, cosmetic changes to Image iter,
pod reworked to be readable via metacpan.
0.95 Sun Mar 26 17:42:02 PDT 2017
- docs, bugfixes, tests in order to be visible on http://mxnet.io
0.03 Tue Feb 14 07:28:11 PST 2017
- sync up with current state of the Python inteface.
- high level RNN support.
0.02 Tue Feb 14 07:28:11 PST 2017
examples/char_lstm.pl view on Meta::CPAN
'cell-mode=s' => \(my $cell_mode = 'LSTM' ),
'sample-size=i' => \(my $sample_size = 10000 ),
'chkp-epoch=i' => \(my $chkp_epoch = 1 ),
'bidirectional=i'=> \(my $bidirectional= 0 ),
'help' => sub { HelpMessage(0) },
) or HelpMessage(1);
=head1 NAME
char_lstm.pl - Example of training char LSTM RNN on tiny shakespeare using high level RNN interface
with optional inferred sampling (RNN generates Shakespeare like text)
=head1 SYNOPSIS
--num-layers number of stacked RNN layers, default=2
--num-hidden hidden layer size, default=256
--num-embed embed size, default=10
--num-seq sequence size, default=60
--gpus list of gpus to run, e.g. 0 or 0,2,5. empty means using cpu.
Increase batch size when using multiple gpus for best performance.
--kv-store key-value store type, default='device'
--num-epochs max num of epochs, default=25
--lr initial learning rate, default=0.01
--optimizer the optimizer type, default='adam'
--mom momentum for sgd, default=0.0
--wd weight decay for sgd, default=0.00001
--batch-size the batch size type, default=32
--bidirectional use bidirectional cell, default false (0)
--disp-batches show progress for every n batches, default=50
--chkp-prefix prefix for checkpoint files, default='lstm_'
--cell-mode RNN cell mode (LSTM, GRU, RNN, default=LSTM)
--sample-size a size of inferred sample text (default=10000) after each epoch
--chkp-epoch save checkpoint after this many epoch, default=1 (saving every checkpoint)
=cut
package AI::MXNet::RNN::IO::ASCIIIterator;
use Mouse;
extends AI::MXNet::DataIter;
has 'data' => (is => 'ro', isa => 'PDL', required => 1);
has 'seq_size' => (is => 'ro', isa => 'Int', required => 1);
has '+batch_size' => (is => 'ro', isa => 'Int', required => 1);
lib/AI/MXNet/Base.pm view on Meta::CPAN
$c->slice(('X')x$rem, $i) .= $pdl->slice(('X')x$rem, $shuffle[$i])
}
$c;
}
=head2 assert
Parameters
-----------
Bool $input
Str $error_str
Calls Carp::confess with $error_str//"AssertionError" if the $input is false
=cut
sub assert
{
my ($input, $error_str) = @_;
local($Carp::CarpLevel) = 1;
Carp::confess($error_str//'AssertionError')
unless $input;
}
=head2 check_call
Checks the return value of C API call
This function will raise an exception when error occurs.
Every API call is wrapped with this function.
Returns the C API call return values stripped of first return value,
checks for return context and returns first element in
the values list when called in scalar context.
=cut
sub check_call
{
Carp::confess(AI::MXNetCAPI::GetLastError()) if shift;
lib/AI/MXNet/Executor.pm view on Meta::CPAN
Parameters
----------
arg_params : HashRef[AI::MXNet::NDArray]
Parameters, hash ref of name to NDArray of arguments
aux_params : Maybe[HashRef[AI::MXNet::NDArray]], optional
Parameters, hash ref of name to NDArray of auxiliary states.
allow_extra_params : boolean, optional
Whether to allow extra parameters that are not needed by symbol
If this is True, no error will be thrown when arg_params or aux_params
contain extra parameters that is not needed by the executor.
=cut
method copy_params_from(
HashRef[AI::MXNet::NDArray] $arg_params,
Maybe[HashRef[AI::MXNet::NDArray]] $aux_params=,
Maybe[Bool] $allow_extra_params=
)
{
my %arg_dict = %{ $self->arg_dict };
lib/AI/MXNet/Executor.pm view on Meta::CPAN
{
$new_grad_dict{ $name } = $darr->reshape($new_shape);
}
}
}
else
{
confess(
"Shape of unspecified array arg:$name changed. "
."This can cause the new executor to not share parameters "
."with the old one. Please check for error in network."
."If this is intended, set partial_shaping=True to suppress this warning."
);
}
$i++;
}
my %new_aux_dict;
$i = 0;
for my $name (@{ $self->_symbol->list_auxiliary_states() })
{
my $new_shape = $aux_shapes->[$i];
lib/AI/MXNet/Executor.pm view on Meta::CPAN
else
{
$new_aux_dict{ $name } = $arr->reshape($new_shape);
}
}
else
{
confess(
"Shape of unspecified array aux:$name changed. "
."This can cause the new executor to not share parameters "
."with the old one. Please check for error in network."
."If this is intended, set partial_shaping=True to suppress this warning."
);
}
$i++;
}
return $self->_symbol->bind(
ctx => $self->_ctx,
args => \%new_arg_dict,
args_grad => \%new_grad_dict,
grad_req => $self->_grad_req,
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
method register()
{
my ($name) = $self =~ /::(\w+)$/;
my $orig_name = $name;
$name = lc $name;
if(exists $init_registry{ $name })
{
my $existing = $init_registry{ $name };
warn(
"WARNING: New initializer $self.$name"
."is overriding existing initializer $existing.$name"
);
}
$init_registry{ $name } = $self;
{
no strict 'refs';
no warnings 'redefine';
*{"$orig_name"} = sub { shift; $self->new(@_) };
*InitDesc = sub { shift; AI::MXNet::InitDesc->new(@_) };
}
}
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
{
my $x = $i % $shape->[3];
my $y = ($i / $shape->[3]) % $shape->[2];
$weight->index($i) .= (1 - abs($x / $f - $c)) * (1 - abs($y / $f - $c));
}
$arr .= $weight->reshape(reverse @{ $shape });
}
method _init_loc_bias($name, $arr)
{
confess("assert error shape[0] == 6")
unless $arr->shape->[0] == 6;
$arr .= [1.0, 0, 0, 0, 1.0, 0];
}
method _init_zero($name, $arr)
{
$arr .= 0;
}
method _init_one($name, $arr)
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
$arr .= 1;
}
method _init_beta($name, $arr)
{
$arr .= 0;
}
method _init_weight($name, $arr)
{
confess("Virtual method, subclass must override it");
}
method _init_default($name, $arr)
{
confess(
"Unknown initialization pattern for $name. "
.'Default initialization is now limited to '
.'"weight", "bias", "gamma" (1.0), and "beta" (0.0).'
.'Please use mx.sym.Variable(init=mx.init.*) to set initialization pattern'
);
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
If not undef, should be a hash ref of existing arg_params.
:$aux_params : Maybe[HashRef[AI::MXNet::NDArray]]
If not undef, should be a hash ref of existing aux_params.
:$allow_missing=0 : Bool
If true, params could contain missing values, and the initializer will be
called to fill those missing params.
:$force_init=0 : Bool
If true, will force re-initialize even if already initialized.
:$allow_extra=0 : Boolean, optional
Whether allow extra parameters that are not needed by symbol.
If this is True, no error will be thrown when arg_params or aux_params
contain extra parameters that is not needed by the executor.
=cut
method init_params(
Maybe[AI::MXNet::Initializer] :$initializer=AI::MXNet::Initializer->Uniform(0.01),
Maybe[HashRef[AI::MXNet::NDArray]] :$arg_params=,
Maybe[HashRef[AI::MXNet::NDArray]] :$aux_params=,
Bool :$allow_missing=0,
Bool :$force_init=0,
Bool :$allow_extra=0
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
Hash ref of name to value (NDArray) mapping.
$aux_params= : Maybe[HashRef[AI::MXNet::NDArray]]
Hash Ref of name to value (`NDArray`) mapping.
:$allow_missing=0 : Bool
If true, params could contain missing values, and the initializer will be
called to fill those missing params.
:$force_init=0 : Bool
If true, will force re-initialize even if already initialized.
:$allow_extra=0 : Bool
Whether allow extra parameters that are not needed by symbol.
If this is True, no error will be thrown when arg_params or aux_params
contain extra parameters that is not needed by the executor.
=cut
method set_params(
Maybe[HashRef[AI::MXNet::NDArray]] $arg_params=,
Maybe[HashRef[AI::MXNet::NDArray]] $aux_params=,
Bool :$allow_missing=0,
Bool :$force_init=0,
Bool :$allow_extra=0
)
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
AI::MXNetCAPI::NDArrayAt(
$self->handle, $idx >=0 ? $idx : $self->shape->[0] + $idx
)
);
return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}
=head2 reshape
Returns a reshaped NDArray that shares the memory with current one.
One shape dimension can be -1. In this case, the value is inferred
from the length of the array and remaining dimensions.
Parameters
----------
new_shape : Shape
new shape of NDArray
=cut
method reshape(ArrayRef[Int] $new_shape)
{
my $i = -1;
my @inferred = map { $i++; $_ == -1 ? ($i) : () } @$new_shape;
assert((@inferred <= 1), 'Only one dimension can be inferred.');
if(@inferred)
{
$new_shape->[$inferred[0]] = product(@{ $self->shape })/product(map { abs($_) } @{ $new_shape });
}
my $handle = check_call(
AI::MXNetCAPI::NDArrayReshape(
$self->handle,
scalar(@$new_shape),
$new_shape
)
);
return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
Broadcasting the current NDArray into the given shape.
Parameters
---------
Shape $shape : the shape to broadcast
=cut
method broadcast_to(Shape $shape)
{
my $cur_shape = $self->shape;
my $err_str = "operands could not be broadcast together with remapped shapes"
."[original->remapped]: [@$cur_shape] and requested shape [@$shape]";
if(@$shape < @$cur_shape)
{
confess($err_str);
}
@$cur_shape = ((1)x(@$shape - @$cur_shape), @$cur_shape);
my $cur_shape_arr = pdl($cur_shape);
my $broadcasting_axes = ($cur_shape_arr != pdl($shape))->which->unpdl;
if (grep { $cur_shape->[$_] != 1 } @$broadcasting_axes)
{
confess($err_str);
}
if(join(',',@$cur_shape) ne join(',',@{ $self->shape }))
{
return __PACKAGE__->SUPER::broadcast_to($self->reshape($cur_shape),{ shape => $shape });
}
else
{
return __PACKAGE__->SUPER::broadcast_to($self, { shape => $shape });
}
}
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
{
my $name = $self;
($name) = $name =~ /::(\w+)$/;
{ no strict 'refs'; *{__PACKAGE__."::$name"} = sub { $self }; }
$name = lc $name;
if(exists $opt_registry{ $name })
{
my $existing = $opt_registry{ $name };
warn(
"WARNING: New optimizer $self.$name"
."is overriding existing optimizer $existing.$name"
);
}
$opt_registry{ $name } = $self;
}
=head2 create_optimizer
Create an optimizer with specified name.
Parameters
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
{
$self->lr_scheduler->base_lr($self->learning_rate);
}
$self->lr($self->learning_rate);
$self->num_update($self->begin_num_update);
$self->idx2name({ %{ $self->param_idx2name } });
$self->set_lr_mult({});
$self->set_wd_mult({});
}
# Create additional optimizer state such as momentum.
# override in implementations.
method create_state($index, $weight){}
# Update the parameters. override in implementations
method update($index, $weight, $grad, $state){}
# set lr scale is deprecated. Use set_lr_mult instead.
method set_lr_scale($args_lrscale)
{
Carp::cluck("set lr scale is deprecated. Use set_lr_mult instead.");
}
=head2 set_lr_mult
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
Sequentially stacking multiple RNN cells
Parameters
----------
params : AI::MXNet::RNN::Params or undef
container for weight sharing between cells.
created if undef.
=cut
has [qw/_override_cell_params _cells/] => (is => 'rw', init_arg => undef);
sub BUILD
{
my ($self, $original_arguments) = @_;
$self->_override_cell_params(defined $original_arguments->{params});
$self->_cells([]);
}
=head2 add
Append a cell to the stack.
Parameters
----------
$cell : AI::MXNet::RNN::Cell::Base
=cut
method add(AI::MXNet::RNN::Cell::Base $cell)
{
push @{ $self->_cells }, $cell;
if($self->_override_cell_params)
{
assert(
$cell->_own_params,
"Either specify params for SequentialRNNCell "
."or child cells, not both."
);
%{ $cell->params->_params } = (%{ $cell->params->_params }, %{ $self->params->_params });
}
%{ $self->params->_params } = (%{ $self->params->_params }, %{ $cell->params->_params });
}
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
cell for forward unrolling
r_cell : AI::MXNet::RNN::Cell::Base
cell for backward unrolling
output_prefix : str, default 'bi_'
prefix for name of output
=cut
has 'l_cell' => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);
has 'r_cell' => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);
has '_output_prefix' => (is => 'ro', init_arg => 'output_prefix', isa => 'Str', default => 'bi_');
has [qw/_override_cell_params _cells/] => (is => 'rw', init_arg => undef);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if(@_ >= 2 and blessed $_[0] and blessed $_[1])
{
my $l_cell = shift(@_);
my $r_cell = shift(@_);
return $class->$orig(
l_cell => $l_cell,
r_cell => $r_cell,
@_
);
}
return $class->$orig(@_);
};
sub BUILD
{
my ($self, $original_arguments) = @_;
$self->_override_cell_params(defined $original_arguments->{params});
if($self->_override_cell_params)
{
assert(
($self->l_cell->_own_params and $self->r_cell->_own_params),
"Either specify params for BidirectionalCell ".
"or child cells, not both."
);
%{ $self->l_cell->params->_params } = (%{ $self->l_cell->params->_params }, %{ $self->params->_params });
%{ $self->r_cell->params->_params } = (%{ $self->r_cell->params->_params }, %{ $self->params->_params });
}
%{ $self->params->_params } = (%{ $self->params->_params }, %{ $self->l_cell->params->_params });
lib/AI/MXNet/Symbol.pm view on Meta::CPAN
{
return scalar(check_call(AI::NNVMCAPI::SymbolListInputNames($self->handle, 0)));
}
=head2 infer_type
Infer the type of outputs and arguments of given known types of arguments.
User can either pass in the known types in positional way or keyword argument way.
Tuple of Nones is returned if there is not enough information passed in.
An error will be raised if there is inconsistency found in the known types passed in.
Parameters
----------
args : Array
Provide type of arguments in a positional way.
Unknown type can be marked as None
kwargs : Hash ref, must ne ssupplied as as sole argument to the method.
Provide keyword arguments of known types.
lib/AI/MXNet/Symbol.pm view on Meta::CPAN
return (undef, undef, undef);
}
}
=head2 infer_shape
Infer the shape of outputs and arguments of given known shapes of arguments.
User can either pass in the known shapes in positional way or keyword argument way.
Tuple of Nones is returned if there is not enough information passed in.
An error will be raised if there is inconsistency found in the known shapes passed in.
Parameters
----------
*args :
Provide shape of arguments in a positional way.
Unknown shape can be marked as undef
**kwargs :
Provide keyword arguments of known shapes.
lib/AI/MXNet/Symbol.pm view on Meta::CPAN
Optionally, one can specify the shape of a variable. This will be used during
shape inference. If user specified a different shape for this variable using
keyword argument when calling shape inference, this shape information will be ignored.
lr_mult : float
Specify learning rate muliplier for this variable.
wd_mult : float
Specify weight decay muliplier for this variable.
dtype : Dtype
Similar to shape, we can specify dtype for this variable.
init : initializer (mx->init->*)
Specify initializer for this variable to override the default initializer
kwargs : hash ref
other additional attribute variables
Returns
-------
variable : Symbol
The created variable symbol.
=cut
method Variable(
Str $name,
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
);
func check_consistency(
SymbolOrArrayOfSymbols :$sym,
ArrayRef :$ctx_list,
Num :$scale=1,
Str|ArrayRef[Str]|HashRef[Str] :$grad_req='write',
Maybe[HashRef[AI::MXNet::NDArray]] :$arg_params=,
Maybe[HashRef[AI::MXNet::NDArray]] :$aux_params=,
Maybe[HashRef[Num]|Num] :$tol=,
Bool :$raise_on_err=1,
Maybe[AI::MXNer::NDArray] :$ground_truth=
)
{
$tol //= {
float16 => 1e-1,
float32 => 1e-3,
float64 => 1e-5,
uint8 => 0,
int32 => 0
};
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
sub enumerate
{
my ($sub, @arrays) = @_;
my $len = @{ $arrays[0] };
zip($sub, [0..$len-1], @arrays);
}
sub assert
{
my ($input, $error_str) = @_;
local($Carp::CarpLevel) = 1;
Carp::confess($error_str//'AssertionError')
unless $input;
}
=head2 same_array
Check whether two NDArrays sharing the same memory block
Parameters
----------
t/test_infer_shape.t view on Meta::CPAN
is_deeply($out_shapes->[0], [100, 10]);
my %true_shapes = (
fc2_bias => [10],
fc2_weight => [10, 1000],
fc1_bias => [1000],
fc1_weight => [1000,100]
);
_test_shapes($out, $arg_shapes, %true_shapes);
}
sub test_mlp2_infer_error
{
# Test shape inconsistent case
my $out = mlp2();
my $weight_shape = [1, 100];
my $data_shape = [100, 100];
eval { $out->infer_shape(data=>$data_shape, fc1_weight=>$weight_shape) };
like($@, qr/Shape inconsistent/);
}
sub test_backward_infer
t/test_infer_shape.t view on Meta::CPAN
my $a = mx->sym->Variable('a', shape=>[0, 10]);
my $b = mx->sym->Variable('b', shape=>[0, 5]);
my $c = mx->sym->Concat($a, $b, num_args=>2, dim=>1);
my $d = mx->sym->Variable('d', shape=>[2, 0]);
$d = $d + $c;
my ($arg_shapes) = $d->infer_shape();
_test_shapes($d, $arg_shapes, a=>[2,10], b=>[2,5], d=>[2,15]);
}
test_mlp2_infer_shape();
test_mlp2_infer_error();
test_backward_infer();
test_incomplete_infer_elewise();
test_incomplete_infer_mlp();
test_incomplete_infer_slicechannel();
test_incomplete_infer_convolution();
test_incomplete_infer_concat();
t/test_module.t view on Meta::CPAN
$mod->set_params($arg_params_correct, {}, force_init=>1);
# test allow missing
$mod->set_params($arg_params_missing, {}, allow_missing=>1, force_init=>1);
ok(dies_like(sub { $mod->set_params($arg_params_missing, {}, force_init=>1, allow_missing=>0); }, qr/fc_/));
# test allow extra
$mod->set_params($arg_params_extra, {}, force_init=>1, allow_missing=>1, allow_extra=>1);
ok(dies_like(sub { $mod->set_params($arg_params_extra, {}, force_init=>1, allow_missing=>1, allow_extra=>0); }, qr/fc_/));
# test allow missing + extra, this will throw a runtime error
ok(dies_like(sub { $mod->set_params($arg_params_missing_extra, {}, force_init=>1, allow_missing=>1, allow_extra=>0); }, qr/fc_/));
}
sub test_forward_reshape
{
my $num_class = 10;
my $data1 = mx->sym->Variable('data1');
my $data2 = mx->sym->Variable('data2');
my $conv1 = mx->sym->Convolution(data=>$data1, kernel=>[2, 2], num_filter=>2, stride=>[2, 2]);
my $conv2 = mx->sym->Convolution(data=>$data2, kernel=>[3, 3], num_filter=>3, stride=>[1, 1]);