view release on metacpan or search on metacpan
lib/AI/MXNet/CachedOp.pm view on Meta::CPAN
sub DEMOLISH
{
check_call(AI::MXNetCAPI::FreeCachedOp(shift->handle));
}
sub call
{
my $self = shift;
my @args;
my %kwargs;
if(blessed $_[0] and $_[0]->isa('AI::MXNet::NDArray'))
{
while(blessed $_[0] and $_[0]->isa('AI::MXNet::NDArray'))
{
push @args, shift(@_);
}
%kwargs = @_;
}
else
{
%kwargs = @_;
}
my $out = delete $kwargs{out};
lib/AI/MXNet/CachedOp.pm view on Meta::CPAN
{
confess(
"AI::MXNet::CachedOp::call got unexpected keyword argument(s): ".
join(', ', keys %kwargs)
);
}
my $original_output;
if(defined $out)
{
$original_output = $out;
if(blessed($out))
{
$out = [$out];
}
}
else
{
$out = [];
}
my $output = check_call(
AI::MXNetCAPI::InvokeCachedOp(
lib/AI/MXNet/Context.pm view on Meta::CPAN
use constant devtype2str => { 1 => 'cpu', 2 => 'gpu', 3 => 'cpu_pinned' };
use constant devstr2type => { cpu => 1, gpu => 2, cpu_pinned => 3 };
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
return $class->$orig(device_type => $_[0])
if @_ == 1 and $_[0] =~ /^(?:cpu|gpu|cpu_pinned)$/;
return $class->$orig(
device_type => $_[0]->device_type,
device_id => $_[0]->device_id
) if @_ == 1 and blessed $_[0];
return $class->$orig(device_type => $_[0], device_id => $_[0])
if @_ == 2 and $_[0] =~ /^(?:cpu|gpu|cpu_pinned)$/;
return $class->$orig(@_);
};
has 'device_type' => (
is => 'rw',
isa => enum([qw[cpu gpu cpu_pinned]]),
default => 'cpu'
);
lib/AI/MXNet/Context.pm view on Meta::CPAN
has 'device_id' => (
is => 'rw',
isa => 'Int',
default => 0
);
use overload
'==' => sub {
my ($self, $other) = @_;
return 0 unless blessed($other) and $other->isa(__PACKAGE__);
return "$self" eq "$other";
},
'""' => sub {
my ($self) = @_;
return sprintf("%s(%s)", $self->device_type, $self->device_id);
};
=head1 NAME
AI::MXNet::Context - A device context.
=cut
lib/AI/MXNet/Contrib/AutoGrad.pm view on Meta::CPAN
package AI::MXNet::Contrib::AutoGrad;
use strict;
use warnings;
use AI::MXNet::Base;
use AI::MXNet::Function::Parameters;
use Scalar::Util qw(blessed);
=head1 NAME
AI::MXNet::AutoGrad - Autograd for NDArray.
=cut
=head2 set_is_training
Set status to training/not training. When training, graph will be constructed
for gradient computation. Operators will also run with ctx.is_train=True. For example,
lib/AI/MXNet/Contrib/AutoGrad.pm view on Meta::CPAN
return sub {
my @args = @_;
my @variables = @_;
if(defined $argnum)
{
my @argnum = ref $argnum ? @$argnum : ($argnum);
@variables = map { $_[$_] } @argnum;
}
map {
assert(
(blessed($_) and $_->isa('AI::MXNet::NDArray')),
"type of autograd input should NDArray")
} @variables;
my @grads = map { $_->zeros_like } @variables;
__PACKAGE__->mark_variables(\@variables, \@grads);
my $prev = __PACKAGE__->set_is_training(1);
my $outputs = $func->(@args);
__PACKAGE__->set_is_training(0) unless $prev;
__PACKAGE__->compute_gradient(ref $outputs eq 'ARRAY' ? $outputs : [$outputs]);
return (\@grads, $outputs);
};
lib/AI/MXNet/Executor.pm view on Meta::CPAN
while (my ($name, $array) = each %kwargs)
{
if(not find_type_constraint('AcceptableInput')->check($array))
{
confess('only accept keyword argument of NDArrays/PDLs/Perl Array refs');
}
if(not exists $arg_dict->{ $name })
{
confess("unknown argument $name");
}
if(not blessed($array) or not $array->isa('AI::MXNet::NDArray'))
{
$array = AI::MXNet::NDArray->array($array);
}
if(join(',', @{ $arg_dict->{$name}->shape }) ne join(',', @{ $array->shape }))
{
my $expected = $arg_dict->{$name}->shape;
my $got = $array->shape;
confess("Shape not match! Argument $name, need: @$expected, received: @$got'");
}
$arg_dict->{ $name } .= $array;
lib/AI/MXNet/Executor.pm view on Meta::CPAN
----------
out_grads : NDArray or an array ref of NDArrays or hash ref of NDArrays, optional.
The gradient on the outputs to be propagated back.
This parameter is only needed when bind is called
on outputs that are not a loss function.
=cut
method backward(Maybe[AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]|HashRef[AI::MXNet::NDArray]] $out_grads=)
{
$out_grads //= [];
if(blessed $out_grads)
{
$out_grads = [$out_grads];
}
elsif(ref $out_grads eq 'HASH')
{
$out_grads = [ @{ $out_grads }{ @{ $self->symbol->list_outputs() } } ];
}
check_call(
AI::MXNetCAPI::ExecutorBackward(
$self->handle,
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
package AI::MXNet::Executor::Group;
use strict;
use warnings;
use Scalar::Util qw(blessed);
use List::Util qw(sum min);
use AI::MXNet::Base;
use AI::MXNet::Function::Parameters;
=head1 NAME
AI::MXNet::Executor::Group - Manager for a group of executors working in different contexts.
=cut
func _split_input_slice($batch_size, $work_load_list)
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
push @slices, [$begin, $end];
}
return \@slices;
}
# Load a array ref of arrays into a array ref of arrays specified by slices
func _load_general($data, $targets, $major_axis)
{
zip(sub {
my ($d_src, $d_targets, $axis) = @_;
if(blessed($d_targets) and $d_targets->isa('AI::MXNet::NDarray'))
{
$d_src->copyto($d_targets);
}
elsif(ref $d_targets eq 'ARRAY' and blessed $d_targets->[0])
{
zip(sub {
my ($src, $dst) = @_;
$src->copyto($dst);
}, $d_src, $d_targets);
}
else
{
for my $d (@{ $d_targets })
{
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
$p->aux_names($self->symbol->list_auxiliary_states);
$p->execs([]);
$self->_p($p);
$self->grad_req('null') if not $self->for_training;
$self->fixed_param_names([]) unless defined $self->fixed_param_names;
$self->state_names([]) unless defined $self->state_names;
my $data_shapes = [];
for my $d (@{ $self->data_shapes })
{
$d = AI::MXNet::DataDesc->new(name => $d->[0], shape => $d->[1])
unless blessed $d;
push @{ $data_shapes }, $d;
}
$self->data_shapes($data_shapes);
if(defined $self->label_shapes)
{
my $label_shapes = [];
for my $l (@{ $self->label_shapes })
{
$l = AI::MXNet::DataDesc->new(name => $l->[0], shape => $l->[1])
unless blessed $l;
push @{ $label_shapes }, $l;
}
$self->label_shapes($label_shapes);
}
my %data_names = map { $_->name => 1 } @{ $self->data_shapes };
my %param_names = map { $_ => 1 } @{ $self->param_names };
my %fixed_param_names = map { $_ => 1 } @{ $self->fixed_param_names };
my %grad_req;
if(not ref $self->grad_req)
{
lib/AI/MXNet/IO.pm view on Meta::CPAN
package AI::MXNet::IO;
use strict;
use warnings;
use AI::MXNet::Base;
use AI::MXNet::Function::Parameters;
use Scalar::Util qw/blessed/;
=head1 NAME
AI::MXNet::IO - NDArray interface of mxnet.
=cut
# Convert data into canonical form.
method init_data(
AcceptableInput|HashRef[AcceptableInput]|ArrayRef[AcceptableInput]|Undef $data,
Undef|Int :$allow_empty=,
lib/AI/MXNet/IO.pm view on Meta::CPAN
}
if(ref($data) eq 'HASH')
{
while(my ($k, $v) = each %{ $data })
{
push @ret, [$k, $v];
}
}
for my $d (@ret)
{
if(not (blessed $d->[1] and $d->[1]->isa('AI::MXNet::NDArray')))
{
$d->[1] = AI::MXNet::NDArray->array($d->[1]);
}
}
return \@ret;
}
method DataDesc(@args) { AI::MXNet::DataDesc->new(@args) }
method DataBatch(@args) { AI::MXNet::DataBatch->new(@args) }
lib/AI/MXNet/Image.pm view on Meta::CPAN
package AI::MXNet::Image;
use strict;
use warnings;
use Scalar::Util qw(blessed);
use AI::MXNet::Base;
use AI::MXNet::Function::Parameters;
=head1 NAME
AI::MXNet:Image - Read individual image files and perform augmentations.
=cut
=head2 imdecode
lib/AI/MXNet/Image.pm view on Meta::CPAN
{
if(not ref $buf)
{
my $pdl_type = PDL::Type->new(DTYPE_MX_TO_PDL->{'uint8'});
my $len; { use bytes; $len = length $buf; }
my $pdl = PDL->new_from_specification($pdl_type, $len);
${$pdl->get_dataref} = $buf;
$pdl->upd_data;
$buf = $pdl;
}
if(not (blessed $buf and $buf->isa('AI::MXNet::NDArray')))
{
$buf = AI::MXNet::NDArray->array($buf, dtype=>'uint8');
}
return AI::MXNet::NDArray->_cvimdecode($buf, { flag => $flag, to_rgb => $to_rgb, ($out ? (out => $out) : ()) });
}
=head2 scale_down
Scale down crop size if it's bigger than the image size.
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
----------
$desc : AI::MXNet::InitDesc|str
a name of corresponding ndarray
or the object that describes the initializer.
$arr : AI::MXNet::NDArray
an ndarray to be initialized.
=cut
method call(Str|AI::MXNet::InitDesc $desc, AI::MXNet::NDArray $arr)
{
return $self->_legacy_init($desc, $arr) unless blessed $desc;
my $init = $desc->attrs->{ __init__ };
if($init)
{
my ($klass, $kwargs) = @{ decode_json($init) };
$self->get_init_registry->{ lc $klass }->new(%{ $kwargs })->_init_weight("$desc", $arr);
$self->_verbose_print($desc, $init, $arr);
}
else
{
$desc = "$desc";
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
has 'init' => (is => 'rw', isa => 'Str|AI::MXNet::Initializer', required => 1);
has 'forget_bias' => (is => 'ro', isa => 'Num', default => 1);
has [qw/num_hidden
num_layers/] => (is => 'ro', isa => 'Int', required => 1);
has 'mode' => (is => 'ro', isa => 'Str', required => 1);
has 'bidirectional' => (is => 'ro', isa => 'Bool', default => 0);
sub BUILD
{
my $self = shift;
if(not blessed $self->init)
{
my ($klass, $kwargs);
eval {
($klass, $kwargs) = @{ decode_json($self->init) };
};
confess("FusedRNN failed to init $@") if $@;
$self->init($self->get_init_registry->{ lc $klass }->new(%$kwargs));
}
}
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
{
my $handle = check_call(AI::MXNetCAPI::KVStoreCreate($name));
return __PACKAGE__->new(handle => $handle);
}
sub _key_value
{
my ($keys, $vals) = @_;
if(not ref $keys)
{
if(blessed $vals)
{
return ([$keys], [$vals->handle]);
}
else
{
for my $value (@{ $vals })
{
assert(blessed($value) and $value->isa('AI::MXNet::NDArray'));
return ([($keys)x@$vals], [map { $_->handle } @$vals]);
}
}
}
else
{
assert(not blessed($vals) and @$keys == @$vals);
my @c_keys;
my @c_vals;
zip(sub {
my ($key, $val) = @_;
my ($c_key, $c_val) = _key_value($key, $val);
push @c_keys, @$c_key;
push @c_vals, @$c_val;
}, $keys, $vals);
return (\@c_keys, \@c_vals);
}
lib/AI/MXNet/Metric.pm view on Meta::CPAN
package AI::MXNet::Metric;
use strict;
use warnings;
use AI::MXNet::Function::Parameters;
use Scalar::Util qw/blessed/;
=head1 NAME
AI::MXNet::Metric - Online evaluation metric module.
=cut
# Check to see if the two arrays are the same size.
sub _calculate_shape
{
my $input = shift;
my ($shape);
if(blessed($input))
{
if($input->isa('PDL'))
{
$shape = $input->shape->at(-1);
}
else
{
$shape = $input->shape->[0];
}
}
lib/AI/MXNet/Module.pm view on Meta::CPAN
func _create_kvstore(
Maybe[Str|AI::MXNet::KVStore] $kvstore,
Int $num_device,
HashRef[AI::MXNet::NDArray] $arg_params
)
{
my $update_on_kvstore = 1;
my $kv;
if(defined $kvstore)
{
if(blessed $kvstore)
{
$kv = $kvstore;
}
else
{
# create kvstore using the string type
if($num_device == 1 and $kvstore !~ /dist/)
{
# no need to use kv for single device and single machine
}
lib/AI/MXNet/Module.pm view on Meta::CPAN
return $class->$orig(symbol => $symbol, @_);
}
return $class->$orig(@_);
};
sub BUILD
{
my $self = shift;
$self->_p(AI::MXNet::Module::Private->new);
my $context = $self->context;
if(blessed $context)
{
$context = [$context];
}
$self->_p->_context($context);
my $work_load_list = $self->work_load_list;
if(not defined $work_load_list)
{
$work_load_list = [(1)x@{$self->_p->_context}];
}
assert(@{ $work_load_list } == @{ $self->_p->_context });
lib/AI/MXNet/Module.pm view on Meta::CPAN
scalar(@{$self->_p->_context}),
$self->_p->_arg_params
);
my $batch_size = $self->_p->_exec_group->_p->batch_size;
if($kvstore and $kvstore->type =~ /dist/ and $kvstore->type =~ /_sync/)
{
$batch_size *= $kvstore->num_workers;
}
my $rescale_grad = 1/$batch_size;
if(not blessed $optimizer)
{
my %idx2name;
if($update_on_kvstore)
{
@idx2name{ 0..@{$self->_p->_exec_group->param_names}-1 } = @{$self->_p->_exec_group->param_names};
}
else
{
for my $k (0..@{$self->_p->_context}-1)
{
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
}
}
method _parse_data_desc(
ArrayRef[Str] $data_names,
Maybe[ArrayRef[Str]] $label_names,
ArrayRef[NameShapeOrDataDesc] $data_shapes,
Maybe[ArrayRef[NameShapeOrDataDesc]] $label_shapes
)
{
$data_shapes = [map { blessed $_ ? $_ : AI::MXNet::DataDesc->new(@$_) } @$data_shapes];
$self->_check_names_match($data_names, $data_shapes, 'data', 1);
if($label_shapes)
{
$label_shapes = [map { blessed $_ ? $_ : AI::MXNet::DataDesc->new(@$_) } @$label_shapes];
$self->_check_names_match($label_names, $label_shapes, 'label', 0);
}
else
{
$self->_check_names_match($label_names, [], 'label', 0);
}
return ($data_shapes, $label_shapes);
}
=head1 DESCRIPTION
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
EvalMetric $eval_metric,
Maybe[Int] :$num_batch=,
Maybe[Callback]|ArrayRef[Callback] :$batch_end_callback=,
Maybe[Callback]|ArrayRef[Callback] :$score_end_callback=,
Bool :$reset=1,
Int :$epoch=0
)
{
assert($self->binded and $self->params_initialized);
$eval_data->reset if $reset;
if(not blessed $eval_metric or not $eval_metric->isa('AI::MXNet::EvalMetric'))
{
$eval_metric = AI::MXNet::Metric->create($eval_metric);
}
$eval_metric->reset();
my $actual_num_batch = 0;
my $nbatch = 0;
while(my $eval_batch = <$eval_data>)
{
last if (defined $num_batch and $nbatch == $num_batch);
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
kvstore => $kvstore,
optimizer => $optimizer,
optimizer_params => $optimizer_params
);
if(not defined $validation_metric)
{
$validation_metric = $eval_metric;
}
$eval_metric = AI::MXNet::Metric->create($eval_metric)
unless blessed $eval_metric;
################################################################################
# training loop
################################################################################
for my $epoch ($begin_epoch..$num_epoch-1)
{
my $tic = time;
$eval_metric->reset;
my $nbatch = 0;
my $end_of_batch = 0;
lib/AI/MXNet/Monitor.pm view on Meta::CPAN
{
my ($n, $k, $v_list) = @{ $q };
if(ref $v_list ne 'ARRAY')
{
$v_list = [$v_list];
}
my $s = '';
for my $v (@{ $v_list })
{
confess("the argument must be NDArray")
unless blessed($v) and $v->isa('AI::MXNet::NDArray');
if($v->size == 1)
{
$s .= $v->asscalar . "\t";
}
else
{
$s .= $v->aspdl . "\t";
}
}
push @res, [$n, $k, $s];
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
method set(AcceptableInput $value, $reverse=)
{
confess("set value must be defined") unless defined $value;
confess("Array is not writable") if not $self->writable;
## plain number
if(not ref $value)
{
$self->_set_value($value, { out => $self });
}
# ndarray
elsif(blessed($value) and $value->isa(__PACKAGE__))
{
$value->copyto($self);
}
# slice of another ndarray
elsif(blessed($value) and $value->isa('AI::MXNet::NDArray::Slice'))
{
$value->sever->copyto($self);
}
# perl array, PDL, PDL::Matrix
else
{
$self->_sync_copyfrom($value);
}
return $self;
}
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
method asscalar()
{
confess("ndarray size must be 1") unless $self->size == 1;
return $self->aspdl->at(0);
}
method _sync_copyfrom(ArrayRef|PDL|PDL::Matrix $source_array)
{
my $dtype = $self->dtype;
my $pdl_type = PDL::Type->new(DTYPE_MX_TO_PDL->{ $dtype });
if(not blessed($source_array))
{
$source_array = eval {
pdl($pdl_type, $source_array);
};
confess($@) if $@;
}
if($pdl_type->numval != $source_array->type->numval)
{
my $convert_func = $pdl_type->convertfunc;
$source_array = $source_array->$convert_func;
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
other : NDArray or Context
Target NDArray or context we want to copy data to.
Returns
-------
dst : NDArray
=cut
method copyto(AI::MXNet::Context|AI::MXNet::NDArray $other)
{
if(blessed($other) and $other->isa('AI::MXNet::Context'))
{
my $hret = __PACKAGE__->empty(
$self->shape,
ctx => $other,
dtype => $self->dtype
);
return __PACKAGE__->_copyto($self, { out => $hret });
}
else
{
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
The dtype of the NDArray, defaults to 'float32'.
Returns
-------
out: Array
The created NDArray.
=cut
method array(PDL|PDL::Matrix|ArrayRef|AI::MXNet::NDArray $source_array, AI::MXNet::Context :$ctx=AI::MXNet::Context->current_ctx, Dtype :$dtype='float32')
{
if(blessed $source_array and $source_array->isa('AI::MXNet::NDArray'))
{
my $arr = __PACKAGE__->empty($source_array->shape, ctx => $ctx, dtype => $dtype);
$arr .= $source_array;
return $arr;
}
my $pdl_type = PDL::Type->new(DTYPE_MX_TO_PDL->{ $dtype });
if(not blessed($source_array))
{
$source_array = eval {
pdl($pdl_type, $source_array);
};
confess($@) if $@;
}
$source_array = pdl($pdl_type, [@{ $source_array->unpdl } ? $source_array->unpdl->[0] : 0 ]) unless @{ $source_array->shape->unpdl };
my $shape = $source_array->shape->unpdl;
my $arr = __PACKAGE__->empty([ref($source_array) eq 'PDL' ? reverse @{ $shape } : @{ $shape }], ctx => $ctx, dtype => $dtype );
$arr .= $source_array;
lib/AI/MXNet/NDArray/Base.pm view on Meta::CPAN
@args = @_;
if(ref $class)
{
@args = ($class) if not @args;
$class = ref $class;
}
my @ndargs;
my @pos_args;
for my $i (@args)
{
if(blessed($i) and $i->isa($class))
{
push @ndargs, $i->handle;
}
else
{
push @pos_args, $i;
}
if(@pos_args > @arguments)
{
die "Too many positional arguments";
lib/AI/MXNet/NDArray/Slice.pm view on Meta::CPAN
confess("set value must be defined") unless defined $value;
confess("${\ $self->parent } is not writable") unless $self->parent->writable;
my $shape = [];
zip(
sub { my ($begin, $end) = @_; push @$shape, ($end-$begin); },
$self->begin,
$self->end
);
if(ref $value)
{
if(blessed($value) and $value->isa('AI::MXNet::NDArray'))
{
$value = $value->as_in_context($self->parent->context);
}
elsif(blessed($value) and $value->isa('AI::MXNet::NDArray::Slice'))
{
$value = $value->sever->as_in_context($self->parent->context);
}
else
{
$value = AI::MXNet::NDArray->array($value, ctx => $self->parent->context);
}
confess("value $value does not match slice dim sizes [@$shape]")
if @{$value->shape} != @$shape;
zip(
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
{
$self->states->{ $index } = $self->sync_state_context($self->states->{ $index }, $weight->context);
$self->states_synced->{ $index } = 1;
}
$self->optimizer->update($index, $weight, $grad, $self->states->{ $index });
}
*slice = *call;
method sync_state_context(Maybe[AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]] $state, AI::MXNet::Context $context)
{
if(blessed $state)
{
return $state->as_in_context($context);
}
elsif(ref $state)
{
return [map { $self->sync_state_context($_, $context) } @{ $state }];
}
return $state;
}
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
)
{
$self->reset;
my $axis = index($layout, 'T');
if(not defined $inputs)
{
$inputs = [
map { AI::MXNet::Symbol->Variable("${input_prefix}t${_}_data") } (0..$length-1)
];
}
elsif(blessed($inputs))
{
assert(
(@{ $inputs->list_outputs() } == 1),
"unroll doesn't allow grouped symbol as input. Please "
."convert to list first or let unroll handle slicing"
);
$inputs = AI::MXNet::Symbol->SliceChannel(
$inputs,
axis => $axis,
num_outputs => $length,
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$inputs=,
Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$begin_state=,
Str :$input_prefix='',
Str :$layout='NTC',
Maybe[Bool] :$merge_outputs=
)
{
$self->reset;
my $axis = index($layout, 'T');
$inputs //= AI::MXNet::Symbol->Variable("${input_prefix}data");
if(blessed($inputs))
{
assert(
(@{ $inputs->list_outputs() } == 1),
"unroll doesn't allow grouped symbol as input. Please "
."convert to list first or let unroll handle slicing"
);
if($axis == 1)
{
AI::MXNet::Logging->warning(
"NTC layout detected. Consider using "
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
=cut
has 'l_cell' => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);
has 'r_cell' => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);
has '_output_prefix' => (is => 'ro', init_arg => 'output_prefix', isa => 'Str', default => 'bi_');
has [qw/_override_cell_params _cells/] => (is => 'rw', init_arg => undef);
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
if(@_ >= 2 and blessed $_[0] and blessed $_[1])
{
my $l_cell = shift(@_);
my $r_cell = shift(@_);
return $class->$orig(
l_cell => $l_cell,
r_cell => $r_cell,
@_
);
}
return $class->$orig(@_);
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
)
{
my $axis = index($layout, 'T');
if(not defined $inputs)
{
$inputs = [
map { AI::MXNet::Symbol->Variable("${input_prefix}t${_}_data") } (0..$length-1)
];
}
elsif(blessed($inputs))
{
assert(
(@{ $inputs->list_outputs() } == 1),
"unroll doesn't allow grouped symbol as input. Please "
."convert to list first or let unroll handle slicing"
);
$inputs = [ @{ AI::MXNet::Symbol->SliceChannel(
$inputs,
axis => $axis,
num_outputs => $length,
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
);
my ($r_outputs, $r_states) = $r_cell->unroll(
$length, inputs => [reverse @{$inputs}],
begin_state => [@{$states}[@{$l_cell->state_info}..@{$states}-1]],
layout => $layout,
merge_outputs => $merge_outputs
);
if(not defined $merge_outputs)
{
$merge_outputs = (
blessed $l_outputs and $l_outputs->isa('AI::MXNet::Symbol')
and
blessed $r_outputs and $r_outputs->isa('AI::MXNet::Symbol')
);
if(not $merge_outputs)
{
if(blessed $l_outputs and $l_outputs->isa('AI::MXNet::Symbol'))
{
$l_outputs = [
@{ AI::MXNet::Symbol->SliceChannel(
$l_outputs, axis => $axis,
num_outputs => $length,
squeeze_axis => 1
) }
];
}
if(blessed $r_outputs and $r_outputs->isa('AI::MXNet::Symbol'))
{
$r_outputs = [
@{ AI::MXNet::Symbol->SliceChannel(
$r_outputs, axis => $axis,
num_outputs => $length,
squeeze_axis => 1
) }
];
}
}
}
if($merge_outputs)
{
$l_outputs = [@{ $l_outputs }];
$r_outputs = [@{ AI::MXNet::Symbol->reverse(blessed $r_outputs ? $r_outputs : @{ $r_outputs }, axis=>$axis) }];
}
else
{
$r_outputs = [reverse(@{ $r_outputs })];
}
my $outputs = [];
zip(sub {
my ($i, $l_o, $r_o) = @_;
push @$outputs, AI::MXNet::Symbol->Concat(
$l_o, $r_o, dim=>(1+($merge_outputs?1:0)),
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
Str :$input_prefix='',
Str :$layout='NTC',
Maybe[Bool] :$merge_outputs=
)
{
$self->reset;
$self->base_cell->_modified(0);
my ($outputs, $states) = $self->base_cell->unroll($length, inputs=>$inputs, begin_state=>$begin_state,
layout=>$layout, merge_outputs=>$merge_outputs);
$self->base_cell->_modified(1);
$merge_outputs //= (blessed($outputs) and $outputs->isa('AI::MXNet::Symbol'));
($inputs) = _normalize_sequence($length, $inputs, $layout, $merge_outputs);
if($merge_outputs)
{
$outputs = AI::MXNet::Symbol->elemwise_add($outputs, $inputs, name => $outputs->name . "_plus_residual");
}
else
{
my @temp;
zip(sub {
my ($output_sym, $input_sym) = @_;
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
func _normalize_sequence($length, $inputs, $layout, $merge, $in_layout=)
{
assert((defined $inputs),
"unroll(inputs=>undef) has been deprecated. ".
"Please create input variables outside unroll."
);
my $axis = index($layout, 'T');
my $in_axis = defined $in_layout ? index($in_layout, 'T') : $axis;
if(blessed($inputs))
{
if(not $merge)
{
assert(
(@{ $inputs->list_outputs() } == 1),
"unroll doesn't allow grouped symbol as input. Please "
."convert to list first or let unroll handle splitting"
);
$inputs = [ @{ AI::MXNet::Symbol->split(
$inputs,
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
{
assert(not defined $length or @$inputs == $length);
if($merge)
{
$inputs = [map { AI::MXNet::Symbol->expand_dims($_, axis=>$axis) } @{ $inputs }];
$inputs = AI::MXNet::Symbol->Concat(@{ $inputs }, dim=>$axis);
$in_axis = $axis;
}
}
if(blessed($inputs) and $axis != $in_axis)
{
$inputs = AI::MXNet::Symbol->swapaxes($inputs, dim0=>$axis, dim1=>$in_axis);
}
return ($inputs, $axis);
}
1;
lib/AI/MXNet/RecordIO.pm view on Meta::CPAN
----------
$header : AI::MXNet::IRHeader or ArrayRef suitable for AI::MXNet::IRHeader->new(@{ ArrayRef })
header of the image record.
$header->label can be a number or an array ref.
s : str
string to pack
=cut
method pack(AI::MXNet::IRHeader|ArrayRef $header, Str $s)
{
$header = AI::MXNet::IRHeader->new(@$header) unless blessed $header;
if(not ref $header->label)
{
$header->flag(0);
}
else
{
my $label = AI::MXNet::NDArray->array($header->label, dtype=>'float32')->aspdl;
$header->label(0);
$header->flag($label->nelem);
my $buf = ${$label->get_dataref};
lib/AI/MXNet/Symbol/AttrScope.pm view on Meta::CPAN
The attribute passed in by user during symbol creation.
Returns
-------
$attr : HashRef[Str]
The attributes updated to include another the scope related attributes.
=cut
method get(Maybe[HashRef[Str]] $attr=)
{
return bless($attr//{}, 'AI::MXNet::Util::Printable') unless %{ $self->attr };
my %ret = (%{ $self->attr }, %{ $attr//{} });
return bless (\%ret, 'AI::MXNet::Util::Printable');
}
$AI::MXNet::curr_attr_scope = __PACKAGE__->new;
lib/AI/MXNet/Symbol/Base.pm view on Meta::CPAN
{
push @args, shift(@_);
}
%kwargs = @_;
my $name = delete $kwargs{'name'};
if(@args and %kwargs)
{
confess("_compose only accept input Symbols \
either as positional or keyword arguments, not both");
}
if(grep { not blessed($_) or not $_->isa(__PACKAGE__) } (@args, values %kwargs))
{
confess("_compose expect 'Symbol' as arguments");
}
my $num_args = scalar(@args) + scalar(keys %kwargs);
my $keys = [];
my $args = [];
for my $key (keys %kwargs)
{
push @$keys, $key;
lib/AI/MXNet/Symbol/Base.pm view on Meta::CPAN
$ret_type
);
my $creator = sub {
my $class = shift;
my (@args, %kwargs);
if(
@_
and
ref $_[-1] eq 'HASH'
and
not (@_ >= 2 and not blessed $_[-2] and $_[-2] eq 'attr')
)
{
%kwargs = %{ pop(@_) };
@args = @_;
}
elsif(blessed $_[0] and $_[0]->isa(__PACKAGE__))
{
while(blessed $_[0] and $_[0]->isa(__PACKAGE__))
{
push @args, shift(@_);
}
%kwargs = @_;
}
else
{
%kwargs = @_;
}
my $params = {};
lib/AI/MXNet/Symbol/Base.pm view on Meta::CPAN
{
$params->{ $key_var_num_args } = scalar(@args);
}
for my $key (keys %kwargs)
{
$kwargs{ $key } = "(" .join(", ", @{ $kwargs{ $key } }) .")"
if ref $kwargs{ $key } eq 'ARRAY';
}
while(my ($k, $v) = each %kwargs)
{
if(blessed($v) and $v->isa(__PACKAGE__))
{
$symbol_kwargs->{ $k } = $v;
}
else
{
$params->{ $k } = "$v";
}
}
# create atomic symbol
my $sym_handle = check_call(
lib/AI/MXNet/Symbol/Doc.pm view on Meta::CPAN
use AI::MXNet::Function::Parameters;
use Exporter;
use base qw(Exporter);
@AI::MXNet::Symbol::Doc::EXPORT = qw/build_doc/;
method get_output_shape(AI::MXNet::Symbol $sym, %input_shapes)
{
my $s_outputs = $sym->infer_shape(%input_shapes);
my %ret;
@ret{ @{ $sym->list_outputs() } } = @$s_outputs;
return bless \%ret, 'AI::MXNet::Util::Printable';
}
func build_doc(
Str $func_name,
Str $desc,
ArrayRef[Str] $arg_names,
ArrayRef[Str] $arg_types,
ArrayRef[Str] $arg_desc,
Str $key_var_num_args=,
Str $ret_type=
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
package AI::MXNet::TestUtils;
use strict;
use warnings;
use PDL;
use Carp;
use Scalar::Util qw(blessed);
use AI::MXNet::Function::Parameters;
use Exporter;
use base qw(Exporter);
@AI::MXNet::TestUtils::EXPORT_OK = qw(same reldiff almost_equal GetMNIST_ubyte
GetCifar10 pdl_maximum pdl_minimum mlp2 conv
check_consistency zip assert enumerate same_array dies_like);
use constant default_numerical_threshold => 1e-6;
=head1 NAME
AI::MXNet::TestUtils - Convenience subs used in tests.
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
{
`wget http://data.mxnet.io/mxnet/data/cifar10.zip -P data`;
chdir 'data';
`unzip -u cifar10.zip`;
chdir '..';
}
}
func _pdl_compare(PDL $a, PDL|Num $b, Str $criteria)
{
if(not blessed $b)
{
my $tmp = $b;
$b = $a->copy;
$b .= $tmp;
}
my $mask = {
'max' => sub { $_[0] < $_[1] },
'min' => sub { $_[0] > $_[1] },
}->{$criteria}->($a, $b);
my $c = $a->copy;
lib/AI/MXNet/TestUtils.pm view on Meta::CPAN
};
$tol = {
float16 => $tol,
float32 => $tol,
float64 => $tol,
uint8 => $tol,
int32 => $tol
} unless ref $tol;
Test::More::ok(@$ctx_list > 1);
if(blessed $sym)
{
$sym = [($sym)x@$ctx_list];
}
else
{
Test::More::ok(@$sym == @$ctx_list);
}
my $output_names = $sym->[0]->list_outputs;
my $arg_names = $sym->[0]->list_arguments;
my @exe_list;