AI-MXNet
view release on metacpan or search on metacpan
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
package AI::MXNet::NDArray;
=head1 NAME
AI::MXNet::NDArray - Multidimensional tensor object of MXNet.
=cut
use strict;
use warnings;
use AI::MXNet::Base;
use AI::MXNet::NDArray::Slice;
use AI::MXNet::Context;
use Mouse;
use AI::MXNet::Function::Parameters;
use overload
'""' => \&stringify,
'+' => \&add,
'+=' => \&iadd,
'-' => \&subtract,
'-=' => \&isubtract,
'*' => \&multiply,
'*=' => \&imultiply,
'/' => \÷,
'/=' => \&idivide,
'%' => \&modulo,
'%=' => \&imodulo,
'**' => \&power,
'==' => \&equal,
'!=' => \¬_equal,
'>' => \&greater,
'>=' => \&greater_equal,
'<' => \&lesser,
'<=' => \&lesser_equal,
'.=' => \&set,
'=' => sub { $_[0] };
extends 'AI::MXNet::NDArray::Base';
has 'writable' => (is => 'rw', isa => 'Int', default => 1, lazy => 1);
has 'handle' => (is => 'rw', isa => 'NDArrayHandle', required => 1);
sub DEMOLISH
{
check_call(AI::MXNetCAPI::NDArrayFree(shift->handle));
}
method STORABLE_freeze($cloning)
{
my $buf = check_call(AI::MXNetCAPI::NDArraySaveRawBytes($self->handle));
return ($buf,\ $self->writable);
}
method STORABLE_thaw($cloning, $buf, $writable)
{
my $handle = check_call(
AI::MXNetCAPI::NDArrayLoadFromRawBytes(
$buf, length($buf)
)
);
$self->handle($handle);
$self->writable($$writable);
}
method at(Index @indices)
{
confess("No idxs supplied") unless @indices;
my $shape = $self->shape;
my $dsize = @$shape;
my $isize = @indices;
confess("Dimensions size $dsize < indexes size $isize")
if $dsize < $isize;
confess("Dimensions size $dsize = indexes size $isize,
ndarray only supports either ->at on dimension 0
or full crop")
if $isize > 1 and $dsize != $isize;
my $i = 0;
zip(sub {
my ($idx, $dim_size) = @_;
confess("Dimension $i mismatch Idx: $idx >= Dim Size: $dim_size")
if $idx >= $dim_size or ($idx + $dim_size) < 0;
++$i;
}, \@indices, $shape);
$i = 0;
for my $v (@indices)
{
$v += $shape->[$i] if $v < 0;
++$i;
}
return $self->_at($indices[0]) if @indices == 1;
return $self->slice(@indices);
}
method slice(Slice @slices)
{
confess("No slices supplied") unless @slices;
my $shape = $self->shape;
my $dsize = @$shape;
my $isize = @slices;
confess("Dimensions size $dsize < slices size $isize")
if $dsize < $isize;
confess("Dimensions size $dsize != slices size $isize,
ndarray only supports either ->slice on dimension 0
or full crop")
if $isize > 1 and $dsize != $isize;
my $i = -1;
@slices = map {
++$i;
ref $_ ? (@$_ == 1 ? [$_->[0], $shape->[$i] - 1] : $_) : ($_ eq 'X' ? [0, $shape->[$i] - 1] : [$_, $_]);
} @slices;
zip(sub {
my ($slice, $dim_size) = @_;
my ($begin, $end, $stride) = @$slice;
confess("NDArray does not support slice strides != 1")
if ($stride//0) > 1;
confess("Dimension $i mismatch slice begin : $begin >= Dim Size: $dim_size")
if $begin >= $dim_size or ($begin + $dim_size) < 0;
confess("Dimension $i mismatch slice end : $end >= Dim Size: $dim_size")
if $end >= $dim_size or ($end + $dim_size) < 0;
}, \@slices, $shape);
$i = 0;
my ($begin, $end) = ([], []);
for my $s (@slices)
{
$s->[0] += $shape->[$i] if $s->[0] < 0;
$s->[1] += $shape->[$i] if $s->[1] < 0;
confess("Dimension $i slice mismatch (begin $s->[0] > end $s->[1])")
if($s->[0] > $s->[1]);
push @$begin, $s->[0];
push @$end, $s->[1] + 1;
$i++;
}
return $self->_slice($begin->[0], $end->[0]) if @slices == 1;
return AI::MXNet::NDArray::Slice->new(parent => $self, begin => $begin, end => $end);
}
method set(AcceptableInput $value, $reverse=)
{
confess("set value must be defined") unless defined $value;
confess("Array is not writable") if not $self->writable;
## plain number
if(not ref $value)
{
$self->_set_value($value, { out => $self });
}
# ndarray
elsif(blessed($value) and $value->isa(__PACKAGE__))
{
$value->copyto($self);
}
# slice of another ndarray
elsif(blessed($value) and $value->isa('AI::MXNet::NDArray::Slice'))
{
$value->sever->copyto($self);
}
# perl array, PDL, PDL::Matrix
else
{
$self->_sync_copyfrom($value);
}
return $self;
}
method asscalar()
{
confess("ndarray size must be 1") unless $self->size == 1;
return $self->aspdl->at(0);
}
method _sync_copyfrom(ArrayRef|PDL|PDL::Matrix $source_array)
{
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
Returns copied PDL::Matrix objectt of current array.
Requires caller to "use PDL::Matrix" in user space.
Returns
-------
array : PDL::Matrix
A copy of array content.
=cut
method asmpdl()
{
my $dtype = $self->dtype;
my $pdl_type = PDL::Type->new(DTYPE_MX_TO_PDL->{ $dtype });
my $pdl = PDL::Matrix->new_from_specification($pdl_type, @{ $self->shape });
my $perl_pack_type = DTYPE_MX_TO_PERL->{$dtype};
my $buf = pack("$perl_pack_type*", (0)x$self->size);
check_call(AI::MXNetCAPI::NDArraySyncCopyToCPU($self->handle, $buf, $self->size));
## special handling for float16
if($perl_pack_type eq 'S')
{
$buf = pack("f*", map { AI::MXNetCAPI::_half_to_float($_) } unpack("S*", $buf));
}
${$pdl->get_dataref} = $buf;
$pdl->upd_data;
return $pdl;
}
=head2 _slice
Returns sliced NDArray that shares memory with the current one.
Parameters
----------
start : int
Starting index of slice.
stop : int
Finishing index of slice.
=cut
method _slice (
Index $start,
Index $stop
)
{
confess("start $start > stop $stop") if $start > $stop;
my $handle = check_call(
AI::MXNetCAPI::NDArraySlice(
$self->handle,
$start,
$stop
)
);
return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}
=head2 _at
Returns a sub NDArray that shares memory with current one.
Parameters
----------
idx : int
index of the sub array.
=cut
method _at(Index $idx)
{
my $handle = check_call(
AI::MXNetCAPI::NDArrayAt(
$self->handle, $idx >=0 ? $idx : $self->shape->[0] + $idx
)
);
return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}
=head2 reshape
Returns a reshaped NDArray that shares the memory with current one.
One shape dimension can be -1. In this case, the value is inferred
from the length of the array and remaining dimensions.
Parameters
----------
new_shape : Shape
new shape of NDArray
=cut
method reshape(ArrayRef[Int] $new_shape)
{
my $i = -1;
my @inferred = map { $i++; $_ == -1 ? ($i) : () } @$new_shape;
assert((@inferred <= 1), 'Only one dimension can be inferred.');
if(@inferred)
{
$new_shape->[$inferred[0]] = product(@{ $self->shape })/product(map { abs($_) } @{ $new_shape });
}
my $handle = check_call(
AI::MXNetCAPI::NDArrayReshape(
$self->handle,
scalar(@$new_shape),
$new_shape
)
);
return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}
=head2 ndim
Returns the number of dimensions of this array.
=cut
method ndim()
{
scalar(@{ $self->shape });
}
=head2 moveaxis
Moves the 'source' axis into the 'destination' position
while leaving the other axes in their original order
Parameters
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
A copy or self as an NDArray in the target context.
=cut
method as_in_context(AI::MXNet::Context $context)
{
return $self if $self->context == $context;
return $self->copyto($context);
}
=head2 onehot_encode
One hot encoding indices into matrix out.
Parameters
----------
indices: NDArray
An NDArray containing indices of the categorical features.
out: NDArray
The result of the encoding.
Returns
-------
$out: NDArray
=cut
method onehot_encode(AI::MXNet::NDArray $indices, AI::MXNet::NDArray $out)
{
return __PACKAGE__->_onehot_encode($indices, $out, { out => $out });
}
=head2 _ufunc_helper(lhs, rhs, fn_array, lfn_scalar, rfn_scalar):
Helper function for element-wise operation
The function will perform numpy-like broadcasting if needed and call different functions
Parameters
----------
lhs : NDArray or numeric value
left hand side operand
rhs : NDArray or numeric value
right hand side operand
fn_array : function
function to be called if both lhs and rhs are of NDArray type
lfn_scalar : function
function to be called if lhs is NDArray while rhs is numeric value
rfn_scalar : function
function to be called if lhs is numeric value while rhs is NDArray;
if none is provided, then the function is commutative, so rfn_scalar is equal to lfn_scalar
Returns
-------
out: NDArray
result array
=cut
sub _ufunc_helper
{
my ($lhs, $rhs, $fn_array, $lfn_scalar, $rfn_scalar, $reverse) = @_;
($rhs, $lhs) = ($lhs, $rhs) if $reverse and $rfn_scalar;
if(not ref $lhs)
{
if(not $rfn_scalar)
{
return __PACKAGE__->can($lfn_scalar)->(__PACKAGE__, $rhs, $lhs);
}
else
{
return __PACKAGE__->can($rfn_scalar)->(__PACKAGE__, $rhs, $lhs);
}
}
elsif(not ref $rhs)
{
return __PACKAGE__->can($lfn_scalar)->(__PACKAGE__, $lhs, $rhs);
}
else
{
return __PACKAGE__->can($fn_array)->(__PACKAGE__, $lhs, $rhs);
}
}
method stringify($other=, $reverse=)
{
sprintf("<%s %s @%s>", ref($self), join('x', @{ $self->shape }), $self->context);
}
method iadd(AI::MXNet::NDArray|Num $other, $reverse=)
{
confess('trying to add to a readonly NDArray') unless $self->writable;
return ref $other
? __PACKAGE__->broadcast_add($self, $other, { out => $self })
: __PACKAGE__->_plus_scalar($self, $other, { out => $self })
}
method add(AI::MXNet::NDArray|Num $other, $reverse=)
{
return _ufunc_helper(
$self,
$other,
qw/broadcast_add _plus_scalar/
);
}
method subtract(AI::MXNet::NDArray|Num $other, $reverse=)
{
return _ufunc_helper(
$self,
$other,
qw/broadcast_sub _minus_scalar _rminus_scalar/,
$reverse
);
}
method isubtract(AI::MXNet::NDArray|Num $other, $reverse=)
{
confess('trying to add to a readonly NDArray') unless $self->writable;
return ref $other
? __PACKAGE__->broadcast_sub($self, $other, { out => $self })
: __PACKAGE__->_minus_scalar($self, $other, { out => $self })
}
method multiply(AI::MXNet::NDArray|Num $other, $reverse=)
{
return _ufunc_helper(
$self,
$other,
qw/broadcast_mul _mul_scalar/
);
}
method imultiply(AI::MXNet::NDArray|Num $other, $reverse=)
{
confess('trying to add to a readonly NDArray') unless $self->writable;
return ref $other
? __PACKAGE__->broadcast_mul($self, $other, { out => $self })
: __PACKAGE__->_mul_scalar($self, $other, { out => $self })
}
method divide(AI::MXNet::NDArray|Num $other, $reverse=)
{
return _ufunc_helper(
$self,
$other,
qw/broadcast_div _div_scalar _rdiv_scalar/,
$reverse
);
}
method idivide(AI::MXNet::NDArray|Num $other, $reverse=)
{
confess('trying to add to a readonly NDArray') unless $self->writable;
return ref $other
? __PACKAGE__->broadcast_div($self, $other, { out => $self })
: __PACKAGE__->_div_scalar($self, $other, { out => $self })
}
method power(AI::MXNet::NDArray|Num $other, $reverse=)
{
return _ufunc_helper(
$self,
$other,
qw/broadcast_power _power_scalar _rpower_scalar/,
$reverse
);
}
method maximum(AI::MXNet::NDArray|Num $other)
{
return _ufunc_helper(
lib/AI/MXNet/NDArray.pm view on Meta::CPAN
}
}
else
{
@$handles = map { $_->handle } @$data;
}
check_call(
AI::MXNetCAPI::NDArraySave(
$filename,
scalar(@$handles),
$handles,
$names
)
);
}
=head2 imdecode
Decode an image from string. Requires OpenCV to work.
Parameters
----------
$str_img : str
binary image data
:$clip_rect : iterable of 4 int
clip decoded image to rectangle (x0, y0, x1, y1)
:$out= : Maybe[NDArray]
output buffer. can be 3 dimensional (c, h, w) or 4 dimensional (n, c, h, w)
:$index : int
output decoded image to i-th slice of 4 dimensional buffer
:$channels=3 : int
number of channels to output. Decode to grey scale when channels = 1.
$mean= : Maybe[NDArray]
subtract mean from decode image before outputting.
=cut
method imdecode($str_img, ArrayRef[Int] :$clip_rect=[0, 0, 0, 0],
Maybe[AI::MXNet::NDArray] :$out=, Int :$index=0, Int :$channels=3, Maybe[AI::MXNet::NDArray] :$mean=)
{
return __PACKAGE__->_imdecode(
$mean//__PACKAGE__->_new_empty_handle(),
$index,
@$clip_rect,
$channels,
length($str_img),
{ str_img => $str_img, ($out ? (out => $out) : ()) }
);
}
=head2 _new_empty_handle
Returns a new empty handle.
Empty handle can be used to hold result
Returns
-------
a new empty ndarray handle
=cut
sub _new_empty_handle
{
my $hdl = check_call(AI::MXNetCAPI::NDArrayCreateNone());
return $hdl;
}
=head2 _new_alloc_handle
Returns a new handle with specified shape and context.
Empty handle is only used to hold results
Returns
-------
a new empty ndarray handle
=cut
func _new_alloc_handle($shape, $ctx, $delay_alloc, $dtype)
{
my $hdl = check_call(AI::MXNetCAPI::NDArrayCreateEx(
$shape,
scalar(@$shape),
$ctx->device_type_id,
$ctx->device_id,
$delay_alloc,
$dtype)
);
return $hdl;
}
=head2 waitall
Wait for all async operations to finish in MXNet.
This function is used for benchmarks only.
=cut
method waitall()
{
check_call(AI::MXNetCAPI::NDArrayWaitAll());
}
=head2 _fresh_grad
Parameters:
----------
Maybe[Bool] $state=
Whether this array's corresponding gradient array
(registered via `autograd->mark_variables`) has been
updated by `autograd->backward` since last reset.
`_fresh_grad` need to be manually set to False
after consuming gradient (usually after updating this
array).
=cut
method _fresh_grad(Maybe[Bool] $state=)
{
if(defined $state)
{
check_call(AI::MXNetCAPI::NDArraySetGradState($self->handle, $state));
( run in 1.127 second using v1.01-cache-2.11-cpan-39bf76dae61 )