AI-MXNet

 view release on metacpan or  search on metacpan

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

        $buf = pack("f*", map { AI::MXNetCAPI::_half_to_float($_) } unpack("S*", $buf));
    }
    ${$pdl->get_dataref} = $buf;
    $pdl->upd_data;
    return $pdl;
}


=head2 _slice

    Returns sliced NDArray that shares memory with the current one.

    Parameters
    ----------
    start : int
        Starting index of slice.
    stop : int
        Finishing index of slice.
=cut

method _slice (
    Index $start,
    Index $stop
)
{
    confess("start $start > stop $stop") if $start > $stop;
    my $handle = check_call(
        AI::MXNetCAPI::NDArraySlice(
            $self->handle,
            $start,
            $stop
        )
    );
    return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}

=head2  _at

    Returns a sub NDArray that shares memory with current one.

    Parameters
    ----------
    idx : int
        index of the sub array.
=cut


method _at(Index $idx)
{
    my $handle = check_call(
                AI::MXNetCAPI::NDArrayAt(
                    $self->handle, $idx >=0 ? $idx : $self->shape->[0] + $idx
                )
    );
    return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}

=head2 reshape

    Returns a reshaped NDArray that shares the memory with current one.
    One shape dimension can be -1. In this case, the value is inferred
    from the length of the array and remaining dimensions.

    Parameters
    ----------
    new_shape : Shape
        new shape of NDArray
=cut

method reshape(ArrayRef[Int] $new_shape)
{
    my $i = -1;
    my @inferred = map { $i++; $_ == -1 ? ($i) : () } @$new_shape;
    assert((@inferred <= 1), 'Only one dimension can be inferred.');
    if(@inferred)
    {
        $new_shape->[$inferred[0]] = product(@{ $self->shape })/product(map { abs($_) } @{ $new_shape });
    }
    my $handle = check_call(
                    AI::MXNetCAPI::NDArrayReshape(
                        $self->handle,
                        scalar(@$new_shape),
                        $new_shape
                    )
    );
    return __PACKAGE__->new(handle => $handle, writable => $self->writable);
}

=head2 ndim

    Returns the number of dimensions of this array.
=cut

method ndim()
{
    scalar(@{ $self->shape });
}

=head2 moveaxis

    Moves the 'source' axis into the 'destination' position
    while leaving the other axes in their original order

    Parameters
    ----------
    source : int
        Original position of the axes to move.
    destination : int
        Destination position for each of the original axes.

    Returns
    -------
    result :NDArray
    Array with moved axes.

    Examples
    --------
    > $X = mx->nd->array([[1, 2, 3],
                          [4, 5, 6]]);
    > print Dumper($X->moveaxis(0, 1)->shape)
    > [3, 2]
=cut

method moveaxis(Int $source, Int $dest)
{
    my @axes = 0..$self->ndim-1;
    $source += @axes if $source < 0;
    $dest += @axes if $dest < 0;
    assert($source < @axes);
    assert($dest < @axes);
    my ($to_move) = splice(@axes, $source, 1);
    splice(@axes, $dest, 0, $to_move);
    return __PACKAGE__->transpose($self, \@axes);
}

=head2 broadcast_to

    Broadcasting the current NDArray into the given shape. 

    Parameters
    ---------
    Shape $shape : the shape to broadcast
=cut

method broadcast_to(Shape $shape)
{
    my $cur_shape = $self->shape;
    my $err_str = "operands could not be broadcast together with remapped shapes" 
                  ."[original->remapped]: [@$cur_shape] and requested shape [@$shape]";
    if(@$shape < @$cur_shape)
    {
        confess($err_str);
    }
    @$cur_shape = ((1)x(@$shape - @$cur_shape), @$cur_shape);
    my $cur_shape_arr = pdl($cur_shape);
    my $broadcasting_axes = ($cur_shape_arr != pdl($shape))->which->unpdl;
    if (grep { $cur_shape->[$_] != 1 } @$broadcasting_axes)
    {
        confess($err_str);
    }
    if(join(',',@$cur_shape) ne join(',',@{ $self->shape }))
    {
        return __PACKAGE__->SUPER::broadcast_to($self->reshape($cur_shape),{ shape => $shape });
    }
    else
    {
        return __PACKAGE__->SUPER::broadcast_to($self, { shape => $shape });
    }
}

=head2 wait_to_read

    Block until all pending write operations on the NDArray are finished.

    This function will return when all the pending writes to the current
    NDArray are finished. There can be pending reads going on when the
    function returns.
=cut

method wait_to_read()
{
    check_call(AI::MXNetCAPI::NDArrayWaitToRead($self->handle));
}

=head2 shape

    Get the shape of current NDArray.

    Returns
    -------
    an array ref representing the shape of current ndarray
=cut

method shape()
{
    return scalar(check_call(AI::MXNetCAPI::NDArrayGetShape($self->handle)));
}

=head2 size

    Number of elements in the array.
=cut

method size(Shape|Undef $shape=)
{
    my $size = 1;
    map { $size *= $_ } @{ $shape//$self->shape };
    return $size;
}


=head2 context

    The context of the NDArray.

    Returns
    -------
    $context : AI::MXNet::Context
=cut



( run in 3.818 seconds using v1.01-cache-2.11-cpan-39bf76dae61 )