AI-MXNet

 view release on metacpan or  search on metacpan

examples/char_lstm.pl  view on Meta::CPAN

    --chkp-prefix    prefix for checkpoint files, default='lstm_'
    --cell-mode      RNN cell mode (LSTM, GRU, RNN, default=LSTM)
    --sample-size    a size of inferred sample text (default=10000) after each epoch
    --chkp-epoch     save checkpoint after this many epoch, default=1 (saving every checkpoint)

=cut

package AI::MXNet::RNN::IO::ASCIIIterator;
use Mouse;
extends AI::MXNet::DataIter;
has 'data'          => (is => 'ro',  isa => 'PDL',   required => 1);
has 'seq_size'      => (is => 'ro',  isa => 'Int',   required => 1);
has '+batch_size'   => (is => 'ro',  isa => 'Int',   required => 1);
has 'data_name'     => (is => 'ro',  isa => 'Str',   default => 'data');
has 'label_name'    => (is => 'ro',  isa => 'Str',   default => 'softmax_label');
has 'dtype'         => (is => 'ro',  isa => 'Dtype', default => 'float32');
has [qw/nd counter seq_counter vocab_size
    data_size provide_data provide_label idx/] => (is => 'rw', init_arg => undef);

sub BUILD
{
    my $self = shift;
    $self->data_size($self->data->nelem);

lib/AI/MXNet/CachedOp.pm  view on Meta::CPAN


    AI::MXNet::CachedOp - A wrapper around CachedOpHandle
=cut

use strict;
use warnings;
use AI::MXNet::Base;
use Mouse;
use overload '&{}' => sub { my $self = shift; sub { $self->call(@_) } };

has 'handle'   => (is => 'ro', isa => 'CachedOpHandle', required => 1);
around BUILDARGS => sub {
    my $orig  = shift;
    my $class = shift;
    my ($sym) = @_;
    my $handle = check_call(
        AI::MXNetCAPI::CreateCachedOp(
            $sym->handle
        )
    );
    return $class->$orig(handle => $handle);

lib/AI/MXNet/Callback.pm  view on Meta::CPAN

    ----------
    batch_size: int
        batch_size of data
    frequent: int
        How many batches between calculations.
        Defaults to calculating & logging every 50 batches.
    auto_reset: Bool
        Reset the metric after each log, defaults to true.
=cut

has 'batch_size' => (is => 'ro', isa => 'Int', required => 1);
has 'frequent'   => (is => 'ro', isa => 'Int', default  => 50);
has 'init'       => (is => 'rw', isa => 'Int', default  => 0);
has 'tic'        => (is => 'rw', isa => 'Num', default  => 0);
has 'last_count' => (is => 'rw', isa => 'Int', default  => 0);
has 'auto_reset' => (is => 'ro', isa => 'Bool', default  => 1);

method call(AI::MXNet::BatchEndParam $param)
{
    my $count = $param->nbatch;
    if($self->last_count > $count)

lib/AI/MXNet/Callback.pm  view on Meta::CPAN


    Parameters
    ----------
    total: Int
        batch size, default is 1
    length: Int
        the length of the progress bar, default is 80 chars
=cut

has 'length'  => (is => 'ro', isa => 'Int', default => 80);
has 'total'   => (is => 'ro', isa => 'Int', required => 1);

method call(AI::MXNet::BatchEndParam $param)
{
    my $count = $param->nbatch;
    my $filled_len = int(0.5 + $self->length * $count / $self->total);
    my $percents = int(100.0 * $count / $self->total) + 1;
    my $prog_bar = ('=' x $filled_len) . ('-' x ($self->length - $filled_len));
    print "[$prog_bar] $percents%\r";
}

lib/AI/MXNet/Executor.pm  view on Meta::CPAN

package AI::MXNet::Executor;
use strict;
use warnings;
use AI::MXNet::Base;
use AI::MXNet::Context;
use Mouse;
use AI::MXNet::Types;
use AI::MXNet::Function::Parameters;

has 'handle'            => (is => 'ro', isa => 'ExecutorHandle', required => 1);
has 'arg_arrays'        => (is => 'rw', isa => 'Maybe[ArrayRef[AI::MXNet::NDArray]]');
has 'grad_arrays'       => (is => 'rw', isa => 'Maybe[ArrayRef[Undef|AI::MXNet::NDArray]]'); 
has 'aux_arrays'        => (is => 'rw', isa => 'Maybe[ArrayRef[AI::MXNet::NDArray]]');
has '_symbol'           => (is => 'rw', init_arg => 'symbol',    isa => 'AI::MXNet::Symbol');
has '_ctx'              => (is => 'rw', init_arg => 'ctx',       isa => 'AI::MXNet::Context' );
has '_grad_req'         => (is => 'rw', init_arg => 'grad_req',  isa => 'Maybe[Str|ArrayRef[Str]|HashRef[Str]]');
has '_group2ctx'        => (is => 'rw', init_arg => 'group2ctx', isa => 'Maybe[HashRef[AI::MXNet::Context]]');
has '_monitor_callback' => (is => 'rw', isa => 'CodeRef');
has [qw/_arg_dict
        _grad_dict

lib/AI/MXNet/Executor/Group.pm  view on Meta::CPAN

    fixed_param_names: Maybe[ArrayRef[Str]]
        Indicate parameters to be fixed during training. Parameters in this array ref will not allocate
        space for gradient, nor do gradient calculation.
    grad_req : ArrayRef[GradReq]|HashRef[GradReq]|GradReq
        Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
        (default to 'write').
        Can be specified globally (str) or for each argument (array ref, hash ref).
    state_names: Maybe[ArrayRef[Str]]
=cut

has 'symbol'            => (is => 'ro', isa => 'AI::MXNet::Symbol', required => 1);
has 'contexts'          => (is => 'ro', isa => 'ArrayRef[AI::MXNet::Context]', required => 1);
has 'workload'          => (is => 'ro', isa => 'ArrayRef[Num]', default => sub { [] });
has 'data_shapes'       => (is => 'rw', isa => 'ArrayRef[NameShape|AI::MXNet::DataDesc]', required => 1);
has 'label_shapes'      => (is => 'rw', isa => 'Maybe[ArrayRef[NameShape|AI::MXNet::DataDesc]]');
has 'param_names'       => (is => 'ro', isa => 'ArrayRef[Str]', required => 1);
has 'for_training'      => (is => 'ro', isa => 'Bool', required => 1);
has 'inputs_need_grad'  => (is => 'ro', isa => 'Bool', default  => 0);
has 'shared_group'      => (is => 'ro', isa => 'Maybe[AI::MXNet::DataParallelExecutorGroup]');
has 'logger'            => (is => 'ro', default => sub { AI::MXNet::Logging->get_logger });
has 'fixed_param_names' => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'state_names'       => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'grad_req'          => (is => 'rw', isa => 'ArrayRef[GradReq]|HashRef[GradReq]|GradReq', default=>'write');
has '_p'                => (is => 'rw', init_arg => undef);
sub BUILD
{
    my $self = shift;

lib/AI/MXNet/IO.pm  view on Meta::CPAN

    return \@ret;
}

method DataDesc(@args)  { AI::MXNet::DataDesc->new(@args)  }
method DataBatch(@args) { AI::MXNet::DataBatch->new(@args) }

package AI::MXNet::DataDesc;
use Mouse;
use overload '""'  => \&stringify,
             '@{}' => \&to_nameshape;
has 'name'   => (is => 'ro', isa => "Str",   required => 1);
has 'shape'  => (is => 'ro', isa => "Shape", required => 1);
has 'dtype'  => (is => 'ro', isa => "Dtype", default => 'float32');
has 'layout' => (is => 'ro', isa => "Str",   default => 'NCHW');

around BUILDARGS => sub {
    my $orig  = shift;
    my $class = shift;
    if(@_ >= 2 and ref $_[1] eq 'ARRAY')
    {
        my $name  = shift;
        my $shape = shift;

lib/AI/MXNet/IO.pm  view on Meta::CPAN

=head1 NAME

    AI::MXNet::DataBatch - A container for a mini-batch of the data and related information.
=cut

=head1 DESCRIPTION

    Default object for holding a mini-batch of data and related information.
=cut

has 'data'          => (is => 'rw', isa => 'Maybe[ArrayRef[AI::MXNet::NDArray]]', required => 1);
has 'label'         => (is => 'rw', isa => 'Maybe[ArrayRef[AI::MXNet::NDArray]]');
has 'pad'           => (is => 'rw');
has 'index'         => (is => 'rw');
has 'bucket_key'    => (is => 'rw');
has 'provide_data'  => (is => 'rw');
has 'provide_label' => (is => 'rw');

package AI::MXNet::DataIter;
use Mouse;
use overload '<>' =>  sub { shift->next },

lib/AI/MXNet/IO.pm  view on Meta::CPAN

    to the padding from internal iterator.

    Parameters
    ----------
    data_iter : DataIter
        Internal data iterator.
    size : number of batches per epoch to resize to.
    reset_internal : whether to reset internal iterator on ResizeIter.reset
=cut

has 'data_iter'      => (is => 'ro', isa => 'AI::MXnet::DataIter', required => 1);
has 'size'           => (is => 'ro', isa => 'Int', required => 1);
has 'reset_internal' => (is => 'rw', isa => 'Int', default => 1);
has 'cur'            => (is => 'rw', isa => 'Int', default => 0);
has 'current_batch'  => (is => 'rw', isa => 'Maybe[AI::MXNet::DataBatch]');
has [qw/provide_data
    default_bucket_key
    provide_label
    batch_size/]     => (is => 'rw', init_arg => undef);

sub BUILD
{

lib/AI/MXNet/IO.pm  view on Meta::CPAN

use Mouse;
use AI::MXNet::Base;

extends 'AI::MXNet::DataIter';

=head1 NAME

    AI::MXNet::MXDataIter - A data iterator pre-built in C++ layer of MXNet.
=cut

has 'handle'           => (is => 'ro', isa => 'DataIterHandle', required => 1);
has '_debug_skip_load' => (is => 'rw', isa => 'Int', default => 0);
has '_debug_at_begin'  => (is => 'rw', isa => 'Int', default => 0);
has 'data_name'        => (is => 'ro', isa => 'Str', default => 'data');
has 'label_name'       => (is => 'ro', isa => 'Str', default => 'softmax_label');
has [qw/first_batch
        provide_data
        provide_label
        batch_size/]   => (is => 'rw', init_arg => undef);

sub BUILD

lib/AI/MXNet/IO.pm  view on Meta::CPAN


# Create an io iterator by handle.
func _make_io_iterator($handle)
{
    my ($iter_name, $desc,
        $arg_names, $arg_types, $arg_descs
    ) = @{ check_call(AI::MXNetCAPI::DataIterGetIterInfo($handle)) };
    my $param_str = build_param_doc($arg_names, $arg_types, $arg_descs);
    my $doc_str = "$desc\n\n"
                  ."$param_str\n"
                  ."name : string, required.\n"
                  ."    Name of the resulting data iterator.\n\n"
                  ."Returns\n"
                  ."-------\n"
                  ."iterator: DataIter\n"
                  ."    The result iterator.";
    my $iter = sub {
        my $class = shift;
        my (@args, %kwargs);
        if(@_ and ref $_[-1] eq 'HASH')
        {

lib/AI/MXNet/Image.pm  view on Meta::CPAN

    Can be slow for HDD.
    part_index : int
        Partition index
    num_parts : int
        Total number of partitions.
    data_name='data' Str
    label_name='softmax_label' Str
    kwargs : hash ref with any additional arguments for augmenters
=cut

has 'batch_size'  => (is => 'ro', isa => 'Int',   required => 1);
has 'data_shape'  => (is => 'ro', isa => 'Shape', required => 1);
has 'label_width' => (is => 'ro', isa => 'Int',   default  => 1);
has 'data_name'   => (is => 'ro', isa => 'Str',   default  => 'data');
has 'label_name'  => (is => 'ro', isa => 'Str',   default  => 'softmax_label');
has [qw/path_imgrec
        path_imglist
        path_root
        path_imgidx
    /]            => (is => 'ro', isa => 'Str');
has 'shuffle'     => (is => 'ro', isa => 'Bool', default => 0);
has 'part_index'  => (is => 'ro', isa => 'Int', default => 0);

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN


=head2 new

    Parameters
    ---------
    name : str
        name of variable
    attrs : hash ref of str to str
        attributes of this variable taken from AI::MXNet::Symbol->attr_dict
=cut
has 'name'        => (is => 'ro', isa => 'Str', required => 1);
has 'attrs'       => (is => 'rw', isa => 'HashRef[Str]', lazy => 1, default => sub { +{} });
use overload '""' => sub { shift->name };
around BUILDARGS => sub {
    my $orig  = shift;
    my $class = shift;
    return $class->$orig(name => $_[0]) if @_ == 1;
    return $class->$orig(@_);
};

# Base class for Initializers

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

    default_init: Initializer
        default initializer when a name is not found in the param hash ref.
    verbose: bool
    log the names when initializing.
=cut

package AI::MXNet::Load;
use Mouse;
extends 'AI::MXNet::Initializer';

has 'param'        => (is => "rw", isa => 'HashRef[AI::MXNet::NDArray]', required => 1);
has 'default_init' => (is => "rw", isa => "AI::MXNet::Initializer");
has 'verbose'      => (is => "rw", isa => "Int", default => 0);

sub BUILD
{
    my $self = shift;
    my $param = AI::MXNet::NDArray->load($self->param) unless ref $self->param;
    my %self_param;
    while(my ($name, $arr) = each %{ $self->param })
    {

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
    $arr .= 1;
}

__PACKAGE__->register;

package AI::MXNet::Constant;
use Mouse;
extends 'AI::MXNet::Initializer';
has 'value' => (is => 'ro', isa => 'Num', required => 1);
around BUILDARGS => sub {
    my $orig  = shift;
    my $class = shift;
    return $class->$orig(value => $_[0]) if @_ == 1;
    return $class->$orig(@_);
};

method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
    $arr .= $self->value;

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

    the forget gate's bias that is set to a custom value.

    Parameters
    ----------
    forget_bias: float,a bias for the forget gate.
    Jozefowicz et al. 2015 recommends setting this to 1.0.
=cut

use Mouse;
extends 'AI::MXNet::Initializer';
has 'forget_bias' => (is => 'ro', isa => 'Num', required => 1);

method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
    $arr .= 0;
    # in the case of LSTMCell the forget gate is the second
    # gate of the 4 LSTM gates, we modify the according values.
    my $num_hidden = int($arr->shape->[0] / 4);
    $arr->slice([$num_hidden, 2*$num_hidden-1]) .= $self->forget_bias;
}

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

    All parameters below must be exactly the same as ones passed to the
    FusedRNNCell constructor.

    num_hidden : int
    num_layers : int
    mode : str
    bidirectional : bool
    forget_bias : float
=cut

has 'init'          => (is => 'rw', isa => 'Str|AI::MXNet::Initializer', required => 1);
has 'forget_bias'   => (is => 'ro', isa => 'Num', default => 1);
has [qw/num_hidden
       num_layers/] => (is => 'ro', isa => 'Int', required => 1);
has 'mode'          => (is => 'ro', isa => 'Str', required => 1);
has 'bidirectional' => (is => 'ro', isa => 'Bool', default => 0);

sub BUILD
{
    my $self = shift;
    if(not blessed $self->init)
    {
        my ($klass, $kwargs);
        eval {
            ($klass, $kwargs) = @{ decode_json($self->init) };

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN


=head1 NAME

    AI::MXNet::KVStore - Key value store interface of MXNet.

=head1 DESCRIPTION 

    Key value store interface of MXNet for parameter synchronization, over multiple devices.
=cut

has 'handle' => (is => 'ro', isa => 'KVStoreHandle', required => 1);
has '_updater' => (is => 'rw',  isa => 'AI::MXNet::Updater');
has '_updater_func' => (is => 'rw', isa => 'CodeRef');

sub DEMOLISH
{
    check_call(AI::MXNetCAPI::KVStoreFree(shift->handle));
}

=head2  init

lib/AI/MXNet/KVStoreServer.pm  view on Meta::CPAN


=head2 new

    Initialize a new KVStoreServer.

    Parameters
    ----------
    kvstore : KVStore
=cut

has 'kvstore' => (is => 'ro', isa => 'AI::MXNet::KVStore', required => 1);
has 'handle'  => (is => 'ro', isa => 'KVStoreHandle', default => sub { shift->kvstore->handle }, lazy => 1);
has 'init_logging' => (is => 'rw', isa => 'Int', default => 0);


# return the server controller
method _controller()
{
    return  sub { 
        my ($cmd_id, $cmd_body) = @_;
        if (not $self->init_logging)

lib/AI/MXNet/LRScheduler.pm  view on Meta::CPAN

    Parameters
    ----------
    step: int
        schedule the learning rate update after n updates
    factor: float
        the factor by which to reduce the learning rate.
=cut
use Mouse;
extends 'AI::MXNet::LRScheduler';

has 'step'            => (is => 'ro', isa => 'Int', required => 1);
has 'factor'          => (is => 'ro', isa => 'Num', default  => 1);
has 'count'           => (is => 'rw', isa => 'Int', default  => 1);
has 'stop_factor_lr'  => (is => 'ro', isa => 'Num', default  => 1e-8);

sub BUILD
{
    my $self = shift;
    confess("Schedule step must be greater or equal than 1")
        if $self->step < 1;
    confess("Factor must be no more than 1 to make lr reduce")

lib/AI/MXNet/LRScheduler.pm  view on Meta::CPAN

    Parameters
    ----------
    step: array ref of int
        schedule learning rate after n updates
    factor: float
        the factor for reducing the learning rate
=cut

use Mouse;
extends 'AI::MXNet::LRScheduler';
has 'step'            => (is => 'ro', isa => 'ArrayRef[Int]', required => 1);
has 'factor'          => (is => 'ro', isa => 'Num', default  => 1);
has 'cur_step_ind'    => (is => 'rw', isa => 'Int', default  => 0);
has 'count'           => (is => 'rw', isa => 'Int', default  => 0);

sub BUILD
{
    my $self = shift;
    confess("step array must have at least one member")
        unless @{ $self->step } >=1 ;
    for (my $i = 0; $i < @{ $self->step }; $i++)

lib/AI/MXNet/Module.pm  view on Meta::CPAN

}

=head1 NAME

    AI::MXNet::Module - FeedForward interface of MXNet.
    See AI::MXNet::Module::Base for the details.
=cut

extends 'AI::MXNet::Module::Base';

has '_symbol'           => (is => 'ro', init_arg => 'symbol', isa => 'AI::MXNet::Symbol', required => 1);
has '_data_names'       => (is => 'ro', init_arg => 'data_names', isa => 'ArrayRef[Str]');
has '_label_names'      => (is => 'ro', init_arg => 'label_names', isa => 'Maybe[ArrayRef[Str]]');
has 'work_load_list'    => (is => 'rw', isa => 'Maybe[ArrayRef[Int]]');
has 'fixed_param_names' => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'state_names'       => (is => 'rw', isa => 'Maybe[ArrayRef[Str]]');
has 'logger'            => (is => 'ro', default => sub { AI::MXNet::Logging->get_logger });
has '_p'                => (is => 'rw', init_arg => undef);
has 'context'           => (
    is => 'ro', 
    isa => 'AI::MXNet::Context|ArrayRef[AI::MXNet::Context]',

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

        ready for computation.
        - Parameter initialized. For modules with parameters, doing computation before initializing
        the parameters might result in undefined outputs.
        - Optimizer installed. An optimizer can be installed to a module. After this, the parameters
        of the module can be updated according to the optimizer after gradients are computed
        (forward-backward).

    In order for a module to interact with others, a module should be able to report the
    following information in its raw stage (before binded)

        - data_names: array ref of string indicating the names of required data.
        - output_names: array ref of string indicating the names of required outputs.

    And also the following richer information after binded:

    - state information
        - binded: bool, indicating whether the memory buffers needed for computation
        has been allocated.
        - for_training: whether the module is binded for training (if binded).
        - params_initialized: bool, indicating whether the parameters of this modules
        has been initialized.
        - optimizer_initialized: bool, indicating whether an optimizer is defined

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

################################################################################

=head2 get_symbol

    The symbol used by this module.
=cut
method get_symbol() { $self->symbol }

=head2 data_names

    An array ref of names for data required by this module.
=cut
method data_names() { confess("NotImplemented") }

=head2 output_names

    An array ref of names for the outputs of this module.
=cut
method output_names() { confess("NotImplemented") }

################################################################################

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

################################################################################
# Computations
################################################################################

=head2 forward

    Forward computation. It supports data batches with different shapes, such as
    different batch sizes or different image sizes.
    If reshaping of data batch relates to modification of symbol or module, such as
    changing image layout ordering or switching from training to predicting, module
    rebinding is required.

    Parameters
    ----------
    $data_batch : DataBatch
        Could be anything with similar API implemented.
    :$is_train= : Bool
        Default is undef, which means is_train takes the value of $self->for_training.
=cut

method forward(AI::MXNet::DataBatch $data_batch, Bool :$is_train=) { confess("NotImplemented") }

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

    $work_load_list : array ref of Num
        Default is undef, indicating uniform workload.
    $fixed_param_names: arrayref of str
        Default is undef, indicating no network parameters are fixed.
    $state_names : arrayref of str
        states are similar to data and label, but not provided by data iterator.
        Instead they are initialized to 0 and can be set by set_states()
=cut

extends 'AI::MXNet::Module::Base';
has '_sym_gen'            => (is => 'ro', init_arg => 'sym_gen', required => 1);
has '_default_bucket_key' => (is => 'rw', init_arg => 'default_bucket_key', required => 1);
has '_context'            => (
    is => 'ro', isa => 'AI::MXNet::Context|ArrayRef[AI::MXNet::Context]',
    lazy => 1, default => sub { AI::MXNet::Context->cpu },
    init_arg => 'context'
);
has '_work_load_list'     => (is => 'rw', init_arg => 'work_load_list', isa => 'ArrayRef[Num]');
has '_curr_module'        => (is => 'rw', init_arg => undef);
has '_curr_bucket_key'    => (is => 'rw', init_arg => undef);
has '_buckets'            => (is => 'rw', init_arg => undef, default => sub { +{} });
has '_fixed_param_names'  => (is => 'rw', isa => 'ArrayRef[Str]', init_arg => 'fixed_param_names');

lib/AI/MXNet/Monitor.pm  view on Meta::CPAN

        a function that computes statistics of tensors.
        Takes a NDArray and returns a NDArray. defaults to mean
        absolute value |x|/size(x).
    pattern : str
        A regular expression specifying which tensors to monitor.
        Only tensors with names that match name_pattern will be included.
        For example, '.*weight|.*output' will print all weights and outputs;
        '.*backward.*' will print all gradients.
=cut

has 'interval'  => (is => 'ro', isa => 'Int', required => 1);
has 'stat_func' => (
    is => 'ro',
    isa => 'CodeRef',
    default => sub {
        return sub {
            # returns |x|/size(x), async execution.
            my ($x) = @_;
            return $x->norm/sqrt($x->size);
        }
    },

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

    '!=' => \&not_equal,
    '>'  => \&greater,
    '>=' => \&greater_equal,
    '<'  => \&lesser,
    '<=' => \&lesser_equal,
    '.=' => \&set,
    '=' => sub { $_[0] };

extends 'AI::MXNet::NDArray::Base';
has 'writable' => (is => 'rw', isa => 'Int', default => 1, lazy => 1);
has 'handle'   => (is => 'rw', isa => 'NDArrayHandle', required => 1);

sub DEMOLISH
{
    check_call(AI::MXNetCAPI::NDArrayFree(shift->handle));
}

method STORABLE_freeze($cloning)
{
    my $buf = check_call(AI::MXNetCAPI::NDArraySaveRawBytes($self->handle));
    return ($buf,\ $self->writable);

lib/AI/MXNet/NDArray/Slice.pm  view on Meta::CPAN

use warnings;
use Mouse;
use AI::MXNet::Base;
use AI::MXNet::Function::Parameters;

=head1 NAME

    AI::MXNet::NDArray::Slice - A convenience class for slicing of the AI::MXNet::NDArray objects.
=cut

has parent => (is => 'ro', isa => 'AI::MXNet::NDArray', required => 1);
has begin  => (is => 'ro', isa => 'Shape', required => 1);
has end    => (is => 'ro', isa => 'Shape', required => 1);
use overload 
    '.=' => \&set,
    '='  => sub { $_[0] },
    '""' => \&notsupported,
    '+'  => \&notsupported,
    '+=' => \&notsupported,
    '-'  => \&notsupported,
    '-=' => \&notsupported,
    '*'  => \&notsupported,
    '*=' => \&notsupported,

lib/AI/MXNet/Optimizer.pm  view on Meta::CPAN

    $opt_registry{ $name } = $self;
}

=head2 create_optimizer

        Create an optimizer with specified name.

        Parameters
        ----------
        name: str
            Name of required optimizer. Should be the name
            of a subclass of Optimizer. Case insensitive.

        rescale_grad : float
            Rescaling factor on gradient. Normally should be 1/batch_size.

        kwargs: dict
            Parameters for optimizer

        Returns
        -------

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    activation : str or Symbol, default 'tanh'
        type of activation function
    prefix : str, default 'rnn_'
        prefix for name of layers
        (and name of weight if params is undef)
    params : AI::MXNet::RNNParams or undef
        container for weight sharing between cells.
        created if undef.
=cut

has '_num_hidden'  => (is => 'ro', init_arg => 'num_hidden', isa => 'Int', required => 1);
has 'forget_bias'  => (is => 'ro', isa => 'Num');
has '_activation'  => (
    is       => 'ro',
    init_arg => 'activation',
    isa      => 'Activation',
    default  => 'tanh'
);
has '+_prefix'    => (default => 'rnn_');
has [qw/_iW _iB
        _hW _hB/] => (is => 'rw', init_arg => undef);

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    AI::MXNet::RNN::FusedCell
=cut

=head1 DESCRIPTION

    Fusing RNN layers across time step into one kernel.
    Improves speed but is less flexible. Currently only
    supported if using cuDNN on GPU.
=cut

has '_num_hidden'      => (is => 'ro', isa => 'Int',  init_arg => 'num_hidden',     required => 1);
has '_num_layers'      => (is => 'ro', isa => 'Int',  init_arg => 'num_layers',     default => 1);
has '_dropout'         => (is => 'ro', isa => 'Num',  init_arg => 'dropout',        default => 0);
has '_get_next_state'  => (is => 'ro', isa => 'Bool', init_arg => 'get_next_state', default => 0);
has '_bidirectional'   => (is => 'ro', isa => 'Bool', init_arg => 'bidirectional',  default => 0);
has 'forget_bias'      => (is => 'ro', isa => 'Num',  default => 1);
has 'initializer'      => (is => 'rw', isa => 'Maybe[Initializer]');
has '_mode'            => (
    is => 'ro',
    isa => enum([qw/rnn_relu rnn_tanh lstm gru/]),
    init_arg => 'mode',

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    Parameters
    ----------
    l_cell : AI::MXNet::RNN::Cell::Base
        cell for forward unrolling
    r_cell : AI::MXNet::RNN::Cell::Base
        cell for backward unrolling
    output_prefix : str, default 'bi_'
        prefix for name of output
=cut

has 'l_cell'         => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);
has 'r_cell'         => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);
has '_output_prefix' => (is => 'ro', init_arg => 'output_prefix', isa => 'Str', default => 'bi_');
has [qw/_override_cell_params _cells/] => (is => 'rw', init_arg => undef);

around BUILDARGS => sub {
    my $orig  = shift;
    my $class = shift;
    if(@_ >= 2 and blessed $_[0] and blessed $_[1])
    {
        my $l_cell = shift(@_);
        my $r_cell = shift(@_);

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN


    Base class for modifier cells. A modifier
    cell takes a base cell, apply modifications
    on it (e.g. Dropout), and returns a new cell.

    After applying modifiers the base cell should
    no longer be called directly. The modifer cell
    should be used instead.
=cut

has 'base_cell' => (is => 'ro', isa => 'AI::MXNet::RNN::Cell::Base', required => 1);

around BUILDARGS => sub {
    my $orig  = shift;
    my $class = shift;
    if(@_%2)
    {
        my $base_cell = shift;
        return $class->$orig(base_cell => $base_cell, @_);
    }
    return $class->$orig(@_);

lib/AI/MXNet/RNN/IO.pm  view on Meta::CPAN

        name of label
    layout : str
        format of data and label. 'NT' means (batch_size, length)
        and 'TN' means (length, batch_size).
=cut

use Mouse;
use AI::MXNet::Base;
use List::Util qw(shuffle max);
extends 'AI::MXNet::DataIter';
has 'sentences'     => (is => 'ro', isa => 'ArrayRef[ArrayRef]', required => 1);
has '+batch_size'   => (is => 'ro', isa => 'Int',                required => 1);
has 'invalid_label' => (is => 'ro', isa => 'Int',   default => -1);
has 'data_name'     => (is => 'ro', isa => 'Str',   default => 'data');
has 'label_name'    => (is => 'ro', isa => 'Str',   default => 'softmax_label');
has 'dtype'         => (is => 'ro', isa => 'Dtype', default => 'float32');
has 'layout'        => (is => 'ro', isa => 'Str',   default => 'NT');
has 'buckets'       => (is => 'rw', isa => 'Maybe[ArrayRef[Int]]');
has [qw/data nddata ndlabel
        major_axis default_bucket_key
        provide_data provide_label
        idx curr_idx

lib/AI/MXNet/RecordIO.pm  view on Meta::CPAN

=head2 new

    Parameters
    ----------
    uri : Str
        uri path to recordIO file.
    flag: Str
        "r" for reading or "w" writing.
=cut

has 'uri'         => (is => 'ro', isa => 'Str', required => 1);
has 'flag'        => (is => 'ro', isa => enum([qw/r w/]), required => 1);
has 'handle'      => (is => 'rw', isa => 'RecordIOHandle');
has [qw/writable 
        is_open/] => (is => 'rw', isa => 'Bool');

sub BUILD
{
    my $self = shift;
    $self->is_open(0);
    $self->open();
}

lib/AI/MXNet/RecordIO.pm  view on Meta::CPAN

    Parameters
    ----------
    idx_path : str
        Path to index file
    uri : str
        Path to record file. Only support file types that are seekable.
    flag : str
        'w' for write or 'r' for read
=cut

has 'idx_path'  => (is => 'ro', isa => 'Str', required => 1);
has [qw/idx
    keys fidx/] => (is => 'rw', init_arg => undef);

method open()
{
    $self->SUPER::open();
    $self->idx({});
    $self->keys([]);
    open(my $f, $self->flag eq 'r' ? '<' : '>', $self->idx_path);
    $self->fidx($f);

lib/AI/MXNet/Rtc.pm  view on Meta::CPAN

            const int x_ndim = 1;
            const int x_dims = { 10 };
            const int y_ndim = 1;
            const int y_dims = { 10 };

            y[threadIdx.x] = x[threadIdx.x];
        }
=cut

has 'handle'              => (is => 'rw', isa => 'RtcHandle', init_arg => undef);
has [qw/name kernel/]     => (is => 'ro', isa => 'Str', required => 1);
has [qw/inputs outputs/]  => (is => 'ro', isa => 'HashRef[AI::MXNet::NDArray]', required => 1);

sub BUILD
{
    my $self = shift;
    my (@input_names, @output_names, @input_nds, @output_nds);
    while(my ($name, $arr) = each %{ $self->inputs })
    {
        push @input_names, $name;
        push @input_nds, $arr->handle;
    }

lib/AI/MXNet/Symbol.pm  view on Meta::CPAN

    '=='  => \&equal,
    '!='  => \&not_equal,
    '>'   => \&greater,
    '>='  => \&greater_equal,
    '<'   => \&lesser,
    '<='  => \&lesser_equal,
    '&{}' => sub { my $self = shift; sub { $self->call(@_) } },
    '@{}' => sub { my $self = shift; [map { $self->slice($_) } @{ $self->list_outputs }] };

extends 'AI::MXNet::Symbol::Base';
has 'handle'   => (is => 'rw', isa => 'SymbolHandle', required => 1);

sub DEMOLISH
{
    check_call(AI::NNVMCAPI::SymbolFree(shift->handle));
}

method STORABLE_freeze($cloning)
{
    return $self->tojson();
}

lib/AI/MXNet/Symbol/NameManager.pm  view on Meta::CPAN

extends 'AI::MXNet::Symbol::NameManager';

=head1 DESCRIPTION

    A name manager that always attaches a prefix to all names.
=cut

has prefix => (
    is => 'ro',
    isa => 'Str',
    required => 1
);

method get(Maybe[Str] $name, Str $hint)
{
    $name = $self->SUPER::get($name, $hint);
    return $self->prefix . $name;
}

1;



( run in 0.700 second using v1.01-cache-2.11-cpan-0a6323c29d9 )