AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

    my($batch_size, $func) = @_;
    # get samples
    my $n = 16384;
    ## creates a pdl with $n rows and two columns with random
    ## floats in the range between 0 and 1
    my $data = PDL->random(2, $n);
    ## creates the pdl with $n rows and one column with labels
    ## labels are floats that either sum or product, etc of
    ## two random values in each corresponding row of the data pdl
    my $label = $func->($data->slice('0,:'), $data->slice('1,:'));
    # partition into train/eval sets
    my $edge = int($n / 8);
    my $validation_data = $data->slice(":,0:@{[ $edge - 1 ]}");
    my $validation_label = $label->slice(":,0:@{[ $edge - 1 ]}");
    my $train_data = $data->slice(":,$edge:");
    my $train_label = $label->slice(":,$edge:");
    # build iterators around the sets
    return(mx->io->NDArrayIter(
        batch_size => $batch_size,
        data => $train_data,
        label => $train_label,
    ), mx->io->NDArrayIter(
        batch_size => $batch_size,
        data => $validation_data,
        label => $validation_label,
    ));
}

examples/char_lstm.pl  view on Meta::CPAN

            dtype => $self->dtype
        )
    ]);
    $self->provide_label([
        AI::MXNet::DataDesc->new(
            name  => $self->label_name,
            shape => $shape,
            dtype => $self->dtype
        )
    ]);
    $self->reset;
}

method reset()
{
    $self->counter(0);
    @{ $self->idx } = List::Util::shuffle(@{ $self->idx });
}

method next()
{
    return undef if $self->counter == @{$self->idx};
    my $offset = $self->idx->[$self->counter]*$self->batch_size*$self->seq_size + $self->seq_counter;
    my $data = $self->nd->slice(
        [$offset, $offset + $self->batch_size*$self->seq_size-1]
    )->reshape([$self->batch_size, $self->seq_size]);
    my $label = $self->nd->slice(
        [$offset + 1 , $offset + $self->batch_size*$self->seq_size]
    )->reshape([$self->batch_size, $self->seq_size]);
    $self->seq_counter($self->seq_counter + 1);
    if($self->seq_counter == $seq_size - 1)
    {
        $self->counter($self->counter + 1);
        $self->seq_counter(0);
    }
    return AI::MXNet::DataBatch->new(
        data          => [$data],
        label         => [$label],

examples/char_lstm.pl  view on Meta::CPAN

    }
    $stack->add($cell);
}

my $data  = mx->sym->Variable('data');
my $label = mx->sym->Variable('softmax_label');
my $embed = mx->sym->Embedding(
        data => $data, input_dim => scalar(keys %vocabulary),
        output_dim => $num_embed, name => 'embed'
);
$stack->reset;
my ($outputs, $states) = $stack->unroll($seq_size, inputs => $embed, merge_outputs => 1);
my $pred  = mx->sym->Reshape($outputs, shape => [-1, $num_hidden*(1+($bidirectional ? 1 : 0))]);
$pred     = mx->sym->FullyConnected(data => $pred, num_hidden => $data_iter->vocab_size, name => 'pred');
$label    = mx->sym->Reshape($label, shape => [-1]);
my $net   = mx->sym->SoftmaxOutput(data => $pred, label => $label, name => 'softmax');

my $contexts;
if(defined $gpus)
{
    $contexts = [map { mx->gpu($_) } split(/,/, $gpus)];

examples/cudnn_lstm_bucketing.pl  view on Meta::CPAN

        )->unfuse()
    }
    my $sym_gen = sub {
        my $seq_len = shift;
        my $data  = mx->sym->Variable('data');
        my $label = mx->sym->Variable('softmax_label');
        my $embed = mx->sym->Embedding(
            data => $data, input_dim => scalar(keys %$vocab),
            output_dim => $num_embed, name => 'embed'
        );
        $stack->reset;
        my ($outputs, $states) = $stack->unroll($seq_len, inputs => $embed, merge_outputs => 1);
        my $pred = mx->sym->Reshape($outputs, shape => [-1, $num_hidden*(1+$bidirectional)]);
        $pred    = mx->sym->FullyConnected(data => $pred, num_hidden => scalar(keys %$vocab), name => 'pred');
        $label   = mx->sym->Reshape($label, shape => [-1]);
        $pred    = mx->sym->SoftmaxOutput(data => $pred, label => $label, name => 'softmax');
        return ($pred, ['data'], ['softmax_label']);
    };
    my $contexts;
    if($gpus)
    {

examples/cudnn_lstm_bucketing.pl  view on Meta::CPAN

        sym_gen             => $sym_gen,
        default_bucket_key  => $data_val->default_bucket_key,
        context             => $contexts
    );
    $model->bind(
        data_shapes  => $data_val->provide_data,
        label_shapes => $data_val->provide_label,
        for_training => 0,
        force_rebind => 0
    );
    $model->set_params($arg_params, $aux_params);
    my $score = $model->score($data_val,
        mx->metric->Perplexity($invalid_label),
        batch_end_callback=>mx->callback->Speedometer($batch_size, 5)
    );
};

if($num_layers >= 4 and split(/,/,$gpus) >= 4 and not $stack_rnn)
{
    print("WARNING: stack-rnn is recommended to train complex model on multiple GPUs\n");
}

examples/lstm_bucketing.pl  view on Meta::CPAN

}

my $sym_gen = sub {
    my $seq_len = shift;
    my $data  = mx->sym->Variable('data');
    my $label = mx->sym->Variable('softmax_label');
    my $embed = mx->sym->Embedding(
        data => $data, input_dim => scalar(keys %$vocabulary),
        output_dim => $num_embed, name => 'embed'
    );
    $stack->reset;
    my ($outputs, $states) = $stack->unroll($seq_len, inputs => $embed, merge_outputs => 1);
    my $pred = mx->sym->Reshape($outputs, shape => [-1, $num_hidden]);
    $pred    = mx->sym->FullyConnected(data => $pred, num_hidden => scalar(keys %$vocabulary), name => 'pred');
    $label   = mx->sym->Reshape($label, shape => [-1]);
    $pred    = mx->sym->SoftmaxOutput(data => $pred, label => $label, name => 'softmax');
    return ($pred, ['data'], ['softmax_label']);
};

my $contexts;
if(defined $gpus)

examples/mnist.pl  view on Meta::CPAN

}

sub read_data {
    my($label_url, $image_url) = @_;
    my($magic, $num, $rows, $cols);

    open my($flbl), '<:gzip', download_data($label_url);
    read $flbl, my($buf), 8;
    ($magic, $num) = unpack 'N2', $buf;
    my $label = PDL->new();
    $label->set_datatype($PDL::Types::PDL_B);
    $label->setdims([ $num ]);
    read $flbl, ${$label->get_dataref}, $num;
    $label->upd_data();

    open my($fimg), '<:gzip', download_data($image_url);
    read $fimg, $buf, 16;
    ($magic, $num, $rows, $cols) = unpack 'N4', $buf;
    my $image = PDL->new();
    $image->set_datatype($PDL::Types::PDL_B);
    $image->setdims([ $rows, $cols, $num ]);
    read $fimg, ${$image->get_dataref}, $num * $rows * $cols;
    $image->upd_data();

    return($label, $image);
}

my $path='http://yann.lecun.com/exdb/mnist/';
my($train_lbl, $train_img) = read_data(
    "${path}train-labels-idx1-ubyte.gz", "${path}train-images-idx3-ubyte.gz");
my($val_lbl, $val_img) = read_data(

lib/AI/MXNet.pm  view on Meta::CPAN

__END__

=encoding UTF-8

=head1 NAME

AI::MXNet - Perl interface to MXNet machine learning library

=head1 SYNOPSIS

    ## Convolutional NN for recognizing hand-written digits in MNIST dataset
    ## It's considered "Hello, World" for Neural Networks
    ## For more info about the MNIST problem please refer to http://neuralnetworksanddeeplearning.com/chap1.html

    use strict;
    use warnings;
    use AI::MXNet qw(mx);
    use AI::MXNet::TestUtils qw(GetMNIST_ubyte);
    use Test::More tests => 1;

    # symbol net

lib/AI/MXNet/Callback.pm  view on Meta::CPAN

use Mouse;
use overload "&{}" => sub { my $self = shift; sub { $self->call(@_) } };

=head1 NAME

    AI::MXNet::Callback - A collection of predefined callback functions
=cut

=head2 module_checkpoint

    Callback to save the module setup in the checkpoint files.

    Parameters
    ----------
    $mod : subclass of AI::MXNet::Module::Base
        The module to checkpoint.
    $prefix : str
        The file prefix to checkpoint to
    $period=1 : int
        How many epochs to wait before checkpointing. Default is 1.
    $save_optimizer_states=0 : Bool

lib/AI/MXNet/Callback.pm  view on Meta::CPAN

}

=head2 log_train_metric

    Callback to log the training evaluation result every period.

    Parameters
    ----------
    $period : Int
        The number of batches after which to log the training evaluation metric.
    $auto_reset : Bool
        Whether to reset the metric after the logging.

    Returns
    -------
    $callback : sub ref
        The callback function that can be passed as iter_epoch_callback to fit.
=cut

method log_train_metric(Int $period, Int $auto_reset=0)
{
    return sub {
        my ($param) = @_;
        if($param->nbatch % $period == 0 and defined $param->eval_metric)
        {
            my $name_value = $param->eval_metric->get_name_value;
            while(my ($name, $value) = each %{ $name_value })
            {
                AI::MXNet::Logging->info(
                    "Iter[%d] Batch[%d] Train-%s=%f",
                    $param->epoch, $param->nbatch, $name, $value
                );
            }
            $param->eval_metric->reset if $auto_reset;
        }
    }
}

package AI::MXNet::Speedometer;
use Mouse;
use Time::HiRes qw/time/;
extends 'AI::MXNet::Callback';

=head1 NAME

lib/AI/MXNet/Callback.pm  view on Meta::CPAN


    Calculate and log training speed periodically.

    Parameters
    ----------
    batch_size: int
        batch_size of data
    frequent: int
        How many batches between calculations.
        Defaults to calculating & logging every 50 batches.
    auto_reset: Bool
        Reset the metric after each log, defaults to true.
=cut

has 'batch_size' => (is => 'ro', isa => 'Int', required => 1);
has 'frequent'   => (is => 'ro', isa => 'Int', default  => 50);
has 'init'       => (is => 'rw', isa => 'Int', default  => 0);
has 'tic'        => (is => 'rw', isa => 'Num', default  => 0);
has 'last_count' => (is => 'rw', isa => 'Int', default  => 0);
has 'auto_reset' => (is => 'ro', isa => 'Bool', default  => 1);

method call(AI::MXNet::BatchEndParam $param)
{
    my $count = $param->nbatch;
    if($self->last_count > $count)
    {
        $self->init(0);
    }
    $self->last_count($count);

    if($self->init)
    {
        if(($count % $self->frequent) == 0)
        {
            my $speed = $self->frequent * $self->batch_size / (time - $self->tic);
            if(defined $param->eval_metric)
            {
                my $name_value = $param->eval_metric->get_name_value;
                $param->eval_metric->reset if $self->auto_reset;
                while(my ($name, $value) = each %{ $name_value })
                {
                    AI::MXNet::Logging->info(
                        "Epoch[%d] Batch [%d]\tSpeed: %.2f samples/sec\tTrain-%s=%f",
                        $param->epoch, $count, $speed, $name, $value
                    );
                }
            }
            else
            {

lib/AI/MXNet/Callback.pm  view on Meta::CPAN

        );
    }
}

package AI::MXNet::Callback;

method Speedometer(@args)
{
    AI::MXNet::Speedometer->new(
        @args == 3 ?
            (batch_size => $args[0], frequent => $args[1], auto_reset => $args[2])
            : @args == 2 ?
                (batch_size => $args[0], frequent => $args[1])
                    : (batch_size => $args[0])
    )
}

method ProgressBar(@args)
{
    AI::MXNet::ProgressBar->new(
        @args == 2 ? (total => $args[0], 'length' => $args[1]) : (total => $args[0])

lib/AI/MXNet/Contrib/AutoGrad.pm  view on Meta::CPAN

use warnings;
use AI::MXNet::Base;
use AI::MXNet::Function::Parameters;
use Scalar::Util qw(blessed);

=head1 NAME

    AI::MXNet::AutoGrad - Autograd for NDArray.
=cut

=head2 set_is_training

    Set status to training/not training. When training, graph will be constructed
    for gradient computation. Operators will also run with ctx.is_train=True. For example,
    Dropout will drop inputs randomly when is_train=True while simply passing through
    if is_train=False.

    Parameters
    ----------
    is_train: bool

    Returns
    -------
    previous state before this set.
=cut


method set_is_training(Bool $is_train)
{
    my $prev = scalar(check_call(AI::MXNetCAPI::AutogradSetIsTraining($is_train ? 1 : 0)));
    return $prev ? 1 : 0
}

=head2 mark_variables

    Mark AI::MXNet::NDArrays as variables to compute gradient for autograd.

    Parameters

lib/AI/MXNet/Contrib/AutoGrad.pm  view on Meta::CPAN

            my @argnum = ref $argnum ? @$argnum : ($argnum);
            @variables = map { $_[$_] } @argnum;
        }
        map {
            assert(
                (blessed($_) and $_->isa('AI::MXNet::NDArray')),
                "type of autograd input should NDArray")
        } @variables;
        my @grads = map { $_->zeros_like } @variables;
        __PACKAGE__->mark_variables(\@variables, \@grads);
        my $prev = __PACKAGE__->set_is_training(1);
        my $outputs = $func->(@args);
        __PACKAGE__->set_is_training(0) unless $prev;
        __PACKAGE__->compute_gradient(ref $outputs eq 'ARRAY' ? $outputs : [$outputs]);
        return (\@grads, $outputs);
    };
}

=head2 grad

    Return function that computes gradient of arguments.

    Parameters

lib/AI/MXNet/Contrib/AutoGrad.pm  view on Meta::CPAN

method grad(CodeRef $func, Maybe[Int|ArrayRef[Int]] $argnum=)
{
    my $grad_with_loss_func = __PACKAGE__->grad_and_loss($func, $argnum);
    return sub {
        return ($grad_with_loss_func->(@_))[0];
    };
}

method train_section(CodeRef $sub)
{
    my $prev = __PACKAGE__->set_is_training(1);
    $sub->();
    __PACKAGE__->set_is_training(0) unless $prev;
}

method test_section(CodeRef $sub)
{
    my $prev = __PACKAGE__->set_is_training(0);
    $sub->();
    __PACKAGE__->set_is_training(1) if $prev;
}

1;

lib/AI/MXNet/Executor.pm  view on Meta::CPAN

{
    check_call(AI::MXNetCAPI::ExecutorFree(shift->handle));
}

# Get the dictionary given name and ndarray pairs.
func _get_dict(
    ArrayRef[Str]                       $names,
    ArrayRef[Maybe[AI::MXNet::NDArray]] $ndarrays
)
{
    my %nset = ();
    for my $nm (@{ $names })
    {
        if(exists $nset{ $nm })
        {
            confess("Duplicate names detected, @$names")
        }
        $nset{ $nm }++;
    }
    my %ret;
    @ret{ @{ $names } } = @{ $ndarrays };
    return \%ret;
}

=head2 outputs

    The output ndarrays bound to this executor.

lib/AI/MXNet/Executor.pm  view on Meta::CPAN

    if(not $self->_output_dirty)
    {
        AI::MXNet::Logging->warning(
            "Calling backward without calling forward(is_train=True) "
            ."first. Behavior is undefined."
        );
    }
    $self->_output_dirty(0);
}

=head2 set_monitor_callback

    Install callback.

    Parameters
    ----------
    callback : subref
        Takes a string and an NDArrayHandle.
=cut

method set_monitor_callback(CodeRef $callback)
{
    $self->_monitor_callback($callback);
    check_call(
        AI::MXNetCAPI::ExecutorSetMonitorCallback(
            $self->handle,
            $self->_monitor_callback
        )
    );
}

lib/AI/MXNet/Executor.pm  view on Meta::CPAN

                or
            join(',', @{ $new_shape }) eq join(',', @{ $arr->shape })
        )
        {
            if(AI::MXNet::NDArray->size($new_shape) > $arr->size)
            {
                confess(
                    "New shape of arg:$name larger than original. "
                    ."First making a big executor and then down sizing it "
                    ."is more efficient than the reverse."
                    ."If you really want to up size, set \$allow_up_sizing=1 "
                    ."to enable allocation of new arrays."
                ) unless $allow_up_sizing;
                $new_arg_dict{ $name }  = AI::MXNet::NDArray->empty(
                    $new_shape,
                    ctx => $arr->context,
                    dtype => $arr->dtype
                );
                if(defined $darr)
                {
                    $new_grad_dict{ $name } = AI::MXNet::NDArray->empty(

lib/AI/MXNet/Executor.pm  view on Meta::CPAN

                    $new_grad_dict{ $name } = $darr->reshape($new_shape);
                }
            }
        }
        else
        {
            confess(
                    "Shape of unspecified array arg:$name changed. "
                    ."This can cause the new executor to not share parameters "
                    ."with the old one. Please check for error in network."
                    ."If this is intended, set partial_shaping=True to suppress this warning."
            );
        }
        $i++;
    }
    my %new_aux_dict;
    $i = 0;
    for my $name (@{ $self->_symbol->list_auxiliary_states() })
    {
        my $new_shape = $aux_shapes->[$i];
        my $arr = $self->aux_arrays->[$i];
        if($partial_shaping or join(',', @{ $new_shape }) eq join (',', @{ $arr->shape }))
        {
            if(AI::MXNet::NDArray->size($new_shape) > $arr->size)
            {
                confess(
                    "New shape of arg:$name larger than original. "
                    ."First making a big executor and then down sizing it "
                    ."is more efficient than the reverse."
                    ."If you really want to up size, set \$allow_up_sizing=1 "
                    ."to enable allocation of new arrays."
                ) unless $allow_up_sizing;
                $new_aux_dict{ $name }  = AI::MXNet::NDArray->empty(
                    $new_shape,
                    ctx => $arr->context,
                    dtype => $arr->dtype
                );
            }
            else
            {
                $new_aux_dict{ $name } = $arr->reshape($new_shape);
            }
        }
        else
        {
            confess(
                "Shape of unspecified array aux:$name changed. "
                ."This can cause the new executor to not share parameters "
                ."with the old one. Please check for error in network."
                ."If this is intended, set partial_shaping=True to suppress this warning."
            );
        }
        $i++;
    }
    return $self->_symbol->bind(
                ctx         => $self->_ctx,
                args        => \%new_arg_dict,
                args_grad   => \%new_grad_dict,
                grad_req    => $self->_grad_req,
                aux_states  => \%new_aux_dict,

lib/AI/MXNet/Executor/Group.pm  view on Meta::CPAN

        in the computation graph.
    for_training : Bool
        Indicate whether the executors should be bind for training. When not doing training,
        the memory for gradients will not be allocated.
    inputs_need_grad : Bool
        Indicate whether the gradients for the input data should be computed. This is currently
        not used. It will be useful for implementing composition of modules.
    shared_group : AI::MXNet::DataParallelExecutorGroup
        Default is undef. This is used in bucketing. When not undef, it should be a executor
        group corresponding to a different bucket. In other words, it will correspond to a different
        symbol with the same set of parameters (e.g. unrolled RNNs with different lengths).
        In this case the memory regions of the parameters will be shared.
    logger : Logger
        Default is AI::MXNet::Logging->get_logger.
    fixed_param_names: Maybe[ArrayRef[Str]]
        Indicate parameters to be fixed during training. Parameters in this array ref will not allocate
        space for gradient, nor do gradient calculation.
    grad_req : ArrayRef[GradReq]|HashRef[GradReq]|GradReq
        Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
        (default to 'write').
        Can be specified globally (str) or for each argument (array ref, hash ref).

lib/AI/MXNet/Executor/Group.pm  view on Meta::CPAN

)
{
    return if($data_shapes eq $self->data_shapes and $label_shapes eq $self->label_shapes);
    if (not defined $self->_p->_default_execs)
    {
        $self->_p->_default_execs([@{ $self->_p->execs }]);
    }
    $self->bind_exec($data_shapes, $label_shapes, undef, 1);
}

=head2 set_params

    Assign, i.e. copy parameters to all the executors.

    Parameters
    ----------
    $arg_params : HashRef[AI::MXNet::NDArray]
        A dictionary of name to AI::MXNet::NDArray parameter mapping.
    $aux_params : HashRef[AI::MXNet::NDArray]
        A dictionary of name to AI::MXNet::NDArray auxiliary variable mapping.
=cut

method set_params(HashRef[AI::MXNet::NDArray] $arg_params, HashRef[AI::MXNet::NDArray] $aux_params, Bool $allow_extra=0)
{
    $_->copy_params_from($arg_params, $aux_params, $allow_extra) for @{ $self->_p->execs };
}

=head2 get_params

    Copy data from each executor to arg_params and aux_params.

    Parameters
    ----------

lib/AI/MXNet/Executor/Group.pm  view on Meta::CPAN

}



method get_states($merge_multi_context=1)
{
    assert((not $merge_multi_context), "merge_multi_context=True is not supported for get_states yet.");
    return $self->_p->state_arrays;
}

method set_states($states, $value)
{
    if(defined $states)
    {
        assert((not defined $value), "Only one of states & value can be specified.");
        AI::MXNet::Executor::Group::_load_general($states, $self->_p->state_arrays, [(0)x@{ $states }]);
    }
    else
    {
        assert((defined $value), "At least one of states & value must be specified.");
        assert((not defined $states), "Only one of states & value can be specified.");

lib/AI/MXNet/IO.pm  view on Meta::CPAN

    AI::MXNet::IO - NDArray interface of mxnet.
=cut

# Convert data into canonical form.
method init_data(
    AcceptableInput|HashRef[AcceptableInput]|ArrayRef[AcceptableInput]|Undef $data,
    Undef|Int :$allow_empty=,
    Str :$default_name
)
{
    Carp::confess("data must be defined or allow_empty set to true value")
        if(not defined $data and not $allow_empty);
    $data //= [];

    if(ref($data) and ref($data) ne 'ARRAY' and ref($data) ne 'HASH')
    {
        $data = [$data];
    }

    Carp::confess("data must not be empty or allow_empty set to true value")
        if(ref($data) eq 'ARRAY' and not @{ $data } and not $allow_empty);

    my @ret;
    if(ref($data) eq 'ARRAY')
    {
        if(@{ $data } == 1)
        {
            @ret = ([$default_name, $data->[0]]);
        }
        else

lib/AI/MXNet/IO.pm  view on Meta::CPAN

use overload '<>' =>  sub { shift->next },
             '@{}' => sub { shift->list };

=head1 NAME

    AI::MXNet::DataIter - A parent class for MXNet data iterators.
=cut

has 'batch_size' => (is => 'rw', isa => 'Int', default => 0);

=head2 reset

    Reset the iterator.
=cut

method reset(){}

=head2 list

    Returns remaining iterator items as an array ref.
=cut

method list()
{
    my @ret;
    while(<$self>)

lib/AI/MXNet/IO.pm  view on Meta::CPAN


    Resize a DataIter to a given number of batches per epoch.
    May produce incomplete batch in the middle of an epoch due
    to the padding from internal iterator.

    Parameters
    ----------
    data_iter : DataIter
        Internal data iterator.
    size : number of batches per epoch to resize to.
    reset_internal : whether to reset internal iterator on ResizeIter.reset
=cut

has 'data_iter'      => (is => 'ro', isa => 'AI::MXnet::DataIter', required => 1);
has 'size'           => (is => 'ro', isa => 'Int', required => 1);
has 'reset_internal' => (is => 'rw', isa => 'Int', default => 1);
has 'cur'            => (is => 'rw', isa => 'Int', default => 0);
has 'current_batch'  => (is => 'rw', isa => 'Maybe[AI::MXNet::DataBatch]');
has [qw/provide_data
    default_bucket_key
    provide_label
    batch_size/]     => (is => 'rw', init_arg => undef);

sub BUILD
{
    my $self = shift;
    $self->provide_data($self->data_iter->provide_data);
    $self->provide_label($self->data_iter->provide_label);
    $self->batch_size($self->data_iter->batch_size);
    if($self->data_iter->can('default_bucket_key'))
    {
        $self->default_bucket_key($self->data_iter->default_bucket_key);
    }
}

method reset()
{
    $self->cur(0);
    if($self->reset_internal)
    {
        $self->data_iter->reset;
    }
}

method iter_next()
{
    return 0 if($self->cur == $self->size);
    $self->current_batch($self->data_iter->next);
    if(not defined $self->current_batch)
    {
        $self->data_iter->reset;
        $self->current_batch($self->data_iter->next);
    }
    $self->cur($self->cur + 1);
    return 1;
}

method get_data()
{
    return $self->current_batch->data;
}

lib/AI/MXNet/IO.pm  view on Meta::CPAN

method provide_label()
{
    return [map {
        my ($k, $v) = @{ $_ };
        my $shape = $v->shape;
        $shape->[0] = $self->batch_size;
        AI::MXNet::DataDesc->new(name => $k, shape => $shape, dtype => $v->dtype)
    } @{ $self->label }];
}

# Ignore roll over data and set to start
method hard_reset()
{
    $self->cursor(-$self->batch_size);
}

method reset()
{
    if($self->last_batch_handle eq 'roll_over' and $self->cursor > $self->num_data)
    {
        $self->cursor(-$self->batch_size + ($self->cursor%$self->num_data)%$self->batch_size);
    }
    else
    {
        $self->cursor(-$self->batch_size);
    }
}

lib/AI/MXNet/IO.pm  view on Meta::CPAN

    }
    else
    {
        return undef;
    }
}

# Load data from underlying arrays, internal use only
method _getdata($data_source)
{
    confess("DataIter needs reset.") unless $self->cursor < $self->num_data;
    if(($self->cursor + $self->batch_size) <= $self->num_data)
    {
        return [
            map {
                $_->[1]->slice([$self->cursor,$self->cursor+$self->batch_size-1])
            } @{ $data_source }
        ];
    }
    else
    {

lib/AI/MXNet/IO.pm  view on Meta::CPAN

    This can be used to test the speed of network without taking
    the loading delay into account.
=cut

method debug_skip_load()
{
    $self->_debug_skip_load(1);
    AI::MXNet::Logging->info('Set debug_skip_load to be true, will simply return first batch');
}

method reset()
{
    $self->_debug_at_begin(1);
    $self->first_batch(undef);
    check_call(AI::MXNetCAPI::DataIterBeforeFirst($self->handle));
}

method next()
{
    if($self->_debug_skip_load and not $self->_debug_at_begin)
    {

lib/AI/MXNet/Image.pm  view on Meta::CPAN

    }
    if(defined $self->aug_list or defined $self->kwargs)
    {
        $self->aug_list(AI::MXNet::Image->CreateAugmenter(data_shape => $self->data_shape, %{ $self->kwargs//{} }));
    }
    else
    {
        $self->aug_list([]);
    }
    $self->cur(0);
    $self->reset();
}

method reset()
{
    if($self->shuffle)
    {
        @{ $self->seq } = List::Util::shuffle(@{ $self->seq });
    }
    if(defined $self->imgrec)
    {
        $self->imgrec->reset;
    }
    $self->cur(0);
}

method next_sample()
{
    if(defined $self->seq)
    {
        return undef if($self->cur >= @{ $self->seq });
        my $idx = $self->seq->[$self->cur];

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN


=head1 NAME

    AI::MXNet::Initializer - Base class for all Initializers

=head2 register

    Register an initializer class to the AI::MXNet::Initializer factory.
=cut

=head2 set_verbosity

    Switch on/off verbose mode

    Parameters
    ----------
    $verbose : bool
        switch on/off verbose mode
    $print_func : CodeRef
        A function that computes statistics of initialized arrays.
        Takes an AI::MXNet::NDArray and returns a scalar. Defaults to mean
        absolute value |x|/size(x)
=cut

method set_verbosity(Bool $verbose=0, CodeRef $print_func=)
{
    $self->_verbose($verbose);
    $self->_print_func($print_func) if defined $print_func;
}

method _verbose_print($desc, $init, $arr)
{
    if($self->_verbose and defined $self->_print_func)
    {
        AI::MXNet::Logging->info('Initialized %s as %s: %s', $desc, $init, $self->_print_func->($arr));

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

{
    confess("Virtual method, subclass must override it");
}

method _init_default($name, $arr)
{
    confess(
        "Unknown initialization pattern for $name. "
        .'Default initialization is now limited to '
        .'"weight", "bias", "gamma" (1.0), and "beta" (0.0).'
        .'Please use mx.sym.Variable(init=mx.init.*) to set initialization pattern'
    );
}

=head1 NAME

    AI::MXNet::Load  - Initialize by loading a pretrained param from a hash ref.
=cut

=head2 new

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

package AI::MXNet::LSTMBias;

=head1 NAME

    AI::MXNet::LSTMBias - Custom initializer for LSTM cells.
=cut

=head1 DESCRIPTION

    Initializes all biases of an LSTMCell to 0.0 except for
    the forget gate's bias that is set to a custom value.

    Parameters
    ----------
    forget_bias: float,a bias for the forget gate.
    Jozefowicz et al. 2015 recommends setting this to 1.0.
=cut

use Mouse;
extends 'AI::MXNet::Initializer';
has 'forget_bias' => (is => 'ro', isa => 'Num', required => 1);

method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
    $arr .= 0;
    # in the case of LSTMCell the forget gate is the second

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

)
{
    my ($keys, $vals) = _key_value($key, $out);
    check_call(
        AI::MXNetCAPI::KVStorePullEx(
            $self->handle, scalar(@{ $keys }), $keys, $vals, $priority
        )
    );
}

=head2  set_optimizer

    Register an optimizer to the store

    If there are multiple machines, this process (should be a worker node)
    will pack this optimizer and send it to all servers. It returns after
    this action is done.

    Parameters
    ----------
    optimizer : Optimizer
        the optimizer
=cut

method set_optimizer(AI::MXNet::Optimizer $optimizer)
{
    my $is_worker = check_call(AI::MXNetCAPI::KVStoreIsWorkerNode());
    if($self->type eq 'dist' and $is_worker)
    {
        my $optim_str = MIME::Base64::encode_base64(Storable::freeze($optimizer), "");
        $self->_send_command_to_servers(0, $optim_str);
    }
    else
    {
        $self->_updater(AI::MXNet::Optimizer->get_updater($optimizer));
        $self->_set_updater(sub { &{$self->_updater}(@_) });
    }
}

=head2  type

    Get the type of this kvstore

    Returns
    -------
    type : str

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

=cut

method load_optimizer_states(Str $fname)
{
    confess("Cannot save states for distributed training")
        unless defined $self->_updater;
    open(F, "<:raw", "$fname") or confess("can't open $fname for reading: $!");
    my $data;
    { local($/) = undef; $data = <F>; }
    close(F);
    $self->_updater->set_states($data);
}

=head2 _set_updater

    Set a push updater into the store.

    This function only changes the local store. Use set_optimizer for
    multi-machines.

    Parameters
    ----------
    updater : function
        the updater function

    Examples
    --------
    >>> my $update = sub { my ($key, input, stored) = @_;
        ...     print "update on key: $key\n";
        ...     $stored += $input * 2; };
        >>> $kv->_set_updater($update)
        >>> $kv->pull(3, out=>$a)
        >>> print $a->aspdl()
        [[ 4.  4.  4.]
        [ 4.  4.  4.]]
        >>> $kv->push(3, mx->nd->ones($shape))
        update on key: 3
        >>> $kv->pull(3, out=>$a)
        >>> print $a->aspdl()
        [[ 6.  6.  6.]
        [ 6.  6.  6.]]
=cut

method _set_updater(CodeRef $updater_func)
{
    $self->_updater_func(
        sub {
            my ($index, $input_handle, $storage_handle) = @_;
            $updater_func->(
                $index,
                AI::MXNet::NDArray->new(handle => $input_handle),
                AI::MXNet::NDArray->new(handle => $storage_handle)
            );
        }

lib/AI/MXNet/KVStoreServer.pm  view on Meta::CPAN

    return  sub { 
        my ($cmd_id, $cmd_body) = @_;
        if (not $self->init_logging)
        {
            ## TODO write logging
            $self->init_logging(1);
        }
        if($cmd_id == 0)
        {
            my $optimizer = Storable::thaw(MIME::Base64::decode_base64($cmd_body));
            $self->kvstore->set_optimizer($optimizer);
        }
        else
        {
            my $rank = $self->kvstore->rank;
            print("server $rank, unknown command ($cmd_id, $cmd_body)\n");
        }
    }
}

=head2 run

lib/AI/MXNet/Metric.pm  view on Meta::CPAN

                [shift->get_name_value()]
            )->Purity(1)->Deepcopy(1)->Terse(1)->Dump
},  fallback => 1;
has 'name'       => (is => 'rw', isa => 'Str');
has 'num'        => (is => 'rw', isa => 'Int');
has 'num_inst'   => (is => 'rw', isa => 'Maybe[Int|ArrayRef[Int]]');
has 'sum_metric' => (is => 'rw', isa => 'Maybe[Num|ArrayRef[Num]]');

sub BUILD
{
    shift->reset;
}

method update($label, $pred)
{
    confess('NotImplemented');
}

method reset()
{
    if(not defined $self->num)
    {
        $self->num_inst(0);
        $self->sum_metric(0);
    }
    else
    {
        $self->num_inst([(0) x $self->num]);
        $self->sum_metric([(0) x $self->num]);

lib/AI/MXNet/Metric.pm  view on Meta::CPAN

}

method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
    for my $metric (@{ $self->metrics })
    {
        $metric->update($labels, $preds);
    }
}

method reset()
{
    for my $metric (@{ $self->metrics })
    {
        $metric->reset;
    }
}

method get()
{
    my $names = [];
    my $results = [];
    for my $metric (@{ $self->metrics })
    {
        my ($name, $result) = $metric->get;

lib/AI/MXNet/Module.pm  view on Meta::CPAN

{
    if(defined $symbol)
    {
        $symbol->save("$prefix-symbol.json");
    }
    my $param_name = sprintf('%s-%04d.params', $prefix, $epoch);
    $self->save_params($param_name, $arg_params, $aux_params);
    AI::MXNet::Logging->info('Saved checkpoint to "%s"', $param_name);
}

# Internal function to reset binded state.
method _reset_bind()
{
    $self->binded(0);
    $self->_p->_exec_group(undef);
    $self->_p->_data_shapes(undef);
    $self->_p->_label_shapes(undef);
}

method data_names()
{
    return $self->_p->_data_names;

lib/AI/MXNet/Module.pm  view on Meta::CPAN

                name  => $name,
                ($attrs->{$name} ? (attrs => $attrs->{$name}) : ())
            ),
            $arr, $aux_params
        );
    }
    $self->params_initialized(1);
    $self->_p->_params_dirty(0);

    # copy the initialized parameters to devices
    $self->_p->_exec_group->set_params($self->_p->_arg_params, $self->_p->_aux_params, $allow_extra);
}

method set_params(
    HashRef[AI::MXNet::NDArray]  $arg_params,
    HashRef[AI::MXNet::NDArray]  $aux_params,
    Bool                        :$allow_missing=0,
    Bool                        :$force_init=1,
    Bool                        :$allow_extra=0
)
{
    if(not $allow_missing)
    {
        $self->init_params(

lib/AI/MXNet/Module.pm  view on Meta::CPAN

            allow_missing => $allow_missing, force_init => $force_init,
            allow_extra   => $allow_extra
        );
        return;
    }

    if($self->params_initialized and not $force_init)
    {
        AI::MXNet::Logging->warning(
            "Parameters already initialized and force_init=False. "
            ."set_params call ignored."
        );
        return;
    }
    $self->_p->_exec_group->set_params($arg_params, $aux_params, $allow_extra);
    $self->_p->_params_dirty(1);
    $self->params_initialized(1);
}

=head2 bind

    Bind the symbols to construct executors. This is necessary before one
    can perform computation with the module.

    Parameters

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    :$inputs_need_grad : bool
        Default is 0. Whether the gradients to the input data need to be computed.
        Typically this is not needed. But this might be needed when implementing composition
        of modules.
    :$force_rebind : bool
        Default is 0. This function does nothing if the executors are already
        binded. But with this 1, the executors will be forced to rebind.
    :$shared_module : Module
        Default is undef. This is used in bucketing. When not undef, the shared module
        essentially corresponds to a different bucket -- a module with different symbol
        but with the same sets of parameters (e.g. unrolled RNNs with different lengths).
=cut

method bind(
    ArrayRef[AI::MXNet::DataDesc|NameShape]        :$data_shapes,
    Maybe[ArrayRef[AI::MXNet::DataDesc|NameShape]] :$label_shapes=,
    Bool                                           :$for_training=1,
    Bool                                           :$inputs_need_grad=0,
    Bool                                           :$force_rebind=0,
    Maybe[AI::MXNet::Module]                       :$shared_module=,
    GradReq|HashRef[GradReq]|ArrayRef[GradReq]     :$grad_req='write',
    Maybe[ArrayRef[Str]]                           :$state_names=$self->_p->_state_names
)
{
    # force rebinding is typically used when one want to switch from
    # training to prediction phase.
    if($force_rebind)
    {
        $self->_reset_bind();
    }
    if($self->binded)
    {
        $self->logger->warning('Already binded, ignoring bind()');
        return;
    }
    $self->for_training($for_training);
    $self->inputs_need_grad($inputs_need_grad);
    $self->binded(1);
    $self->_p->_grad_req($grad_req);

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    if($shared_module)
    {
        $self->params_initialized(1);
        $self->_p->_arg_params($shared_module->_p->_arg_params);
        $self->_p->_aux_params($shared_module->_p->_aux_params);
    }
    elsif($self->params_initialized)
    {
        # if the parameters are already initialized, we are re-binding
        # so automatically copy the already initialized params
        $self->_p->_exec_group->set_params($self->_p->_arg_params, $self->_p->_aux_params);
    }
    else
    {
        assert(not defined $self->_p->_arg_params and not $self->_p->_aux_params);
        my @param_arrays = (
            map { AI::MXNet::NDArray->zeros($_->[0]->shape, dtype => $_->[0]->dtype) }
            @{ $self->_p->_exec_group->_p->param_arrays }
        );
        my %arg_params;
        @arg_params{ @{ $self->_p->_param_names } } = @param_arrays;

lib/AI/MXNet/Module.pm  view on Meta::CPAN

        _initialize_kvstore(
            kvstore           => $kvstore,
            param_arrays      => $self->_p->_exec_group->_p->param_arrays,
            arg_params        => $self->_p->_arg_params,
            param_names       => $self->_p->_param_names,
            update_on_kvstore => $update_on_kvstore
        );
    }
    if($update_on_kvstore)
    {
        $kvstore->set_optimizer($self->_p->_optimizer);
    }
    else
    {
        $self->_p->_updater(AI::MXNet::Optimizer->get_updater($optimizer));
    }
    $self->optimizer_initialized(1);

    if($self->_p->_preload_opt_states)
    {
        $self->load_optimizer_states($self->_p->_preload_opt_states);

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    assert($self->binded and $self->params_initialized and $self->inputs_need_grad);
    return $self->_p->_exec_group->get_input_grads($merge_multi_context);
}

method get_states(Bool $merge_multi_context=1)
{
    assert($self->binded and $self->params_initialized);
    return $self->_p->_exec_group->get_states($merge_multi_context);
}

method set_states(:$states=, :$value=)
{
    assert($self->binded and $self->params_initialized);
    return $self->_p->_exec_group->set_states($states, $value);
}

method update_metric(
    AI::MXNet::EvalMetric $eval_metric,
    ArrayRef[AI::MXNet::NDArray] $labels
)
{
    $self->_p->_exec_group->update_metric($eval_metric, $labels);
}

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    if($self->_p->_update_on_kvstore)
    {
        $self->_p->_kvstore->load_optimizer_states($fname);
    }
    else
    {
        open(F, "<:raw", "$fname") or confess("can't open $fname for reading: $!");
        my $data;
        { local($/) = undef; $data = <F>; }
        close(F);
        $self->_p->_updater->set_states($data);
    }
}

method install_monitor(AI::MXNet::Monitor $mon)
{
    assert($self->binded);
    $self->_p->_exec_group->install_monitor($mon);
}

method _updater()

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN


    - parameters (for modules with parameters)
        - get_params(): return an array ($arg_params, $aux_params). Each of those
        is a hash ref of name to NDArray mapping. Those NDArrays always on
        CPU. The actual parameters used for computing might be on other devices (GPUs),
        this function will retrieve (a copy of) the latest parameters. Therefore, modifying
        - get_params($arg_params, $aux_params): assign parameters to the devices
        doing the computation.
        - init_params(...): a more flexible interface to assign or initialize the parameters.

    - setup
        - bind(): prepare environment for computation.
        - init_optimizer(): install optimizer for parameter updating.

    - computation
        - forward(data_batch): forward operation.
        - backward(out_grads=): backward operation.
        - update(): update parameters according to installed optimizer.
        - get_outputs(): get outputs of the previous forward operation.
        - get_input_grads(): get the gradients with respect to the inputs computed
        in the previous backward operation.

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN


    - other properties (mostly for backward compatability)
        - symbol: the underlying symbolic graph for this module (if any)
        This property is not necessarily constant. For example, for AI::MXNet::Module::Bucketing,
        this property is simply the *current* symbol being used. For other modules,
        this value might not be well defined.

    When those intermediate-level API are implemented properly, the following
    high-level API will be automatically available for a module:

        - fit: train the module parameters on a data set
        - predict: run prediction on a data set and collect outputs
        - score: run prediction on a data set and evaluate performance
=cut

has 'logger'            => (is => 'rw', default => sub { AI::MXNet::Logging->get_logger });
has '_symbol'           => (is => 'rw', init_arg => 'symbol', isa => 'AI::MXNet::Symbol');
has [
    qw/binded for_training inputs_need_grad
    params_initialized optimizer_initialized/
]                       => (is => 'rw', isa => 'Bool', init_arg => undef, default => 0);

################################################################################

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN


    Parameters
    ----------
    $eval_data   : AI::MXNet::DataIter
    $eval_metric : AI::MXNet::EvalMetric
    :$num_batch= : Maybe[Int]
        Number of batches to run. Default is undef, indicating run until the AI::MXNet::DataIter
        finishes.
    :$batch_end_callback= : Maybe[Callback]
        Could also be a array ref of functions.
    :$reset=1 : Bool
        Default 1, indicating whether we should reset $eval_data before starting
        evaluating.
    $epoch=0 : Int
        Default is 0. For compatibility, this will be passed to callbacks (if any). During
        training, this will correspond to the training epoch number.
=cut

method score(
    AI::MXNet::DataIter                 $eval_data,
    EvalMetric                          $eval_metric,
    Maybe[Int]                          :$num_batch=,
    Maybe[Callback]|ArrayRef[Callback]  :$batch_end_callback=,
    Maybe[Callback]|ArrayRef[Callback]  :$score_end_callback=,
    Bool                                :$reset=1,
    Int                                 :$epoch=0
)
{
    assert($self->binded and $self->params_initialized);
    $eval_data->reset if $reset;
    if(not blessed $eval_metric or not $eval_metric->isa('AI::MXNet::EvalMetric'))
    {
        $eval_metric = AI::MXNet::Metric->create($eval_metric);
    }

    $eval_metric->reset();
    my $actual_num_batch = 0;
    my $nbatch = 0;
    while(my $eval_batch = <$eval_data>)
    {
        last if (defined $num_batch and $nbatch == $num_batch);
        $self->forward($eval_batch, is_train => 0);
        $self->update_metric($eval_metric, $eval_batch->label);

        if (defined $batch_end_callback)
        {

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN


=head2  iter_predict

    Iterate over predictions.

    Parameters
    ----------
    $eval_data : AI::MXNet::DataIter
    :$num_batch= : Maybe[Int]
        Default is undef, indicating running all the batches in the data iterator.
    :$reset=1 : bool
        Default is 1, indicating whether we should reset the data iter before start
        doing prediction.
=cut

method iter_predict(AI::MXNet::DataIter $eval_data, Maybe[Int] :$num_batch=, Bool :$reset=1)
{
    assert($self->binded and $self->params_initialized);
    if($reset)
    {
        $eval_data->reset;
    }
    my $nbatch = 0;
    my @out;
    while(my $eval_batch = <$eval_data>)
    {
        last if defined $num_batch and $nbatch == $num_batch;
        $self->forward($eval_batch, is_train => 0);
        my $pad = $eval_batch->pad;
        my $outputs = [
            map { $_->slice([0, $_->shape->[0] - ($pad//0) - 1]) } @{ $self->get_outputs() }

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN


    Run prediction and collect the outputs.

    Parameters
    ----------
    $eval_data  : AI::MXNet::DataIter
    :$num_batch= : Maybe[Int]
        Default is undef, indicating running all the batches in the data iterator.
    :$merge_batches=1 : Bool
        Default is 1.
    :$reset=1 : Bool
        Default is 1, indicating whether we should reset the data iter before start
        doing prediction.
    :$always_output_list=0 : Bool
    Default is 0, see the doc for return values.

    Returns
    -------
    When $merge_batches is 1 (by default), the return value will be an array ref
    [$out1, $out2, $out3] where each element is concatenation of the outputs for
    all the mini-batches. If $always_output_list` also is 0 (by default),
    then in the case of a single output, $out1 is returned in stead of [$out1].

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    [[$out1_batch1, $out2_batch1], [$out1_batch2], ...]. This mode is useful because
    in some cases (e.g. bucketing), the module does not necessarily produce the same
    number of outputs.

    The objects in the results are AI::MXNet::NDArray`s. If you need to work with pdl array,
    just call ->aspdl() on each AI::MXNet::NDArray.
=cut

method predict(
    AI::MXNet::DataIter $eval_data,
    Maybe[Int] :$num_batch=, Bool :$merge_batches=1, Bool :$reset=1, Bool :$always_output_list=0
)
{
    assert($self->binded and $self->params_initialized);
    $eval_data->reset() if $reset;

    my @output_list;
    my $nbatch = 0;
    while(my $eval_batch = <$eval_data>)
    {
        last if defined $num_batch and $nbatch == $num_batch;
        $self->forward($eval_batch, is_train => 0);
        my $pad = $eval_batch->pad;
        my $outputs = [map { $_->slice([0, $_->shape->[0]-($pad//0)-1])->copy } @{ $self->get_outputs }];
        push @output_list, $outputs;

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

}

=head2 fit

    Train the module parameters.

    Parameters
    ----------
    $train_data : AI::MXNet::DataIter
    :$eval_data= : Maybe[AI::MXNet::DataIter]
        If not undef, it will be used as a validation set to evaluate the performance
        after each epoch.
    :$eval_metric='acc' : str or AI::MXNet::EvalMetric subclass object.
        Default is 'accuracy'. The performance measure used to display during training.
        Other possible predefined metrics are:
        'ce' (CrossEntropy), 'f1', 'mae', 'mse', 'rmse', 'top_k_accuracy'
    :$epoch_end_callback= : Maybe[Callback]|ArrayRef[Callback] function or array ref of functions.
        Each callback will be called with the current $epoch, $symbol, $arg_params
        and $aux_params.
    :$batch_end_callback= : Maybe[Callback]|ArrayRef[Callback] function or array ref of functions.
        Each callback will be called with a AI::MXNet::BatchEndParam.
    :$kvstore='local' : str or AI::MXNet::KVStore
        Default is 'local'.
    :$optimizer : str or AI::MXNet::Optimizer
        Default is 'sgd'
    :$optimizer_params : hash ref
        Default { learning_rate => 0.01 }.
        The parameters for the optimizer constructor.
    :$eval_end_callback= : Maybe[Callback]|ArrayRef[Callback] function or array ref of functions
        These will be called at the end of each full evaluation, with the metrics over
        the entire evaluation set.
    :$eval_batch_end_callback : Maybe[Callback]|ArrayRef[Callback] function or array ref of functions
        These will be called at the end of each minibatch during evaluation
    :$initializer= : Initializer
        Will be called to initialize the module parameters if not already initialized.
    :$arg_params= : hash ref
        Default undef, if not undef, must be an existing parameters from a trained
        model or loaded from a checkpoint (previously saved model). In this case,
        the value here will be used to initialize the module parameters, unless they
        are already initialized by the user via a call to init_params or fit.
        $arg_params have higher priority than the $initializer.

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    }
    $eval_metric = AI::MXNet::Metric->create($eval_metric)
        unless blessed $eval_metric;

    ################################################################################
    # training loop
    ################################################################################
    for my $epoch ($begin_epoch..$num_epoch-1)
    {
        my $tic = time;
        $eval_metric->reset;
        my $nbatch = 0;
        my $end_of_batch = 0;
        my $next_data_batch = <$train_data>;
        while(not $end_of_batch)
        {
            my $data_batch = $next_data_batch;
            $monitor->tic if $monitor;
            $self->forward_backward($data_batch);
            $self->update;
            $next_data_batch = <$train_data>;

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

        my $name_value = $eval_metric->get_name_value;
        while(my ($name, $val) = each %{ $name_value })
        {
            $self->logger->info('Epoch[%d] Train-%s=%f', $epoch, $name, $val);
        }
        my $toc = time;
        $self->logger->info('Epoch[%d] Time cost=%.3f', $epoch, ($toc-$tic));

        # sync aux params across devices
        my ($arg_params, $aux_params) = $self->get_params;
        $self->set_params($arg_params, $aux_params);

        if($epoch_end_callback)
        {
            for my $callback (@{ _as_list($epoch_end_callback) })
            {
                &{$callback}($epoch, $self->get_symbol, $arg_params, $aux_params);
            }
        }
        #----------------------------------------
        # evaluation on validation set
        if(defined $eval_data)
        {
            my $res = $self->score(
                $eval_data,
                $validation_metric,
                score_end_callback => $eval_end_callback,
                batch_end_callback => $eval_batch_end_callback,
                epoch              => $epoch
            );
            #TODO: pull this into default
            while(my ($name, $val) = each %{ $res })
            {
                $self->logger->info('Epoch[%d] Validation-%s=%f', $epoch, $name, $val);
            }
        }
        # end of 1 epoch, reset the data-iter for another epoch
        $train_data->reset;
    }
}

################################################################################
# Symbol information
################################################################################

=head2 get_symbol

    The symbol used by this module.

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    Maybe[HashRef[AI::MXNet::NDArray]] :$arg_params=,
    Maybe[HashRef[AI::MXNet::NDArray]] :$aux_params=,
    Bool                               :$allow_missing=0,
    Bool                               :$force_init=0,
    Bool                               :$allow_extra=0
)
{
    confess("NotImplemented");
}

=head2 set_params

    Assign parameter and aux state values.

    Parameters
    ----------
    $arg_params= : Maybe[HashRef[AI::MXNet::NDArray]]
        Hash ref of name to value (NDArray) mapping.
    $aux_params= : Maybe[HashRef[AI::MXNet::NDArray]]
        Hash Ref of name to value (`NDArray`) mapping.
    :$allow_missing=0 : Bool
        If true, params could contain missing values, and the initializer will be
        called to fill those missing params.
    :$force_init=0 : Bool
        If true, will force re-initialize even if already initialized.
    :$allow_extra=0 : Bool
        Whether allow extra parameters that are not needed by symbol.
        If this is True, no error will be thrown when arg_params or aux_params
        contain extra parameters that is not needed by the executor.
=cut

method set_params(
    Maybe[HashRef[AI::MXNet::NDArray]]  $arg_params=,
    Maybe[HashRef[AI::MXNet::NDArray]]  $aux_params=,
    Bool                               :$allow_missing=0,
    Bool                               :$force_init=0,
    Bool                               :$allow_extra=0
)
{
    $self->init_params(
        initializer   => undef,
        arg_params    => $arg_params,

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

        }
        elsif($arg_type eq 'aux')
        {
            $aux_params{ $name } = $v;
        }
        else
        {
            confess("Invalid param file $fname");
        }
    }
    $self->set_params(\%arg_params, \%aux_params);
}

=head2 get_states

    The states from all devices

    Parameters
    ----------
    $merge_multi_context=1 : Bool
        Default is true (1). In the case when data-parallelism is used, the states

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    elements are AI::MXNet::NDArray.
=cut

method get_states(Bool $merge_multi_context=1)
{
    assert($self->binded and $self->params_initialized);
    assert(not $merge_multi_context);
    return [];
}

=head2 set_states

    Set value for states. You can specify either $states or $value, not both.

    Parameters
    ----------
    $states= : Maybe[ArrayRef[ArrayRef[AI::MXNet::NDArray]]]
        source states arrays formatted like [[$state1_dev1, $state1_dev2],
            [$state2_dev1, $state2_dev2]].
    $value= : Maybe[Num]
        a single scalar value for all state arrays.
=cut

method set_states(Maybe[ArrayRef[ArrayRef[AI::MXNet::NDArray]]] $states=, Maybe[Num] $value=)
{
    assert($self->binded and $self->params_initialized);
    assert(not $states and not $value);
}


=head2 install_monitor

    Install monitor on all executors

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    $labels : ArrayRef[AI::MXNet::NDArray]
        Typically $data_batch->label.
=cut

method update_metric(EvalMetric $eval_metric, ArrayRef[AI::MXNet::NDArray] $labels)
{
    confess("NotImplemented")
}

################################################################################
# module setup
################################################################################

=head2 bind

    Binds the symbols in order to construct the executors. This is necessary
    before the computations can be performed.

    Parameters
    ----------
    $data_shapes : ArrayRef[AI::MXNet::DataDesc]

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    :$inputs_need_grad=0 : Bool
        Default is 0. Whether the gradients to the input data need to be computed.
        Typically this is not needed. But this might be needed when implementing composition
        of modules.
    :$force_rebind=0 : Bool
        Default is 0. This function does nothing if the executors are already
        binded. But with this as 1, the executors will be forced to rebind.
    :$shared_module= : A subclass of AI::MXNet::Module::Base
        Default is undef. This is used in bucketing. When not undef, the shared module
        essentially corresponds to a different bucket -- a module with different symbol
        but with the same sets of parameters (e.g. unrolled RNNs with different lengths).
    :$grad_req='write' : Str|ArrayRef[Str]|HashRef[Str]
        Requirement for gradient accumulation. Can be 'write', 'add', or 'null'
        (defaults to 'write').
        Can be specified globally (str) or for each argument (array ref, hash ref).
=cut

method bind(
    ArrayRef[AI::MXNet::DataDesc]         $data_shapes,
    Maybe[ArrayRef[AI::MXNet::DataDesc]] :$label_shapes=,
    Bool                                 :$for_training=1,

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

    }

    my $sym_gen = sub {
        my $seq_len = shift;
        my $data  = mx->sym->Variable('data');
        my $label = mx->sym->Variable('softmax_label');
        my $embed = mx->sym->Embedding(
            data => $data, input_dim => scalar(keys %$vocabulary),
            output_dim => $num_embed, name => 'embed'
        );
        $stack->reset;
        my ($outputs, $states) = $stack->unroll($seq_len, inputs => $embed, merge_outputs => 1);
        my $pred = mx->sym->Reshape($outputs, shape => [-1, $num_hidden]);
        $pred    = mx->sym->FullyConnected(data => $pred, num_hidden => scalar(keys %$vocabulary), name => 'pred');
        $label   = mx->sym->Reshape($label, shape => [-1]);
        $pred    = mx->sym->SoftmaxOutput(data => $pred, label => $label, name => 'softmax');
        return ($pred, ['data'], ['softmax_label']);
    };

    my $contexts;
    if(defined $gpus)

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

        The key for the default bucket.
    $logger : Logger
    $context : AI::MXNet::Context or array ref of AI::MXNet::Context objects
        Default is cpu(0)
    $work_load_list : array ref of Num
        Default is undef, indicating uniform workload.
    $fixed_param_names: arrayref of str
        Default is undef, indicating no network parameters are fixed.
    $state_names : arrayref of str
        states are similar to data and label, but not provided by data iterator.
        Instead they are initialized to 0 and can be set by set_states()
=cut

extends 'AI::MXNet::Module::Base';
has '_sym_gen'            => (is => 'ro', init_arg => 'sym_gen', required => 1);
has '_default_bucket_key' => (is => 'rw', init_arg => 'default_bucket_key', required => 1);
has '_context'            => (
    is => 'ro', isa => 'AI::MXNet::Context|ArrayRef[AI::MXNet::Context]',
    lazy => 1, default => sub { AI::MXNet::Context->cpu },
    init_arg => 'context'
);

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

    $self->_fixed_param_names([]) unless defined $original_params->{fixed_param_names};
    $self->_state_names([]) unless defined $original_params->{state_names};
    $self->_params_dirty(0);
    my ($symbol, $data_names, $label_names) = &{$self->_sym_gen}($self->_default_bucket_key);
    $self->_check_input_names($symbol, $data_names//[], "data", 1);
    $self->_check_input_names($symbol, $label_names//[], "label", 0);
    $self->_check_input_names($symbol, $self->_state_names, "state", 1);
    $self->_check_input_names($symbol, $self->_fixed_param_names, "fixed_param", 1);
}

method _reset_bind()
{
    $self->binded(0);
    $self->_buckets({});
    $self->_curr_module(undef);
    $self->_curr_bucket_key(undef);
}

method data_names()
{
    if($self->binded)

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN


method get_params()
{
    assert($self->binded and $self->params_initialized);
    $self->_curr_module->_p->_params_dirty($self->_params_dirty);
    my ($arg_params, $aux_params) = $self->_curr_module->get_params;
    $self->_params_dirty(0);
    return ($arg_params, $aux_params);
}

method set_params(
    HashRef[AI::MXNet::NDArray] $arg_params,
    HashRef[AI::MXNet::NDArray] $aux_params,
    Bool                        $allow_missing=0,
    Bool                        $force_init=1,
    Bool                        $allow_extra=0
)
{
    if(not $allow_missing)
    {
        $self->init_params(
            arg_params    => $arg_params,    aux_params => $aux_params,
            allow_missing => $allow_missing, force_init => $force_init,
            allow_extra   => $allow_extra
        );
       return;
    }
    if($self->params_initialized and not $force_init)
    {
        AI::MXNet::Logging->warning(
            "Parameters already initialized and force_init=False. "
            ."set_params call ignored."
        );
        return;
    }
    $self->_curr_module->set_params(
        $arg_params, $aux_params,
        allow_missing => $allow_missing,
        force_init    => $force_init,
        allow_extra   => $allow_extra
    );
    # because we didn't update self._arg_params, they are dirty now.
    $self->_params_dirty(1);
    $self->params_initialized(1);
}

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

    $self->_params_dirty(0);
    $self->params_initialized(1);
}

method get_states(Bool $merge_multi_context=1)
{
    assert($self->binded and $self->params_initialized);
    $self->_curr_module->get_states($merge_multi_context);
}

method set_states(:$states=, :$value=)
{
    assert($self->binded and $self->params_initialized);
    $self->_curr_module->set_states(states => $states, value => $value);
}

=head2 bind

    Binding for a AI::MXNet::Module::Bucketing means setting up the buckets and bind the
    executor for the default bucket key. Executors corresponding to other keys are
    binded afterwards with switch_bucket.

    Parameters
    ----------
    :$data_shapes : ArrayRef[AI::MXNet::DataDesc|NameShape]
        This should correspond to the symbol for the default bucket.
    :$label_shapes= : Maybe[ArrayRef[AI::MXNet::DataDesc|NameShape]]
        This should correspond to the symbol for the default bucket.
    :$for_training : Bool

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

{
    # in case we already initialized params, keep it
    my ($arg_params, $aux_params);
    if($self->params_initialized)
    {
        ($arg_params, $aux_params) = $self->get_params;
    }

    # force rebinding is typically used when one want to switch from
    # training to prediction phase.
    $self->_reset_bind if $force_rebind;

    if($self->binded)
    {
        $self->logger->warning('Already binded, ignoring bind()');
        return;
    }

    assert((not defined $shared_module), 'shared_module for BucketingModule is not supported');

    $self->for_training($for_training);

lib/AI/MXNet/Module/Bucketing.pm  view on Meta::CPAN

        shared_module    => undef,
        grad_req         => $grad_req
    );
    $self->_curr_module($module);
    $self->_curr_bucket_key($self->_default_bucket_key);
    $self->_buckets->{ $self->_default_bucket_key } = $module;

    # copy back saved params, if already initialized
    if($self->params_initialized)
    {
        $self->set_params($arg_params, $aux_params);
    }
}

=head2 switch_bucket

    Switch to a different bucket. This will change $self->_curr_module.

    Parameters
    ----------
    :$bucket_key : str (or any perl object that overloads "" op)

lib/AI/MXNet/Monitor.pm  view on Meta::CPAN

    Supports installing to multiple exes.

    Parameters
    ----------
    exe : AI::MXNet::Executor
        the Executor (returned by $symbol->bind) to install to.
=cut

method install(AI::MXNet::Executor $exe)
{
    $exe->set_monitor_callback($self->stat_helper);
    push @{ $self->exes }, $exe;
}

=head2 tic

    start collecting stats for current batch.
    Call before forward
=cut

method tic()

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

    '/=' => \&idivide,
    '%'  => \&modulo,
    '%=' => \&imodulo,
    '**' => \&power,
    '==' => \&equal,
    '!=' => \&not_equal,
    '>'  => \&greater,
    '>=' => \&greater_equal,
    '<'  => \&lesser,
    '<=' => \&lesser_equal,
    '.=' => \&set,
    '=' => sub { $_[0] };

extends 'AI::MXNet::NDArray::Base';
has 'writable' => (is => 'rw', isa => 'Int', default => 1, lazy => 1);
has 'handle'   => (is => 'rw', isa => 'NDArrayHandle', required => 1);

sub DEMOLISH
{
    check_call(AI::MXNetCAPI::NDArrayFree(shift->handle));
}

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

        confess("Dimension $i slice mismatch (begin $s->[0] > end $s->[1])")
            if($s->[0] > $s->[1]);
        push @$begin, $s->[0];
        push @$end, $s->[1] + 1;
        $i++;
    }
    return $self->_slice($begin->[0], $end->[0]) if @slices == 1;
    return AI::MXNet::NDArray::Slice->new(parent => $self, begin => $begin, end => $end);
}

method set(AcceptableInput $value, $reverse=)
{
    confess("set value must be defined") unless defined $value;
    confess("Array is not writable") if not $self->writable;
    ## plain number
    if(not ref $value)
    {
        $self->_set_value($value, { out => $self });
    }
    # ndarray
    elsif(blessed($value) and $value->isa(__PACKAGE__))
    {
        $value->copyto($self);
    }
    # slice of another ndarray
    elsif(blessed($value) and $value->isa('AI::MXNet::NDArray::Slice'))
    {
        $value->sever->copyto($self);

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

    out: Array
        The created NDArray.
=cut

method full(
    Shape $shape, Num $val,
    AI::MXNet::Context :$ctx=AI::MXNet::Context->current_ctx,
    Dtype :$dtype='float32', Maybe[AI::MXNet::NDArray] :$out=
)
{
    return __PACKAGE__->_set_value({ src => $val, out => $out ? $out : __PACKAGE__->empty($shape, ctx => $ctx, dtype => $dtype) });
}

=head2 array

    Creates a new NDArray that is a copy of the source_array.

    Parameters
    ----------
    $source_array : AI::MXNet::NDArray PDL, PDL::Matrix, Array ref in PDL::pdl format
        Source data to create NDArray from.

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

}

=head2 _fresh_grad

        Parameters:
        ----------
        Maybe[Bool] $state=

        Whether this array's corresponding gradient array
        (registered via `autograd->mark_variables`) has been
        updated by `autograd->backward` since last reset.

        `_fresh_grad` need to be manually set to False
        after consuming gradient (usually after updating this
        array).
=cut

method _fresh_grad(Maybe[Bool] $state=)
{
    if(defined $state)
    {
        check_call(AI::MXNetCAPI::NDArraySetGradState($self->handle, $state));
        return $state;

lib/AI/MXNet/NDArray/Slice.pm  view on Meta::CPAN


=head1 NAME

    AI::MXNet::NDArray::Slice - A convenience class for slicing of the AI::MXNet::NDArray objects.
=cut

has parent => (is => 'ro', isa => 'AI::MXNet::NDArray', required => 1);
has begin  => (is => 'ro', isa => 'Shape', required => 1);
has end    => (is => 'ro', isa => 'Shape', required => 1);
use overload 
    '.=' => \&set,
    '='  => sub { $_[0] },
    '""' => \&notsupported,
    '+'  => \&notsupported,
    '+=' => \&notsupported,
    '-'  => \&notsupported,
    '-=' => \&notsupported,
    '*'  => \&notsupported,
    '*=' => \&notsupported,
    '/'  => \&notsupported,
    '/=' => \&notsupported,
    '**' => \&notsupported,
    '==' => \&notsupported,
    '!=' => \&notsupported,
    '>'  => \&notsupported,
    '>=' => \&notsupported,
    '<'  => \&notsupported,
    '<=' => \&notsupported;

method set(AcceptableInput $value, $reverse=)
{
    confess("set value must be defined") unless defined $value;
    confess("${\ $self->parent } is not writable") unless $self->parent->writable;
    my $shape = []; 
    zip(
        sub { my ($begin, $end) = @_; push @$shape, ($end-$begin); },
        $self->begin, 
        $self->end
    );
    if(ref $value)
    {
        if(blessed($value) and $value->isa('AI::MXNet::NDArray'))

lib/AI/MXNet/Optimizer.pm  view on Meta::CPAN

sub BUILD
{
    my $self = shift;
    if($self->lr_scheduler)
    {
        $self->lr_scheduler->base_lr($self->learning_rate);
    }
    $self->lr($self->learning_rate);
    $self->num_update($self->begin_num_update);
    $self->idx2name({ %{ $self->param_idx2name } });
    $self->set_lr_mult({});
    $self->set_wd_mult({});
}
# Create additional optimizer state such as momentum.
# override in implementations.
method create_state($index, $weight){}

# Update the parameters. override in implementations
method update($index, $weight, $grad, $state){}

# set lr scale is deprecated. Use set_lr_mult instead.
method set_lr_scale($args_lrscale)
{
    Carp::cluck("set lr scale is deprecated. Use set_lr_mult instead.");
}

=head2 set_lr_mult

        Set individual learning rate multipler for parameters

        Parameters
        ----------
        args_lr_mult : dict of string/int to float
            set the lr multipler for name/index to float.
            setting multipler by index is supported for backward compatibility,
            but we recommend using name and symbol.
=cut

method set_lr_mult(HashRef[Num] $args_lr_mult)
{
    $self->lr_mult({});
    if($self->sym)
    {
        my $attr = $self->sym->attr_dict();
        for my $name (@{ $self->sym->list_arguments() })
        {
            if(exists $attr->{ $name } and exists $attr->{ $name }{ __lr_mult__ })
            {
                $self->lr_mult->{ $name } = $attr->{ $name }{ __lr_mult__ };
            }
        }
    }
    $self->lr_mult({ %{ $self->lr_mult }, %{ $args_lr_mult } });
}

=head2 set_wd_mult

        Set individual weight decay multipler for parameters.
        By default wd multipler is 0 for all params whose name doesn't
        end with _weight, if param_idx2name is provided.

        Parameters
        ----------
        args_wd_mult : dict of string/int to float
            set the wd multipler for name/index to float.
            setting multipler by index is supported for backward compatibility,
            but we recommend using name and symbol.
=cut

method set_wd_mult(HashRef[Num] $args_wd_mult)
{
    $self->wd_mult({});
    for my $n (values %{ $self->idx2name })
    {
        if(not $n =~ /(?:_weight|_gamma)$/)
        {
            $self->wd_mult->{ $n } = 0;
        }
    }
    if($self->sym)

lib/AI/MXNet/Optimizer.pm  view on Meta::CPAN

       *Adam: A Method for Stochastic Optimization*,
       http://arxiv.org/abs/1412.6980

    the code in this class was adapted from
    https://github.com/mila-udem/blocks/blob/master/blocks/algorithms/__init__.py#L765

    Parameters
    ----------
    learning_rate : float, optional
        Step size.
        Default value is set to 0.001.
    beta1 : float, optional
        Exponential decay rate for the first moment estimates.
        Default value is set to 0.9.
    beta2 : float, optional
        Exponential decay rate for the second moment estimates.
        Default value is set to 0.999.
    epsilon : float, optional
        Default value is set to 1e-8.
    decay_factor : float, optional
        Default value is set to 1 - 1e-8.

    wd : float, optional
        L2 regularization coefficient add to all the weights
    rescale_grad : float, optional
        rescaling factor of gradient. Normally should be 1/batch_size.

    clip_gradient : float, optional
        clip gradient in range [-clip_gradient, clip_gradient]
=cut
package AI::MXNet::Adam;

lib/AI/MXNet/Optimizer.pm  view on Meta::CPAN

    AdaGrad optimizer of Duchi et al., 2011,

    This code follows the version in http://arxiv.org/pdf/1212.5701v1.pdf  Eq(5)
    by Matthew D. Zeiler, 2012. AdaGrad will help the network to converge faster
    in some cases.

    Parameters
    ----------
    learning_rate : float, optional
        Step size.
        Default value is set to 0.05.

    wd : float, optional
        L2 regularization coefficient add to all the weights

    rescale_grad : float, optional
        rescaling factor of gradient. Normally should be 1/batch_size.

    eps: float, optional
        A small float number to make the updating processing stable
        Default value is set to 1e-7.

    clip_gradient : float, optional
        clip gradient in range [-clip_gradient, clip_gradient]
=cut
package AI::MXNet::AdaGrad;
use Mouse;

extends 'AI::MXNet::Optimizer';

has 'float_stable_eps'    => (is => "rw", isa => "Num", default => 1e-7);

lib/AI/MXNet/Optimizer.pm  view on Meta::CPAN

    http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf by
    Tieleman & Hinton, 2012

    For centered=True, the code follows the version in
    http://arxiv.org/pdf/1308.0850v5.pdf Eq(38) - Eq(45) by Alex Graves, 2013.

    Parameters
    ----------
    learning_rate : float, optional
        Step size.
        Default value is set to 0.001.
    gamma1: float, optional
        decay factor of moving average for gradient^2.
        Default value is set to 0.9.
    gamma2: float, optional
        "momentum" factor.
        Default value if set to 0.9.
        Only used if centered=True
    epsilon : float, optional
        Default value is set to 1e-8.
    centered : bool, optional
        Use Graves or Tielemans & Hintons version of RMSProp
    wd : float, optional
        L2 regularization coefficient add to all the weights
    rescale_grad : float, optional
        rescaling factor of gradient.
    clip_gradient : float, optional
        clip gradient in range [-clip_gradient, clip_gradient]
    clip_weights : float, optional
        clip weights in range [-clip_weights, clip_weights]

lib/AI/MXNet/Optimizer.pm  view on Meta::CPAN

    {
        return $state->as_in_context($context);
    }
    elsif(ref $state)
    {
        return [map { $self->sync_state_context($_, $context) } @{ $state }];
    }
    return $state;
}

method set_states($states)
{
    my $thawed_states = thaw($states);
    $self->states($thawed_states);
    %{ $self->states_synced } = map { $_ => 0 } keys %{ $thawed_states };
}

method get_states()
{
    return freeze($self->states);
}

lib/AI/MXNet/Profiler.pm  view on Meta::CPAN

=head1 NAME

    AI::MXNet::Profiler - Optional profiler feature.
=cut

=head1 DESCRIPTION

    Optional profirer.
=cut

=head2 profiler_set_config

    Set up the configure of profiler.

    Parameters
    ----------
    mode : string, optional
        Indicting whether to enable the profiler, can
        be 'symbolic' or 'all'. Default is `symbolic`.
    filename : string, optional
        The name of output trace file. Default is
        'profile.json'.
=cut

method profiler_set_config(ProfilerMode $mode='symbolic', Str $filename='profile.json')
{
    my %mode2int = qw/symbolic 0 all 1/;
    check_call(AI::MXNet::SetProfilerConfig($mode2int{ $mode }, $filename));
}

=head2 profiler_set_state

    Set up the profiler state to record operator.

    Parameters
    ----------
    state : string, optional
        Indicting whether to run the profiler, can
        be 'stop' or 'run'. Default is `stop`.
=cut

method profiler_set_state(ProfilerState $state='stop')
{
    my %state2int = qw/stop 0 run 1/;
    check_call(AI::MXNet::SetProfilerState($state2int{ $state }));
}

=head2 dump_profile

    Dump profile and stop profiler. Use this to save profile
    in advance in case your program cannot exit normally
=cut

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    if(not defined $self->_params)
    {
        $self->_own_params(1);
        $self->_params(AI::MXNet::RNN::Params->new($self->_prefix));
    }
    else
    {
        $self->_own_params(0);
    }
    $self->_modified(0);
    $self->reset;
}

=head2 reset

    Reset before re-using the cell for another graph
=cut

method reset()
{
    $self->_init_counter(-1);
    $self->_counter(-1);
}

=head2 call

    Construct symbol for one step of RNN.

    Parameters

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN


method unroll(
    Int $length,
    Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$inputs=,
    Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$begin_state=,
    Str                                                  :$input_prefix='',
    Str                                                  :$layout='NTC',
    Maybe[Bool]                                          :$merge_outputs=
)
{
    $self->reset;
    my $axis = index($layout, 'T');
    if(not defined $inputs)
    {
        $inputs = [
            map { AI::MXNet::Symbol->Variable("${input_prefix}t${_}_data") } (0..$length-1)
        ];
    }
    elsif(blessed($inputs))
    {
        assert(

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    ----------
    num_hidden : int
        number of units in output symbol
    prefix : str, default 'lstm_'
        prefix for name of layers
        (and name of weight if params is undef)
    params : AI::MXNet::RNN::Params or None
        container for weight sharing between cells.
        created if undef.
    forget_bias : bias added to forget gate, default 1.0.
        Jozefowicz et al. 2015 recommends setting this to 1.0
=cut

has '+_prefix'     => (default => 'lstm_');
has '+_activation' => (init_arg => undef);
has '+forget_bias' => (is => 'ro', isa => 'Num', default => 1);

method state_info()
{
    return [{ shape => [0, $self->_num_hidden], __layout__ => 'NC' } , { shape => [0, $self->_num_hidden], __layout__ => 'NC' }];
}

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

        name       => "${name}h2h"
    );
    my ($i2h_r, $i2h_z);
    ($i2h_r, $i2h_z, $i2h) = @{ AI::MXNet::Symbol->SliceChannel(
        $i2h, num_outputs => 3, name => "${name}_i2h_slice"
    ) };
    my ($h2h_r, $h2h_z);
    ($h2h_r, $h2h_z, $h2h) = @{ AI::MXNet::Symbol->SliceChannel(
        $h2h, num_outputs => 3, name => "${name}_h2h_slice"
    ) };
    my $reset_gate = AI::MXNet::Symbol->Activation(
        $i2h_r + $h2h_r, act_type => "sigmoid", name => "${name}_r_act"
    );
    my $update_gate = AI::MXNet::Symbol->Activation(
        $i2h_z + $h2h_z, act_type => "sigmoid", name => "${name}_z_act"
    );
    my $next_h_tmp = AI::MXNet::Symbol->Activation(
        $i2h + $reset_gate * $h2h, act_type => "tanh", name => "${name}_h_act"
    );
    my $next_h = AI::MXNet::Symbol->_plus(
        (1 - $update_gate) * $next_h_tmp, $update_gate * $prev_state_h,
        name => "${name}out"
    );
    return ($next_h, [$next_h]);
}

package AI::MXNet::RNN::FusedCell;
use Mouse;

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN


method unroll(
    Int $length,
    Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$inputs=,
    Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$begin_state=,
    Str                                                  :$input_prefix='',
    Str                                                  :$layout='NTC',
    Maybe[Bool]                                          :$merge_outputs=
)
{
    $self->reset;
    my $axis = index($layout, 'T');
    $inputs //= AI::MXNet::Symbol->Variable("${input_prefix}data");
    if(blessed($inputs))
    {
        assert(
            (@{ $inputs->list_outputs() } == 1),
            "unroll doesn't allow grouped symbol as input. Please "
            ."convert to list first or let unroll handle slicing"
        );
        if($axis == 1)

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    );
    my $outputs;
    my %attr = (__layout__ => 'LNC');
    if(not $self->_get_next_state)
    {
        ($outputs, $states) = ($rnn, []);
    }
    elsif($self->_mode eq 'lstm')
    {
        my @rnn = @{ $rnn };
        $rnn[1]->_set_attr(%attr);
        $rnn[2]->_set_attr(%attr);
        ($outputs, $states) = ($rnn[0], [$rnn[1], $rnn[2]]);
    }
    else
    {
        my @rnn = @{ $rnn };
        $rnn[1]->_set_attr(%attr);
        ($outputs, $states) = ($rnn[0], [$rnn[1]]);
    }
    if(defined $merge_outputs and not $merge_outputs)
    {
        AI::MXNet::Logging->warning(
            "Call RNN::FusedCell->unroll with merge_outputs=1 "
            ."for faster speed"
        );
        $outputs = [@ {
            AI::MXNet::Symbol->SliceChannel(

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

}

method call(AI::MXNet::Symbol $inputs, AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol] $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my ($i2h, $h2h) = $self->_conv_forward($inputs, $states, $name);
    my ($i2h_r, $i2h_z, $h2h_r, $h2h_z);
    ($i2h_r, $i2h_z, $i2h) = @{ AI::MXNet::Symbol->SliceChannel($i2h, num_outputs => 3, name => "${name}_i2h_slice") };
    ($h2h_r, $h2h_z, $h2h) = @{ AI::MXNet::Symbol->SliceChannel($h2h, num_outputs => 3, name => "${name}_h2h_slice") };
    my $reset_gate = AI::MXNet::Symbol->Activation(
        $i2h_r + $h2h_r, act_type => "sigmoid",
        name => "${name}_r_act"
    );
    my $update_gate = AI::MXNet::Symbol->Activation(
        $i2h_z + $h2h_z, act_type => "sigmoid",
        name => "${name}_z_act"
    );
    my $next_h_tmp = $self->_get_activation($i2h + $reset_gate * $h2h, $self->_activation, name => "${name}_h_act");
    my $next_h = AI::MXNet::Symbol->_plus(
        (1 - $update_gate) * $next_h_tmp, $update_gate * @{$states}[0],
        name => "${name}out"
    );
    return ($next_h, [$next_h]);
}

package AI::MXNet::RNN::ModifierCell;
use Mouse;
use AI::MXNet::Base;

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

        "BidirectionalCell doesn't support zoneout since it doesn't support step. ".
        "Please add ZoneoutCell to the cells underneath instead."
    );
    assert(
        (not $self->base_cell->isa('AI::MXNet::RNN::SequentialCell') or not $self->_bidirectional),
        "Bidirectional SequentialCell doesn't support zoneout. ".
        "Please add ZoneoutCell to the cells underneath instead."
    );
}

method reset()
{
    $self->SUPER::reset;
    $self->prev_output(undef);
}

method call(AI::MXNet::Symbol $inputs, SymbolOrArrayOfSymbols $states)
{
    my ($cell, $p_outputs, $p_states) = ($self->base_cell, $self->zoneout_outputs, $self->zoneout_states);
    my ($next_output, $next_states) = &{$cell}($inputs, $states);
    my $mask = sub {
        my ($p, $like) = @_;
        AI::MXNet::Symbol->Dropout(

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN


method unroll(
    Int $length,
    Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$inputs=,
    Maybe[AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol]] :$begin_state=,
    Str                                                  :$input_prefix='',
    Str                                                  :$layout='NTC',
    Maybe[Bool]                                          :$merge_outputs=
)
{
    $self->reset;
    $self->base_cell->_modified(0);
    my ($outputs, $states) = $self->base_cell->unroll($length, inputs=>$inputs, begin_state=>$begin_state,
                                                layout=>$layout, merge_outputs=>$merge_outputs);
    $self->base_cell->_modified(1);
    $merge_outputs //= (blessed($outputs) and $outputs->isa('AI::MXNet::Symbol'));
    ($inputs) = _normalize_sequence($length, $inputs, $layout, $merge_outputs);
    if($merge_outputs)
    {
        $outputs = AI::MXNet::Symbol->elemwise_add($outputs, $inputs, name => $outputs->name . "_plus_residual");
    }

lib/AI/MXNet/RNN/IO.pm  view on Meta::CPAN

        my $buck_len = $buck->shape->at(-1);
        for my $j (0..($buck_len - $self->batch_size))
        {
            if(not $j%$self->batch_size)
            {
                push @{ $self->idx }, [$i, $j];
            }
        }
    }, $self->data);
    $self->curr_idx(0);
    $self->reset;
}

method reset()
{
    $self->curr_idx(0);
    @{ $self->idx } = shuffle(@{ $self->idx });
    $self->nddata([]);
    $self->ndlabel([]);
    for my $buck (@{ $self->data })
    {
        $buck = pdl_shuffle($buck);
        my $label = $buck->zeros;
        $label->slice([0, -2], 'X')  .= $buck->slice([1, -1], 'X');

lib/AI/MXNet/Random.pm  view on Meta::CPAN


    Seed the random number generators in mxnet.

    This seed will affect behavior of functions in this module,
    as well as results from executors that contains Random number
    such as Dropout operators.

    Parameters
    ----------
    seed_state : int
        The random number seed to set to all devices.

    Notes
    -----
    The random number generator of mxnet is by default device specific.
    This means if you set the same seed, the random number sequence
    generated from GPU0 can be different from CPU.
=cut

method seed(Int $seed_state)
{
    check_call(AI::MXNetCAPI::RandomSeed($seed_state));
}

for my $method (
        [qw/_sample_uniform uniform/],

lib/AI/MXNet/RecordIO.pm  view on Meta::CPAN

    {
        check_call(AI::MXNetCAPI::RecordIOWriterFree($self->handle));
    }
    else
    {
        check_call(AI::MXNetCAPI::RecordIOReaderFree($self->handle));
    }
    $self->is_open(0);
}

=head2 reset

    Reset pointer to first item. If record is opened with 'w',
    this will truncate the file to empty.
=cut

method reset()
{
    $self->close;
    $self->open;
}

=head2 write

    Write a string buffer as a record.

    Parameters

lib/AI/MXNet/Symbol.pm  view on Meta::CPAN

    my $size = @attrs/2;
    for (my $i = 0; $i < $size; $i++)
    {
        my ($name, $key) = split(/\$/, $attrs[$i*2]);
        my $val = $attrs[$i*2+1];
        $ret{ $name }{ $key } = $val;
    }
    return \%ret;
}

method _set_attr(Str @args)
{
    my %kwargs = @args; 
    while(my ($key, $val) = each(%kwargs))
    {
        check_call(
            AI::MXNetCAPI::SymbolSetAttr(
                $self->handle, $key, $val
            )
        );
    }

lib/AI/MXNet/Symbol.pm  view on Meta::CPAN

    }

    if(not defined $aux_states)
    {
        $aux_states = [];
    }
    ($aux_args_handle, $aux_states) = $self->_get_ndarray_inputs(
            'aux_states', $aux_states, $self->list_auxiliary_states()
    );

    # setup requirements
    my $req_map = { null => 0, write => 1, add =>  3 };
    my $req_array = [];
    if(not ref $grad_req)
    {
        confess('grad_req must be one of "null,write,add"')
            unless exists $req_map->{ $grad_req };
        @{ $req_array } = (($req_map->{ $grad_req }) x @{ $listed_arguments });
    }
    elsif(ref $grad_req eq 'ARRAY')
    {

lib/AI/MXNet/Symbol.pm  view on Meta::CPAN


=head2 Variable

    Create a symbolic variable with specified name.

    Parameters
    ----------
    name : str
        Name of the variable.
    attr : hash ref of string -> string
        Additional attributes to set on the variable.
    shape : array ref of positive integers
        Optionally, one can specify the shape of a variable. This will be used during
        shape inference. If user specified a different shape for this variable using
        keyword argument when calling shape inference, this shape information will be ignored.
    lr_mult : float
        Specify learning rate muliplier for this variable.
    wd_mult : float
        Specify weight decay muliplier for this variable.
    dtype : Dtype
        Similar to shape, we can specify dtype for this variable.

lib/AI/MXNet/Symbol.pm  view on Meta::CPAN

            $attr->{$k} = "$v";
        }
        else
        {
            confess("Attribute name=$k is not supported.".
                    ' Additional attributes must start and end with double underscores,'.
                    ' e.g, __yourattr__'
            );
        }
    }
    $ret->_set_attr(%{ $attr });
    return $ret;
}

=head2 var

    A synonym to Variable.
=cut

*var = \&Variable;

lib/AI/MXNet/Symbol/AttrScope.pm  view on Meta::CPAN


=head1 DESCRIPTION

    Attribute manager for scoping.

    User can also inherit this object to change naming behavior.

    Parameters
    ----------
    kwargs
        The attributes to set for all symbol creations in the scope.
=cut

has 'attr' => (
    is => 'ro',
    isa => 'HashRef[Str]',
);

=head2 current

    Get the attribute hash ref given the attribute set by the symbol.

    Returns
    -------
    $attr : current value of the class singleton object
=cut

method current()
{
    $AI::MXNet::curr_attr_scope;
}

=head2 get

    Get the attribute hash ref given the attribute set by the symbol.

    Parameters
    ----------
    $attr : Maybe[HashRef[Str]]
        The attribute passed in by user during symbol creation.

    Returns
    -------
    $attr : HashRef[Str]
        The attributes updated to include another the scope related attributes.



( run in 2.069 seconds using v1.01-cache-2.11-cpan-49f99fa48dc )