AI-MXNet

 view release on metacpan or  search on metacpan

examples/calculator.pl  view on Meta::CPAN

    return mx->sym->MAERegressionOutput(data => $fc, name => 'softmax');
}

sub learn_function {
    my(%args) = @_;
    my $func = $args{func};
    my $batch_size = $args{batch_size}//128;
    my($train_iter, $eval_iter) = samples($batch_size, $func);
    my $sym = nn_fc();

    ## call as ./calculator.pl 1 to just print model and exit
    if($ARGV[0]) {
        my @dsz = @{$train_iter->data->[0][1]->shape};
        my @lsz = @{$train_iter->label->[0][1]->shape};
        my $shape = {
            data          => [ $batch_size, splice @dsz,  1 ],
            softmax_label => [ $batch_size, splice @lsz, 1 ],
        };
        print mx->viz->plot_network($sym, shape => $shape)->graph->as_png;
        exit;
    }

    my $model = mx->mod->Module(
        symbol => $sym,
        context => mx->cpu(),
    );
    $model->fit($train_iter,
        eval_data => $eval_iter,
        optimizer => 'adam',

examples/calculator.pl  view on Meta::CPAN

    );
    $model->reshape(
        data_shapes => $iter->provide_data,
        label_shapes => $iter->provide_label,
    );

    # wrap a helper around making predictions
    my ($arg_params) = $model->get_params;
    for my $k (sort keys %$arg_params)
    {
	print "$k -> ". $arg_params->{$k}->aspdl."\n";
    }
    return sub {
        my($n, $m) = @_;
        return $model->predict(mx->io->NDArrayIter(
            batch_size => 1,
            data => PDL->new([[ $n, $m ]]),
        ))->aspdl->list;
    };
}

examples/calculator.pl  view on Meta::CPAN

my $mul = learn_function(func => sub {
    my($n, $m) = @_;
    return $n * $m;
}, batch_size => 50, epoch => 40);
my $div = learn_function(func => sub {
    my($n, $m) = @_;
    return $n / $m;
}, batch_size => 10, epoch => 80);


print "12345 + 54321 ≈ ", $add->(12345, 54321), "\n";
print "188 - 88 ≈ ", $sub->(188, 88), "\n";
print "250 * 2 ≈ ", $mul->(250, 2), "\n";
print "250 / 2 ≈ ", $div->(250, 2), "\n";

examples/char_lstm.pl  view on Meta::CPAN

sub sample {
    return if not $sample_size;
    $model->reshape(data_shapes=>[['data',[1, $seq_size]]], label_shapes=>[['softmax_label',[1, $seq_size]]]);
    my $input = mx->nd->array($fdata->slice([0, $seq_size-1]))->reshape([1, $seq_size]);
    $| = 1;
    for (0..$sample_size-1)
    {
        $model->forward(mx->io->DataBatch(data=>[$input]), is_train => 0);
        my $prob = $model->get_outputs(0)->[0][0]->at($seq_size-1)->aspdl;
        my $next_char = Math::Random::Discrete->new($prob->reshape(-1)->unpdl, [0..scalar(keys %vocabulary)-1])->rand;
        print "$reverse_vocab{$next_char}";
        $input->at(0)->slice([0, $seq_size-2]) .= $input->at(0)->slice([1, $seq_size-1])->copy;
        $input->at(0)->at($seq_size-1) .= $next_char;
    }
    $model->reshape(data_shapes=>[['data',[$batch_size, $seq_size]]], label_shapes=>[['softmax_label',[$batch_size, $seq_size]]]);
}

examples/cudnn_lstm_bucketing.pl  view on Meta::CPAN

    );
    $model->set_params($arg_params, $aux_params);
    my $score = $model->score($data_val,
        mx->metric->Perplexity($invalid_label),
        batch_end_callback=>mx->callback->Speedometer($batch_size, 5)
    );
};

if($num_layers >= 4 and split(/,/,$gpus) >= 4 and not $stack_rnn)
{
    print("WARNING: stack-rnn is recommended to train complex model on multiple GPUs\n");
}

if($do_test)
{
    # Demonstrates how to load a model trained with CuDNN RNN and predict
    # with non-fused MXNet symbol
    $test->();
}
else
{

examples/mnist.pl  view on Meta::CPAN

    return($label, $image);
}

my $path='http://yann.lecun.com/exdb/mnist/';
my($train_lbl, $train_img) = read_data(
    "${path}train-labels-idx1-ubyte.gz", "${path}train-images-idx3-ubyte.gz");
my($val_lbl, $val_img) = read_data(
    "${path}t10k-labels-idx1-ubyte.gz", "${path}t10k-images-idx3-ubyte.gz");

sub show_sample {
    print 'label: ', $train_lbl->slice('0:9'), "\n";
    my $hbox = Gtk2::HBox->new(0, 2);
    for my $i (0 .. 9) {
        my $img = $train_img->slice(":,:,$i");
        my($w, $h) = $img->dims;
        $img->make_physical();
        # ugh, pixbufs don't have a grayscale colorspace?!
        # burst it to rgb I guess.
        my $data = pack 'c*', map { $_, $_, $_ } unpack 'c*', ${$img->get_dataref};
        $hbox->add(Gtk2::Image->new_from_pixbuf(
            Gtk2::Gdk::Pixbuf->new_from_data($data, 'rgb', 0, 8, $w, $h, $w * 3)

lib/AI/MXNet/Base.pm  view on Meta::CPAN

sub build_param_doc
{
    my ($arg_names, $arg_types, $arg_descs, $remove_dup) = @_;
    $remove_dup //= 1;
    my %param_keys;
    my @param_str;
    zip(sub { 
            my ($key, $type_info, $desc) = @_;
            return if exists $param_keys{$key} and $remove_dup;
            $param_keys{$key} = 1;
            my $ret = sprintf("%s : %s", $key, $type_info);
            $ret .= "\n    ".$desc if length($desc); 
            push @param_str,  $ret;
        },
        $arg_names, $arg_types, $arg_descs
    );
    return sprintf("Parameters\n----------\n%s\n", join("\n", @param_str));
}

=head2 _notify_shutdown

    Notify MXNet about shutdown.
=cut

sub _notify_shutdown
{
    check_call(AI::MXNetCAPI::NotifyShutdown());
}

END {
    _notify_shutdown();
    Time::HiRes::sleep(0.01);
}

*pzeros = \&zeros;
*pceil  = \&ceil;
## making sure that we can stringify arbitrarily large piddles
$PDL::toolongtoprint = 1000_000_000;

1;

lib/AI/MXNet/Callback.pm  view on Meta::CPAN


has 'length'  => (is => 'ro', isa => 'Int', default => 80);
has 'total'   => (is => 'ro', isa => 'Int', required => 1);

method call(AI::MXNet::BatchEndParam $param)
{
    my $count = $param->nbatch;
    my $filled_len = int(0.5 + $self->length * $count / $self->total);
    my $percents = int(100.0 * $count / $self->total) + 1;
    my $prog_bar = ('=' x $filled_len) . ('-' x ($self->length - $filled_len));
    print "[$prog_bar] $percents%\r";
}

*slice = \&call;

# Just logs the eval metrics at the end of an epoch.
package AI::MXNet::LogValidationMetricsCallback;
use Mouse;
extends 'AI::MXNet::Callback';

=head1 NAME

lib/AI/MXNet/Context.pm  view on Meta::CPAN

);

use overload
    '==' => sub {
        my ($self, $other) = @_;
        return 0 unless blessed($other) and $other->isa(__PACKAGE__);
        return "$self" eq "$other";
    },
    '""' => sub {
        my ($self) = @_;
        return sprintf("%s(%s)", $self->device_type, $self->device_id);
    };
=head1 NAME

    AI::MXNet::Context - A device context.
=cut

=head1 DESCRIPTION

    This class governs the device context of AI::MXNet::NDArray objects.
=cut

lib/AI/MXNet/Executor.pm  view on Meta::CPAN


    Examples
    --------
        >>> # doing forward by specifying data
        >>> $texec->forward(1, data => $mydata);
        >>> # doing forward by not specifying things, but copy to the executor before hand
        >>> $mydata->copyto($texec->arg_dict->{'data'});
        >>> $texec->forward(1);
        >>> # doing forward by specifying data and get outputs
        >>> my $outputs = $texec->forward(1, data => $mydata);
        >>> print $outputs->[0]->aspdl;
=cut

method forward(Int $is_train=0, %kwargs)
{
    if(%kwargs)
    {
        my $arg_dict = $self->arg_dict;
        while (my ($name, $array) = each %kwargs)
        {
            if(not find_type_constraint('AcceptableInput')->check($array))

lib/AI/MXNet/Executor/Group.pm  view on Meta::CPAN

    confess("empty data_shapes array") unless @{ $data_shapes } > 0;
    my $major_axis = [map { AI::MXNet::DataDesc->get_batch_axis($_->layout) } @{ $data_shapes }];
    zip(sub {
        my ($desc, $axis) = @_;
        return if($axis == -1);
        my $batch_size = $desc->shape->[$axis];
        if(defined $self->_p->batch_size)
        {
            confess(
                "all data must have the same batch size: "
                . sprintf("batch_size = %d, but ", $self->_p->batch_size)
                . sprintf("%s has shape %s", $desc->name, '('. join(',', @{ $desc->shape }) . ')')
            ) unless $batch_size == $self->_p->batch_size;
        }
        else
        {
            $self->_p->batch_size($batch_size);
            $self->_p->slices(AI::MXNet::Executor::Group::_split_input_slice($self->_p->batch_size, $self->workload));
        }
    }, $data_shapes, $major_axis);
    return $major_axis;
}

lib/AI/MXNet/IO.pm  view on Meta::CPAN

    {
        my $name  = shift;
        my $shape = shift;
        return $class->$orig(name => $name, shape => $shape, @_);
    }
    return $class->$orig(@_);
};

method stringify($other=, $reverse=)
{
    sprintf(
        "DataDesc[%s,%s,%s,%s]",
        $self->name,
        join('x', @{ $self->shape }),
        $self->dtype,
        $self->layout
    );
}

method to_nameshape($other=, $reverse=)
{

lib/AI/MXNet/Image.pm  view on Meta::CPAN

        provide_data
        provide_label
           /]     => (is => 'rw', init_arg => undef);

sub BUILD
{
    my $self = shift;
    assert($self->path_imgrec or $self->path_imglist or ref $self->imglist eq 'ARRAY');
    if($self->path_imgrec)
    {
        print("loading recordio...\n");
        if($self->path_imgidx)
        {
            $self->imgrec(
                AI::MXNet::IndexedRecordIO->new(
                    idx_path => $self->path_imgidx,
                    uri => $self->path_imgrec,
                    flag => 'r'
                )
            );
            $self->imgidx([@{ $self->imgrec->keys }]);
        }
        else
        {
            $self->imgrec(AI::MXNet::RecordIO->new(uri => $self->path_imgrec, flag => 'r'));
        }
    }
    my %imglist;
    my @imgkeys;
    if($self->path_imglist)
    {
        print("loading image list...\n");
        open(my $f, $self->path_imglist) or confess("can't open ${\ $self->path_imglist } : $!");
        while(my $line = <$f>)
        {
            chomp($line);
            my @line = split(/\t/, $line);
            my $label = AI::MXNet::NDArray->array([@line[1..@line-2]]);
            my $key   = $line[0];
            $imglist{$key} = [$label, $line[-1]];
            push @imgkeys, $key;
        }
        $self->imglist(\%imglist);
    }
    elsif(ref $self->imglist eq 'ARRAY')
    {
        print("loading image list...\n");
        my %result;
        my $index = 1;
        for my $img (@{ $self->imglist })
        {
            my $key = $index++;
            my $label;
            if(not ref $img->[0])
            {
                $label = AI::MXNet::NDArray->array([$img->[0]]);
            }

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

                my $self = shift;
                my ($name) = ref($self) =~ /::(\w+)$/;
                encode_json(
                    [lc $name,
                        $self->kwargs//{ map { $_ => "".$self->$_ } $self->meta->get_attribute_list }
                ]);
             },
             fallback => 1;
has 'kwargs' => (is => 'rw', init_arg => undef, isa => 'HashRef');
has '_verbose'    => (is => 'rw', isa => 'Bool', lazy => 1, default => 0);
has '_print_func' => (is => 'rw', isa => 'CodeRef', lazy => 1,
    default => sub {
        return sub {
            my $x = shift;
            return ($x->norm/sqrt($x->size))->asscalar;
        };
    }
);

=head1 NAME

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

=cut

=head2 set_verbosity

    Switch on/off verbose mode

    Parameters
    ----------
    $verbose : bool
        switch on/off verbose mode
    $print_func : CodeRef
        A function that computes statistics of initialized arrays.
        Takes an AI::MXNet::NDArray and returns a scalar. Defaults to mean
        absolute value |x|/size(x)
=cut

method set_verbosity(Bool $verbose=0, CodeRef $print_func=)
{
    $self->_verbose($verbose);
    $self->_print_func($print_func) if defined $print_func;
}

method _verbose_print($desc, $init, $arr)
{
    if($self->_verbose and defined $self->_print_func)
    {
        AI::MXNet::Logging->info('Initialized %s as %s: %s', $desc, $init, $self->_print_func->($arr));
    }
}

my %init_registry;
method get_init_registry()
{
    return \%init_registry;
}

method register()

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

        an ndarray to be initialized.
=cut
method call(Str|AI::MXNet::InitDesc $desc, AI::MXNet::NDArray $arr)
{
    return $self->_legacy_init($desc, $arr) unless blessed $desc;
    my $init = $desc->attrs->{ __init__ };
    if($init)
    {
      my ($klass, $kwargs) = @{ decode_json($init) };
      $self->get_init_registry->{ lc $klass }->new(%{ $kwargs })->_init_weight("$desc", $arr);
      $self->_verbose_print($desc, $init, $arr);
    }
    else
    {
        $desc = "$desc";
        if($desc =~ /(weight|bias|gamma|beta)$/)
        {
            my $method = "_init_$1";
            $self->$method($desc, $arr);
            $self->_verbose_print($desc, $1, $arr);
        }
        else
        {
            $self->_init_default($desc, $arr)
        }
    }
}


method _legacy_init(Str $name, AI::MXNet::NDArray $arr)

lib/AI/MXNet/Initializer.pm  view on Meta::CPAN

    ----------
    scale : float, optional
        scaling factor of weight

    rand_type: string optional
        use "uniform" or "normal" random number to initialize weight

    Reference
    ---------
    Exact solutions to the nonlinear dynamics of learning in deep linear neural networks
    arXiv preprint arXiv:1312.6120 (2013).
=cut

package AI::MXNet::Orthogonal;
use AI::MXNet::Base;
use Mouse;
use AI::MXNet::Types;
extends 'AI::MXNet::Initializer';
has "scale" => (is => "ro", isa => "Num", default => 1.414);
has "rand_type" => (is => "ro", isa => enum([qw/uniform normal/]), default => 'uniform');

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

        The values.

    Examples
    --------
    >>> # init a single key-value pair
    >>> $shape = [2,3]
    >>> $kv = mx->kv->create('local')
    >>> $kv->init(3, mx->nd->ones($shape)*2)
    >>> $a = mx->nd->zeros($shape)
    >>> $kv->pull(3, out=>$a)
    >>> print $a->aspdl
    [[ 2  2  2]
    [ 2  2  2]]

    >>> # init a list of key-value pairs
    >>> $keys = [5, 7, 9]
    >>> $kv->init(keys, [map { mx->nd->ones($shape) } 0..@$keys-1])
=cut

method init(
    Str|ArrayRef[Str] $key,

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

    priority : int, optional
        The priority of the push operation.
        The higher the priority, the faster this action is likely
        to be executed before other push actions.

    Examples
    --------
    >>> # push a single key-value pair
    >>> $kv->push(3, mx->nd->ones($shape)*8)
    >>> $kv->pull(3, out=>$a) # pull out the value
    >>> print $a->aspdl()
        [[ 8.  8.  8.]
        [ 8.  8.  8.]]

    >>> # aggregate the value and the push
    >>> $gpus = [map { mx->gpu($_) } 0..3]
    >>> $b = [map { mx->nd->ones($shape, ctx => $_) } @$gpus]
    >>> $kv->push(3, $b)
    >>> $kv->pull(3, out=>$a)
    >>> print $a->aspdl
        [[ 4.  4.  4.]
        [ 4.  4.  4.]]

    >>> # push a list of keys.
    >>> # single device
    >>> $kv->push($keys, [map { mx->nd->ones($shape) } 0..@$keys-1)
    >>> $b = [map { mx->nd->zeros(shape) } 0..@$keys-1]
    >>> $kv->pull($keys, out=>$b)
    >>> print $b->[1]->aspdl
        [[ 1.  1.  1.]
        [ 1.  1.  1.]]

    >>> # multiple devices:
    >>> $b = [map { [map { mx->nd->ones($shape, ctx => $_) } @$gpus] } @$keys-1]
    >>> $kv->push($keys, $b)
    >>> $kv->pull($keys, out=>$b)
    >>> print $b->[1][1]->aspdl()
        [[ 4.  4.  4.]
        [ 4.  4.  4.]]
=cut

method push(
    Str|ArrayRef[Str] $key,
    AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]|ArrayRef[ArrayRef[AI::MXNet::NDArray]] $value,
    Int :$priority=0
)
{

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

    priority : int, optional
        The priority of the push operation.
        The higher the priority, the faster this action is likely
        to be executed before other push actions.

    Examples
    --------
    >>> # pull a single key-value pair
    >>> $a = mx->nd->zeros($shape)
    >>> $kv->pull(3, out=>$a)
    >>> print $a->aspdl
        [[ 2.  2.  2.]
        [ 2.  2.  2.]]

    >>> # pull into multiple devices
    >>> $b = [map { mx->nd->ones($shape, $_) } @$gpus]
    >>> $kv->pull(3, out=>$b)
    >>> print $b->[1]->aspdl()
        [[ 2.  2.  2.]
        [ 2.  2.  2.]]

    >>> # pull a list of key-value pairs.
    >>> # On single device
    >>> $keys = [5, 7, 9]
    >>> $b = [map { mx->nd->zeros($shape) } 0..@$keys-1]
    >>> $kv->pull($keys, out=>$b)
    >>> print $b->[1]->aspdl()
        [[ 2.  2.  2.]
        [ 2.  2.  2.]]
    >>> # On multiple devices
    >>> $b = [map { [map { mx->nd->ones($shape, ctx => $_) } @$gpus ] } 0..@$keys-1]
    >>> $kv->pull($keys, out=>$b)
    >>> print $b->[1][1]->aspdl()
        [[ 2.  2.  2.]
        [ 2.  2.  2.]]
=cut

method pull(
    Str|ArrayRef[Str] $key,
    AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]|ArrayRef[ArrayRef[AI::MXNet::NDArray]] :$out,
    Int :$priority=0
)
{

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

    ----------
    fname : str
        Path to output states file.
=cut

method save_optimizer_states(Str $fname)
{
    confess("Cannot save states for distributed training")
        unless defined $self->_updater;
    open(F, ">:raw", "$fname") or confess("can't open $fname for writing: $!");
    print F $self->_updater->get_states();
    close(F);
}

=head2 load_optimizer_states

    Load optimizer (updater) state from file.

    Parameters
    ----------
    fname : str

lib/AI/MXNet/KVStore.pm  view on Meta::CPAN

    multi-machines.

    Parameters
    ----------
    updater : function
        the updater function

    Examples
    --------
    >>> my $update = sub { my ($key, input, stored) = @_;
        ...     print "update on key: $key\n";
        ...     $stored += $input * 2; };
        >>> $kv->_set_updater($update)
        >>> $kv->pull(3, out=>$a)
        >>> print $a->aspdl()
        [[ 4.  4.  4.]
        [ 4.  4.  4.]]
        >>> $kv->push(3, mx->nd->ones($shape))
        update on key: 3
        >>> $kv->pull(3, out=>$a)
        >>> print $a->aspdl()
        [[ 6.  6.  6.]
        [ 6.  6.  6.]]
=cut

method _set_updater(CodeRef $updater_func)
{
    $self->_updater_func(
        sub {
            my ($index, $input_handle, $storage_handle) = @_;
            $updater_func->(

lib/AI/MXNet/KVStoreServer.pm  view on Meta::CPAN

            $self->init_logging(1);
        }
        if($cmd_id == 0)
        {
            my $optimizer = Storable::thaw(MIME::Base64::decode_base64($cmd_body));
            $self->kvstore->set_optimizer($optimizer);
        }
        else
        {
            my $rank = $self->kvstore->rank;
            print("server $rank, unknown command ($cmd_id, $cmd_body)\n");
        }
    }
}

=head2 run

    run the server, whose behavior is like
    >>> while receive(x):
    ...     if is_command x: controller(x)
    ...     else if is_key_value x: updater(x)

lib/AI/MXNet/Logging.pm  view on Meta::CPAN

package AI::MXNet::Logging;
## TODO
use Mouse;
sub warning { shift; warn sprintf(shift, @_) . "\n" };
*debug   = *info = *warning;
sub get_logger { __PACKAGE__->new }

1;

lib/AI/MXNet/Metric.pm  view on Meta::CPAN

        {
            return ($self->name, 'nan');
        }
        else
        {
            return ($self->name, $self->sum_metric / $self->num_inst);
        }
    }
    else
    {
        my $names = [map { sprintf('%s_%d', $self->name, $_) } 0..$self->num-1];
        my $values = [];
        for (my $i = 0; $i < @{ $self->sum_metric }; $i++)
        {
            my ($x, $y) = ($self->sum_metric->[$i], $self->num_inst->[$i]);
            if($y != 0)
            {
                push (@$values, $x/$y);
            }
            else
            {

lib/AI/MXNet/Metric.pm  view on Meta::CPAN

    ----------
    name : str
        Name of this metric instance for display.

    Examples
    --------
    >>> $predicts = [mx->nd->array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]
    >>> $labels   = [mx->nd->array([[1, 0], [0, 1], [0, 1]])]
    >>> $pr = mx->metric->PearsonCorrelation()
    >>> $pr->update($labels, $predicts)
    >>> print pr->get()
    ('pearson-correlation', '0.421637061887229')
=cut

method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds)
{
    AI::MXNet::Metric::check_label_shapes($labels, $preds);
    zip(sub {
        my ($label, $pred) = @_;
        AI::MXNet::Metric::check_label_shapes($label, $pred);
        $label = $label->aspdl->flat;

lib/AI/MXNet/Module.pm  view on Meta::CPAN

            # state for the same index but on diff devs, TODO(mli)
            # use a better solution later
            &{$updater}($index*$num_device+$k, $g, $w);
        }, $arg_list, $grad_list);
    }, $param_arrays, $grad_arrays);
}

method load_checkpoint(Str $prefix, Int $epoch)
{
    my $symbol = AI::MXNet::Symbol->load("$prefix-symbol.json");
    my %save_dict = %{ AI::MXNet::NDArray->load(sprintf('%s-%04d.params', $prefix, $epoch)) };
    my %arg_params;
    my %aux_params;
    while(my ($k, $v) = each %save_dict)
    {
        my ($tp, $name) = split(/:/, $k, 2);
        if($tp eq 'arg')
        {
            $arg_params{$name} = $v;
        }
        if($tp eq 'aux')

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    %kwargs
)
{
    my ($sym, $args, $auxs) = __PACKAGE__->load_checkpoint($prefix, $epoch);
    my $mod = $self->new(symbol => $sym, %kwargs);
    $mod->_p->_arg_params($args);
    $mod->_p->_aux_params($auxs);
    $mod->params_initialized(1);
    if($load_optimizer_states)
    {
        $mod->_p->_preload_opt_states(sprintf('%s-%04d.states', $prefix, $epoch));
    }
    return $mod;
}

=head2 save_checkpoint

    Save current progress to a checkpoint.
    Use mx->callback->module_checkpoint as epoch_end_callback to save during training.

    Parameters

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    epoch : int
        The current epoch number
    save_optimizer_states : bool
        Whether to save optimizer states for later training
=cut


method save_checkpoint(Str $prefix, Int $epoch, Bool $save_optimizer_states=0)
{
    $self->_symbol->save("$prefix-symbol.json");
    my $param_name = sprintf('%s-%04d.params', $prefix, $epoch);
    $self->save_params($param_name);
    AI::MXNet::Logging->info('Saved checkpoint to "%s"', $param_name);
    if($save_optimizer_states)
    {
        my $state_name = sprintf('%s-%04d.states', $prefix, $epoch);
        $self->save_optimizer_states($state_name);
        AI::MXNet::Logging->info('Saved optimizer state to "%s"', $state_name);
    }
}

=head2 model_save_checkpoint

    Checkpoint the model data into file.

    Parameters

lib/AI/MXNet/Module.pm  view on Meta::CPAN

    Int                         $epoch,
    Maybe[AI::MXNet::Symbol]    $symbol,
    HashRef[AI::MXNet::NDArray] $arg_params,
    HashRef[AI::MXNet::NDArray] $aux_params
)
{
    if(defined $symbol)
    {
        $symbol->save("$prefix-symbol.json");
    }
    my $param_name = sprintf('%s-%04d.params', $prefix, $epoch);
    $self->save_params($param_name, $arg_params, $aux_params);
    AI::MXNet::Logging->info('Saved checkpoint to "%s"', $param_name);
}

# Internal function to reset binded state.
method _reset_bind()
{
    $self->binded(0);
    $self->_p->_exec_group(undef);
    $self->_p->_data_shapes(undef);

lib/AI/MXNet/Module.pm  view on Meta::CPAN

method save_optimizer_states(Str $fname)
{
    assert($self->optimizer_initialized);
    if($self->_p->_update_on_kvstore)
    {
        $self->_p->_kvstore->save_optimizer_states($fname);
    }
    else
    {
        open(F, ">:raw", "$fname") or confess("can't open $fname for writing: $!");
        print F $self->_p->_updater->get_states();
        close(F);
    }
}

method load_optimizer_states(Str $fname)
{
    assert($self->optimizer_initialized);
    if($self->_p->_update_on_kvstore)
    {
        $self->_p->_kvstore->load_optimizer_states($fname);

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    my @candidates;
    my %args = map {
        push @candidates, $_ if not /_(?:weight|bias|gamma|beta)$/;
        $_ => 1
    } @{ $symbol->list_arguments };
    for my $name (@$names)
    {
        my $msg;
        if(not exists $args{$name} and $name ne 'softmax_label')
        {
            $msg = sprintf("\033[91mYou created Module with Module(..., %s_names=%s) but "
                ."input with name '%s' is not found in symbol.list_arguments(). "
                ."Did you mean one of:\n\t%s\033[0m",
                $typename, "@$names", $name, join("\n\t", @candidates)
            );
            if($throw)
            {
                confess($msg);
            }
            else
            {

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

    ArrayRef[Str]                  $data_names,
    ArrayRef[NameShapeOrDataDesc]  $data_shapes,
    Str                            $name,
    Bool                           $throw
)
{
    return if (not @$data_shapes and @$data_names == 1 and  $data_names->[0] eq 'softmax_label');
    my @actual = map { @{$_}[0] } @{ $data_shapes };
    if("@$data_names" ne "@actual")
    {
        my $msg = sprintf(
            "Data provided by %s_shapes don't match names specified by %s_names (%s vs. %s)",
            $name, $name, "@$data_shapes", "@$data_names"
        );
        if($throw)
        {
            confess($msg);
        }
        else
        {
            AI::MXNet::Logging->warning($msg);

lib/AI/MXNet/Module/Base.pm  view on Meta::CPAN

            $next_data_batch = <$train_data>;
            if(defined $next_data_batch)
            {
                $self->prepare($next_data_batch);
            }
            else
            {
                $end_of_batch = 1;
            }
            $self->update_metric($eval_metric, $data_batch->label);
            $monitor->toc_print if $monitor;
            if(defined $batch_end_callback)
            {
                my $batch_end_params = AI::MXNet::BatchEndParam->new(
                    epoch       => $epoch,
                    nbatch      => $nbatch,
                    eval_metric => $eval_metric
                );
                for my $callback (@{ _as_list($batch_end_callback) })
                {
                    &{$callback}($batch_end_params);

lib/AI/MXNet/Monitor.pm  view on Meta::CPAN


    AI::MXNet::Monitor - Monitor outputs, weights, and gradients for debugging.

=head1 DESCRIPTION

    Monitor outputs, weights, and gradients for debugging.

    Parameters
    ----------
    interval : int
        Number of batches between printing.
    stat_func : function
        a function that computes statistics of tensors.
        Takes a NDArray and returns a NDArray. defaults to mean
        absolute value |x|/size(x).
    pattern : str
        A regular expression specifying which tensors to monitor.
        Only tensors with names that match name_pattern will be included.
        For example, '.*weight|.*output' will print all weights and outputs;
        '.*backward.*' will print all gradients.
=cut

has 'interval'  => (is => 'ro', isa => 'Int', required => 1);
has 'stat_func' => (
    is => 'ro',
    isa => 'CodeRef',
    default => sub {
        return sub {
            # returns |x|/size(x), async execution.
            my ($x) = @_;

lib/AI/MXNet/Monitor.pm  view on Meta::CPAN

            {
                $s .= $v->aspdl . "\t";
            }
        }
        push @res, [$n, $k, $s];
    }
    $self->queue([]);
    return \@res;
}

=head2 toc_print

    End collecting and print results
=cut

method toc_print()
{
    my $res = $self->toc;
    for my $r (@{ $res })
    {
        AI::MXNet::Logging->info('Batch: %7d %30s %s', @{ $r });
    }
}

method Monitor(@args)
{

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN


    Returns
    -------
    result :NDArray
    Array with moved axes.

    Examples
    --------
    > $X = mx->nd->array([[1, 2, 3],
                          [4, 5, 6]]);
    > print Dumper($X->moveaxis(0, 1)->shape)
    > [3, 2]
=cut

method moveaxis(Int $source, Int $dest)
{
    my @axes = 0..$self->ndim-1;
    $source += @axes if $source < 0;
    $dest += @axes if $dest < 0;
    assert($source < @axes);
    assert($dest < @axes);

lib/AI/MXNet/NDArray.pm  view on Meta::CPAN

        return __PACKAGE__->can($lfn_scalar)->(__PACKAGE__, $lhs, $rhs);
    }
    else
    {
        return __PACKAGE__->can($fn_array)->(__PACKAGE__, $lhs, $rhs);
    }
}

method stringify($other=, $reverse=)
{
    sprintf("<%s %s @%s>", ref($self), join('x', @{ $self->shape }), $self->context);
}

method iadd(AI::MXNet::NDArray|Num $other, $reverse=)
{
    confess('trying to add to a readonly NDArray') unless $self->writable;
    return ref $other 
        ? __PACKAGE__->broadcast_add($self, $other, { out => $self })
        : __PACKAGE__->_plus_scalar($self, $other, { out => $self })
}

lib/AI/MXNet/NDArray/Doc.pm  view on Meta::CPAN

        $arg_names,
        $arg_types,
        $arg_desc,
        $key_var_num_args,
        $ret_type) = @_;
    my $param_str = build_param_doc($arg_names, $arg_types, $arg_desc);
    if($key_var_num_args)
    {
        $desc .= "\nThis function support variable length of positional input."
    }
    my $doc_str = sprintf("%s\n\n" .
               "%s\n" .
               "out : NDArray, optional\n" .
               "    The output NDArray to hold the result.\n\n".
               "Returns\n" .
               "-------\n" .
               "out : NDArray or list of NDArray\n" .
               "    The output of this function.", $desc, $param_str);
    return $doc_str
}

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    assert(
        (not $self->_modified),
        "After applying modifier cells (e.g. DropoutCell) the base "
        ."cell cannot be called directly. Call the modifier cell instead."
    );
    my @states;
    my $func_needs_named_name = $func ne AI::MXNet::Symbol->can('Variable');
    for my $info (@{ $self->state_info })
    {
        $self->_init_counter($self->_init_counter + 1);
        my @name = (sprintf("%sbegin_state_%d", $self->_prefix, $self->_init_counter));
        my %info = %{ $info//{} };
        if($func_needs_named_name)
        {
            unshift(@name, 'name');
        }
        else
        {
            if(exists $info{__layout__})
            {
                $info{kwargs} = { __layout__ => delete $info{__layout__} };

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

        hash ref with weights associated with
        this cell, unpacked.
=cut

method unpack_weights(HashRef[AI::MXNet::NDArray] $args)
{
    my %args = %{ $args };
    my $h = $self->_num_hidden;
    for my $group_name ('i2h', 'h2h')
    {
        my $weight = delete $args{ sprintf('%s%s_weight', $self->_prefix, $group_name) };
        my $bias   = delete $args{ sprintf('%s%s_bias', $self->_prefix, $group_name) };
        enumerate(sub {
            my ($j, $name) = @_;
            my $wname = sprintf('%s%s%s_weight', $self->_prefix, $group_name, $name);
            $args->{$wname} = $weight->slice([$j*$h,($j+1)*$h-1])->copy;
            my $bname = sprintf('%s%s%s_bias', $self->_prefix, $group_name, $name);
            $args->{$bname} = $bias->slice([$j*$h,($j+1)*$h-1])->copy;
        }, $self->_gate_names);
    }
    return \%args;
}

=head2 pack_weights

    Pack fused weight matrices into common
    weight matrices

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

method pack_weights(HashRef[AI::MXNet::NDArray] $args)
{
    my %args = %{ $args };
    my $h = $self->_num_hidden;
    for my $group_name ('i2h', 'h2h')
    {
        my @weight;
        my @bias;
        for my $name (@{ $self->_gate_names })
        {
            my $wname = sprintf('%s%s%s_weight', $self->_prefix, $group_name, $name);
            push @weight, delete $args{$wname};
            my $bname = sprintf('%s%s%s_bias', $self->_prefix, $group_name, $name);
            push @bias, delete $args{$bname};
        }
        $args{ sprintf('%s%s_weight', $self->_prefix, $group_name) } = AI::MXNet::NDArray->concatenate(
            \@weight
        );
        $args{ sprintf('%s%s_bias', $self->_prefix, $group_name) } = AI::MXNet::NDArray->concatenate(
            \@bias
        );
    }
    return \%args;
}

=head2 unroll

    Unroll an RNN cell across time steps.

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

}

method state_info()
{
    return [{ shape => [0, $self->_num_hidden], __layout__ => 'NC' }];
}

method call(AI::MXNet::Symbol $inputs, SymbolOrArrayOfSymbols $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my $i2h = AI::MXNet::Symbol->FullyConnected(
        data       => $inputs,
        weight     => $self->_iW,
        bias       => $self->_iB,
        num_hidden => $self->_num_hidden,
        name       => "${name}i2h"
    );
    my $h2h = AI::MXNet::Symbol->FullyConnected(
        data       => @{$states}[0],
        weight     => $self->_hW,

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

}

method _gate_names()
{
    [qw/_i _f _c _o/];
}

method call(AI::MXNet::Symbol $inputs, SymbolOrArrayOfSymbols $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my @states = @{ $states };
    my $i2h = AI::MXNet::Symbol->FullyConnected(
        data       => $inputs,
        weight     => $self->_iW,
        bias       => $self->_iB,
        num_hidden => $self->_num_hidden*4,
        name       => "${name}i2h"
    );
    my $h2h = AI::MXNet::Symbol->FullyConnected(
        data       => $states[0],

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

has '+_prefix'     => (default => 'gru_');

method _gate_names()
{
    [qw/_r _z _o/];
}

method call(AI::MXNet::Symbol $inputs, SymbolOrArrayOfSymbols $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my $prev_state_h = @{ $states }[0];
    my $i2h = AI::MXNet::Symbol->FullyConnected(
        data       => $inputs,
        weight     => $self->_iW,
        bias       => $self->_iB,
        num_hidden => $self->_num_hidden*3,
        name       => "${name}i2h"
    );
    my $h2h = AI::MXNet::Symbol->FullyConnected(
        data       => $prev_state_h,

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    my @directions = @{ $self->_directions };

    my $b = @directions;
    my $p = 0;
    for my $layer (0..$self->_num_layers-1)
    {
        for my $direction (@directions)
        {
            for my $gate (@gate_names)
            {
                my $name = sprintf('%s%s%d_i2h%s_weight', $self->_prefix, $direction, $layer, $gate);
                my $size;
                if($layer > 0)
                {
                    $size = $b*$lh*$lh;
                    $args{$name} = $arr->slice([$p,$p+$size-1])->reshape([$lh, $b*$lh]);
                }
                else
                {
                    $size = $li*$lh;
                    $args{$name} = $arr->slice([$p,$p+$size-1])->reshape([$lh, $li]);
                }
                $p += $size;
            }
            for my $gate (@gate_names)
            {
                my $name = sprintf('%s%s%d_h2h%s_weight', $self->_prefix, $direction, $layer, $gate);
                my $size = $lh**2;
                $args{$name} = $arr->slice([$p,$p+$size-1])->reshape([$lh, $lh]);
                $p += $size;
            }
        }
    }
    for my $layer (0..$self->_num_layers-1)
    {
        for my $direction (@directions)
        {
            for my $gate (@gate_names)
            {
                my $name = sprintf('%s%s%d_i2h%s_bias', $self->_prefix, $direction, $layer, $gate);
                $args{$name} = $arr->slice([$p,$p+$lh-1]);
                $p += $lh;
            }
            for my $gate (@gate_names)
            {
                my $name = sprintf('%s%s%d_h2h%s_bias', $self->_prefix, $direction, $layer, $gate);
                $args{$name} = $arr->slice([$p,$p+$lh-1]);
                $p += $lh;
            }
        }
    }
    assert($p == $arr->size, "Invalid parameters size for FusedRNNCell");
    return %args;
}

method unpack_weights(HashRef[AI::MXNet::NDArray] $args)

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    return \%args
}

method pack_weights(HashRef[AI::MXNet::NDArray] $args)
{
    my %args = %{ $args };
    my $b = @{ $self->_directions };
    my $m = $self->_num_gates;
    my @c = @{ $self->_gate_names };
    my $h = $self->_num_hidden;
    my $w0 = $args{ sprintf('%sl0_i2h%s_weight', $self->_prefix, $c[0]) };
    my $num_input = $w0->shape->[1];
    my $total = ($num_input+$h+2)*$h*$m*$b + ($self->_num_layers-1)*$m*$h*($h+$b*$h+2)*$b;
    my $arr = AI::MXNet::NDArray->zeros([$total], ctx => $w0->context, dtype => $w0->dtype);
    my %nargs = $self->_slice_weights($arr, $num_input, $h);
    while(my ($name, $nd) = each %nargs)
    {
        $nd .= delete $args{ $name };
    }
    $args{ $self->_parameter->name } = $arr;
    return \%args;

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

                prefix     => shift
            )
        },
    }->{ $self->_mode };
    for my $i (0..$self->_num_layers-1)
    {
        if($self->_bidirectional)
        {
            $stack->add(
                AI::MXNet::RNN::BidirectionalCell->new(
                    $get_cell->(sprintf('%sl%d_', $self->_prefix, $i)),
                    $get_cell->(sprintf('%sr%d_', $self->_prefix, $i)),
                    output_prefix => sprintf('%sbi_%s_%d', $self->_prefix, $self->_mode, $i)
                )
            );
        }
        else
        {
            $stack->add($get_cell->(sprintf('%sl%d_', $self->_prefix, $i)));
        }
    }
    return $stack;
}

package AI::MXNet::RNN::SequentialCell;
use Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::RNN::Cell::Base';

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

    else
    {
        $r_outputs = [reverse(@{ $r_outputs })];
    }
    my $outputs = [];
    zip(sub {
        my ($i, $l_o, $r_o) = @_;
        push @$outputs, AI::MXNet::Symbol->Concat(
            $l_o, $r_o, dim=>(1+($merge_outputs?1:0)),
            name => $merge_outputs
                        ? sprintf('%sout', $self->_output_prefix)
                        : sprintf('%st%d', $self->_output_prefix, $i)
        );
    }, [0..@{ $l_outputs }-1], [@{ $l_outputs }], [@{ $r_outputs }]);
    if($merge_outputs)
    {
        $outputs = @{ $outputs }[0];
    }
    $states = [$l_states, $r_states];
    return($outputs, $states);
}

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

        dilate     => $self->_h2h_dilate,
        weight     => $self->_hW,
        bias       => $self->_hB
    );
    return ($i2h, $h2h);
}

method call(AI::MXNet::Symbol $inputs, AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol] $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my ($i2h, $h2h) = $self->_conv_forward($inputs, $states, $name);
    my $output = $self->_get_activation($i2h + $h2h, $self->_activation, name => "${name}out");
    return ($output, [$output]);
}

package AI::MXNet::RNN::ConvLSTMCell;
use Mouse;
extends 'AI::MXNet::RNN::ConvCell';
has '+forget_bias' => (default => 1);
has '+_prefix'     => (default => 'ConvLSTM_');

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

=cut

method _gate_names()
{
    return ['_i', '_f', '_c', '_o'];
}

method call(AI::MXNet::Symbol $inputs, AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol] $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my ($i2h, $h2h) = $self->_conv_forward($inputs, $states, $name);
    my $gates = $i2h + $h2h;
    my @slice_gates = @{ AI::MXNet::Symbol->SliceChannel(
        $gates,
        num_outputs => 4,
        axis => index($self->_conv_layout, 'C'),
        name => "${name}slice"
    ) };
    my $in_gate = AI::MXNet::Symbol->Activation(
        $slice_gates[0],

lib/AI/MXNet/RNN/Cell.pm  view on Meta::CPAN

=cut

method _gate_names()
{
    return ['_r', '_z', '_o'];
}

method call(AI::MXNet::Symbol $inputs, AI::MXNet::Symbol|ArrayRef[AI::MXNet::Symbol] $states)
{
    $self->_counter($self->_counter + 1);
    my $name = sprintf('%st%d_', $self->_prefix, $self->_counter);
    my ($i2h, $h2h) = $self->_conv_forward($inputs, $states, $name);
    my ($i2h_r, $i2h_z, $h2h_r, $h2h_z);
    ($i2h_r, $i2h_z, $i2h) = @{ AI::MXNet::Symbol->SliceChannel($i2h, num_outputs => 3, name => "${name}_i2h_slice") };
    ($h2h_r, $h2h_z, $h2h) = @{ AI::MXNet::Symbol->SliceChannel($h2h, num_outputs => 3, name => "${name}_h2h_slice") };
    my $reset_gate = AI::MXNet::Symbol->Activation(
        $i2h_r + $h2h_r, act_type => "sigmoid",
        name => "${name}_r_act"
    );
    my $update_gate = AI::MXNet::Symbol->Activation(
        $i2h_z + $h2h_z, act_type => "sigmoid",

lib/AI/MXNet/RecordIO.pm  view on Meta::CPAN

    -----------
    Int $idx
    Str $buf
=cut

method write_idx(Int $idx, Str $buf)
{
    my $pos = $self->tell();
    $self->write($buf);
    my $f = $self->fidx;
    print $f "$idx\t$pos\n";
    $self->idx->{$idx} = $pos;
    push @{ $self->keys }, $idx;
}

1;

lib/AI/MXNet/Symbol.pm  view on Meta::CPAN

        AI::MXNetCAPI::SymbolCreateFromJSON(
            $json
        )
    );
    $self->handle($handle);
}

method stringify($other=, $reverse=)
{
    my $name = $self->name;
    sprintf("<%s %s>", ref($self), $name ? $name : 'Grouped');
}

method add(AI::MXNet::Symbol|Num $other, $reverse=)
{
    return _ufunc_helper(
        $self,
        $other,
        qw/_Plus _PlusScalar/
    );
}

lib/AI/MXNet/Symbol/Doc.pm  view on Meta::CPAN

                    ArrayRef[Str] $arg_desc,
                    Str $key_var_num_args=,
                    Str $ret_type=
)
{
    my $param_str = build_param_doc($arg_names, $arg_types, $arg_desc);
    if($key_var_num_args)
    {
        $desc .= "\nThis function support variable length of positional input."
    }
    my $doc_str = sprintf("%s\n\n" .
               "%s\n" .
               "name : string, optional.\n" .
               "    Name of the resulting symbol.\n\n" .
               "Returns\n" .
               "-------\n" .
               "symbol: Symbol\n" .
               "    The result symbol.", $desc, $param_str);
    return $doc_str;
}

lib/AI/MXNet/Symbol/NameManager.pm  view on Meta::CPAN

        A canonical name for the symbol.
=cut

method get(Maybe[Str] $name, Str $hint)
{
    return $name if $name;
    if(not exists $self->counter->{ $hint })
    {
        $self->counter->{ $hint } = 0;
    }
    $name = sprintf("%s%d", $hint, $self->counter->{ $hint });
    $self->counter->{ $hint }++;
    return $name;
}

method current()
{
    $AI::MXNet::current_nm_ldr;
}

$AI::MXNet::current_nm_ldr = __PACKAGE__->new;

lib/AI/MXNet/Util/Printable.pm  view on Meta::CPAN

package AI::MXNet::Util::Printable;
use strict;
use warnings;
use Data::Dumper qw();
use overload '""' => sub { print Data::Dumper->new([shift])->Purity(1)->Deepcopy(1)->Terse(1)->Dump };

lib/AI/MXNet/Visualization.pm  view on Meta::CPAN


    ## creates the image file working directory
    mx->viz->plot_network($softmax, save_format => 'png')->render("network.png"); 

=head1 DESCRIPTION

     Vizualization support for Perl interface to MXNet machine learning library

=head1 Class methods

=head2 print_summary

    convert symbol for detail information

    Parameters
    ----------
    symbol: AI::MXNet::Symbol
        symbol to be visualized
    shape: hashref
        hashref of shapes, str->shape (arrayref[int]), given input shapes
    line_length: int
        total length of printed lines
    positions: arrayref[float]
        relative or absolute positions of log elements in each line
    Returns
    ------
        nothing
=cut

method print_summary(
    AI::MXNet::Symbol        $symbol,
    Maybe[HashRef[Shape]]    $shape=,
    Int                      $line_length=120,
    ArrayRef[Num]            $positions=[.44, .64, .74, 1]
)
{
    my $show_shape;
    my %shape_dict;
    if(defined $shape)
    {

lib/AI/MXNet/Visualization.pm  view on Meta::CPAN

    }
    my $conf = decode_json($symbol->tojson);
    my $nodes = $conf->{nodes};
    my %heads = map { $_ => 1 } @{ $conf->{heads}[0] };
    if($positions->[-1] <= 1)
    {
        $positions = [map { int($line_length * $_) } @{ $positions }];
    }
    # header names for the different log elements
    my $to_display = ['Layer (type)', 'Output Shape', 'Param #', 'Previous Layer'];
    my $print_row = sub { my ($fields, $positions) = @_;
        my $line = '';
        enumerate(sub {
            my ($i, $field) = @_;
            $line .= $field//'';
            $line = substr($line, 0, $positions->[$i]);
            $line .= ' ' x ($positions->[$i] - length($line));

        }, $fields);
        print $line,"\n";
    };
    print('_' x $line_length,"\n");
    $print_row->($to_display, $positions);
    print('=' x $line_length,"\n");
    my $print_layer_summary = sub { my ($node, $out_shape) = @_;
        my $op = $node->{op};
        my $pre_node = [];
        my $pre_filter = 0;
        if($op ne 'null')
        {
            my $inputs = $node->{inputs};
            for my $item (@{ $inputs })
            {
                my $input_node = $nodes->[$item->[0]];
                my $input_name = $input_node->{name};

lib/AI/MXNet/Visualization.pm  view on Meta::CPAN

        else
        {
            $first_connection = $pre_node->[0];
        }
        my $fields = [
            $node->{name} . '(' . $op . ')',
            join('x', @{ $out_shape }),
            $cur_param,
            $first_connection
        ];
        $print_row->($fields, $positions);
        if(@{ $pre_node } > 1)
        {
            for my $i (1..@{ $pre_node }-1)
            {
                $fields = ['', '', '', $pre_node->[$i]];
                $print_row->($fields, $positions);
            }
        }
        return $cur_param;
    };
    my $total_params = 0;
    enumerate(sub {
        my ($i, $node) = @_;
        my $out_shape = [];
        my $op = $node->{op};
        return if($op eq 'null' and $i > 0);

lib/AI/MXNet/Visualization.pm  view on Meta::CPAN

            {
                my $key = $node->{name};
                $key .= '_output' if $op ne 'null';
                if(exists $shape_dict{ $key })
                {
                    my $end = @{ $shape_dict{ $key } };
                    @{ $out_shape } = @{ $shape_dict{ $key } }[1..$end-1];
                }
            }
        }
        $total_params += $print_layer_summary->($nodes->[$i], $out_shape);
        if($i == @{ $nodes } - 1)
        {
            print('=' x $line_length, "\n");
        }
        else
        {
            print('_' x $line_length, "\n");
        }
    }, $nodes);
    print("Total params: $total_params\n");
    print('_' x $line_length, "\n");
}

=head2 plot_network

    convert symbol to dot object for visualization

    Parameters
    ----------
    title: str
        title of the dot graph

t/test_conv.t  view on Meta::CPAN

use strict;
use warnings;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(GetMNIST_ubyte);
use Test::More tests => 1;

## speed up the tests when gpu present
my $gpu_present = (`perl -e 'use AI::MXNet qw(mx); print mx->nd->ones([1], ctx => mx->gpu(0))->asscalar' 2>/dev/null` eq '1');

# symbol net
my $batch_size = 100;

### model
my $data = mx->symbol->Variable('data');
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 32, kernel => [3,3], stride => [2,2]);
my $bn1  = mx->symbol->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->symbol->Activation(data => $bn1, name => 'relu1', act_type => "relu");
my $mp1  = mx->symbol->Pooling(data => $act1, name => 'mp1', kernel => [2,2], stride =>[2,2], pool_type=>'max');

t/test_symbol.t  view on Meta::CPAN

    }
    my ($fc2, $act2, $fc3, $sym1);
    {
        local($mx::AttrScope) = mx->AttrScope(ctx_group=>'stage2');
        $fc2  = mx->symbol->FullyConnected(data => $act1, name => 'fc2', num_hidden => 64, lr_mult=>0.01);
        $act2 = mx->symbol->Activation(data => $fc2, name=>'relu2', act_type=>"relu");
        $fc3  = mx->symbol->FullyConnected(data => $act2, name=>'fc3', num_hidden=>10);
        $fc3  = mx->symbol->BatchNorm($fc3, name=>'batchnorm0');
        $sym1 = mx->symbol->SoftmaxOutput(data => $fc3, name => 'softmax')
    }
    { local $/ = undef; my $json = <DATA>; open(F, ">save_000800.json"); print F $json; close(F); };
    my $sym2 = mx->sym->load('save_000800.json');
    unlink 'save_000800.json';

    my %attr1 = %{ $sym1->attr_dict };
    my %attr2 = %{ $sym2->attr_dict };
    while(my ($k, $v1) = each %attr1)
    {
        ok(exists $attr2{ $k });
        my $v2 = $attr2{$k};
        while(my ($kk, $vv1) = each %{ $v1 })



( run in 1.358 second using v1.01-cache-2.11-cpan-de7293f3b23 )