AI-MXNet
    
    
  
  
  
view release on metacpan or search on metacpan
Revision history for Perl extension AI::MXNet
1.0102 Sun Aug  6 16:55:08 PDT 2017
        - bugfixes in Image.pm, updated tests, added PearsonCorrelation metric, added Convolutional RNN modules.
1.0101  Sun Jul  2 17:16:01 PDT 2017
        - reworked CachedOp, two new optimizers, auto module reshape, using strings to index the kvstore.
1.01    Sat Jun 10 23:57:27 PDT 2017
        - sync with python.
0.9507  Thu May 11 17:04:44 PDT 2017
        - added AutoGrad, bugfixes.
    
  
  
  lib/AI/MXNet/LRScheduler.pm view on Meta::CPAN
package AI::MXNet::FactorScheduler;
=head1 NAME
    AI::MXNet::FactorScheduler - Reduces the learning rate by a factor.
=head1 DESCRIPTION
    Reduces the learning rate by a factor each step.
    Assume the weight has been updated by n times, then the learning rate will
    be base_lr * factor^(floor(n/step))
    Parameters
    ----------
    step: int
        schedule the learning rate update after n updates
    factor: float
        the factor by which to reduce the learning rate.
=cut
use Mouse;
    
  
  
  lib/AI/MXNet/LRScheduler.pm view on Meta::CPAN
package AI::MXNet::MultiFactorScheduler;
=head1 NAME
    AI::MXNet::MultiFactorScheduler - Reduces the learning rate by an array ref of factors.
=head1 DESCRIPTION
    Reduces a learning rate in factor at steps specified in an array ref.
    Assume the weight has been updated by n times, then the learning rate will
    be base_lr * factor^(sum((step/n)<=1)) # step is an array.
    Parameters
    ----------
    step: array ref of int
        schedule learning rate after n updates
    factor: float
        the factor for reducing the learning rate
=cut
    
  
  
  lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
    depth network).
    A module has several states:
        - Initial state. Memory is not allocated yet, not ready for computation yet.
        - Binded. Shapes for inputs, outputs, and parameters are all known, memory allocated,
        ready for computation.
        - Parameter initialized. For modules with parameters, doing computation before initializing
        the parameters might result in undefined outputs.
        - Optimizer installed. An optimizer can be installed to a module. After this, the parameters
        of the module can be updated according to the optimizer after gradients are computed
        (forward-backward).
    In order for a module to interact with others, a module should be able to report the
    following information in its raw stage (before binded)
        - data_names: array ref of string indicating the names of required data.
        - output_names: array ref of string indicating the names of required outputs.
    And also the following richer information after binded:
    
  
  
  lib/AI/MXNet/NDArray.pm view on Meta::CPAN
}
=head2 _fresh_grad
        Parameters:
        ----------
        Maybe[Bool] $state=
        Whether this array's corresponding gradient array
        (registered via `autograd->mark_variables`) has been
        updated by `autograd->backward` since last reset.
        `_fresh_grad` need to be manually set to False
        after consuming gradient (usually after updating this
        array).
=cut
method _fresh_grad(Maybe[Bool] $state=)
{
    if(defined $state)
    {
    
  
  
  lib/AI/MXNet/Symbol.pm view on Meta::CPAN
    my %shared_data;
    if(defined $shared_buffer)
    {
        while(my ($k, $v) = each %{ $shared_buffer })
        {
            $shared_data{$k} = $v->handle;
        }
    }
    my $shared_exec_handle = defined $shared_exec ? $shared_exec->handle : undef;
    my (
        $updated_shared_data,
        $in_arg_handles,
        $arg_grad_handles,
        $aux_state_handles,
        $exe_handle
    );
    eval {
        ($updated_shared_data, $in_arg_handles, $arg_grad_handles, $aux_state_handles, $exe_handle)
            =
        check_call(
            AI::MXNetCAPI::ExecutorSimpleBind(
                $self->handle,
                $ctx->device_type_id,
                $ctx->device_id,
                $num_ctx_map_keys,
                \@ctx_map_keys,
                \@ctx_map_dev_types,
                \@ctx_map_dev_ids,
    
  
  
  lib/AI/MXNet/Symbol.pm view on Meta::CPAN
    {
        confess(
            "simple_bind failed: Error: $@; Arguments: ".
            Data::Dumper->new(
                [$shapes//{}]
            )->Purity(1)->Deepcopy(1)->Terse(1)->Dump
        );
    }
    if(defined $shared_buffer)
    {
        while(my ($k, $v) = each %{ $updated_shared_data })
        {
            $shared_buffer->{$k} = AI::MXNet::NDArray->new(handle => $v);
        }
    }
    my @arg_arrays  = map { AI::MXNet::NDArray->new(handle => $_) } @{ $in_arg_handles };
    my @grad_arrays = map { defined $_ ? AI::MXNet::NDArray->new(handle => $_) : undef  } @{ $arg_grad_handles };
    my @aux_arrays  = map { AI::MXNet::NDArray->new(handle => $_) } @{ $aux_state_handles };
    my $executor = AI::MXNet::Executor->new(
        handle    => $exe_handle,
        symbol    => $self,
    
  
  
  lib/AI/MXNet/Symbol/AttrScope.pm view on Meta::CPAN
    Get the attribute hash ref given the attribute set by the symbol.
    Parameters
    ----------
    $attr : Maybe[HashRef[Str]]
        The attribute passed in by user during symbol creation.
    Returns
    -------
    $attr : HashRef[Str]
        The attributes updated to include another the scope related attributes.
=cut
method get(Maybe[HashRef[Str]] $attr=)
{
    return bless($attr//{}, 'AI::MXNet::Util::Printable') unless %{ $self->attr };
    my %ret = (%{ $self->attr }, %{ $attr//{} });
    return bless (\%ret, 'AI::MXNet::Util::Printable');
}
$AI::MXNet::curr_attr_scope = __PACKAGE__->new;
    
  
  
  
( run in 0.476 second using v1.01-cache-2.11-cpan-a1d94b6210f )