view release on metacpan or search on metacpan
examples/mnist.pl view on Meta::CPAN
# Flatten the data from 4-D shape (batch_size, num_channel, width, height)
# into 2-D (batch_size, num_channel*width*height)
$data = mx->sym->Flatten(data => $data);
# The first fully-connected layer
# my $fc1 = mx->sym->FullyConnected(data => $data, name => 'fc1', num_hidden => 128);
# # Apply relu to the output of the first fully-connnected layer
# my $act1 = mx->sym->Activation(data => $fc1, name => 'relu1', act_type => "relu");
# The second fully-connected layer and the according activation function
my $fc2 = mx->sym->FullyConnected(data => $data, name => 'fc2', num_hidden => 64);
my $act2 = mx->sym->Activation(data => $fc2, name => 'relu2', act_type => "relu");
# The thrid fully-connected layer, note that the hidden size should be 10, which is the number of unique digits
my $fc3 = mx->sym->FullyConnected(data => $act2, name => 'fc3', num_hidden => 10);
# The softmax and loss layer
my $mlp = mx->sym->SoftmaxOutput(data => $fc3, name => 'softmax');
return $mlp;
}
sub nn_conv {
my($data) = @_;
# Epoch[9] Batch [200] Speed: 1625.07 samples/sec Train-accuracy=0.992090
# Epoch[9] Batch [400] Speed: 1630.12 samples/sec Train-accuracy=0.992850
# Epoch[9] Train-accuracy=0.991357
# Epoch[9] Time cost=36.817
# Epoch[9] Validation-accuracy=0.988100
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 20, kernel => [5,5], stride => [2,2]);
my $bn1 = mx->symbol->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->symbol->Activation(data => $bn1, name => 'relu1', act_type => "relu");
my $mp1 = mx->symbol->Pooling(data => $act1, name => 'mp1', kernel => [2,2], stride =>[1,1], pool_type=>'max');
my $conv2= mx->symbol->Convolution(data => $mp1, name => 'conv2', num_filter => 50, kernel=>[3,3], stride=>[2,2]);
lib/AI/MXNet/Base.pm view on Meta::CPAN
use AI::MXNetCAPI 1.0102;
use AI::NNVMCAPI 1.01;
use AI::MXNet::Types;
use Time::HiRes;
use Carp;
use Exporter;
use base qw(Exporter);
use List::Util qw(shuffle);
@AI::MXNet::Base::EXPORT = qw(product enumerate assert zip check_call build_param_doc
pdl cat dog svd bisect_left pdl_shuffle
DTYPE_STR_TO_MX DTYPE_MX_TO_STR DTYPE_MX_TO_PDL
DTYPE_PDL_TO_MX DTYPE_MX_TO_PERL GRAD_REQ_MAP);
@AI::MXNet::Base::EXPORT_OK = qw(pzeros pceil);
use constant DTYPE_STR_TO_MX => {
float32 => 0,
float64 => 1,
float16 => 2,
uint8 => 3,
int32 => 4
};
lib/AI/MXNet/Base.pm view on Meta::CPAN
Calculates the product of the input agruments.
=cut
sub product
{
my $p = 1;
map { $p = $p * $_ } @_;
return $p;
}
=head2 bisect_left
https://hg.python.org/cpython/file/2.7/Lib/bisect.py
=cut
sub bisect_left
{
my ($a, $x, $lo, $hi) = @_;
$lo //= 0;
$hi //= @{ $a };
if($lo < 0)
{
Carp::confess('lo must be non-negative');
}
while($lo < $hi)
{
lib/AI/MXNet/Base.pm view on Meta::CPAN
arg_descs : array ref of str
Argument description information.
remove_dup : boolean, optional
Whether to remove duplication or not.
Returns
-------
docstr : str
Python docstring of parameter sections.
=cut
sub build_param_doc
{
my ($arg_names, $arg_types, $arg_descs, $remove_dup) = @_;
$remove_dup //= 1;
my %param_keys;
my @param_str;
zip(sub {
my ($key, $type_info, $desc) = @_;
lib/AI/MXNet/Callback.pm view on Meta::CPAN
if(($count % $self->frequent) == 0)
{
my $speed = $self->frequent * $self->batch_size / (time - $self->tic);
if(defined $param->eval_metric)
{
my $name_value = $param->eval_metric->get_name_value;
$param->eval_metric->reset if $self->auto_reset;
while(my ($name, $value) = each %{ $name_value })
{
AI::MXNet::Logging->info(
"Epoch[%d] Batch [%d]\tSpeed: %.2f samples/sec\tTrain-%s=%f",
$param->epoch, $count, $speed, $name, $value
);
}
}
else
{
AI::MXNet::Logging->info(
"Iter[%d] Batch [%d]\tSpeed: %.2f samples/sec",
$param->epoch, $count, $speed
);
}
$self->tic(time);
}
}
else
{
$self->init(1);
$self->tic(time);
lib/AI/MXNet/Contrib/AutoGrad.pm view on Meta::CPAN
method grad(CodeRef $func, Maybe[Int|ArrayRef[Int]] $argnum=)
{
my $grad_with_loss_func = __PACKAGE__->grad_and_loss($func, $argnum);
return sub {
return ($grad_with_loss_func->(@_))[0];
};
}
method train_section(CodeRef $sub)
{
my $prev = __PACKAGE__->set_is_training(1);
$sub->();
__PACKAGE__->set_is_training(0) unless $prev;
}
method test_section(CodeRef $sub)
{
my $prev = __PACKAGE__->set_is_training(0);
$sub->();
__PACKAGE__->set_is_training(1) if $prev;
}
1;
lib/AI/MXNet/Executor.pm view on Meta::CPAN
}
}
check_call(AI::MXNetCAPI::ExecutorForward(
$self->handle,
$is_train
)
);
if($self->_output_dirty)
{
AI::MXNet::Logging->warning(
"Calling forward the second time after forward(is_train=1) "
."without calling backward first. Is this intended?"
);
}
$self->_output_dirty($is_train);
return $self->outputs;
}
=head2 backward
Do a backward pass to get the gradient of the arguments.
lib/AI/MXNet/Initializer.pm view on Meta::CPAN
Jozefowicz et al. 2015 recommends setting this to 1.0.
=cut
use Mouse;
extends 'AI::MXNet::Initializer';
has 'forget_bias' => (is => 'ro', isa => 'Num', required => 1);
method _init_weight(Str $name, AI::MXNet::NDArray $arr)
{
$arr .= 0;
# in the case of LSTMCell the forget gate is the second
# gate of the 4 LSTM gates, we modify the according values.
my $num_hidden = int($arr->shape->[0] / 4);
$arr->slice([$num_hidden, 2*$num_hidden-1]) .= $self->forget_bias;
}
__PACKAGE__->register;
package AI::MXNet::FusedRNN;
use Mouse;
use JSON::PP;
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
Parameters
----------
learning_rate : float, optional
Step size.
Default value is set to 0.001.
beta1 : float, optional
Exponential decay rate for the first moment estimates.
Default value is set to 0.9.
beta2 : float, optional
Exponential decay rate for the second moment estimates.
Default value is set to 0.999.
epsilon : float, optional
Default value is set to 1e-8.
decay_factor : float, optional
Default value is set to 1 - 1e-8.
wd : float, optional
L2 regularization coefficient add to all the weights
rescale_grad : float, optional
rescaling factor of gradient. Normally should be 1/batch_size.
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
available at http://arxiv.org/abs/1412.6980 Section 7.
This optimizer accepts the following parameters in addition to those accepted
AI::MXNet::Optimizer.
Parameters
----------
beta1 : float, optional
Exponential decay rate for the first moment estimates.
beta2 : float, optional
Exponential decay rate for the second moment estimates.
=cut
use Mouse;
extends 'AI::MXNet::Optimizer';
has '+learning_rate' => (default => 0.002);
has 'beta1' => (is => "ro", isa => "Num", default => 0.9);
has 'beta2' => (is => "ro", isa => "Num", default => 0.999);
method create_state(Index $index, AI::MXNet::NDArray $weight)
{
lib/AI/MXNet/Optimizer.pm view on Meta::CPAN
at http://cs229.stanford.edu/proj2015/054_report.pdf.
This optimizer accepts the following parameters in addition to those accepted
AI::MXNet::Optimizer.
Parameters
----------
beta1 : float, optional
Exponential decay rate for the first moment estimates.
beta2 : float, optional
Exponential decay rate for the second moment estimates.
epsilon : float, optional
Small value to avoid division by 0.
schedule_decay : float, optional
Exponential decay rate for the momentum schedule
=cut
use Mouse;
extends 'AI::MXNet::Optimizer';
has '+learning_rate' => (default => 0.001);
has 'beta1' => (is => "ro", isa => "Num", default => 0.9);
lib/AI/MXNet/RNN/IO.pm view on Meta::CPAN
push @buckets, $i;
}
}, $p->histogram(1,0,$p->max+1)->unpdl);
$self->buckets(\@buckets);
}
@{ $self->buckets } = sort { $a <=> $b } @{ $self->buckets };
my $ndiscard = 0;
$self->data([map { [] } 0..@{ $self->buckets }-1]);
for my $i (0..@{$self->sentences}-1)
{
my $buck = bisect_left($self->buckets, scalar(@{ $self->sentences->[$i] }));
if($buck == @{ $self->buckets })
{
$ndiscard += 1;
next;
}
my $buff = AI::MXNet::NDArray->full(
[$self->buckets->[$buck]],
$self->invalid_label,
dtype => $self->dtype
)->aspdl;
t/test_io_image.t view on Meta::CPAN
kwargs => { rand_crop=>1,
rand_resize=>1,
rand_mirror=>1 }
);
$data->reset();
my $tic = time;
for my $i (1..$n)
{
$data->next;
mx->nd->waitall;
warn("average speed after iteration $i is " . $batch_size*$i/(time - $tic) . " samples/sec");
}
}
run_imageiter('data/cifar/test.rec', 20);
ok(1);