view release on metacpan or search on metacpan
META.yml
MANIFEST
examples/calculator.pl
examples/plot_network.pl
examples/char_lstm.pl
examples/get_ptb_data.sh
examples/lstm_bucketing.pl
examples/mnist.pl
examples/cudnn_lstm_bucketing.pl
Makefile.PL
Changes
META.json
t/test_recordio.t
t/test_random.t
t/test_init.t
t/test_model_parallel.t
examples/calculator.pl view on Meta::CPAN
use AI::MXNet ('mx');
## preparing the samples
## to train our network
sub samples {
my($batch_size, $func) = @_;
# get samples
my $n = 16384;
## creates a pdl with $n rows and two columns with random
## floats in the range between 0 and 1
my $data = PDL->random(2, $n);
## creates the pdl with $n rows and one column with labels
## labels are floats that either sum or product, etc of
## two random values in each corresponding row of the data pdl
my $label = $func->($data->slice('0,:'), $data->slice('1,:'));
# partition into train/eval sets
my $edge = int($n / 8);
my $validation_data = $data->slice(":,0:@{[ $edge - 1 ]}");
my $validation_label = $label->slice(":,0:@{[ $edge - 1 ]}");
my $train_data = $data->slice(":,$edge:");
my $train_label = $label->slice(":,$edge:");
# build iterators around the sets
return(mx->io->NDArrayIter(
batch_size => $batch_size,
data => $train_data,
label => $train_label,
), mx->io->NDArrayIter(
batch_size => $batch_size,
data => $validation_data,
label => $validation_label,
));
}
## the network model
sub nn_fc {
my $data = mx->sym->Variable('data');
my $ln = mx->sym->exp(mx->sym->FullyConnected(
data => mx->sym->log($data),
num_hidden => 1,
));
my $wide = mx->sym->Concat($data, $ln);
my $fc = mx->sym->FullyConnected(
$wide,
num_hidden => 1
);
return mx->sym->MAERegressionOutput(data => $fc, name => 'softmax');
}
sub learn_function {
my(%args) = @_;
my $func = $args{func};
my $batch_size = $args{batch_size}//128;
my($train_iter, $eval_iter) = samples($batch_size, $func);
my $sym = nn_fc();
## call as ./calculator.pl 1 to just print model and exit
if($ARGV[0]) {
my @dsz = @{$train_iter->data->[0][1]->shape};
my @lsz = @{$train_iter->label->[0][1]->shape};
my $shape = {
data => [ $batch_size, splice @dsz, 1 ],
softmax_label => [ $batch_size, splice @lsz, 1 ],
};
print mx->viz->plot_network($sym, shape => $shape)->graph->as_png;
exit;
}
my $model = mx->mod->Module(
symbol => $sym,
context => mx->cpu(),
);
$model->fit($train_iter,
eval_data => $eval_iter,
optimizer => 'adam',
optimizer_params => {
learning_rate => $args{lr}//0.01,
rescale_grad => 1/$batch_size,
lr_scheduler => AI::MXNet::FactorScheduler->new(
step => 100,
factor => 0.99
)
},
eval_metric => 'mse',
num_epoch => $args{epoch}//25,
);
# refit the model for calling on 1 sample at a time
my $iter = mx->io->NDArrayIter(
batch_size => 1,
data => PDL->pdl([[ 0, 0 ]]),
label => PDL->pdl([[ 0 ]]),
);
$model->reshape(
data_shapes => $iter->provide_data,
label_shapes => $iter->provide_label,
);
# wrap a helper around making predictions
my ($arg_params) = $model->get_params;
for my $k (sort keys %$arg_params)
{
print "$k -> ". $arg_params->{$k}->aspdl."\n";
}
return sub {
my($n, $m) = @_;
return $model->predict(mx->io->NDArrayIter(
batch_size => 1,
data => PDL->new([[ $n, $m ]]),
))->aspdl->list;
};
}
my $add = learn_function(func => sub {
my($n, $m) = @_;
return $n + $m;
});
my $sub = learn_function(func => sub {
my($n, $m) = @_;
examples/char_lstm.pl view on Meta::CPAN
--chkp-prefix prefix for checkpoint files, default='lstm_'
--cell-mode RNN cell mode (LSTM, GRU, RNN, default=LSTM)
--sample-size a size of inferred sample text (default=10000) after each epoch
--chkp-epoch save checkpoint after this many epoch, default=1 (saving every checkpoint)
=cut
package AI::MXNet::RNN::IO::ASCIIIterator;
use Mouse;
extends AI::MXNet::DataIter;
has 'data' => (is => 'ro', isa => 'PDL', required => 1);
has 'seq_size' => (is => 'ro', isa => 'Int', required => 1);
has '+batch_size' => (is => 'ro', isa => 'Int', required => 1);
has 'data_name' => (is => 'ro', isa => 'Str', default => 'data');
has 'label_name' => (is => 'ro', isa => 'Str', default => 'softmax_label');
has 'dtype' => (is => 'ro', isa => 'Dtype', default => 'float32');
has [qw/nd counter seq_counter vocab_size
data_size provide_data provide_label idx/] => (is => 'rw', init_arg => undef);
sub BUILD
{
my $self = shift;
$self->data_size($self->data->nelem);
my $segments = int(($self->data_size-$self->seq_size)/($self->batch_size*$self->seq_size));
$self->idx([0..$segments-1]);
$self->vocab_size($self->data->uniq->shape->at(0));
$self->counter(0);
$self->seq_counter(0);
$self->nd(mx->nd->array($self->data, dtype => $self->dtype));
my $shape = [$self->batch_size, $self->seq_size];
$self->provide_data([
AI::MXNet::DataDesc->new(
name => $self->data_name,
shape => $shape,
dtype => $self->dtype
)
]);
$self->provide_label([
AI::MXNet::DataDesc->new(
name => $self->label_name,
shape => $shape,
dtype => $self->dtype
)
examples/cudnn_lstm_bucketing.pl view on Meta::CPAN
invalid_label => $invalid_label,
start_label => $start_label
);
return ($sentences, $vocab);
}
my $buckets = [10, 20, 30, 40, 50, 60];
my $start_label = 1;
my $invalid_label = 0;
func get_data($layout)
{
my ($train_sentences, $vocabulary) = tokenize_text(
'./data/ptb.train.txt', start_label => $start_label,
invalid_label => $invalid_label
);
my ($validation_sentences) = tokenize_text(
'./data/ptb.test.txt', vocab => $vocabulary,
start_label => $start_label, invalid_label => $invalid_label
);
my $data_train = mx->rnn->BucketSentenceIter(
$train_sentences, $batch_size, buckets => $buckets,
invalid_label => $invalid_label,
layout => $layout
);
my $data_val = mx->rnn->BucketSentenceIter(
$validation_sentences, $batch_size, buckets => $buckets,
invalid_label => $invalid_label,
layout => $layout
);
return ($data_train, $data_val, $vocabulary);
}
my $train = sub
{
my ($data_train, $data_val, $vocab) = get_data('TN');
my $cell;
if($stack_rnn)
{
my $stack = mx->rnn->SequentialRNNCell();
for my $i (0..$num_layers-1)
{
my $dropout_rate = 0;
if($i < $num_layers-1)
{
$dropout_rate = $dropout;
examples/get_ptb_data.sh view on Meta::CPAN
#!/usr/bin/env bash
RNN_DIR=$(cd `dirname $0`; pwd)
DATA_DIR="${RNN_DIR}/data/"
if [[ ! -d "${DATA_DIR}" ]]; then
echo "${DATA_DIR} doesn't exist, will create one";
mkdir -p ${DATA_DIR}
fi
wget -P ${DATA_DIR} https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/ptb/ptb.train.txt;
wget -P ${DATA_DIR} https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/ptb/ptb.valid.txt;
wget -P ${DATA_DIR} https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/ptb/ptb.test.txt;
wget -P ${DATA_DIR} https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/tinyshakespeare/input.txt;
examples/lstm_bucketing.pl view on Meta::CPAN
'wd=f' => \(my $wd = 0.00001 ),
'batch-size=i' => \(my $batch_size = 32 ),
'disp-batches=i' => \(my $disp_batches = 50 ),
'chkp-prefix=s' => \(my $chkp_prefix = 'lstm_' ),
'chkp-epoch=i' => \(my $chkp_epoch = 0 ),
'help' => sub { HelpMessage(0) },
) or HelpMessage(1);
=head1 NAME
lstm_bucketing.pl - Example of training LSTM RNN on Penn Tree Bank data using high level RNN interface
=head1 SYNOPSIS
--num-layers number of stacked RNN layers, default=2
--num-hidden hidden layer size, default=200
--num-embed embedding layer size, default=200
--gpus list of gpus to run, e.g. 0 or 0,2,5. empty means using cpu.
Increase batch size when using multiple gpus for best performance.
--kv-store key-value store type, default='device'
--num-epochs max num of epochs, default=25
lib/AI/MXNet.pm view on Meta::CPAN
__END__
=encoding UTF-8
=head1 NAME
AI::MXNet - Perl interface to MXNet machine learning library
=head1 SYNOPSIS
## Convolutional NN for recognizing hand-written digits in MNIST dataset
## It's considered "Hello, World" for Neural Networks
## For more info about the MNIST problem please refer to http://neuralnetworksanddeeplearning.com/chap1.html
use strict;
use warnings;
use AI::MXNet qw(mx);
use AI::MXNet::TestUtils qw(GetMNIST_ubyte);
use Test::More tests => 1;
# symbol net
my $batch_size = 100;
### model
my $data = mx->symbol->Variable('data');
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 32, kernel => [3,3], stride => [2,2]);
my $bn1 = mx->symbol->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->symbol->Activation(data => $bn1, name => 'relu1', act_type => "relu");
my $mp1 = mx->symbol->Pooling(data => $act1, name => 'mp1', kernel => [2,2], stride =>[2,2], pool_type=>'max');
my $conv2= mx->symbol->Convolution(data => $mp1, name => 'conv2', num_filter => 32, kernel=>[3,3], stride=>[2,2]);
my $bn2 = mx->symbol->BatchNorm(data => $conv2, name=>"bn2");
my $act2 = mx->symbol->Activation(data => $bn2, name=>'relu2', act_type=>"relu");
my $mp2 = mx->symbol->Pooling(data => $act2, name => 'mp2', kernel=>[2,2], stride=>[2,2], pool_type=>'max');
my $fl = mx->symbol->Flatten(data => $mp2, name=>"flatten");
my $fc1 = mx->symbol->FullyConnected(data => $fl, name=>"fc1", num_hidden=>30);
my $act3 = mx->symbol->Activation(data => $fc1, name=>'relu3', act_type=>"relu");
my $fc2 = mx->symbol->FullyConnected(data => $act3, name=>'fc2', num_hidden=>10);
my $softmax = mx->symbol->SoftmaxOutput(data => $fc2, name => 'softmax');
# check data
GetMNIST_ubyte();
my $train_dataiter = mx->io->MNISTIter({
image=>"data/train-images-idx3-ubyte",
label=>"data/train-labels-idx1-ubyte",
data_shape=>[1, 28, 28],
batch_size=>$batch_size, shuffle=>1, flat=>0, silent=>0, seed=>10});
my $val_dataiter = mx->io->MNISTIter({
image=>"data/t10k-images-idx3-ubyte",
label=>"data/t10k-labels-idx1-ubyte",
data_shape=>[1, 28, 28],
batch_size=>$batch_size, shuffle=>1, flat=>0, silent=>0});
my $n_epoch = 1;
my $mod = mx->mod->new(symbol => $softmax);
$mod->fit(
$train_dataiter,
eval_data => $val_dataiter,
optimizer_params=>{learning_rate=>0.01, momentum=> 0.9},
num_epoch=>$n_epoch
);
my $res = $mod->score($val_dataiter, mx->metric->create('acc'));
ok($res->{accuracy} > 0.8);
=head1 DESCRIPTION
Perl interface to MXNet machine learning library.
=head1 BUGS AND INCOMPATIBILITIES
Parity with Python inteface is mostly achieved, few deprecated
and not often used features left unported for now.
lib/AI/MXNet/Callback.pm view on Meta::CPAN
AI::MXNet::Speedometer - A callback that logs training speed
=cut
=head1 DESCRIPTION
Calculate and log training speed periodically.
Parameters
----------
batch_size: int
batch_size of data
frequent: int
How many batches between calculations.
Defaults to calculating & logging every 50 batches.
auto_reset: Bool
Reset the metric after each log, defaults to true.
=cut
has 'batch_size' => (is => 'ro', isa => 'Int', required => 1);
has 'frequent' => (is => 'ro', isa => 'Int', default => 50);
has 'init' => (is => 'rw', isa => 'Int', default => 0);
lib/AI/MXNet/Executor.pm view on Meta::CPAN
$is_train=0: bool, optional
whether this forward is for evaluation purpose. If True,
a backward call is expected to follow. Otherwise following
backward is invalid.
%kwargs
Additional specification of input arguments.
Examples
--------
>>> # doing forward by specifying data
>>> $texec->forward(1, data => $mydata);
>>> # doing forward by not specifying things, but copy to the executor before hand
>>> $mydata->copyto($texec->arg_dict->{'data'});
>>> $texec->forward(1);
>>> # doing forward by specifying data and get outputs
>>> my $outputs = $texec->forward(1, data => $mydata);
>>> print $outputs->[0]->aspdl;
=cut
method forward(Int $is_train=0, %kwargs)
{
if(%kwargs)
{
my $arg_dict = $self->arg_dict;
while (my ($name, $array) = each %kwargs)
{
lib/AI/MXNet/Executor/Group.pm view on Meta::CPAN
if($begin >= $end)
{
confess('Too many slices such that some splits are empty');
}
push @slices, [$begin, $end];
}
return \@slices;
}
# Load a array ref of arrays into a array ref of arrays specified by slices
func _load_general($data, $targets, $major_axis)
{
zip(sub {
my ($d_src, $d_targets, $axis) = @_;
if(blessed($d_targets) and $d_targets->isa('AI::MXNet::NDarray'))
{
$d_src->copyto($d_targets);
}
elsif(ref $d_targets eq 'ARRAY' and blessed $d_targets->[0])
{
zip(sub {
lib/AI/MXNet/Image.pm view on Meta::CPAN
AI::MXNet:Image - Read individual image files and perform augmentations.
=cut
=head2 imdecode
Decode an image from string. Requires OpenCV to work.
Parameters
----------
$buf : str, array ref, pdl, ndarray
Binary image data.
:$flag : int
0 for grayscale. 1 for colored.
:$to_rgb : int
0 for BGR format (OpenCV default). 1 for RGB format (MXNet default).
:$out : NDArray
Output buffer. Do not specify for automatic allocation.
=cut
method imdecode(Str|PDL $buf, Int :$flag=1, Int :$to_rgb=1, Maybe[AI::MXNet::NDArray] :$out=)
{
if(not ref $buf)
{
my $pdl_type = PDL::Type->new(DTYPE_MX_TO_PDL->{'uint8'});
my $len; { use bytes; $len = length $buf; }
my $pdl = PDL->new_from_specification($pdl_type, $len);
${$pdl->get_dataref} = $buf;
$pdl->upd_data;
$buf = $pdl;
}
if(not (blessed $buf and $buf->isa('AI::MXNet::NDArray')))
{
$buf = AI::MXNet::NDArray->array($buf, dtype=>'uint8');
}
return AI::MXNet::NDArray->_cvimdecode($buf, { flag => $flag, to_rgb => $to_rgb, ($out ? (out => $out) : ()) });
}
=head2 scale_down
lib/AI/MXNet/KVStore.pm view on Meta::CPAN
sub DEMOLISH
{
check_call(AI::MXNetCAPI::KVStoreFree(shift->handle));
}
=head2 init
Initialize a single or a sequence of key-value pairs into the store.
For each key, one must init it before push and pull.
Only worker 0's (rank == 0) data are used.
This function returns after data have been initialized successfully
Parameters
----------
key : str or an array ref of str
The keys.
value : NDArray or an array ref of NDArray objects
The values.
Examples
--------
lib/AI/MXNet/Module/Base.pm view on Meta::CPAN
confess($msg);
}
else
{
AI::MXNet::Logging->warning($msg);
}
}
}
}
# Check that input names matches input data descriptors
method _check_names_match(
ArrayRef[Str] $data_names,
ArrayRef[NameShapeOrDataDesc] $data_shapes,
Str $name,
Bool $throw
)
{
return if (not @$data_shapes and @$data_names == 1 and $data_names->[0] eq 'softmax_label');
my @actual = map { @{$_}[0] } @{ $data_shapes };
if("@$data_names" ne "@actual")
{
my $msg = sprintf(
"Data provided by %s_shapes don't match names specified by %s_names (%s vs. %s)",
$name, $name, "@$data_shapes", "@$data_names"
);
if($throw)
{
confess($msg);
}
else
{
AI::MXNet::Logging->warning($msg);
}
}
}
method _parse_data_desc(
ArrayRef[Str] $data_names,
Maybe[ArrayRef[Str]] $label_names,
ArrayRef[NameShapeOrDataDesc] $data_shapes,
Maybe[ArrayRef[NameShapeOrDataDesc]] $label_shapes
)
{
$data_shapes = [map { blessed $_ ? $_ : AI::MXNet::DataDesc->new(@$_) } @$data_shapes];
$self->_check_names_match($data_names, $data_shapes, 'data', 1);
if($label_shapes)
{
$label_shapes = [map { blessed $_ ? $_ : AI::MXNet::DataDesc->new(@$_) } @$label_shapes];
$self->_check_names_match($label_names, $label_shapes, 'label', 0);
}
else
{
$self->_check_names_match($label_names, [], 'label', 0);
}
return ($data_shapes, $label_shapes);
}
=head1 DESCRIPTION
The base class of a modules. A module represents a computation component. The design
purpose of a module is that it abstract a computation "machine", that one can run forward,
backward, update parameters, etc. We aim to make the APIs easy to use, especially in the
case when we need to use imperative API to work with multiple modules (e.g. stochastic
depth network).
lib/AI/MXNet/RNN/Cell.pm view on Meta::CPAN
Str :$input_prefix='',
Str :$layout='NTC',
Maybe[Bool] :$merge_outputs=
)
{
$self->reset;
my $axis = index($layout, 'T');
if(not defined $inputs)
{
$inputs = [
map { AI::MXNet::Symbol->Variable("${input_prefix}t${_}_data") } (0..$length-1)
];
}
elsif(blessed($inputs))
{
assert(
(@{ $inputs->list_outputs() } == 1),
"unroll doesn't allow grouped symbol as input. Please "
."convert to list first or let unroll handle slicing"
);
$inputs = AI::MXNet::Symbol->SliceChannel(
lib/AI/MXNet/RecordIO.pm view on Meta::CPAN
package AI::MXNet::RecordIO;
use strict;
use warnings;
use AI::MXNet::Function::Parameters;
use AI::MXNet::Types;
use AI::MXNet::Base;
use Mouse;
=head1 NAME
AI::MXNet::Function::Parameters - Read/write RecordIO format data
=cut
=head2 new
Parameters
----------
uri : Str
uri path to recordIO file.
flag: Str
"r" for reading or "w" writing.
lib/AI/MXNet/Symbol.pm view on Meta::CPAN
The order is in the same order as list_outputs()
aux_types : array ref of Dtype or undef
List of types of outputs.
The order is in the same order as list_auxiliary()
=cut
method infer_type(Str|Undef @args)
{
my ($positional_arguments, $kwargs, $kwargs_order) = _parse_arguments("Dtype", @args);
my $sdata = [];
my $keys = [];
if(@$positional_arguments)
{
@{ $sdata } = map { defined($_) ? DTYPE_STR_TO_MX->{ $_ } : -1 } @{ $positional_arguments };
}
else
{
@{ $keys } = @{ $kwargs_order };
@{ $sdata } = map { DTYPE_STR_TO_MX->{ $_ } } @{ $kwargs }{ @{ $kwargs_order } };
}
my ($arg_type, $out_type, $aux_type, $complete) = check_call(AI::MXNetCAPI::SymbolInferType(
$self->handle,
scalar(@{ $sdata }),
$keys,
$sdata
)
);
if($complete)
{
return (
[ map { DTYPE_MX_TO_STR->{ $_ } } @{ $arg_type }],
[ map { DTYPE_MX_TO_STR->{ $_ } } @{ $out_type }],
[ map { DTYPE_MX_TO_STR->{ $_ } } @{ $aux_type }]
);
}
t/test_conv.t view on Meta::CPAN
use AI::MXNet::TestUtils qw(GetMNIST_ubyte);
use Test::More tests => 1;
## speed up the tests when gpu present
my $gpu_present = (`perl -e 'use AI::MXNet qw(mx); print mx->nd->ones([1], ctx => mx->gpu(0))->asscalar' 2>/dev/null` eq '1');
# symbol net
my $batch_size = 100;
### model
my $data = mx->symbol->Variable('data');
my $conv1= mx->symbol->Convolution(data => $data, name => 'conv1', num_filter => 32, kernel => [3,3], stride => [2,2]);
my $bn1 = mx->symbol->BatchNorm(data => $conv1, name => "bn1");
my $act1 = mx->symbol->Activation(data => $bn1, name => 'relu1', act_type => "relu");
my $mp1 = mx->symbol->Pooling(data => $act1, name => 'mp1', kernel => [2,2], stride =>[2,2], pool_type=>'max');
my $conv2= mx->symbol->Convolution(data => $mp1, name => 'conv2', num_filter => 32, kernel=>[3,3], stride=>[2,2]);
my $bn2 = mx->symbol->BatchNorm(data => $conv2, name=>"bn2");
my $act2 = mx->symbol->Activation(data => $bn2, name=>'relu2', act_type=>"relu");
my $mp2 = mx->symbol->Pooling(data => $act2, name => 'mp2', kernel=>[2,2], stride=>[2,2], pool_type=>'max');
my $fl = mx->symbol->Flatten(data => $mp2, name=>"flatten");
my $fc1 = mx->symbol->FullyConnected(data => $fl, name=>"fc1", num_hidden=>30);
my $act3 = mx->symbol->Activation(data => $fc1, name=>'relu3', act_type=>"relu");
my $fc2 = mx->symbol->FullyConnected(data => $act3, name=>'fc2', num_hidden=>10);
my $softmax = mx->symbol->SoftmaxOutput(data => $fc2, name => 'softmax');
# check data
GetMNIST_ubyte();
my $train_dataiter = mx->io->MNISTIter({
image=>"data/train-images-idx3-ubyte",
label=>"data/train-labels-idx1-ubyte",
data_shape=>[1, 28, 28],
batch_size=>$batch_size, shuffle=>1, flat=>0, silent=>0, seed=>10});
my $val_dataiter = mx->io->MNISTIter({
image=>"data/t10k-images-idx3-ubyte",
label=>"data/t10k-labels-idx1-ubyte",
data_shape=>[1, 28, 28],
batch_size=>$batch_size, shuffle=>1, flat=>0, silent=>0});
my $n_epoch = 1;
my $mod = mx->mod->new(symbol => $softmax, ($gpu_present ? (context => mx->gpu(0)) : ()));
$mod->fit(
$train_dataiter,
eval_data => $val_dataiter,
optimizer_params=>{learning_rate=>0.01, momentum=> 0.9},
num_epoch=>$n_epoch
);
my $res = $mod->score($val_dataiter, mx->metric->create('acc'));
ok($res->{accuracy} > 0.8);
t/test_infer_shape.t view on Meta::CPAN
{
is_deeply($arg_shape_dict{$k}, $v);
}
}
sub test_mlp2_infer_shape
{
# Build MLP
my $out = mlp2();
# infer shape
my $data_shape = [100, 100];
my($arg_shapes, $out_shapes, $aux_shapes) = $out->infer_shape(data=>$data_shape);
ok(@$out_shapes == 1);
is_deeply($out_shapes->[0], [100, 10]);
my %true_shapes = (
fc2_bias => [10],
fc2_weight => [10, 1000],
fc1_bias => [1000],
fc1_weight => [1000,100]
);
_test_shapes($out, $arg_shapes, %true_shapes);
}
sub test_mlp2_infer_error
{
# Test shape inconsistent case
my $out = mlp2();
my $weight_shape = [1, 100];
my $data_shape = [100, 100];
eval { $out->infer_shape(data=>$data_shape, fc1_weight=>$weight_shape) };
like($@, qr/Shape inconsistent/);
}
sub test_backward_infer
{
my $w = mx->sym->Variable("weight");
my $wshift = mx->sym->Variable("wshift", shape=>[1]);
my $data = mx->sym->Variable("data");
# broadcast add here, not being able to deduce shape correctly
my $wt = mx->sym->broadcast_add($w, $wshift);
# shape constraint, this is what enables backward shape inference
$wt = mx->sym->_identity_with_attr_like_rhs($wt, $w);
my $net = mx->sym->FullyConnected(data=>$data, weight=>$wt, num_hidden=>11, no_bias=>1);
my $data_shape = [7, 100];
my ($arg_shapes, $out_shapes, $aux_shapes) = $net->infer_shape(data=>$data_shape);
_test_shapes($net, $arg_shapes, weight=>[11,100]);
}
sub test_incomplete_infer_elewise
{
my $a = mx->sym->Variable('a', shape=>[0, 10]);
my $b = mx->sym->Variable('b', shape=>[12, 0]);
my $c = $a + $b;
my ($arg_shapes) = $c->infer_shape();
_test_shapes($c, $arg_shapes, a=>[12,10], b=>[12,10]);
}
sub test_incomplete_infer_mlp
{
my $a = mx->sym->Variable('a', shape=>[0, 10]);
my $b = mx->sym->FullyConnected(data=>$a, num_hidden=>21);
my $c = mx->sym->Variable('c', shape=>[5, 0]);
my $d = $b + $c;
my ($arg_shapes) = $d->infer_shape();
_test_shapes($d, $arg_shapes, a=>[5,10], c=>[5,21]);
}
sub test_incomplete_infer_slicechannel
{
my $a = mx->sym->Variable('a', shape=>[0, 10]);
my $b = mx->sym->SliceChannel(data=>$a, num_outputs=>10, axis=>1, squeeze_axis=>1);
my $c = mx->sym->Variable('c', shape=>[5]);
my $d = @{$b}[1] + $c;
my ($arg_shapes) = $d->infer_shape();
_test_shapes($d, $arg_shapes, a=>[5,10]);
$a = mx->sym->Variable('a', shape=>[0, 15, 0]);
$b = mx->sym->SliceChannel(data=>$a, num_outputs=>3, squeeze_axis=>0);
$c = mx->sym->Variable('c', shape=>[3, 5, 2]);
$d = @{$b}[1] + $c;
($arg_shapes) = $d->infer_shape();
_test_shapes($d, $arg_shapes, a=>[3,15,2]);
}
sub test_incomplete_infer_convolution
{
my $a = mx->sym->Variable('a', shape=>[0, 10, 0, 0]);
my $b = mx->sym->Convolution(data=>$a, num_filter=>21, kernel=>[3, 3], dilate=>[1, 1], pad=>[1, 1]);
my $c = mx->sym->Variable('c', shape=>[5, 21, 32, 32]);
my $d = $b + $c;
my ($arg_shapes) = $d->infer_shape();
_test_shapes($d, $arg_shapes, a=>[5, 10, 32, 32]);
}
sub test_incomplete_infer_concat
{
my $a = mx->sym->Variable('a', shape=>[0, 10]);
my $b = mx->sym->Variable('b', shape=>[0, 5]);
t/test_init.t view on Meta::CPAN
use strict;
use warnings;
use Test::More tests => 4;
use AI::MXNet qw(mx);
sub test_default_init
{
my $data = mx->sym->Variable('data');
my $sym = mx->sym->LeakyReLU(data => $data, act_type => 'prelu');
my $mod = mx->mod->Module($sym);
$mod->bind(data_shapes=>[['data', [10,10]]]);
$mod->init_params;
ok((((values %{ ($mod->get_params)[0] }))[0]->aspdl == 0.25)->all);
}
sub test_variable_init
{
my $data = mx->sym->Variable('data');
my $gamma = mx->sym->Variable('gamma', init => mx->init->One());
my $sym = mx->sym->LeakyReLU(data => $data, gamma => $gamma, act_type => 'prelu');
my $mod = mx->mod->Module($sym);
$mod->bind(data_shapes=>[['data', [10,10]]]);
$mod->init_params();
ok((((values %{ ($mod->get_params)[0] }))[0]->aspdl == 1)->all);
}
sub test_aux_init
{
my $data = mx->sym->Variable('data');
my $sym = mx->sym->BatchNorm(data => $data, name => 'bn');
my $mod = mx->mod->Module($sym);
$mod->bind(data_shapes=>[['data', [10, 10, 3, 3]]]);
$mod->init_params();
ok((($mod->get_params)[1]->{bn_moving_var}->aspdl == 1)->all);
ok((($mod->get_params)[1]->{bn_moving_mean}->aspdl == 0)->all);
}
test_default_init();
test_variable_init();
test_aux_init();
t/test_io.t view on Meta::CPAN
use AI::MXNet::TestUtils qw(same reldiff GetMNIST_ubyte GetCifar10);
use PDL;
use PDL::Types;
use PDL::NiceSlice;
$|++;
sub test_Cifar10Rec()
{
GetCifar10();
my $dataiter = mx->io->ImageRecordIter({
path_imgrec => "data/cifar/train.rec",
mean_img => "data/cifar/cifar10_mean.bin",
rand_crop => 0,
and_mirror => 0,
shuffle => 0,
data_shape => [3,28,28],
batch_size => 100,
preprocess_threads => 4,
prefetch_buffer => 1
});
my @labelcount;
my $batchcount = 0;
while(my $batch = <$dataiter>)
{
my $nplabel = $batch->label->[0];
for my $i (0..$nplabel->shape->[0]-1)
{
$labelcount[int($nplabel->at($i)->asscalar)] += 1;
}
}
for my $i (0..9)
{
ok($labelcount[$i] == 5000);
}
}
sub test_NDArrayIter()
{
my $datas = ones(PDL::Type->new(6), 2, 2, 1000);
my $labels = ones(PDL::Type->new(6), 1, 1000);
for my $i (0..999)
{
$datas(:,:,$i) .= $i / 100;
$labels(:,$i) .= $i / 100;
}
my $dataiter = mx->io->NDArrayIter(
data => $datas,
label => $labels,
batch_size => 128,
shuffle => 1,
last_batch_handle => 'pad'
);
my $batchidx = 0;
while(<$dataiter>)
{
$batchidx += 1;
}
is($batchidx, 8);
$dataiter = mx->io->NDArrayIter(
data => $datas,
label => $labels,
batch_size => 128,
shuffle => 0,
last_batch_handle => 'pad'
);
$batchidx = 0;
my @labelcount;
my $i = 0;
for my $batch (@{ $dataiter })
{
my $label = $batch->label->[0];
my $flabel = $label->aspdl->flat;
ok($batch->data->[0]->aspdl->slice(0,0,'X')->flat->at(0) == $flabel->at(0));
for my $i (0..$label->shape->[0]-1)
{
$labelcount[$flabel->at($i)] += 1;
}
}
for my $i (0..9)
{
if($i == 0)
{
ok($labelcount[$i] == 124);
view all matches for this distributionview release on metacpan - search on metacpan