view release on metacpan or search on metacpan
lib/AI/MXNet/Gluon/ModelZoo/Vision/AlexNet.pm view on Meta::CPAN
classes : Int, default 1000
Number of classes for the output layer.
=cut
has 'classes' => (is => 'ro', isa => 'Int', default => 1000);
method python_constructor_arguments() { ['classes'] }
sub BUILD
{
my $self = shift;
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
$self->features->name_scope(sub {
$self->features->add(nn->Conv2D(64, kernel_size=>11, strides=>4,
padding=>2, activation=>'relu'));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2));
$self->features->add(nn->Conv2D(192, kernel_size=>5, padding=>2,
activation=>'relu'));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2));
$self->features->add(nn->Conv2D(384, kernel_size=>3, padding=>1,
activation=>'relu'));
$self->features->add(nn->Conv2D(256, kernel_size=>3, padding=>1,
activation=>'relu'));
$self->features->add(nn->Conv2D(256, kernel_size=>3, padding=>1,
activation=>'relu'));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2));
$self->features->add(nn->Flatten());
$self->features->add(nn->Dense(4096, activation=>'relu'));
$self->features->add(nn->Dropout(0.5));
$self->features->add(nn->Dense(4096, activation=>'relu'));
$self->features->add(nn->Dropout(0.5));
});
$self->output(nn->Dense($self->classes));
});
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
=head2 alexnet
AlexNet model from the `"One weird trick..." <https://arxiv.org/abs/1404.5997> paper.
lib/AI/MXNet/Gluon/ModelZoo/Vision/DenseNet.pm view on Meta::CPAN
for(1..$num_layers)
{
$out->add(_make_dense_layer($growth_rate, $bn_size, $dropout));
}
});
return $out;
}
func _make_dense_layer($growth_rate, $bn_size, $dropout)
{
my $new_features = nn->HybridSequential(prefix=>'');
$new_features->add(nn->BatchNorm());
$new_features->add(nn->Activation('relu'));
$new_features->add(nn->Conv2D($bn_size * $growth_rate, kernel_size=>1, use_bias=>0));
$new_features->add(nn->BatchNorm());
$new_features->add(nn->Activation('relu'));
$new_features->add(nn->Conv2D($growth_rate, kernel_size=>3, padding=>1, use_bias=>0));
if($dropout)
{
$new_features->add(nn->Dropout($dropout));
}
my $out = nn->HybridConcurrent(axis=>1, prefix=>'');
$out->add(nn->Identity());
$out->add($new_features);
return $out;
}
func _make_transition($num_output_features)
{
my $out = nn->HybridSequential(prefix=>'');
$out->add(nn->BatchNorm());
$out->add(nn->Activation('relu'));
$out->add(nn->Conv2D($num_output_features, kernel_size=>1, use_bias=>0));
$out->add(nn->AvgPool2D(pool_size=>2, strides=>2));
return $out;
}
=head1 NAME
AI::MXNet::Gluon::ModelZoo::Vision::DenseNet - Densenet-BC model from the "Densely Connected Convolutional Networks"
=cut
=head1 DESCRIPTION
Densenet-BC model from the "Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf> paper.
Parameters
----------
num_init_features : Int
Number of filters to learn in the first convolution layer.
growth_rate : Int
Number of filters to add each layer (`k` in the paper).
block_config : array ref of Int
List of integers for numbers of layers in each pooling block.
bn_size : Int, default 4
Multiplicative factor for number of bottle neck layers.
(i.e. bn_size * k features in the bottleneck layer)
dropout : float, default 0
Rate of dropout after each dense layer.
classes : int, default 1000
Number of classification classes.
=cut
has [qw/num_init_features
growth_rate/] => (is => 'ro', isa => 'Int', required => 1);
has 'block_config' => (is => 'ro', isa => 'ArrayRef[Int]', required => 1);
has 'bn_size' => (is => 'ro', isa => 'Int', default => 4);
has 'dropout' => (is => 'ro', isa => 'Num', default => 0);
has 'classes' => (is => 'ro', isa => 'Int', default => 1000);
method python_constructor_arguments(){ [qw/num_init_features growth_rate block_config bn_size dropout classes/] }
sub BUILD
{
my $self = shift;
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
$self->features->add(
nn->Conv2D(
$self->num_init_features, kernel_size=>7,
strides=>2, padding=>3, use_bias=>0
)
);
$self->features->add(nn->BatchNorm());
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, padding=>1));
# Add dense blocks
my $num_features = $self->num_init_features;
for(enumerate($self->block_config))
{
my ($i, $num_layers) = @$_;
$self->features->add(_make_dense_block($num_layers, $self->bn_size, $self->growth_rate, $self->dropout, $i+1));
$num_features += $num_layers * $self->growth_rate;
if($i != @{ $self->block_config } - 1)
{
$self->features->add(_make_transition(int($num_features/2)));
$num_features = int($num_features/2);
}
}
$self->features->add(nn->BatchNorm());
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->AvgPool2D(pool_size=>7));
$self->features->add(nn->Flatten());
$self->output(nn->Dense($self->classes));
});
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
my %densenet_spec = (
121 => [64, 32, [6, 12, 24, 16]],
161 => [96, 48, [6, 12, 36, 24]],
169 => [64, 32, [6, 12, 32, 32]],
lib/AI/MXNet/Gluon/ModelZoo/Vision/DenseNet.pm view on Meta::CPAN
=cut
method get_densenet(
Int $num_layers, Bool :$pretrained=0, :$ctx=AI::MXNet::Context->cpu(),
:$root='~/.mxnet/models',
Int :$bn_size=4,
Num :$dropout=0,
Int :$classes=1000
)
{
my ($num_init_features, $growth_rate, $block_config) = @{ $densenet_spec{$num_layers} };
my $net = AI::MXNet::Gluon::ModelZoo::Vision::DenseNet->new(
$num_init_features, $growth_rate, $block_config,
$bn_size, $dropout, $classes
);
if($pretrained)
{
$net->load_parameters(
AI::MXNet::Gluon::ModelZoo::ModelStore->get_model_file(
"densenet$num_layers",
root=>$root
),
ctx=>$ctx
lib/AI/MXNet/Gluon/ModelZoo/Vision/Inception.pm view on Meta::CPAN
if(defined $value)
{
$kwargs{ $setting_names[$i] } = $value;
}
}
$out->add(_make_basic_conv(%kwargs));
}
return $out;
}
func _make_A($pool_features, $prefix)
{
my $out = nn->HybridConcurrent(axis=>1, prefix=>$prefix);
$out->name_scope(sub {
$out->add(_make_branch('', [64, 1, undef, undef]));
$out->add(_make_branch(
'',
[48, 1, undef, undef],
[64, 5, undef, 2]
));
$out->add(_make_branch(
'',
[64, 1, undef, undef],
[96, 3, undef, 1],
[96, 3, undef, 1]
));
$out->add(_make_branch('avg', [$pool_features, 1, undef, undef]));
});
return $out;
}
func _make_B($prefix)
{
my $out = nn->HybridConcurrent(axis=>1, prefix=>$prefix);
$out->name_scope(sub {
$out->add(_make_branch('', [384, 3, 2, undef]));
$out->add(_make_branch(
lib/AI/MXNet/Gluon/ModelZoo/Vision/Inception.pm view on Meta::CPAN
Number of classification classes.
=cut
has 'classes' => (is => 'ro', isa => 'Int', default => 1000);
method python_constructor_arguments(){ ['classes'] }
sub BUILD
{
my $self = shift;
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
$self->features->add(_make_basic_conv(channels=>32, kernel_size=>3, strides=>2));
$self->features->add(_make_basic_conv(channels=>32, kernel_size=>3));
$self->features->add(_make_basic_conv(channels=>64, kernel_size=>3, padding=>1));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2));
$self->features->add(_make_basic_conv(channels=>80, kernel_size=>1));
$self->features->add(_make_basic_conv(channels=>192, kernel_size=>3));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2));
$self->features->add(_make_A(32, 'A1_'));
$self->features->add(_make_A(64, 'A2_'));
$self->features->add(_make_A(64, 'A3_'));
$self->features->add(_make_B('B_'));
$self->features->add(_make_C(128, 'C1_'));
$self->features->add(_make_C(160, 'C2_'));
$self->features->add(_make_C(160, 'C3_'));
$self->features->add(_make_C(192, 'C4_'));
$self->features->add(_make_D('D_'));
$self->features->add(_make_E('E1_'));
$self->features->add(_make_E('E2_'));
$self->features->add(nn->AvgPool2D(pool_size=>8));
$self->features->add(nn->Dropout(0.5));
$self->output(nn->Dense($self->classes));
});
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
=head2 inception_v3
Inception v3 model from
"Rethinking the Inception Architecture for Computer Vision"
lib/AI/MXNet/Gluon/ModelZoo/Vision/MobileNet.pm view on Meta::CPAN
{
_add_conv($out, channels=>$dw_channels, kernel=>3, stride=>$stride,
pad=>1, num_group=>$dw_channels, relu6=>$relu6);
_add_conv($out, channels=>$channels, relu6=>$relu6);
}
sub BUILD
{
my $self = shift;
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
$self->features->name_scope(sub {
_add_conv($self->features, channels=>int(32 * $self->multiplier), kernel=>3, pad=>1, stride=>2);
my $dw_channels = [map { int($_ * $self->multiplier) } (32, 64, (128)x2, (256)x2, (512)x6, 1024)];
my $channels = [map { int($_ * $self->multiplier) } (64, (128)x2, (256)x2, (512)x6, (1024)x2)];
my $strides = [(1, 2)x3, (1)x5, 2, 1];
for(zip($dw_channels, $channels, $strides))
{
my ($dwc, $c, $s) = @$_;
_add_conv_dw($self->features, dw_channels=>$dwc, channels=>$c, stride=>$s);
}
$self->features->add(nn->GlobalAvgPool2D());
$self->features->add(nn->Flatten());
});
$self->output(nn->Dense($self->classes));
});
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision::MobileNetV2;
use AI::MXNet::Gluon::Mouse;
use AI::MXNet::Base;
extends 'AI::MXNet::Gluon::HybridBlock';
has 'multiplier' => (is => 'ro', isa => 'Num', default => 1);
has 'classes' => (is => 'ro', isa => 'Int', default => 1000);
lib/AI/MXNet/Gluon/ModelZoo/Vision/MobileNet.pm view on Meta::CPAN
if($active)
{
$out->add($relu6 ? AI::MXNet::Gluon::ModelZoo::Vision::MobileNet::RELU6->new : nn->Activation('relu'));
}
}
sub BUILD
{
my $self = shift;
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>'features_'));
$self->features->name_scope(sub {
_add_conv(
$self->features, int(32 * $self->multiplier), kernel=>3,
stride=>2, pad=>1, relu6=>1
);
my $in_channels_group = [map { int($_ * $self->multiplier) } (32, 16, (24)x2, (32)x3, (64)x4, (96)x3, (160)x3)];
my $channels_group = [map { int($_ * $self->multiplier) } (16, (24)x2, (32)x3, (64)x4, (96)x3, (160)x3, 320)];
my $ts = [1, (6)x16];
my $strides = [(1, 2)x2, 1, 1, 2, (1)x6, 2, (1)x3];
for(zip($in_channels_group, $channels_group, $ts, $strides))
{
my ($in_c, $c, $t, $s) = @$_;
$self->features->add(
AI::MXNet::Gluon::ModelZoo::Vision::MobileNet::LinearBottleneck->new(
in_channels=>$in_c, channels=>$c,
t=>$t, stride=>$s
)
);
}
my $last_channels = $self->multiplier > 1 ? int(1280 * $self->multiplier) : 1280;
_add_conv($self->features, $last_channels, relu6=>1);
$self->features->add(nn->GlobalAvgPool2D());
});
$self->output(nn->HybridSequential(prefix=>'output_'));
$self->output->name_scope(sub {
$self->output->add(
nn->Conv2D($self->classes, 1, use_bias=>0, prefix=>'pred_'),
nn->Flatten()
);
});
});
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
=head2 get_mobilenet
MobileNet model from the
"MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications"
lib/AI/MXNet/Gluon/ModelZoo/Vision/ResNet.pm view on Meta::CPAN
$channels, kernel_size=>3, strides=>$stride, padding=>1,
use_bias=>0, in_channels=>$in_channels
);
}
sub BUILD
{
my $self = shift;
assert(@{ $self->layers } == (@{ $self->channels } - 1));
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
if($self->thumbnail)
{
$self->features->add(_conv3x3($self->channels->[0], 1, 0));
}
else
{
$self->features->add(nn->Conv2D($self->channels->[0], 7, 2, 3, use_bias=>0));
$self->features->add(nn->BatchNorm());
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->MaxPool2D(3, 2, 1));
}
for(enumerate($self->layers))
{
my ($i, $num_layer) = @$_;
my $stride = $i == 0 ? 1 : 2;
$self->features->add(
$self->_make_layer(
$self->block, $num_layer, $self->channels->[$i+1],
$stride, $i+1, in_channels=>$self->channels->[$i]
)
);
}
$self->features->add(nn->GlobalAvgPool2D());
$self->output(nn->Dense($self->classes, in_units=>$self->channels->[-1]));
});
}
method _make_layer($block, $layers, $channels, $stride, $stage_index, :$in_channels=0)
{
my $layer = nn->HybridSequential(prefix=>"stage${stage_index}_");
$layer->name_scope(sub {
$layer->add(
$block->new(
lib/AI/MXNet/Gluon/ModelZoo/Vision/ResNet.pm view on Meta::CPAN
for(1..$layers-1)
{
$layer->add($block->new($channels, 1, 0, in_channels=>$channels, prefix=>''));
}
});
return $layer;
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision::ResNet::V2;
use AI::MXNet::Gluon::Mouse;
extends 'AI::MXNet::Gluon::HybridBlock';
use AI::MXNet::Base;
lib/AI/MXNet/Gluon/ModelZoo/Vision/ResNet.pm view on Meta::CPAN
$channels, kernel_size=>3, strides=>$stride, padding=>1,
use_bias=>0, in_channels=>$in_channels
);
}
sub BUILD
{
my $self = shift;
assert(@{ $self->layers } == (@{ $self->channels } - 1));
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
$self->features->add(nn->BatchNorm(scale=>0, center=>0));
if($self->thumbnail)
{
$self->features->add(_conv3x3($self->channels->[0], 1, 0));
}
else
{
$self->features->add(nn->Conv2D($self->channels->[0], 7, 2, 3, use_bias=>0));
$self->features->add(nn->BatchNorm());
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->MaxPool2D(3, 2, 1));
}
my $in_channels = $self->channels->[0];
for(enumerate($self->layers))
{
my ($i, $num_layer) = @$_;
my $stride = $i == 0 ? 1 : 2;
$self->features->add(
$self->_make_layer(
$self->block, $num_layer, $self->channels->[$i+1],
$stride, $i+1, in_channels=>$in_channels
)
);
$in_channels = $self->channels->[$i+1];
}
$self->features->add(nn->BatchNorm());
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->GlobalAvgPool2D());
$self->features->add(nn->Flatten());
$self->output(nn->Dense($self->classes, in_units=>$in_channels));
});
}
method _make_layer($block, $layers, $channels, $stride, $stage_index, :$in_channels=0)
{
my $layer = nn->HybridSequential(prefix=>"stage${stage_index}_");
$layer->name_scope(sub {
$layer->add(
$block->new(
lib/AI/MXNet/Gluon/ModelZoo/Vision/ResNet.pm view on Meta::CPAN
for(1..$layers-1)
{
$layer->add($block->new($channels, 1, 0, in_channels=>$channels, prefix=>''));
}
});
return $layer;
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
# Specification
my %resnet_spec = (
18 => ['basic_block', [2, 2, 2, 2], [64, 64, 128, 256, 512]],
34 => ['basic_block', [3, 4, 6, 3], [64, 64, 128, 256, 512]],
lib/AI/MXNet/Gluon/ModelZoo/Vision/SqueezeNet.pm view on Meta::CPAN
=cut
has 'version' => (is => 'ro', isa => enum([qw[1.0 1.1]]), required => 1);
has 'classes' => (is => 'ro', isa => 'Int', default => 1000);
method python_constructor_arguments() { [qw/version classes/] }
sub BUILD
{
my $self = shift;
$self->name_scope(sub {
$self->features(nn->HybridSequential(prefix=>''));
if($self->version eq '1.0')
{
$self->features->add(nn->Conv2D(96, kernel_size=>7, strides=>2));
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, ceil_mode=>1));
$self->features->add(_make_fire(16, 64, 64));
$self->features->add(_make_fire(16, 64, 64));
$self->features->add(_make_fire(32, 128, 128));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, ceil_mode=>1));
$self->features->add(_make_fire(32, 128, 128));
$self->features->add(_make_fire(48, 192, 192));
$self->features->add(_make_fire(48, 192, 192));
$self->features->add(_make_fire(64, 256, 256));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, ceil_mode=>1));
$self->features->add(_make_fire(64, 256, 256));
}
else
{
$self->features->add(nn->Conv2D(64, kernel_size=>3, strides=>2));
$self->features->add(nn->Activation('relu'));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, ceil_mode=>1));
$self->features->add(_make_fire(16, 64, 64));
$self->features->add(_make_fire(16, 64, 64));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, ceil_mode=>1));
$self->features->add(_make_fire(32, 128, 128));
$self->features->add(_make_fire(32, 128, 128));
$self->features->add(nn->MaxPool2D(pool_size=>3, strides=>2, ceil_mode=>1));
$self->features->add(_make_fire(48, 192, 192));
$self->features->add(_make_fire(48, 192, 192));
$self->features->add(_make_fire(64, 256, 256));
$self->features->add(_make_fire(64, 256, 256));
}
$self->features->add(nn->Dropout(0.5));
$self->output(nn->HybridSequential(prefix=>''));
$self->output->add(nn->Conv2D($self->classes, kernel_size=>1));
$self->output->add(nn->Activation('relu'));
$self->output->add(nn->AvgPool2D(13));
$self->output->add(nn->Flatten());
});
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
=head2 get_squeezenet
SqueezeNet model from the "SqueezeNet: AlexNet-level accuracy with 50x fewer parameters
lib/AI/MXNet/Gluon/ModelZoo/Vision/VGG.pm view on Meta::CPAN
=cut
=head1 DESCRIPTION
VGG model from the "Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556> paper.
Parameters
----------
layers : array ref of Int
Numbers of layers in each feature block.
filters : array ref of Int
Numbers of filters in each feature block. List length should match the layers.
classes : Int, default 1000
Number of classification classes.
batch_norm : Bool, default 0
Use batch normalization.
=cut
method python_constructor_arguments() { [qw/layers filters classes batch_norm/] }
has ['layers',
'filters'] => (is => 'ro', isa => 'ArrayRef[Int]', required => 1);
has 'classes' => (is => 'ro', isa => 'Int', default => 1000);
has 'batch_norm' => (is => 'ro', isa => 'Bool', default => 0);
sub BUILD
{
my $self = shift;
assert(@{ $self->layers } == @{ $self->filters });
$self->name_scope(sub {
$self->features($self->_make_features());
$self->features->add(nn->Dense(4096, activation=>'relu',
weight_initializer=>'normal',
bias_initializer=>'zeros'));
$self->features->add(nn->Dropout(rate=>0.5));
$self->features->add(nn->Dense(4096, activation=>'relu',
weight_initializer=>'normal',
bias_initializer=>'zeros'));
$self->features->add(nn->Dropout(rate=>0.5));
$self->output(nn->Dense($self->classes,
weight_initializer=>'normal',
bias_initializer=>'zeros'));
});
}
method _make_features()
{
my $featurizer = nn->HybridSequential(prefix=>'');
for(enumerate($self->layers))
{
my ($i, $num) = @$_;
for(0..$num-1)
{
$featurizer->add(
nn->Conv2D(
$self->filters->[$i], kernel_size => 3, padding => 1,
lib/AI/MXNet/Gluon/ModelZoo/Vision/VGG.pm view on Meta::CPAN
}
$featurizer->add(nn->Activation('relu'));
}
$featurizer->add(nn->MaxPool2D(strides=>2));
}
return $featurizer;
}
method hybrid_forward(GluonClass $F, GluonInput $x)
{
$x = $self->features->($x);
$x = $self->output->($x);
return $x;
}
package AI::MXNet::Gluon::ModelZoo::Vision;
# Specification
my %vgg_spec = (
11 => [[1, 1, 2, 2, 2], [64, 128, 256, 512, 512]],
13 => [[2, 2, 2, 2, 2], [64, 128, 256, 512, 512]],