AI-MXNet
view release on metacpan or search on metacpan
lib/AI/MXNet/Executor.pm view on Meta::CPAN
=head2 aux_dict
Get a hash ref representation of the auxiliary states arrays.
Returns
-------
aux_dict : HashRef[AI::MXNet::NDArray]
The map that maps a name of the auxiliary states to the NDArrays.
=cut
method aux_dict()
{
if(not defined $self->_aux_dict)
{
$self->_aux_dict(_get_dict(
$self->_symbol->list_auxiliary_states(),
$self->aux_arrays()
)
);
}
return $self->_aux_dict;
}
=head2 output_dict
Get a hash ref representation of the output arrays.
Returns
-------
output_dict : HashRef[AI::MXNet::NDArray]
The map that maps a name of the outputs to the NDArrays.
=cut
method output_dict()
{
if(not defined $self->_output_dict)
{
$self->_output_dict(_get_dict(
$self->_symbol->list_outputs(),
$self->outputs
)
);
}
return $self->_output_dict;
}
=head2 copy_params_from
Copy parameters from arg_params, aux_params into the executor's internal array.
Parameters
----------
arg_params : HashRef[AI::MXNet::NDArray]
Parameters, hash ref of name to NDArray of arguments
aux_params : Maybe[HashRef[AI::MXNet::NDArray]], optional
Parameters, hash ref of name to NDArray of auxiliary states.
allow_extra_params : boolean, optional
Whether to allow extra parameters that are not needed by symbol
If this is True, no error will be thrown when arg_params or aux_params
contain extra parameters that is not needed by the executor.
=cut
method copy_params_from(
HashRef[AI::MXNet::NDArray] $arg_params,
Maybe[HashRef[AI::MXNet::NDArray]] $aux_params=,
Maybe[Bool] $allow_extra_params=
)
{
my %arg_dict = %{ $self->arg_dict };
while (my ($name, $array) = each %{ $arg_params })
{
if(exists $arg_dict{ $name })
{
my $dst = $arg_dict{ $name };
$array->astype($dst->dtype)->copyto($dst);
}
elsif(not $allow_extra_params)
{
confess("Found name \"$name\" that is not in the arguments");
}
}
if(defined $aux_params)
{
my %aux_dict = %{ $self->aux_dict };
while (my ($name, $array) = each %{ $aux_params })
{
if(exists $aux_dict{ $name })
{
my $dst = $aux_dict{ $name };
$array->astype($dst->dtype)->copyto($dst);
}
elsif(not $allow_extra_params)
{
confess("Found name \"$name\" that is not in the arguments");
}
}
}
}
=head2 reshape
Returns new executor with the same symbol and shared memory,
but different input/output shapes.
For runtime reshaping, variable length sequences, etc.
The returned executor shares state with the current one,
and cannot be used in parallel with it.
Parameters
----------
$kwargs : HashRef[Shape]
new shape for arguments.
:$partial_shaping : bool
Whether to allow changing the shape of unspecified arguments.
:$allow_up_sizing : bool
Whether to allow allocating new ndarrays that's larger than the original.
Returns
-------
$exec : AI::MXNet::Executor
lib/AI/MXNet/Executor.pm view on Meta::CPAN
my ($arg_shapes, undef, $aux_shapes) = $self->_symbol->infer_shape(%{ $kwargs });
confess("Insufficient argument shapes provided.")
unless defined $arg_shapes;
my %new_arg_dict;
my %new_grad_dict;
my $i = 0;
for my $name (@{ $self->_symbol->list_arguments() })
{
my $new_shape = $arg_shapes->[$i];
my $arr = $self->arg_arrays->[$i];
my $darr;
if(@{ $self->grad_arrays })
{
$darr = $self->grad_arrays->[$i];
}
if(
$partial_shaping
or
exists $kwargs->{ $name }
or
join(',', @{ $new_shape }) eq join(',', @{ $arr->shape })
)
{
if(AI::MXNet::NDArray->size($new_shape) > $arr->size)
{
confess(
"New shape of arg:$name larger than original. "
."First making a big executor and then down sizing it "
."is more efficient than the reverse."
."If you really want to up size, set \$allow_up_sizing=1 "
."to enable allocation of new arrays."
) unless $allow_up_sizing;
$new_arg_dict{ $name } = AI::MXNet::NDArray->empty(
$new_shape,
ctx => $arr->context,
dtype => $arr->dtype
);
if(defined $darr)
{
$new_grad_dict{ $name } = AI::MXNet::NDArray->empty(
$new_shape,
ctx => $darr->context,
dtype => $arr->dtype
);
}
}
else
{
$new_arg_dict{ $name } = $arr->reshape($new_shape);
if(defined $darr)
{
$new_grad_dict{ $name } = $darr->reshape($new_shape);
}
}
}
else
{
confess(
"Shape of unspecified array arg:$name changed. "
."This can cause the new executor to not share parameters "
."with the old one. Please check for error in network."
."If this is intended, set partial_shaping=True to suppress this warning."
);
}
$i++;
}
my %new_aux_dict;
$i = 0;
for my $name (@{ $self->_symbol->list_auxiliary_states() })
{
my $new_shape = $aux_shapes->[$i];
my $arr = $self->aux_arrays->[$i];
if($partial_shaping or join(',', @{ $new_shape }) eq join (',', @{ $arr->shape }))
{
if(AI::MXNet::NDArray->size($new_shape) > $arr->size)
{
confess(
"New shape of arg:$name larger than original. "
."First making a big executor and then down sizing it "
."is more efficient than the reverse."
."If you really want to up size, set \$allow_up_sizing=1 "
."to enable allocation of new arrays."
) unless $allow_up_sizing;
$new_aux_dict{ $name } = AI::MXNet::NDArray->empty(
$new_shape,
ctx => $arr->context,
dtype => $arr->dtype
);
}
else
{
$new_aux_dict{ $name } = $arr->reshape($new_shape);
}
}
else
{
confess(
"Shape of unspecified array aux:$name changed. "
."This can cause the new executor to not share parameters "
."with the old one. Please check for error in network."
."If this is intended, set partial_shaping=True to suppress this warning."
);
}
$i++;
}
return $self->_symbol->bind(
ctx => $self->_ctx,
args => \%new_arg_dict,
args_grad => \%new_grad_dict,
grad_req => $self->_grad_req,
aux_states => \%new_aux_dict,
group2ctx => $self->_group2ctx,
shared_exec => $self
);
}
=head2 debug_str
A debug string about the internal execution plan.
Returns
-------
debug_str : string
Debug string of the executor.
=cut
method debug_str()
{
return scalar(check_call(AI::MXNetCAPI::ExecutorPrint($self->handle)));
}
1;
( run in 0.988 second using v1.01-cache-2.11-cpan-39bf76dae61 )