AI-MXNet

 view release on metacpan or  search on metacpan

t/test_module.t  view on Meta::CPAN

    $mod->backward([mx->nd->ones($dshape)]);
    $mod->update();
    is_deeply($mod->get_outputs()->[0]->shape, $dshape);
    ok((($mod->get_params())[0]{fc_bias}->aspdl == -1)->all);

    $dshape = [14, 20];
    $mod->reshape(data_shapes=>[['data', $dshape]]);
    $mod->forward(
        mx->io->DataBatch(
            data=>[mx->nd->ones($dshape)]
        ),
        is_train => 1
    );
    $mod->backward([mx->nd->ones($dshape)]);
    $mod->update();
    is_deeply($mod->get_outputs()->[0]->shape, $dshape);
    ok((($mod->get_params())[0]{fc_bias}->aspdl == -3)->all);
}


sub test_module_states
{
    my $stack = mx->rnn->SequentialRNNCell();
    for my $i (0..1)
    {
        $stack->add(mx->rnn->LSTMCell(num_hidden=>20, prefix=>"lstm_l${i}_"));
    }
    my $begin_state = $stack->begin_state(func=>mx->sym->can('Variable'));
    my (undef, $states) = $stack->unroll(10, begin_state=>$begin_state, inputs=>mx->sym->Variable('data'));

    my $state_names = [map { $_->name } @$begin_state];
    my $mod = mx->mod->Module(
        mx->sym->Group($states), context=>[mx->cpu(0), mx->cpu(1)],
        state_names=>$state_names
    );
    $mod->bind(data_shapes=>[['data', [5, 10]]], for_training=>0);
    $mod->init_params();
    my $batch = mx->io->DataBatch(data=>[mx->nd->zeros([5, 10])], label=>[]);

    $mod->set_states(value=>1);
    $mod->forward($batch);
    my $out = $mod->get_outputs(0);
    my $out1 = $mod->get_outputs(1);

    $mod->set_states(states=>$out);
    $mod->forward($batch);
    my $out2 = $mod->get_outputs(1);

    zip(sub {
        my ($x1, $x2) = @_;
        ok(not almost_equal($x1->aspdl, $x2->aspdl, 1e-3));
    }, $out1, $out2);
}

sub test_module_switch_bucket
{
    my $vocab_dim  = 5000;
    my $num_hidden = 100;
    my $num_embedding = 100;
    my $num_layer = 2;
    my $default_key = 10;
    my $test_key = 5;
    my $batch_size = 32;
    my $contexts = [mx->cpu(0)];
    my $initializer = mx->init->Xavier(factor_type=>"in", magnitude=>2.34);

    #generate symbols for an LSTM network
    my $gen_sym = sub {
        my $seq_len = shift;
        my $data  = mx->sym->Variable('data');
        my $label = mx->sym->Variable('softmax_label');
        my $embed = mx->sym->Embedding(data=>$data, input_dim=>$vocab_dim,
                                 output_dim=>$num_embedding, name=>'embed');
        my $stack = mx->rnn->SequentialRNNCell();
        for my $i (0..$num_layer-1)
        {
            $stack->add(mx->rnn->LSTMCell(num_hidden=>$num_hidden, prefix=>"lstm_l${i}_"));
        }
        my ($outputs, $states) = $stack->unroll($seq_len, inputs=>$embed, merge_outputs=>1);

        my $pred = mx->sym->Reshape($outputs, shape=>[-1, $num_hidden]);
        $pred = mx->sym->FullyConnected(data=>$pred, num_hidden=>$vocab_dim, name=>'pred');

        $label = mx->sym->Reshape($label, shape=>[-1]);
        $pred = mx->sym->SoftmaxOutput(data=>$pred, label=>$label, name=>'softmax');

        return ($pred, ['data'], ['softmax_label']);
    };
    my $create_bucketing_module = sub { my $key = shift;
        my $model = mx->mod->BucketingModule(
            sym_gen             => $gen_sym,
            default_bucket_key  => $key,
            context             => $contexts
        );
        $model->bind(data_shapes=>[['data', [$batch_size, $key]]],
                    label_shapes=>[['softmax_label', [$batch_size, $key]]]
        );
        $model->init_params(initializer=>$initializer);
        return $model;
    };
    #initialize the bucketing module with the default bucket key
    my $bucketing_model = $create_bucketing_module->($default_key);
    #switch to test_key
    $bucketing_model->switch_bucket(
        bucket_key   => $test_key,
        data_shapes  => [['data', [$batch_size, $test_key]]],
        label_shapes => [['softmax_label', [$batch_size, $test_key]]]
    );

    delete $bucketing_model->_buckets->{$test_key};

    $bucketing_model->switch_bucket(
        bucket_key   => $test_key,
        data_shapes  => [['data', [$batch_size, $test_key]]],
        label_shapes => [['softmax_label', [$batch_size, $test_key]]]
    );
}

sub test_monitor
{
    mx->random->seed(11);
    my $data = mx->nd->array([[0.05, .10]]);
    my $label = mx->nd->array([[.01, 0.99]]);
    my $train_data = mx->io->NDArrayIter($data, label => $label, batch_size=>1);

    # symbols
    my $x = mx->symbol->Variable('data');
    $x = mx->symbol->FullyConnected(name=>'fc_0', data=>$x, num_hidden=>2);
    $x = mx->symbol->Activation(name=>"act_0", data=>$x, act_type=>'sigmoid');
    $x = mx->symbol->FullyConnected(name=>'fc_1', data=>$x, num_hidden=>2);
    $x = mx->symbol->Activation(name=>"act_1", data=>$x, act_type=>'sigmoid');
    $x = mx->symbol->LinearRegressionOutput(data=>$x, name=>'softmax', grad_scale=>2);

    # create monitor
    my $mean_abs = sub { my ($x) = @_;
        return $x->abs->sum/$x->size;
    };
    my $mon = mx->mon->Monitor(1, stat_func=>$mean_abs, pattern=>'.*', sort=>1);

    # create module
    my $mod = mx->mod->Module($x, context=>[mx->cpu()]);
    $mod->bind(data_shapes=>$train_data->provide_data, label_shapes=>$train_data->provide_label,
                    for_training=>1);
    $mod->install_monitor($mon);
    my $arg_params = {fc_0_weight => mx->nd->array([[.15, .20], [.25, .30]]),
                  fc_0_bias  => mx->nd->array([.35, .35]),
                  fc_1_weight => mx->nd->array([[.40, .45], [.50, .55]]),
                  fc_1_bias  => mx->nd->array([.60, .60])};
    $mod->init_params(arg_params=>$arg_params);

    my $data_batch = <$train_data>;
    $mon->tic();
    $mod->forward_backward($data_batch);
    my $res = $mon->toc();
    my $keys = ['act_0', 'act_1', 'data', 'fc_0', 'fc_1', 'softmax'];
    my $mon_result_counts = [0, 0, 0, 0, 0, 0];
    ok(@$res == 21);
    for my $r (@$res)
    {
        my ($n, $k, $v) = @$r;
        enumerate(sub {
            my ($idx, $key) = @_;



( run in 0.802 second using v1.01-cache-2.11-cpan-39bf76dae61 )