AI-TensorFlow-Libtensorflow

 view release on metacpan or  search on metacpan

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod  view on Meta::CPAN

                    $subset{detection_class_labels}[$idx],
                    100*$subset{detection_scores}->at($idx,0) ) =>
                at => $label_xy, 'left',
                offset => 'character 0,-0.25',
                qq{font ",12" boxed front tc rgb "#ffffff"} ], ],
        )
    } 0..$subset{detection_boxes}->dim(1)-1
);

$gp->plot(
    topcmds => q{set style textbox opaque fc "#505050f0" noborder},
    square => 1,
    yrange => [$pdl_images[0]->dim(2),0],
    with => 'image', $pdl_images[0],
);

$gp->close;

IPerl->png( bytestream => path($plot_output_path)->slurp_raw ) if IN_IPERL;

use Filesys::DiskUsage qw/du/;

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubCenterNetObjDetect.pod  view on Meta::CPAN

                      $subset{detection_class_labels}[$idx],
                      100*$subset{detection_scores}->at($idx,0) ) =>
                  at => $label_xy, 'left',
                  offset => 'character 0,-0.25',
                  qq{font ",12" boxed front tc rgb "#ffffff"} ], ],
          )
      } 0..$subset{detection_boxes}->dim(1)-1
  );
  
  $gp->plot(
      topcmds => q{set style textbox opaque fc "#505050f0" noborder},
      square => 1,
      yrange => [$pdl_images[0]->dim(2),0],
      with => 'image', $pdl_images[0],
  );
  
  $gp->close;
  
  IPerl->png( bytestream => path($plot_output_path)->slurp_raw ) if IN_IPERL;

=head1 RESOURCE USAGE

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod  view on Meta::CPAN

          AI::TensorFlow::Libtensorflow::Output->New({
              oper => $graph->OperationByName('StatefulPartitionedCall'),
              index => 1,
      }),
  );
  
  p %puts;

B<STREAM (STDERR)>:

=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #33ccff;">{</span><span style="">
    </span><span style="color: #6666cc;">inputs_args_0</span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Output</span><span style=""> </span><span style="color: #33ccff;">{</span><span style=""...
        </span><span style="color: #6666cc;">index</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">0</span><span style="color: #33ccff;">,</span><span style="">
        </span><span style="color: #6666cc;">oper</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Operation</span><span style=""> </span><span style="color: #33ccff;">{...
            </span><span style="color: #6666cc;">Name</span><span style="">      </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">serving_default_args_0</span><span style="color: ...
            </span><span style="color: #6666cc;">NumInputs</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">0</span><span style="color: #33ccff;">,</span><span style="">
            </span><span style="color: #6666cc;">NumOutputs</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">1</span><span style="color: #33ccff;">,</span><span style="">
            </span><span style="color: #6666cc;">OpType</span><span style="">    </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">Placeholder</span><span style="color: #33ccff;">&...
        </span><span style="color: #33ccff;">}</span><span style="">
    </span><span style="color: #33ccff;">}</span><span style="color: #33ccff;">,</span><span style="">
    </span><span style="color: #6666cc;">outputs_human</span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Output</span><span style=""> </span><span style="color: #33ccff;">{</span><span style=""...
        </span><span style="color: #6666cc;">index</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">0</span><span style="color: #33ccff;">,</span><span style="">
        </span><span style="color: #6666cc;">oper</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Operation</span><span style=""> </span><span style="color: #33ccff;">{...
            </span><span style="color: #6666cc;">Name</span><span style="">      </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">StatefulPartitionedCall</span><span style="color:...
            </span><span style="color: #6666cc;">NumInputs</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">274</span><span style="color: #33ccff;">,</span><span style="">
            </span><span style="color: #6666cc;">NumOutputs</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">2</span><span style="color: #33ccff;">,</span><span style="">
            </span><span style="color: #6666cc;">OpType</span><span style="">    </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">StatefulPartitionedCall</span><span style="color:...
        </span><span style="color: #33ccff;">}</span><span style="">
    </span><span style="color: #33ccff;">}</span><span style="color: #33ccff;">,</span><span style="">
    </span><span style="color: #6666cc;">outputs_mouse</span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Output</span><span style=""> </span><span style="color: #33ccff;">{</span><span style=""...
        </span><span style="color: #6666cc;">index</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">1</span><span style="color: #33ccff;">,</span><span style="">
        </span><span style="color: #6666cc;">oper</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Operation</span><span style=""> </span><span style="color: #33ccff;">{...
            </span><span style="color: #6666cc;">Name</span><span style="">      </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">StatefulPartitionedCall</span><span style="color:...
            </span><span style="color: #6666cc;">NumInputs</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">274</span><span style="color: #33ccff;">,</span><span style="">
            </span><span style="color: #6666cc;">NumOutputs</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">2</span><span style="color: #33ccff;">,</span><span style="">
            </span><span style="color: #6666cc;">OpType</span><span style="">    </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">StatefulPartitionedCall</span><span style="color:...
        </span><span style="color: #33ccff;">}</span><span style="">
    </span><span style="color: #33ccff;">}</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>

We need a helper to simplify running the session and getting just the predictions that we want.

  my $predict_on_batch = sub {
      my ($session, $t) = @_;
      my @outputs_t;
  
      $session->Run(
          undef,

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod  view on Meta::CPAN

  
  p $predictions;
  
  $t->mark('END');
  $t->report();

B<STREAM (STDERR)>:

=begin html

<span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Tensor</span><span style=""> </span><span style="color: #33ccff;">{</span><span style="">
    </span><span style="color: #6666cc;">Type           </span><span style=""> </span><span style="color: #cc66cc;">FLOAT</span><span style="">
    </span><span style="color: #6666cc;">Dims           </span><span style=""> </span><span style="color: #33ccff;">[</span><span style=""> </span><span style="color: #ff6633;">1</span><span style=""> </span><span style="color: #ff6633;">896</span><s...
    </span><span style="color: #6666cc;">NumDims        </span><span style=""> </span><span style="color: #ff6633;">3</span><span style="">
    </span><span style="color: #6666cc;">ElementCount   </span><span style=""> </span><span style="color: #ff6633;">4760448</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">


Devel::Timer Report -- Total time: 14.5641 secs
Interval  Time    Percent
----------------------------------------------
01 -&gt; 02  14.5634  100.00%  prediction of sequence -&gt; End of prediction of sequence
02 -&gt; 03  0.0007   0.00%  End of prediction of sequence -&gt; END
00 -&gt; 01  0.0000   0.00%  INIT -&gt; prediction of sequence
</span></code></pre></span>

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubEnformerGeneExprPredModel.pod  view on Meta::CPAN

  $gp->end_multi;
  
  $gp->close;
  
  if( IN_IPERL ) {
      IPerl->png( bytestream => path($plot_output_path)->slurp_raw );
  }

B<DISPLAY>:

=for html <span style="display:inline-block;margin-left:1em;"><p><img						src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA+gAAAMgCAIAAAA/et9qAAAgAElEQVR4nOzdd2AUVeIH8Ddb0jshBAIEpSo1GjoIpyAgCOqd3uGdoGBBUQQFRUVBRbkTf9gOBQucqFiwUhSSgJQYCCSBkJBAet1k...

=head2 Parts of the original notebook that fall outside the scope

In the orignal notebook, there are several more steps that have not been ported here:

=over

=item 1.

"Compute contribution scores":

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

    $pdl->upd_data;

    $pdl;
}

use HTML::Tiny;

sub my_table {
    my ($data, $cb) = @_;
    my $h = HTML::Tiny->new;
    $h->table( { style => 'width: 100%' },
        [
            $h->tr(
                map {
                    [
                        $h->td( $cb->($_, $h) )
                    ]
                } @$data
            )
        ]
    )

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

      $pdl;
  }

The following is just a small helper to generate an HTML C<<< <table> >>> for output in C<IPerl>.

  use HTML::Tiny;
  
  sub my_table {
      my ($data, $cb) = @_;
      my $h = HTML::Tiny->new;
      $h->table( { style => 'width: 100%' },
          [
              $h->tr(
                  map {
                      [
                          $h->td( $cb->($_, $h) )
                      ]
                  } @$data
              )
          ]
      )

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

  say "Input: " , $outputs{in}[0];
  say "Output: ", $outputs{out}[0];

B<STREAM (STDOUT)>:

  Input: serving_default_inputs:0
  Output: StatefulPartitionedCall:0

B<STREAM (STDERR)>:

=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #33ccff;">{</span><span style="">
    </span><span style="color: #6666cc;">in</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">[</span><span style="">
        </span><span style="color: #9999cc;">[0] </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Output</span><span style=""> </span><span style="color: #33ccff;">{</span><span style="">
                </span><span style="color: #6666cc;">index</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">0</span><span style="color: #33ccff;">,</span><span style="">
                </span><span style="color: #6666cc;">oper</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Operation</span><span style=""> </span><span style="color: #33...
                    </span><span style="color: #6666cc;">Name</span><span style="">      </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">serving_default_inputs</span><span style=...
                    </span><span style="color: #6666cc;">NumInputs</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">0</span><span style="color: #33ccff;">,</span><span style="">
                    </span><span style="color: #6666cc;">NumOutputs</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">1</span><span style="color: #33ccff;">,</span><span style="">
                    </span><span style="color: #6666cc;">OpType</span><span style="">    </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">Placeholder</span><span style="color: #33...
                </span><span style="color: #33ccff;">}</span><span style="">
            </span><span style="color: #33ccff;">}</span><span style="">
    </span><span style="color: #33ccff;">]</span><span style="color: #33ccff;">,</span><span style="">
    </span><span style="color: #6666cc;">out</span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">[</span><span style="">
        </span><span style="color: #9999cc;">[0] </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Output</span><span style=""> </span><span style="color: #33ccff;">{</span><span style="">
                </span><span style="color: #6666cc;">index</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">0</span><span style="color: #33ccff;">,</span><span style="">
                </span><span style="color: #6666cc;">oper</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Operation</span><span style=""> </span><span style="color: #33...
                    </span><span style="color: #6666cc;">Name</span><span style="">      </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">StatefulPartitionedCall</span><span style...
                    </span><span style="color: #6666cc;">NumInputs</span><span style=""> </span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">263</span><span style="color: #33ccff;">,</span><span style="">
                    </span><span style="color: #6666cc;">NumOutputs</span><span style="color: #33ccff;">   </span><span style="color: #ff6633;">1</span><span style="color: #33ccff;">,</span><span style="">
                    </span><span style="color: #6666cc;">OpType</span><span style="">    </span><span style="color: #33ccff;">   </span><span style="color: #33ccff;">&quot;</span><span style="color: #669933;">StatefulPartitionedCall</span><span style...
                </span><span style="color: #33ccff;">}</span><span style="">
            </span><span style="color: #33ccff;">}</span><span style="">
    </span><span style="color: #33ccff;">]</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>

B<RESULT>:

  1

Now we can get the following testing images from Wikimedia.

  my %images_for_test_to_uri = (
      "tiger" => "https://upload.wikimedia.org/wikipedia/commons/b/b0/Bengal_tiger_%28Panthera_tigris_tigris%29_female_3_crop.jpg",

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

                          width => '50%',
                      })
                  ),
              )
          })
      );
  }

B<DISPLAY>:

=for html <span style="display:inline-block;margin-left:1em;"><p><table style="width: 100%"><tr><td><tt>apple</tt></td><td><a href="https://upload.wikimedia.org/wikipedia/commons/1/15/Red_Apple.jpg"><img alt="apple" src="https://upload.wikimedia.org/...

=head2 Download the test images and transform them into suitable input data

We now fetch these images and prepare them to be the in the needed format by using C<Imager> to resize and add padding. Then we turn the C<Imager> data into a C<PDL> ndarray. Since the C<Imager> data is stored as 32-bits with 4 channels in the order ...

We then take all the PDL ndarrays and concatenate them. Again, note that the dimension lists for the PDL ndarray and the TFTensor are reversed.

  sub imager_paste_center_pad {
      my ($inner, $padded_sz, @rest) = @_;
  

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

  Padded to [ 224 x 224 ]
  Downloaded https://upload.wikimedia.org/wikipedia/commons/b/b0/Bengal_tiger_%28Panthera_tigris_tigris%29_female_3_crop.jpg
  Rescaled image from [ 4500 x 3000 ] to [ 224 x 149 ]
  Padded to [ 224 x 224 ]
  Downloaded https://upload.wikimedia.org/wikipedia/commons/8/80/Turtle_golfina_escobilla_oaxaca_mexico_claudio_giovenzana_2010.jpg
  Rescaled image from [ 2000 x 1329 ] to [ 224 x 149 ]
  Padded to [ 224 x 224 ]

B<STREAM (STDERR)>:

=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">PDL</span><span style="color: #33ccff;"> {</span><span style="">
    </span><span style="color: #6666cc;">Data    </span><span style=""> : </span><span style="color: #669933;">too long to print</span><span style="">
    </span><span style="color: #6666cc;">Type    </span><span style=""> : </span><span style="color: #cc66cc;">float</span><span style="">
    </span><span style="color: #6666cc;">Shape   </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #9999cc;">3 224 224 12</span><span style="color: #33ccff;">]</span><span style="">
    </span><span style="color: #6666cc;">Nelem   </span><span style=""> : </span><span style="color: #dd6;">1806336</span><span style="">
    </span><span style="color: #6666cc;">Min     </span><span style=""> : </span><span style="color: #f66;">0</span><span style="">
    </span><span style="color: #6666cc;">Max     </span><span style=""> : </span><span style="color: #99f;">1</span><span style="">
    </span><span style="color: #6666cc;">Badflag </span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
    </span><span style="color: #6666cc;">Has Bads</span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Tensor</span><span style=""> </span><span style="color: #33ccff;">{</span><span style="">
    </span><span style="color: #6666cc;">Type           </span><span style=""> </span><span style="color: #cc66cc;">FLOAT</span><span style="">
    </span><span style="color: #6666cc;">Dims           </span><span style=""> </span><span style="color: #33ccff;">[</span><span style=""> </span><span style="color: #ff6633;">12</span><span style=""> </span><span style="color: #ff6633;">224</span><...
    </span><span style="color: #6666cc;">NumDims        </span><span style=""> </span><span style="color: #ff6633;">4</span><span style="">
    </span><span style="color: #6666cc;">ElementCount   </span><span style=""> </span><span style="color: #ff6633;">1806336</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>

=head2 Run the model for inference

We can use the C<Run> method to run the session and get the output C<TFTensor>.

First, we send a single random input to warm up the model.

  my $RunSession = sub {
      my ($session, $t) = @_;

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

  $rng->get_uniform($warmup_input);
  
  p $RunSession->($session, FloatPDLTOTFTensor($warmup_input));

B<STREAM (STDOUT)>:

  Warming up the model

B<STREAM (STDERR)>:

=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">AI::TensorFlow::Libtensorflow::Tensor</span><span style=""> </span><span style="color: #33ccff;">{</span><span style="">
    </span><span style="color: #6666cc;">Type           </span><span style=""> </span><span style="color: #cc66cc;">FLOAT</span><span style="">
    </span><span style="color: #6666cc;">Dims           </span><span style=""> </span><span style="color: #33ccff;">[</span><span style=""> </span><span style="color: #ff6633;">1</span><span style=""> </span><span style="color: #ff6633;">1001</span><...
    </span><span style="color: #6666cc;">NumDims        </span><span style=""> </span><span style="color: #ff6633;">2</span><span style="">
    </span><span style="color: #6666cc;">ElementCount   </span><span style=""> </span><span style="color: #ff6633;">1001</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>

Then we send the batched image data. The returned scores need to by normalised using the L<softmax function|https://en.wikipedia.org/wiki/Softmax_function> with the following formula (taken from Wikipedia):

$$ {\displaystyle \sigma (\mathbf {z} )I<{i}={\frac {e^{z>{i}}}{\sum I<{j=1}^{K}e^{z>{j}}}}\ \ {\text{ for }}i=1,\dotsc ,K{\text{ and }}\mathbf {z} =(zI<{1},\dotsc ,z>{K})\in \mathbb {R} ^{K}.} $$

  my $output_pdl_batched = FloatTFTensorToPDL($RunSession->($session, $t));
  my $softmax = sub { ( map $_/sumover($_)->dummy(0), exp($_[0]) )[0] };
  my $probabilities_batched = $softmax->($output_pdl_batched);
  p $probabilities_batched;

B<STREAM (STDERR)>:

=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">PDL</span><span style="color: #33ccff;"> {</span><span style="">
    </span><span style="color: #6666cc;">Data    </span><span style=""> : </span><span style="color: #669933;">too long to print</span><span style="">
    </span><span style="color: #6666cc;">Type    </span><span style=""> : </span><span style="color: #cc66cc;">float</span><span style="">
    </span><span style="color: #6666cc;">Shape   </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #9999cc;">1001 12</span><span style="color: #33ccff;">]</span><span style="">
    </span><span style="color: #6666cc;">Nelem   </span><span style=""> : </span><span style="color: #dd6;">12012</span><span style="">
    </span><span style="color: #6666cc;">Min     </span><span style=""> : </span><span style="color: #f66;">2.73727380317723e-07</span><span style="">
    </span><span style="color: #6666cc;">Max     </span><span style=""> : </span><span style="color: #99f;">0.980696022510529</span><span style="">
    </span><span style="color: #6666cc;">Badflag </span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
    </span><span style="color: #6666cc;">Has Bads</span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>

=head2 Results summary

Then select the top 5 of those and find their class labels.

  my $N = 5; # number to select
  
  my $top_batched = $probabilities_batched->qsorti->slice([-1, -$N]);
  

lib/AI/TensorFlow/Libtensorflow/Manual/Notebook/InferenceUsingTFHubMobileNetV2Model.pod  view on Meta::CPAN

                      $probabilities_batched->at($label_index,$batch_idx),
              ) ];
          }
          say generate_table( rows => [ $header, @rows ], header_row => 1 );
          print "\n";
      }
  }

B<DISPLAY>:

=for html <span style="display:inline-block;margin-left:1em;"><p><table style="width: 100%"><tr><td><tt>apple</tt></td><td><a href="https://upload.wikimedia.org/wikipedia/commons/1/15/Red_Apple.jpg"><img alt="apple" src="https://upload.wikimedia.org/...

  my $p_approx_batched = $probabilities_batched->sumover->approx(1, 1e-5);
  p $p_approx_batched;
  say "All probabilities sum up to approximately 1" if $p_approx_batched->all->sclr;

B<STREAM (STDOUT)>:

  All probabilities sum up to approximately 1

B<STREAM (STDERR)>:

=for html <span style="display:inline-block;margin-left:1em;"><pre style="display: block"><code><span style="color: #cc66cc;">PDL</span><span style="color: #33ccff;"> {</span><span style="">
    </span><span style="color: #6666cc;">Data    </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #ff6633;">1 1 1 1 1 1 1 1 1 1 1 1</span><span style="color: #33ccff;">]</span><span style="">
    </span><span style="color: #6666cc;">Type    </span><span style=""> : </span><span style="color: #cc66cc;">double</span><span style="">
    </span><span style="color: #6666cc;">Shape   </span><span style=""> : </span><span style="color: #33ccff;">[</span><span style="color: #9999cc;">12</span><span style="color: #33ccff;">]</span><span style="">
    </span><span style="color: #6666cc;">Nelem   </span><span style=""> : </span><span style="color: #dd6;">12</span><span style="">
    </span><span style="color: #6666cc;">Min     </span><span style=""> : </span><span style="color: #f66;">1</span><span style="">
    </span><span style="color: #6666cc;">Max     </span><span style=""> : </span><span style="color: #99f;">1</span><span style="">
    </span><span style="color: #6666cc;">Badflag </span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
    </span><span style="color: #6666cc;">Has Bads</span><span style=""> : </span><span style="color: #2c2;">No</span><span style="">
</span><span style="color: #33ccff;">}</span><span style="">
</span></code></pre></span>

B<RESULT>:

  1

=head1 RESOURCE USAGE

  use Filesys::DiskUsage qw/du/;
  



( run in 0.454 second using v1.01-cache-2.11-cpan-49f99fa48dc )