Algorithm-TimelinePacking

 view release on metacpan or  search on metacpan

README.md  view on Meta::CPAN

    my ($lines, $latest) = $packer->arrange_slices(\@jobs);
    # Feed $lines to D3.js or other visualization library

# ATTRIBUTES

## space

    my $packer = Algorithm::TimelinePacking->new(space => 10);

Minimum space (in the same units as your timestamps) required between
consecutive intervals on the same line. Default: 0.

## width

    my $packer = Algorithm::TimelinePacking->new(width => 800);

If set, all intervals will be scaled to fit within this width. The scaling
preserves relative positions and durations. Default: undef (no scaling).

# METHODS

README.md  view on Meta::CPAN

start and end timestamps. Additional elements (metadata) are preserved.

    [start, end, ...optional_metadata]

The slices are modified in place (normalized to start at 0, optionally scaled).

# ALGORITHM

The packing uses a greedy first-fit approach:

1. Sort intervals by start time (secondary sort by end time)
2. Normalize all timestamps to start at 0
3. Optionally scale to fit within specified width
4. For each interval, place it on the first line where it fits
5. If no line has room, create a new line
6. Shuffle the final line order for visual balance

Time complexity: O(n² log n) where n is the number of intervals.

# AUTHOR

cpanfile.snapshot  view on Meta::CPAN

      Type::Tie::ARRAY 2.010001
      Type::Tie::BASE 2.010001
      Type::Tie::HASH 2.010001
      Type::Tie::SCALAR 2.010001
      Type::Tiny 2.010001
      Type::Tiny::Bitfield 2.010001
      Type::Tiny::Class 2.010001
      Type::Tiny::ConstrainedObject 2.010001
      Type::Tiny::Duck 2.010001
      Type::Tiny::Enum 2.010001
      Type::Tiny::Intersection 2.010001
      Type::Tiny::Role 2.010001
      Type::Tiny::Union 2.010001
      Type::Utils 2.010001
      Types::Common 2.010001
      Types::Common::Numeric 2.010001
      Types::Common::String 2.010001
      Types::Standard 2.010001
      Types::Standard::ArrayRef 2.010001
      Types::Standard::CycleTuple 2.010001
      Types::Standard::Dict 2.010001

examples/hadoop-jobs.html  view on Meta::CPAN

// INPUT: Raw job data (sent to server)
// Each job: [start_epoch, end_epoch, job_id, user, map_tasks, reduce_tasks]
// =============================================================================
var INPUT = [[1700002680,1700002812,"job_1","alice",2121,41],[1700003083,1700003262,"job_2","dave",3464,418],[1700001666,1700001869,"job_3","dave",139,385],[1700002164,1700002466,"job_4","dave",2689,249],[1700001575,1700001709,"job_5","grace",311,430...

// =============================================================================
// OUTPUT: Pre-computed by Algorithm::TimelinePacking (returned from server)
// Times normalized to start at 0, arranged into non-overlapping lines
// =============================================================================
var EARLIEST = 1700000030;  // Original start timestamp
var LATEST = 3753;          // Duration in seconds

var lines = [
    [[756,1039,"job_22","grace",4762,301],[1321,1388,"job_54","eve",845,203],[1545,1679,"job_5","grace",311,430],[1697,2001,"job_37","alice",569,497],[2180,2456,"job_17","eve",1272,308],[2650,2782,"job_1","alice",2121,41],[3329,3400,"job_10","dave",2...
    [[58,124,"job_26","carol",436,368],[340,652,"job_18","alice",2725,308],[678,951,"job_30","grace",4530,24],[1263,1408,"job_74","alice",104,177],[1580,1782,"job_72","carol",2026,190],[1786,2009,"job_44","alice",4672,3],[2217,2391,"job_67","carol",3...
    [[739,795,"job_9","dave",2633,433],[1056,1304,"job_7","alice",1379,145],[1432,1608,"job_14","dave",1167,420],[1660,1934,"job_65","grace",3824,91],[2097,2237,"job_25","dave",1575,228],[2444,2559,"job_13","bob",62,310],[2895,3093,"job_66","eve",427...
    [[721,811,"job_24","carol",3581,26],[1112,1400,"job_71","frank",298,144],[1575,1824,"job_28","frank",4537,188],[1906,1956,"job_12","dave",4074,472],[2152,2271,"job_35","frank",433,126],[2500,2714,"job_32","bob",195,14],[3196,3512,"job_33","carol"...
    [[0,192,"job_29","grace",1676,412],[421,511,"job_73","carol",4038,472],[549,800,"job_61","frank",3926,221],[1103,1250,"job_53","bob",4525,414],[1375,1590,"job_59","alice",94,96],[1652,1936,"job_75","dave",88,402],[2112,2288,"job_43","bob",2062,19...
    [[686,892,"job_62","grace",2930,46],[1214,1429,"job_15","bob",2896,78],[1587,1672,"job_50","frank",3526,305],[1694,1878,"job_55","dave",843,405],[2006,2292,"job_8","bob",4018,409],[2568,2806,"job_48","dave",4399,338],[3469,3753,"job_6","bob",1038...
    [[498,762,"job_46","grace",3417,192],[856,1164,"job_16","bob",4105,387],[1363,1510,"job_27","carol",41,471],[1636,1839,"job_3","dave",139,385],[1995,2174,"job_39","carol",2997,7],[2394,2456,"job_49","eve",3621,74],[2802,3079,"job_70","carol",2943...
    [[1786,2038,"job_23","eve",661,472],[2263,2576,"job_51","alice",4691,231],[2939,3216,"job_38","dave",2502,492]],

lib/Algorithm/TimelinePacking.pm  view on Meta::CPAN

use strict;
use warnings;

use Moo;
use Types::Standard qw(Int Maybe);
use POSIX qw(floor);
use List::Util qw(max shuffle);

our $VERSION = '0.01';

# minimum space (in units, i.e. most frequently pixels) between 2 consecutive
# items on a line
has space => (
    is      => 'rw',
    isa     => Int->where('$_ >= 0'),
    default => 0,
);

# convert all epochs to end at this maximum. undef means no constraint
has width => (
    is      => 'rw',

lib/Algorithm/TimelinePacking.pm  view on Meta::CPAN

    my ($lines, $latest) = $packer->arrange_slices(\@jobs);
    # Feed $lines to D3.js or other visualization library

=head1 ATTRIBUTES

=head2 space

    my $packer = Algorithm::TimelinePacking->new(space => 10);

Minimum space (in the same units as your timestamps) required between
consecutive intervals on the same line. Default: 0.

=head2 width

    my $packer = Algorithm::TimelinePacking->new(width => 800);

If set, all intervals will be scaled to fit within this width. The scaling
preserves relative positions and durations. Default: undef (no scaling).

=head1 METHODS

lib/Algorithm/TimelinePacking.pm  view on Meta::CPAN

The slices are modified in place (normalized to start at 0, optionally scaled).

=head1 ALGORITHM

The packing uses a greedy first-fit approach:

=over 4

=item 1.

Sort intervals by start time (secondary sort by end time)

=item 2.

Normalize all timestamps to start at 0

=item 3.

Optionally scale to fit within specified width

=item 4.



( run in 1.933 second using v1.01-cache-2.11-cpan-39bf76dae61 )