Algorithm-MinPerfHashTwoLevel

 view release on metacpan or  search on metacpan

MinPerfHashTwoLevel.xs  view on Meta::CPAN

normalize_with_flags(pTHX_ SV *sv, SV *normalized_sv, SV *is_utf8_sv, int downgrade) {
    STRLEN len;
    if (SvROK(sv)) {
        croak("Error: Not expecting a reference value in source hash");
    }
    sv_setsv(normalized_sv,sv);
    if (SvOK(sv)) {
        STRLEN pv_len;
        char *pv= SvPV(sv,pv_len);
        if (pv_len > 0xFFFF)
            croak("Error: String in source hash is too long to store, max length is %u got length %lu", 0xFFFF, pv_len);
        if (SvUTF8(sv)) {
            if (downgrade)
                sv_utf8_downgrade(normalized_sv,1);
            if (SvUTF8(normalized_sv)) {
                SvUTF8_off(normalized_sv);
                sv_setiv(is_utf8_sv,1);
            } else {
                sv_setiv(is_utf8_sv,2);
            }
        }

MinPerfHashTwoLevel.xs  view on Meta::CPAN

I32
_compare(pTHX_ SV *a, SV *b) {
    dMY_CXT;
    HE *a_he= hv_fetch_ent_with_keysv((HV*)SvRV(a),MPH_KEYSV_KEY_NORMALIZED,0);
    HE *b_he= hv_fetch_ent_with_keysv((HV*)SvRV(b),MPH_KEYSV_KEY_NORMALIZED,0);

    return sv_cmp(HeVAL(a_he),HeVAL(b_he));
}

U32
normalize_source_hash(pTHX_ HV *source_hv, AV *keys_av, U32 compute_flags, SV *buf_length_sv, char *state_pv) {
    dMY_CXT;
    HE *he;
    U32 buf_length= 0;
    U32 ctr;

    hv_iterinit(source_hv);
    while (he= hv_iternext(source_hv)) {
        SV *val_sv= HeVAL(he);
        SV *val_normalized_sv;
        SV *val_is_utf8_sv;

        SV *key_sv;
        SV *key_normalized_sv;

MinPerfHashTwoLevel.xs  view on Meta::CPAN

        hv_ksplit(hv,15);
        hv_store_ent_with_keysv(hv,MPH_KEYSV_KEY,            key_sv);
        hv_store_ent_with_keysv(hv,MPH_KEYSV_KEY_NORMALIZED, key_normalized_sv);
        hv_store_ent_with_keysv(hv,MPH_KEYSV_KEY_IS_UTF8,    key_is_utf8_sv);
        hv_store_ent_with_keysv(hv,MPH_KEYSV_VAL,            SvREFCNT_inc_simple_NN(val_sv));
        hv_store_ent_with_keysv(hv,MPH_KEYSV_VAL_NORMALIZED, val_normalized_sv);
        hv_store_ent_with_keysv(hv,MPH_KEYSV_VAL_IS_UTF8,    val_is_utf8_sv);
        /* install everything into the keys_av just in case normalize_with_flags() dies */
        av_push(keys_av,newRV_noinc((SV*)hv));

        buf_length += normalize_with_flags(aTHX_ key_sv, key_normalized_sv, key_is_utf8_sv, 1);
        buf_length += normalize_with_flags(aTHX_ val_sv, val_normalized_sv, val_is_utf8_sv, 0);

        key_pv= (U8 *)SvPV(key_normalized_sv,key_len);
        h0= mph_hash_with_state(state_pv,key_pv,key_len);

        hv_store_ent_with_keysv(hv,MPH_KEYSV_H0,             newSVuv(h0));
    }
    if (buf_length_sv)
        sv_setuv(buf_length_sv, buf_length);

    /* we now know how many keys there are, and what the max_xor_val should be */
    return av_top_index(keys_av)+1;
}

void
find_first_level_collisions(pTHX_ U32 bucket_count, AV *keys_av, AV *keybuckets_av, AV *h2_packed_av) {
    dMY_CXT;
    U32 i;
    for (i=0; i<bucket_count;i++) {

MinPerfHashTwoLevel.xs  view on Meta::CPAN

            SvROK_on(*got_psv);
        } else {
            av= (AV *)SvRV(*got_psv);
        }

        av_push(av,newRV_inc((SV*)hv));
    }
}

AV *
idx_by_length(pTHX_ AV *keybuckets_av) {
    U32 i;
    U32 keybuckets_count= av_top_index(keybuckets_av) + 1;
    AV *by_length_av= (AV*)sv_2mortal((SV*)newAV());
    for( i = 0 ; i < keybuckets_count ; i++ ) {
        SV **got= av_fetch(keybuckets_av,i,0);
        AV *keys_av;
        SV *keys_ref;
        AV *target_av;
        IV len;
        if (!got) continue;
        keys_av= (AV *)SvRV(*got);
        len= av_top_index(keys_av) + 1;
        if (len<1) continue;

        got= av_fetch(by_length_av,len,1);
        if (SvPOK(*got)) {
            sv_catpvn(*got,(char *)&i,4);
        } else {
            sv_setpvn(*got,(char *)&i,4);
        }
    }
    return by_length_av;
}

void set_xor_val_in_buckets(pTHX_ U32 xor_val, AV *buckets_av, U32 idx1, U32 *idx_start, char *is_used, AV *keys_in_bucket_av) {
    dMY_CXT;
    U32 *idx2;
    HV *idx1_hv;
    U32 i;
    U32 keys_in_bucket_count= av_top_index(keys_in_bucket_av) + 1;

    SV **buckets_rvp= av_fetch(buckets_av, idx1, 1);

MinPerfHashTwoLevel.xs  view on Meta::CPAN

        if (!got)
            croak("panic: no keybuckets_av for idx %u",idx1);
        keys_in_bucket_av= (AV *)SvRV(*got);
        *idx2_start= singleton_pos;
        set_xor_val_in_buckets(aTHX_ xor_val, buckets_av, idx1, idx2_start, is_used, keys_in_bucket_av);
    }
    return 0;
}

U32
solve_collisions_by_length(pTHX_ U32 bucket_count, U32 max_xor_val, AV *by_length_av, AV *h2_packed_av, AV *keybuckets_av, U32 variant, AV *buckets_av) {
    U32 bad_idx= 0;
    I32 singleton_pos= 0;
    IV len_idx;
    char *is_used;
    U32 *idx2_start;

    /* this is used to quickly tell if we have used a particular bucket yet */
    Newxz(is_used,bucket_count,char);
    SAVEFREEPV(is_used);

    /* used to keep track the indexes that a set of keys map into
     * stored in an SV just because - we actually treat it as an array of U32 */
    Newxz(idx2_start, av_top_index(by_length_av)+1, U32);
    SAVEFREEPV(idx2_start);

    /* now loop through and process the keysets from most collisions to least */
    for (len_idx= av_top_index(by_length_av); len_idx > 0 && !bad_idx; len_idx--) {
        SV **idx1_packed_sv= av_fetch(by_length_av, len_idx, 0);
        /* deal with the possibility that there are gaps in the length grouping,
         * for instance we might have some 13 way collisions and some 11 way collisions
         * without any 12-way collisions. (this should be rare - but is possible) */
        if (!idx1_packed_sv || !SvPOK(*idx1_packed_sv))
            continue;

        if (len_idx == 1) {
            bad_idx= place_singletons(aTHX_ bucket_count, *idx1_packed_sv, keybuckets_av,
                is_used, idx2_start, buckets_av);
        } else {
            bad_idx= solve_collisions(aTHX_ bucket_count, max_xor_val, *idx1_packed_sv, h2_packed_av, keybuckets_av,

MinPerfHashTwoLevel.xs  view on Meta::CPAN


    IV len_idx;

    U32 bucket_count;
    U32 max_xor_val;
    U32 i;

    U32 variant;
    U32 compute_flags;

    SV* buf_length_sv;

    HV* source_hv;

    AV *buckets_av;
    AV *keys_av;
    AV *by_length_av;
    AV *keybuckets_av;
    AV *h2_packed_av;

    RETVAL = 0;

    /**** extract the various reference data we need from $self */

    he= hv_fetch_ent_with_keysv(self_hv,MPH_KEYSV_VARIANT,0);
    if (he) {
        variant= SvUV(HeVAL(he));

MinPerfHashTwoLevel.xs  view on Meta::CPAN

        state_pv= (U8 *)SvPV(state_sv,state_len);
        if (state_len != MPH_STATE_BYTES) {
            croak("Error: state vector must be at exactly %d bytes",(int)MPH_SEED_BYTES);
        }
    } else {
        croak("panic: no state in self?");
    }

    he= hv_fetch_ent_with_keysv(self_hv,MPH_KEYSV_BUF_LENGTH,1);
    if (he) {
        buf_length_sv= HeVAL(he);
    } else {
        croak("panic: out of memory in lvalue fetch for 'buf_length' in self");
    }

    he= hv_fetch_ent_with_keysv(self_hv,MPH_KEYSV_SOURCE_HASH,0);
    if (he) {
        source_hv= (HV*)SvRV(HeVAL(he));
    } else {
        croak("panic: no source_hash in self");
    }

    he= hv_fetch_ent_with_keysv(self_hv,MPH_KEYSV_BUCKETS,1);

MinPerfHashTwoLevel.xs  view on Meta::CPAN

        }
        buckets_av= newAV();
        SvRV_set(rv,(SV*)buckets_av);
        SvROK_on(rv);
    } else {
        croak("panic: out of memory in lvalue fetch for 'buckets' in self");
    }

    /**** build an array of hashes in keys_av based on the normalized contents of source_hv */
    keys_av= (AV *)sv_2mortal((SV*)newAV());
    bucket_count= normalize_source_hash(aTHX_ source_hv, keys_av, compute_flags, buf_length_sv, state_pv);
    max_xor_val= INT32_MAX;

    /* if the caller wants deterministic results we sort the keys_av
     * before we compute collisions - depending on the order we process
     * the keys we might resolve the collisions differently */
    if (compute_flags & MPH_F_DETERMINISTIC)
        sortsv(AvARRAY(keys_av),bucket_count,_compare);

    /**** find the collisions from the data we just computed, build an AoAoH and AoS of the
     **** collision data */
    keybuckets_av= (AV*)sv_2mortal((SV*)newAV()); /* AoAoH - hashes from keys_av */
    h2_packed_av= (AV*)sv_2mortal((SV*)newAV());  /* AoS - packed h1 */
    find_first_level_collisions(aTHX_ bucket_count, keys_av, keybuckets_av, h2_packed_av);

    /* Sort the buckets by size by constructing an AoS, with the outer array indexed by length,
     * and the inner string being the list of items of that length. (Thus the contents of index
     * 0 is empty/undef).
     * The end result is we can process the collisions from the most keys to a bucket to the
     * least in O(N) and not O(N log2 N).
     *
     * the length of the array (av_top_index+1) reflect the number of items in the bucket
     * with the most collisions - we use this later to size some of our data structures.
     */
    by_length_av= idx_by_length(aTHX_ keybuckets_av);
        
    RETVAL= solve_collisions_by_length(aTHX_ bucket_count, max_xor_val, by_length_av, h2_packed_av, keybuckets_av, 
        variant, buckets_av);
}
    OUTPUT:
        RETVAL



MODULE = Algorithm::MinPerfHashTwoLevel		PACKAGE = Tie::Hash::MinPerfHashTwoLevel::OnDisk

SV *
packed_xs(variant,buf_length_sv,state_sv,comment_sv,flags,buckets_av)
        U32 variant
        SV* buf_length_sv
        SV* state_sv
        SV* comment_sv
        AV *buckets_av
        U32 flags
    PREINIT:
        dMY_CXT;
    PROTOTYPE: $$$$$\@
    CODE:
{
    U32 buf_length= SvUV(buf_length_sv);
    U32 bucket_count= av_top_index(buckets_av) + 1;
    U32 header_rlen= _roundup(sizeof(struct mph_header),16);
    STRLEN state_len;
    char *state_pv= SvPV(state_sv, state_len);
    
    U32 alignment= sizeof(U64);
    U32 state_rlen= _roundup(state_len,alignment);
    U32 table_rlen= _roundup(sizeof(struct mph_bucket) * bucket_count,alignment);
    U32 key_flags_rlen= _roundup((bucket_count * 2 + 7 ) / 8,alignment);
    U32 val_flags_rlen= _roundup((bucket_count + 7) / 8,alignment);
    U32 str_rlen= _roundup( buf_length
                            + 2
                            + ( SvOK(comment_sv) ? sv_len(comment_sv) + 1 : 1 )
                            + ( 2 + 8 ),
                            alignment );

    U32 total_size;
    HV *str_ofs_hv= (HV *)sv_2mortal((SV*)newHV());
    SV *sv_buf;
    char *start;
    struct mph_header *head;

lib/Algorithm/MinPerfHashTwoLevel.pm  view on Meta::CPAN

    my $source= $self->get_state();
    return $self->_seed(substr($source,0,STADTX_HASH_SEED_BYTES) ^ substr($source,STADTX_HASH_SEED_BYTES));
}


sub _seed {
    my $self= shift;
    if (@_) {
        my $seed= shift;
        Carp::confess(sprintf "Seed should be undef, or a string exactly %d bytes long, not %d bytes",
            STADTX_HASH_SEED_BYTES,length($seed))
            if defined($seed) and length($seed) != 16;
        $self->{seed}= $seed;
        delete $self->{state};
    }
    if ( !defined $self->{seed} ) {
                       #1234567812345678
        $self->{seed}= "MinPerfHash2Levl";
        delete $self->{state};
    }
    return $self->{seed};
}

lib/Tie/Hash/MinPerfHashTwoLevel/OnDisk.pm  view on Meta::CPAN


    my $seed= $opts{seed};
    my $hasher= Algorithm::MinPerfHashTwoLevel->new(
        debug => $debug,
        seed => (ref $seed ? $$seed : $seed),
        variant => $variant,
        compute_flags => $compute_flags,
        max_tries => $opts{max_tries},
    );
    my $buckets= $hasher->compute($source_hash);
    my $buf_length= $hasher->{buf_length};
    my $state= $hasher->{state};
    my $buf= packed_xs($variant, $buf_length, $state, $comment, $compute_flags, @$buckets);
    $$seed= $hasher->get_seed if ref $seed;

    my $tmp_file= "$ofile.$$";
    open my $ofh, ">", $tmp_file
        or die "Failed to open $tmp_file for output";
    print $ofh $buf
        or die "failed to print to '$tmp_file': $!";
    close $ofh
        or die "failed to close '$tmp_file': $!";
    rename $tmp_file, $ofile

lib/Tie/Hash/MinPerfHashTwoLevel/OnDisk.pm  view on Meta::CPAN

    U32 table_ofs       -> offset in file where bucket table starts
    U32 key_flags_ofs   -> offset in file where key flags are located
    U32 val_flags ofs   -> offset in file where val flags are located
    U32 str_buf_ofs     -> offset in file where strings are located
    U64 general_flags   -> flags used for this header
    U64 reserved        -> reserved field.

All "_ofs" values in the header are a multiple of 8, and the relevant sections
maybe be null padded to ensure this is so.

The string buffer contains the comment at str_buf_ofs+1, its length can be found
with strlen(), the comment may NOT contain nulls, and will be null terminated. All
other strings in the table are NOT null padded, the length data stored in the
bucket records should be used to determine the length of the keys and values. 
The last 8 bytes of the file contains a hash checksum of the rest of the entire 
file. This value is itself 8 byte aligned.

Buckets:

   U32 xor_val      -> the xor_val for this bucket's h1 lookups (0 means none)
                       for variant 1 and later this may also be treated as a signed
                       integer, with negative values representing the index of
                       the bucket which contains the correct key (-index-1).
   U32 key_ofs      -> offset from str_buf_ofs to find this key (nonzero always)
   U32 val_ofs      -> offset from str_buf_ofs to find this value (0 means undef)
   U16 key_len      -> length of key
   U16 val_len      -> length of value

The hash function used is Siphash 1-3, which uses a 16 byte seed to produce
a 32 byte state vector used for hashing. The file contains the state vector
required for hashing and does not include the original seed.

=head2 EXPORT

None by default.

=head1 SEE ALSO

mph2l.h  view on Meta::CPAN

    MPH_INIT_KEYSV(MPH_KEYSV_KEY_NORMALIZED,"key_normalized");  \
    MPH_INIT_KEYSV(MPH_KEYSV_KEY_IS_UTF8,"key_is_utf8");        \
    MPH_INIT_KEYSV(MPH_KEYSV_VAL,"val");                        \
    MPH_INIT_KEYSV(MPH_KEYSV_VAL_NORMALIZED,"val_normalized");  \
    MPH_INIT_KEYSV(MPH_KEYSV_VAL_IS_UTF8,"val_is_utf8");        \
                                                                \
    MPH_INIT_KEYSV(MPH_KEYSV_VARIANT,"variant");                \
    MPH_INIT_KEYSV(MPH_KEYSV_COMPUTE_FLAGS,"compute_flags");    \
    MPH_INIT_KEYSV(MPH_KEYSV_STATE,"state");                    \
    MPH_INIT_KEYSV(MPH_KEYSV_SOURCE_HASH,"source_hash");        \
    MPH_INIT_KEYSV(MPH_KEYSV_BUF_LENGTH,"buf_length");          \
    MPH_INIT_KEYSV(MPH_KEYSV_BUCKETS,"buckets");                \
    MPH_INIT_KEYSV(MPH_KEYSV_MOUNT,"mount");                    \
} STMT_END

#define MPH_F_FILTER_UNDEF          (1<<0)
#define MPH_F_DETERMINISTIC         (1<<1)
#define MPH_F_NO_DEDUPE             (1<<2)
#define MPH_F_VALIDATE              (1<<3)

#define MPH_MOUNT_ERROR_OPEN_FAILED     (-1)

ppport.h  view on Meta::CPAN

reg_named_buff_all||5.009005|
reg_named_buff_exists||5.009005|
reg_named_buff_fetch||5.009005|
reg_named_buff_firstkey||5.009005|
reg_named_buff_iter|||
reg_named_buff_nextkey||5.009005|
reg_named_buff_scalar||5.009005|
reg_named_buff|||
reg_node|||
reg_numbered_buff_fetch|||
reg_numbered_buff_length|||
reg_numbered_buff_store|||
reg_qr_package|||
reg_scan_name|||
reg_skipcomment|||n
reg_temp_copy|||
reganode|||
regatom|||
regbranch|||
regclass|||
regcp_restore|||

ppport.h  view on Meta::CPAN

upg_version||5.009005|
usage|||
utf16_textfilter|||
utf16_to_utf8_reversed||5.006001|
utf16_to_utf8||5.006001|
utf8_distance||5.006000|
utf8_hop_back|||n
utf8_hop_forward|||n
utf8_hop_safe|||n
utf8_hop||5.006000|n
utf8_length||5.007001|
utf8_mg_len_cache_update|||
utf8_mg_pos_cache_update|||
utf8_to_uvchr_buf|5.015009|5.015009|p
utf8_to_uvchr|||p
utf8n_to_uvchr_error|||n
utf8n_to_uvchr||5.007001|n
utf8n_to_uvuni||5.007001|
utilize|||
uvchr_to_utf8_flags||5.007003|
uvchr_to_utf8||5.007001|

ppport.h  view on Meta::CPAN

yyparse|||
yyquit|||
yyunlex|||
yywarn|||
);

if (exists $opt{'list-unsupported'}) {
  my $f;
  for $f (sort { lc $a cmp lc $b } keys %API) {
    next unless $API{$f}{todo};
    print "$f ", '.'x(40-length($f)), " ", format_version($API{$f}{todo}), "\n";
  }
  exit 0;
}

# Scan for possible replacement candidates

my(%replace, %need, %hints, %warnings, %depends);
my $replace = 0;
my($hint, $define, $function);

ppport.h  view on Meta::CPAN

#endif

#ifndef av_top_index
#  define av_top_index                   AvFILL
#endif
#ifndef ERRSV
#  define ERRSV                          get_sv("@",FALSE)
#endif

/* Hint: gv_stashpvn
 * This function's backport doesn't support the length parameter, but
 * rather ignores it. Portability can only be ensured if the length
 * parameter is used for speed reasons, but the length can always be
 * correctly computed from the string argument.
 */
#ifndef gv_stashpvn
#  define gv_stashpvn(str,len,create)    gv_stashpv(str,create)
#endif

/* Replace: 1 */
#ifndef get_cv
#  define get_cv                         perl_get_cv
#endif

ppport.h  view on Meta::CPAN

        else {
            ret = _ppport_utf8_to_uvchr_buf_callee(
                                            s, curlen, retlen, UTF8_ALLOW_ANY);
            /* Override with the REPLACEMENT character, as that is what the
             * modern version of this function returns */
            ret = UNICODE_REPLACEMENT;

#           if (PERL_BCDVERSION < 0x5016000)

            /* Versions earlier than this don't necessarily return the proper
             * length.  It should not extend past the end of string, nor past
             * what the first byte indicates the length is, nor past the
             * continuation characters */
            if (retlen && *retlen >= 0) {
                *retlen = _ppport_MIN(*retlen, curlen);
                *retlen = _ppport_MIN(*retlen, UTF8SKIP(s));
                unsigned int i = 1;
                do {
                    if (s[i] < 0x80 || s[i] > 0xBF) {
                        *retlen = i;
                        break;
                    }

ppport.h  view on Meta::CPAN


#if defined(NEED_my_strlcat) || defined(NEED_my_strlcat_GLOBAL)

#define my_strlcat DPPP_(my_my_strlcat)
#define Perl_my_strlcat DPPP_(my_my_strlcat)


Size_t
DPPP_(my_my_strlcat)(char *dst, const char *src, Size_t size)
{
    Size_t used, length, copy;

    used = strlen(dst);
    length = strlen(src);
    if (size > 0 && used < size - 1) {
        copy = (length >= size - used) ? size - used - 1 : length;
        memcpy(dst + used, src, copy);
        dst[used + copy] = '\0';
    }
    return used + length;
}
#endif
#endif

#if !defined(my_strlcpy)
#if defined(NEED_my_strlcpy)
static Size_t DPPP_(my_my_strlcpy)(char * dst, const char * src, Size_t size);
static
#else
extern Size_t DPPP_(my_my_strlcpy)(char * dst, const char * src, Size_t size);

ppport.h  view on Meta::CPAN


#if defined(NEED_my_strlcpy) || defined(NEED_my_strlcpy_GLOBAL)

#define my_strlcpy DPPP_(my_my_strlcpy)
#define Perl_my_strlcpy DPPP_(my_my_strlcpy)


Size_t
DPPP_(my_my_strlcpy)(char *dst, const char *src, Size_t size)
{
    Size_t length, copy;

    length = strlen(src);
    if (size > 0) {
        copy = (length >= size) ? size - 1 : length;
        memcpy(dst, src, copy);
        dst[copy] = '\0';
    }
    return length;
}

#endif
#endif
#ifndef PERL_PV_ESCAPE_QUOTE
#  define PERL_PV_ESCAPE_QUOTE           0x0001
#endif

#ifndef PERL_PV_PRETTY_QUOTE
#  define PERL_PV_PRETTY_QUOTE           PERL_PV_ESCAPE_QUOTE

t/Corruption.t  view on Meta::CPAN


use Tie::Hash::MinPerfHashTwoLevel::OnDisk qw(mph2l_tied_hashref mph2l_make_file);

# trying this with variants before 3 will typically result in failed tests at offsets 8-24,
# that is, we fail to detect that the file has been corrupted. :-(
mph2l_make_file("$tmpdir/test_000.mph2l",source_hash=>{1..10},canonical=>1);
open my $fh,"<", "$tmpdir/test_000.mph2l";
my $data= do { local $/; <$fh> };
close $fh;
$data = "" unless defined $data;
ok($data,sprintf "got data as expected (length: %d)",length($data));
for my $pos (0..length($data)-1) {
    my $chr= substr($data,$pos,1);
    substr( $data, $pos, 1, chr( ord($chr) ^ ( 1 << rand(8) ) ) );
    my $fn= sprintf "$tmpdir/test_%03d.mph2l", $pos+1;
    open my $ofh, ">", $fn or die "failed to open '$fn' for write: $!";
    print $ofh $data;
    close $ofh;
    substr($data,$pos,1,$chr);
}
ok(1,"constructed files ok");
for my $pos (0 .. length($data)) {
    my $fn= sprintf "$tmpdir/test_%03d.mph2l", $pos;
    my $got= eval { mph2l_tied_hashref($fn,validate=>1); 1 };
    my $error= $got ? "" : "Error: $@";
    if ($pos) {
        ok( !$got, sprintf "munging offset %d is noticed", $pos-1 );
        ok( $error=~/Error: Failed to mount/, sprintf "munging offset %d produces an error of sorts", $pos-1 );
    } else {
        ok( $got, "loaded base image ok" );
        ok ( !$error, "No error loading base image");
    }

t/OnDisk.pl  view on Meta::CPAN

    return $data;
}

sub files_eq {
    my ($lfile,$rfile)= @_;
    my $left= slurp($lfile);
    my $right= slurp($rfile);
    my $ret= (defined($left) == defined($right) and defined($right) and $left eq $right);
    if (!$ret) {
        diag (sprintf "'%s' is %s bytes '%s' is %s bytes",
            $lfile => length($left)//'undef', $rfile => length($right)//'undef');
        require Data::Dumper;
        diag Data::Dumper::qquote($left),"\n";
        diag Data::Dumper::qquote($right),"\n";

    }
    return $ret;
}

my $class= 'Tie::Hash::MinPerfHashTwoLevel::OnDisk';
plan tests => 2 + 1830 * (defined($ENV{VARIANT}) ? 1 : MAX_VARIANT - MIN_VARIANT + 1);



( run in 0.733 second using v1.01-cache-2.11-cpan-65fba6d93b7 )