X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=blobdiff_plain;f=dump.c;h=9d2595f6a92e0d403fb5a5704e99e79b2a047b53;hb=b32b0a5d97553a28be45508a0e60a97b2bf53203;hp=9d2a01729fa784e13c85ee4592a907f534ac6231;hpb=2ef28da1578e18cf36b9a30b71ac471521d2b507;p=p5sagit%2Fp5-mst-13.2.git diff --git a/dump.c b/dump.c index 9d2a017..9d2595f 100644 --- a/dump.c +++ b/dump.c @@ -195,7 +195,7 @@ Perl_sv_peek(pTHX_ SV *sv) unref++; } else if (DEBUG_R_TEST && SvREFCNT(sv) > 1) { - Perl_sv_catpvf(aTHX_ t, "<%u>", SvREFCNT(sv)); + Perl_sv_catpvf(aTHX_ t, "<%"UVuf">", (UV)SvREFCNT(sv)); } @@ -1044,15 +1044,24 @@ Perl_do_sv_dump(pTHX_ I32 level, PerlIO *file, SV *sv, I32 nest, I32 maxnest, bo } } PerlIO_putc(file, ')'); - /* Now calculate quality wrt theoretical value */ + /* The "quality" of a hash is defined as the total number of + comparisons needed to access every element once, relative + to the expected number needed for a random hash. + + The total number of comparisons is equal to the sum of + the squares of the number of entries in each backet. + For a random hash of n keys into k backets, the expected + value is + n + n(n-1)/2k + */ + for (i = max; i > 0; i--) { /* Precision: count down. */ sum += freq[i] * i * i; } while ((keys = keys >> 1)) pow2 = pow2 << 1; - /* Approximate by Poisson distribution */ theoret = HvKEYS(sv); - theoret += theoret * theoret/pow2; + theoret += theoret * (theoret-1)/pow2; PerlIO_putc(file, '\n'); Perl_dump_indent(aTHX_ level, file, " hash quality = %.1"NVff"%%", theoret/sum*100); }