5 * Refactor this to split out D:M code from Devel::Size code.
7 * Start migrating Devel::Size's Size.xs towards the new code.
9 * ADD_PRE_ATTR for index should check if the ptr is new first. Currently we're
10 * generating lots of ADD_PRE_ATTR's for SVs that we've already seen via other paths.
11 * That's wasteful and likely to cause subtle bugs.
13 * Give HE's their own node so keys and values can be tied together
17 #undef NDEBUG /* XXX */
20 #define PERL_NO_GET_CONTEXT
27 #include "refcounted_he.h"
29 /* Not yet in ppport.h */
31 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
34 # define SvRV_const(rv) SvRV(rv)
37 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
40 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
41 (SVf_FAKE | SVf_READONLY))
43 #ifndef SvIsCOW_shared_hash
44 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
46 #ifndef SvSHARED_HEK_FROM_PV
47 # define SvSHARED_HEK_FROM_PV(pvx) \
48 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
52 # define PL_opargs opargs
53 # define PL_op_name op_name
57 /* "structured exception" handling is a Microsoft extension to C and C++.
58 It's *not* C++ exception handling - C++ exception handling can't capture
59 SEGVs and suchlike, whereas this can. There's no known analagous
60 functionality on other platforms. */
62 # define TRY_TO_CATCH_SEGV __try
63 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
65 # define TRY_TO_CATCH_SEGV if(1)
66 # define CAUGHT_EXCEPTION else
70 # define __attribute__(x)
73 #if 0 && defined(DEBUGGING)
74 #define dbg_printf(x) printf x
79 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
82 /* The idea is to have a tree structure to store 1 bit per possible pointer
83 address. The lowest 16 bits are stored in a block of 8092 bytes.
84 The blocks are in a 256-way tree, indexed by the reset of the pointer.
85 This can cope with 32 and 64 bit pointers, and any address space layout,
86 without excessive memory needs. The assumption is that your CPU cache
87 works :-) (And that we're not going to bust it) */
90 #define LEAF_BITS (16 - BYTE_BITS)
91 #define LEAF_MASK 0x1FFF
93 typedef struct npath_node_st npath_node_t;
94 struct npath_node_st {
109 /* My hunch (not measured) is that for most architectures pointers will
110 start with 0 bits, hence the start of this array will be hot, and the
111 end unused. So put the flags next to the hot end. */
113 int min_recurse_threshold;
114 /* callback hooks and data */
115 int (*add_attr_cb)(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
116 void (*free_state_cb)(pTHX_ struct state *st);
117 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
118 /* this stuff wil be moved to state_cb_data later */
120 FILE *node_stream_fh;
121 char *node_stream_name;
124 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
126 #define PATH_TRACKING
129 #define pPATH npath_node_t *NPathArg
131 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
132 * to the next unused slot (though with prev already filled in)
133 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
134 * to and passes that NP value to the function being called.
135 * seqn==0 indicates the node is new (hasn't been output yet)
137 #define dNPathNodes(nodes, prev_np) \
138 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
139 npath_node_t *NP = &name_path_nodes[0]; \
140 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
142 #define NPathPushNode(nodeid, nodetype) \
144 NP->type = nodetype; \
146 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
148 NP->id = Nullch; /* safety/debug */ \
151 #define NPathSetNode(nodeid, nodetype) \
152 (NP-1)->id = nodeid; \
153 (NP-1)->type = nodetype; \
154 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
156 #define NPathPopNode \
159 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
160 * So the function can only safely call ADD_*() but not NPathLink, unless the
161 * caller has spare nodes in its name_path_nodes.
163 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
165 #define NPtype_NAME 0x01
166 #define NPtype_LINK 0x02
167 #define NPtype_SV 0x03
168 #define NPtype_MAGIC 0x04
169 #define NPtype_OP 0x05
171 /* XXX these should probably be generalizes into flag bits */
172 #define NPattr_LEAFSIZE 0x00
173 #define NPattr_NAME 0x01
174 #define NPattr_PADFAKE 0x02
175 #define NPattr_PADNAME 0x03
176 #define NPattr_PADTMP 0x04
177 #define NPattr_NOTE 0x05
178 #define NPattr_PRE_ATTR 0x06
180 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) (st->add_attr_cb && st->add_attr_cb(aTHX_ st, np, attr_type, attr_name, attr_value))
181 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
182 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) (assert(!attr_type), _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1))
184 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
185 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
186 /* add a link and a name node to the path - a special case for op_size */
187 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
188 #define NPathOpLink (NPathArg)
189 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(aTHX_ st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
193 #define NPathAddSizeCb(st, name, bytes)
194 #define pPATH void *npath_dummy /* XXX ideally remove */
195 #define dNPathNodes(nodes, prev_np) dNOOP
196 #define NPathLink(nodeid, nodetype) NULL
197 #define NPathOpLink NULL
198 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
200 #endif /* PATH_TRACKING */
207 static const char *svtypenames[SVt_LAST] = {
209 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
210 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
211 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
212 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
213 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
214 #elif PERL_VERSION < 13
215 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
217 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
222 np_print_node_name(pTHX_ FILE *fp, npath_node_t *npath_node)
224 char buf[1024]; /* XXX */
226 switch (npath_node->type) {
227 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
228 const SV *sv = (SV*)npath_node->id;
229 int type = SvTYPE(sv);
230 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
231 fprintf(fp, "SV(%s)", typename);
232 switch(type) { /* add some useful details */
233 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
234 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
238 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
239 const OP *op = (OP*)npath_node->id;
240 fprintf(fp, "OP(%s)", OP_NAME(op));
243 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
244 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
245 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
246 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
250 fprintf(fp, "%s", npath_node->id);
253 fprintf(fp, "%s", npath_node->id);
255 default: /* assume id is a string pointer */
256 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
263 np_dump_indent(int depth) {
265 fprintf(stderr, ": ");
269 np_walk_new_nodes(pTHX_ struct state *st,
270 npath_node_t *npath_node,
271 npath_node_t *npath_node_deeper,
272 int (*cb)(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
274 if (npath_node->seqn) /* node already output */
277 if (npath_node->prev) {
278 np_walk_new_nodes(aTHX_ st, npath_node->prev, npath_node, cb); /* recurse */
279 npath_node->depth = npath_node->prev->depth + 1;
281 else npath_node->depth = 0;
282 npath_node->seqn = ++st->seqn;
285 if (cb(aTHX_ st, npath_node, npath_node_deeper)) {
286 /* ignore this node */
287 assert(npath_node->prev);
288 assert(npath_node->depth);
289 assert(npath_node_deeper);
291 npath_node->seqn = --st->seqn;
292 npath_node_deeper->prev = npath_node->prev;
300 np_dump_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
301 if (0 && npath_node->type == NPtype_LINK)
303 np_dump_indent(npath_node->depth);
304 np_print_node_name(aTHX_ stderr, npath_node);
305 if (npath_node->type == NPtype_LINK)
306 fprintf(stderr, "->"); /* cosmetic */
307 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
308 fprintf(stderr, "\n");
313 np_dump_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
315 if (attr_type == NPattr_LEAFSIZE && !attr_value)
316 return 0; /* ignore zero sized leaf items */
317 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_dump_formatted_node);
318 np_dump_indent(npath_node->depth+1);
320 case NPattr_LEAFSIZE:
321 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
324 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
327 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
332 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
335 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
338 fprintf(stderr, "\n");
343 np_stream_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
344 fprintf(st->node_stream_fh, "-%u %lu %u ",
345 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
347 np_print_node_name(aTHX_ st->node_stream_fh, npath_node);
348 fprintf(st->node_stream_fh, "\n");
353 np_stream_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
355 if (!attr_type && !attr_value)
356 return 0; /* ignore zero sized leaf items */
357 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_stream_formatted_node);
358 if (attr_type) { /* Attribute type, name and value */
359 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
361 else { /* Leaf name and memory size */
362 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
364 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
369 #endif /* PATH_TRACKING */
373 Checks to see if thing is in the bitstring.
374 Returns true or false, and
375 notes thing in the segmented bitstring.
378 check_new(struct state *st, const void *const p) {
379 unsigned int bits = 8 * sizeof(void*);
380 const size_t raw_p = PTR2nat(p);
381 /* This effectively rotates the value right by the number of low always-0
382 bits in an aligned pointer. The assmption is that most (if not all)
383 pointers are aligned, and these will be in the same chain of nodes
384 (and hence hot in the cache) but we can still deal with any unaligned
386 const size_t cooked_p
387 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
388 const U8 this_bit = 1 << (cooked_p & 0x7);
392 void **tv_p = (void **) (st->tracking);
394 if (NULL == p) return FALSE;
396 const char c = *(const char *)p;
399 if (st->dangle_whine)
400 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
406 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
408 /* First level is always present. */
410 i = (unsigned int)((cooked_p >> bits) & 0xFF);
412 Newxz(tv_p[i], 256, void *);
413 tv_p = (void **)(tv_p[i]);
415 } while (bits > LEAF_BITS + BYTE_BITS);
416 /* bits now 16 always */
417 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
418 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
419 a my_perl under multiplicity */
422 leaf_p = (U8 **)tv_p;
423 i = (unsigned int)((cooked_p >> bits) & 0xFF);
425 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
430 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
432 if(leaf[i] & this_bit)
440 free_tracking_at(void **tv, int level)
448 free_tracking_at((void **) tv[i], level);
462 free_state(pTHX_ struct state *st)
464 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
465 if (st->free_state_cb)
466 st->free_state_cb(aTHX_ st);
467 if (st->state_cb_data)
468 Safefree(st->state_cb_data);
469 free_tracking_at((void **)st->tracking, top_level);
473 /* For now, this is somewhat a compatibility bodge until the plan comes
474 together for fine grained recursion control. total_size() would recurse into
475 hash and array members, whereas sv_size() would not. However, sv_size() is
476 called with CvSTASH() of a CV, which means that if it (also) starts to
477 recurse fully, then the size of any CV now becomes the size of the entire
478 symbol table reachable from it, and potentially the entire symbol table, if
479 any subroutine makes a reference to a global (such as %SIG). The historical
480 implementation of total_size() didn't report "everything", and changing the
481 only available size to "everything" doesn't feel at all useful. */
483 #define NO_RECURSION 0
484 #define SOME_RECURSION 1
485 #define TOTAL_SIZE_RECURSION 2
487 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
503 , OPc_CONDOP /* 12 */
512 cc_opclass(const OP * const o)
518 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
520 if (o->op_type == OP_SASSIGN)
521 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
524 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
528 if ((o->op_type == OP_TRANS)) {
532 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
564 #ifdef OA_PVOP_OR_SVOP
565 case OA_PVOP_OR_SVOP: TAG;
567 * Character translations (tr///) are usually a PVOP, keeping a
568 * pointer to a table of shorts used to look up translations.
569 * Under utf8, however, a simple table isn't practical; instead,
570 * the OP is an SVOP, and the SV is a reference to a swash
571 * (i.e., an RV pointing to an HV).
573 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
574 ? OPc_SVOP : OPc_PVOP;
583 case OA_BASEOP_OR_UNOP: TAG;
585 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
586 * whether parens were seen. perly.y uses OPf_SPECIAL to
587 * signal whether a BASEOP had empty parens or none.
588 * Some other UNOPs are created later, though, so the best
589 * test is OPf_KIDS, which is set in newUNOP.
591 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
593 case OA_FILESTATOP: TAG;
595 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
596 * the OPf_REF flag to distinguish between OP types instead of the
597 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
598 * return OPc_UNOP so that walkoptree can find our children. If
599 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
600 * (no argument to the operator) it's an OP; with OPf_REF set it's
601 * an SVOP (and op_sv is the GV for the filehandle argument).
603 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
605 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
607 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
609 case OA_LOOPEXOP: TAG;
611 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
612 * label was omitted (in which case it's a BASEOP) or else a term was
613 * seen. In this last case, all except goto are definitely PVOP but
614 * goto is either a PVOP (with an ordinary constant label), an UNOP
615 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
616 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
619 if (o->op_flags & OPf_STACKED)
621 else if (o->op_flags & OPf_SPECIAL)
631 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
632 PL_op_name[o->op_type]);
638 /* Figure out how much magic is attached to the SV and return the
641 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
642 dNPathNodes(1, NPathArg);
643 MAGIC *magic_pointer = SvMAGIC(thing); /* caller ensures thing is SvMAGICAL */
645 /* push a dummy node for NPathSetNode to update inside the while loop */
646 NPathPushNode("dummy", NPtype_NAME);
648 /* Have we seen the magic pointer? (NULL has always been seen before) */
649 while (check_new(st, magic_pointer)) {
651 NPathSetNode(magic_pointer, NPtype_MAGIC);
653 ADD_SIZE(st, "mg", sizeof(MAGIC));
654 /* magic vtables aren't freed when magic is freed, so don't count them.
655 (They are static structures. Anything that assumes otherwise is buggy.)
660 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
661 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
662 if (magic_pointer->mg_len == HEf_SVKEY) {
663 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
665 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
666 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
667 if (check_new(st, magic_pointer->mg_ptr)) {
668 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
672 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
673 else if (magic_pointer->mg_len > 0) {
674 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
675 if (check_new(st, magic_pointer->mg_ptr)) {
676 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
680 /* Get the next in the chain */
681 magic_pointer = magic_pointer->mg_moremagic;
684 if (st->dangle_whine)
685 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
690 #define check_new_and_strlen(st, p, ppath) S_check_new_and_strlen(aTHX_ st, p, ppath)
692 S_check_new_and_strlen(pTHX_ struct state *st, const char *const p, pPATH) {
693 dNPathNodes(1, NPathArg->prev);
694 if(check_new(st, p)) {
695 NPathPushNode(NPathArg->id, NPtype_NAME);
696 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
701 regex_size(pTHX_ const REGEXP * const baseregex, struct state *st, pPATH) {
702 dNPathNodes(1, NPathArg);
703 if(!check_new(st, baseregex))
705 NPathPushNode("regex_size", NPtype_NAME);
706 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
707 #if (PERL_VERSION < 11)
708 /* Note the size of the paren offset thing */
709 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
710 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
712 ADD_SIZE(st, "regexp", sizeof(struct regexp));
713 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
714 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
716 if (st->go_yell && !st->regex_whine) {
717 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
723 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
725 dNPathNodes(1, NPathArg);
727 /* Hash keys can be shared. Have we seen this before? */
728 if (!check_new(st, hek))
730 NPathPushNode("hek", NPtype_NAME);
731 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
733 + 1 /* No hash key flags prior to 5.8.0 */
739 #if PERL_VERSION < 10
740 ADD_SIZE(st, "he", sizeof(struct he));
742 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
748 refcounted_he_size(pTHX_ struct state *st, struct refcounted_he *he, pPATH)
750 dNPathNodes(1, NPathArg);
751 if (!check_new(st, he))
753 NPathPushNode("refcounted_he_size", NPtype_NAME);
754 ADD_SIZE(st, "refcounted_he", sizeof(struct refcounted_he));
757 ADD_SIZE(st, "refcounted_he_data", NPtype_NAME);
759 hek_size(aTHX_ st, he->refcounted_he_hek, 0, NPathLink("refcounted_he_hek"));
762 if (he->refcounted_he_next)
763 refcounted_he_size(aTHX_ st, he->refcounted_he_next, NPathLink("refcounted_he_next"));
766 static void op_size_class(pTHX_ const OP * const baseop, opclass op_class, struct state *st, pPATH);
769 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
771 op_size_class(aTHX_ baseop, cc_opclass(baseop), st, NPathArg);
775 op_size_class(pTHX_ const OP * const baseop, opclass op_class, struct state *st, pPATH)
777 /* op_size recurses to follow the chain of opcodes. For the node path we
778 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
779 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
780 * instead of NPathLink().
782 dNPathUseParent(NPathArg);
786 if(!check_new(st, baseop))
789 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
792 case OPc_BASEOP: TAG;
793 ADD_SIZE(st, "op", sizeof(struct op));
796 ADD_SIZE(st, "unop", sizeof(struct unop));
797 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
800 ADD_SIZE(st, "binop", sizeof(struct binop));
801 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
802 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
805 ADD_SIZE(st, "logop", sizeof(struct logop));
806 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
807 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
810 case OPc_CONDOP: TAG;
811 ADD_SIZE(st, "condop", sizeof(struct condop));
812 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
813 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
814 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
817 case OPc_LISTOP: TAG;
818 ADD_SIZE(st, "listop", sizeof(struct listop));
819 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
820 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
823 ADD_SIZE(st, "pmop", sizeof(struct pmop));
824 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
825 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
826 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
827 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
828 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
830 /* This is defined away in perl 5.8.x, but it is in there for
833 regex_size(aTHX_ PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
835 regex_size(aTHX_ ((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
839 ADD_SIZE(st, "svop", sizeof(struct svop));
840 if (!(baseop->op_type == OP_AELEMFAST
841 && baseop->op_flags & OPf_SPECIAL)) {
842 /* not an OP_PADAV replacement */
843 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
848 ADD_SIZE(st, "padop", sizeof(struct padop));
853 ADD_SIZE(st, "gvop", sizeof(struct gvop));
854 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
858 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
861 ADD_SIZE(st, "loop", sizeof(struct loop));
862 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
863 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
864 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
865 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
866 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
872 basecop = (COP *)baseop;
873 ADD_SIZE(st, "cop", sizeof(struct cop));
875 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
876 Eliminate cop_label from struct cop by storing a label as the first
877 entry in the hints hash. Most statements don't have labels, so this
878 will save memory. Not sure how much.
879 The check below will be incorrect fail on bleadperls
880 before 5.11 @33656, but later than 5.10, producing slightly too
881 small memory sizes on these Perls. */
882 #if (PERL_VERSION < 11)
883 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
886 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
887 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
889 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
890 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
891 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
894 hh = CopHINTHASH_get(basecop);
895 refcounted_he_size(aTHX_ st, hh, NPathLink("cop_hints_hash"));
903 if (st->dangle_whine)
904 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
908 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
913 # define MAYBE_PURIFY(normal, pure) (pure)
914 # define MAYBE_OFFSET(struct_name, member) 0
916 # define MAYBE_PURIFY(normal, pure) (normal)
917 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
920 const U8 body_sizes[SVt_LAST] = {
923 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
924 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
925 sizeof(XRV), /* SVt_RV */
926 sizeof(XPV), /* SVt_PV */
927 sizeof(XPVIV), /* SVt_PVIV */
928 sizeof(XPVNV), /* SVt_PVNV */
929 sizeof(XPVMG), /* SVt_PVMG */
930 sizeof(XPVBM), /* SVt_PVBM */
931 sizeof(XPVLV), /* SVt_PVLV */
932 sizeof(XPVAV), /* SVt_PVAV */
933 sizeof(XPVHV), /* SVt_PVHV */
934 sizeof(XPVCV), /* SVt_PVCV */
935 sizeof(XPVGV), /* SVt_PVGV */
936 sizeof(XPVFM), /* SVt_PVFM */
937 sizeof(XPVIO) /* SVt_PVIO */
938 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
942 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
944 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
945 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
946 sizeof(XPVNV), /* SVt_PVNV */
947 sizeof(XPVMG), /* SVt_PVMG */
948 sizeof(XPVGV), /* SVt_PVGV */
949 sizeof(XPVLV), /* SVt_PVLV */
950 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
951 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
952 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
953 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
954 sizeof(XPVIO), /* SVt_PVIO */
955 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
959 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
961 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
962 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
963 sizeof(XPVNV), /* SVt_PVNV */
964 sizeof(XPVMG), /* SVt_PVMG */
965 sizeof(XPVGV), /* SVt_PVGV */
966 sizeof(XPVLV), /* SVt_PVLV */
967 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
968 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
969 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
970 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
971 sizeof(XPVIO) /* SVt_PVIO */
972 #elif PERL_VERSION < 13
976 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
977 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
978 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
979 sizeof(XPVNV), /* SVt_PVNV */
980 sizeof(XPVMG), /* SVt_PVMG */
981 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
982 sizeof(XPVGV), /* SVt_PVGV */
983 sizeof(XPVLV), /* SVt_PVLV */
984 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
985 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
986 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
987 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
988 sizeof(XPVIO) /* SVt_PVIO */
993 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
994 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
995 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
996 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
997 sizeof(XPVMG), /* SVt_PVMG */
998 sizeof(regexp), /* SVt_REGEXP */
999 sizeof(XPVGV), /* SVt_PVGV */
1000 sizeof(XPVLV), /* SVt_PVLV */
1001 sizeof(XPVAV), /* SVt_PVAV */
1002 sizeof(XPVHV), /* SVt_PVHV */
1003 sizeof(XPVCV), /* SVt_PVCV */
1004 sizeof(XPVFM), /* SVt_PVFM */
1005 sizeof(XPVIO) /* SVt_PVIO */
1010 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
1012 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
1015 dNPathUseParent(NPathArg);
1022 if( 0 && !check_new(st, padlist))
1025 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
1026 pname = AvARRAY(pad_name);
1028 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
1029 const SV *namesv = pname[ix];
1030 if (namesv && namesv == &PL_sv_undef) {
1034 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1036 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1038 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1041 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1045 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1050 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1051 const int recurse) {
1052 const SV *thing = orig_thing;
1053 dNPathNodes(3, NPathArg);
1056 if(!check_new(st, orig_thing))
1059 type = SvTYPE(thing);
1060 if (type > SVt_LAST) {
1061 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1064 NPathPushNode(thing, NPtype_SV);
1065 ADD_SIZE(st, "sv_head", sizeof(SV));
1066 ADD_SIZE(st, "sv_body", body_sizes[type]);
1069 #if (PERL_VERSION < 11)
1070 /* Is it a reference? */
1075 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1076 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1080 /* Is there anything in the array? */
1081 if (AvMAX(thing) != -1) {
1082 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1083 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1084 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1086 if (recurse >= st->min_recurse_threshold) {
1087 SSize_t i = AvFILLp(thing) + 1;
1090 ADD_PRE_ATTR(st, 0, "index", i);
1091 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1095 /* Add in the bits on the other side of the beginning */
1097 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1098 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1100 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1101 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1102 if (AvALLOC(thing) != 0) {
1103 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1105 #if (PERL_VERSION < 9)
1106 /* Is there something hanging off the arylen element?
1107 Post 5.9.something this is stored in magic, so will be found there,
1108 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1109 complain about AvARYLEN() passing thing to it. */
1110 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1115 /* Now the array of buckets */
1116 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1117 if (HvENAME(thing)) {
1118 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1120 /* Now walk the bucket chain */
1121 if (HvARRAY(thing)) {
1124 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1125 cur_entry = *(HvARRAY(thing) + cur_bucket);
1127 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1128 ADD_SIZE(st, "he", sizeof(HE));
1129 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1130 if (recurse >= st->min_recurse_threshold) {
1131 if (orig_thing == (SV*)PL_strtab) {
1132 /* For PL_strtab the HeVAL is used as a refcnt */
1133 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1136 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1137 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1138 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1139 * so we protect against that here, but I'd like to know the cause.
1141 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1142 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1143 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1146 cur_entry = cur_entry->hent_next;
1152 /* This direct access is arguably "naughty": */
1153 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1154 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1156 I32 count = HvAUX(thing)->xhv_name_count;
1159 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1163 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1168 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1171 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1173 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1174 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1175 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1176 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1178 #if PERL_VERSION > 10
1179 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1180 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1182 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1183 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1188 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1194 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1195 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1197 if (st->go_yell && !st->fm_whine) {
1198 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1204 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1205 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1206 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1207 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1208 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1209 if (CvISXSUB(thing)) {
1210 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1212 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1213 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1218 /* Some embedded char pointers */
1219 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1220 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1221 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1222 /* Throw the GVs on the list to be walked if they're not-null */
1223 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1224 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1225 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1227 /* Only go trotting through the IO structures if they're really
1228 trottable. If USE_PERLIO is defined we can do this. If
1229 not... we can't, so we don't even try */
1231 /* Dig into xio_ifp and xio_ofp here */
1232 warn("Devel::Size: Can't size up perlio layers yet\n");
1237 #if (PERL_VERSION < 9)
1242 if(isGV_with_GP(thing)) {
1244 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1246 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1248 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1250 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1251 #elif defined(GvFILE)
1252 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1253 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1254 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1255 and the relevant COP has been freed on scope cleanup after the eval.
1256 5.8.9 adds a binary compatible fudge that catches the vast majority
1257 of cases. 5.9.something added a proper fix, by converting the GP to
1258 use a shared hash key (porperly reference counted), instead of a
1259 char * (owned by who knows? possibly no-one now) */
1260 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1263 /* Is there something hanging off the glob? */
1264 if (check_new(st, GvGP(thing))) {
1265 ADD_SIZE(st, "GP", sizeof(GP));
1266 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1267 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1268 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1269 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1270 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1271 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1273 #if (PERL_VERSION >= 9)
1277 #if PERL_VERSION <= 8
1285 if(recurse && SvROK(thing))
1286 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1287 else if (SvIsCOW_shared_hash(thing))
1288 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1290 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1294 SvOOK_offset(thing, len);
1295 ADD_SIZE(st, "SvOOK", len);
1301 if (type >= SVt_PVMG) {
1302 if (SvMAGICAL(thing))
1303 magic_size(aTHX_ thing, st, NPathLink("MG"));
1304 if (SvPAD_OUR(thing) && SvOURSTASH(thing))
1305 sv_size(aTHX_ st, NPathLink("SvOURSTASH"), (SV *)SvOURSTASH(thing), SOME_RECURSION);
1307 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1314 free_memnode_state(pTHX_ struct state *st)
1316 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1317 if (*st->node_stream_name == '|') {
1318 if (pclose(st->node_stream_fh))
1319 warn("%s exited with an error status\n", st->node_stream_name);
1322 if (fclose(st->node_stream_fh))
1323 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1328 static struct state *
1334 Newxz(st, 1, struct state);
1336 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1337 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1338 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1340 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1341 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1343 check_new(st, &PL_sv_undef);
1344 check_new(st, &PL_sv_no);
1345 check_new(st, &PL_sv_yes);
1346 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1347 check_new(st, &PL_sv_placeholder);
1350 #ifdef PATH_TRACKING
1351 /* XXX quick hack */
1352 st->node_stream_name = getenv("PERL_DMEM");
1353 if (st->node_stream_name) {
1354 if (*st->node_stream_name) {
1355 if (*st->node_stream_name == '|')
1356 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1358 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1359 if (!st->node_stream_fh)
1360 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1361 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1362 st->add_attr_cb = np_stream_node_path_info;
1365 st->add_attr_cb = np_dump_node_path_info;
1367 st->free_state_cb = free_memnode_state;
1373 /* XXX based on S_visit() in sv.c */
1375 unseen_sv_size(pTHX_ struct state *st, pPATH)
1380 dNPathNodes(1, NPathArg);
1382 NPathPushNode("unseen", NPtype_NAME);
1384 /* by this point we should have visited all the SVs
1385 * so now we'll run through all the SVs via the arenas
1386 * in order to find any thet we've missed for some reason.
1387 * Once the rest of the code is finding all the SVs then any
1388 * found here will be leaks.
1390 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1391 const SV * const svend = &sva[SvREFCNT(sva)];
1393 for (sv = sva + 1; sv < svend; ++sv) {
1394 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1395 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1397 else if (check_new(st, sv)) { /* sanity check */
1399 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1406 perl_size(pTHX_ struct state *const st, pPATH)
1408 dNPathNodes(3, NPathArg);
1410 /* if(!check_new(st, interp)) return; */
1411 NPathPushNode("perl", NPtype_NAME);
1417 * unknown <== = O/S Heap size - perl - free_malloc_space
1419 /* start with PL_defstash to get everything reachable from \%main:: */
1420 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1422 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1423 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1424 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1425 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1426 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1427 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1428 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1429 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1430 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1431 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1432 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1434 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1436 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1437 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1438 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1439 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1440 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1441 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1442 /* TODO PL_pidstatus */
1443 /* TODO PL_stashpad */
1444 op_size(aTHX_ (OP *)&PL_compiling, st, NPathLink("PL_compiling"));
1446 /* TODO stacks: cur, main, tmps, mark, scope, save */
1447 /* TODO PL_exitlist */
1448 /* TODO PL_reentrant_buffers etc */
1450 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1452 /* TODO anything missed? */
1454 /* --- by this point we should have seen all reachable SVs --- */
1456 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1457 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1459 /* unused space in sv head arenas */
1463 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1464 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1465 if (!check_new(st, p)) /* sanity check */
1466 warn("Free'd SV head unexpectedly already seen");
1469 NPathPushNode("unused_sv_heads", NPtype_NAME);
1470 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1473 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1475 /* iterate over all SVs to find any we've not accounted for yet */
1476 /* once the code above is visiting all SVs, any found here have been leaked */
1477 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1481 MODULE = Devel::Memory PACKAGE = Devel::Memory
1489 total_size = TOTAL_SIZE_RECURSION
1492 SV *thing = orig_thing;
1493 struct state *st = new_state(aTHX);
1495 /* If they passed us a reference then dereference it. This is the
1496 only way we can check the sizes of arrays and hashes */
1498 thing = SvRV(thing);
1501 sv_size(aTHX_ st, NULL, thing, ix);
1502 RETVAL = st->total_size;
1503 free_state(aTHX_ st);
1512 /* just the current perl interpreter */
1513 struct state *st = new_state(aTHX);
1514 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1515 perl_size(aTHX_ st, NULL);
1516 RETVAL = st->total_size;
1517 free_state(aTHX_ st);
1526 /* the current perl interpreter plus malloc, in the context of total heap size */
1527 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1531 /* some systems have the SVID2/XPG mallinfo structure and function */
1532 struct mstats ms = mstats(); /* mstats() first */
1534 struct state *st = new_state(aTHX);
1535 dNPathNodes(1, NULL);
1536 NPathPushNode("heap", NPtype_NAME);
1538 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1540 perl_size(aTHX_ st, NPathLink("perl_interp"));
1542 NPathSetNode("free_malloc_space", NPtype_NAME);
1543 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1544 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1545 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1546 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1547 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1548 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1549 /* for now we use bytes_total as an approximation */
1550 NPathSetNode("unknown", NPtype_NAME);
1551 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1556 RETVAL = st->total_size;
1557 free_state(aTHX_ st);