5 * Refactor this to split out D:M code from Devel::Size code.
7 * Start migrating Devel::Size's Size.xs towards the new code.
9 * ADD_PRE_ATTR for index should check if the ptr is new first. Currently we're
10 * generating lots of ADD_PRE_ATTR's for SVs that we've already seen via other paths.
11 * That's wasteful and likely to cause subtle bugs.
13 * Give HE's their own node so keys and values can be tied together
17 #undef NDEBUG /* XXX */
20 #define PERL_NO_GET_CONTEXT
27 #include "refcounted_he.h"
29 /* Not yet in ppport.h */
31 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
34 # define SvRV_const(rv) SvRV(rv)
37 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
40 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
41 (SVf_FAKE | SVf_READONLY))
43 #ifndef SvIsCOW_shared_hash
44 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
46 #ifndef SvSHARED_HEK_FROM_PV
47 # define SvSHARED_HEK_FROM_PV(pvx) \
48 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
52 # define PL_opargs opargs
53 # define PL_op_name op_name
57 /* "structured exception" handling is a Microsoft extension to C and C++.
58 It's *not* C++ exception handling - C++ exception handling can't capture
59 SEGVs and suchlike, whereas this can. There's no known analagous
60 functionality on other platforms. */
62 # define TRY_TO_CATCH_SEGV __try
63 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
65 # define TRY_TO_CATCH_SEGV if(1)
66 # define CAUGHT_EXCEPTION else
70 # define __attribute__(x)
73 #if 0 && defined(DEBUGGING)
74 #define dbg_printf(x) printf x
79 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
82 /* The idea is to have a tree structure to store 1 bit per possible pointer
83 address. The lowest 16 bits are stored in a block of 8092 bytes.
84 The blocks are in a 256-way tree, indexed by the reset of the pointer.
85 This can cope with 32 and 64 bit pointers, and any address space layout,
86 without excessive memory needs. The assumption is that your CPU cache
87 works :-) (And that we're not going to bust it) */
90 #define LEAF_BITS (16 - BYTE_BITS)
91 #define LEAF_MASK 0x1FFF
93 typedef struct npath_node_st npath_node_t;
94 struct npath_node_st {
109 /* My hunch (not measured) is that for most architectures pointers will
110 start with 0 bits, hence the start of this array will be hot, and the
111 end unused. So put the flags next to the hot end. */
113 int min_recurse_threshold;
114 /* callback hooks and data */
115 void (*add_attr_cb)(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
116 void (*free_state_cb)(pTHX_ struct state *st);
117 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
118 /* this stuff wil be moved to state_cb_data later */
120 FILE *node_stream_fh;
121 char *node_stream_name;
124 #define ADD_SIZE(st, leafname, bytes) \
126 NPathAddSizeCb(st, leafname, bytes); \
127 (st)->total_size += (bytes); \
131 #define PATH_TRACKING
134 #define pPATH npath_node_t *NPathArg
136 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
137 * to the next unused slot (though with prev already filled in)
138 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
139 * to and passes that NP value to the function being called.
140 * seqn==0 indicates the node is new (hasn't been output yet)
142 #define dNPathNodes(nodes, prev_np) \
143 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
144 npath_node_t *NP = &name_path_nodes[0]; \
145 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
147 #define NPathPushNode(nodeid, nodetype) \
149 NP->type = nodetype; \
151 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
153 NP->id = Nullch; /* safety/debug */ \
156 #define NPathSetNode(nodeid, nodetype) \
157 (NP-1)->id = nodeid; \
158 (NP-1)->type = nodetype; \
159 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
161 #define NPathPopNode \
164 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
165 * So the function can only safely call ADD_*() but not NPathLink, unless the
166 * caller has spare nodes in its name_path_nodes.
168 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
170 #define NPtype_NAME 0x01
171 #define NPtype_LINK 0x02
172 #define NPtype_SV 0x03
173 #define NPtype_MAGIC 0x04
174 #define NPtype_OP 0x05
176 /* XXX these should probably be generalizes into flag bits */
177 #define NPattr_LEAFSIZE 0x00
178 #define NPattr_NAME 0x01
179 #define NPattr_PADFAKE 0x02
180 #define NPattr_PADNAME 0x03
181 #define NPattr_PADTMP 0x04
182 #define NPattr_NOTE 0x05
183 #define NPattr_PRE_ATTR 0x06
185 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) \
187 if (st->add_attr_cb) { \
188 st->add_attr_cb(aTHX_ st, np, attr_type, attr_name, attr_value); \
192 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
193 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) \
195 assert(!attr_type); \
196 _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1); \
199 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
200 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
201 /* add a link and a name node to the path - a special case for op_size */
202 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
203 #define NPathOpLink (NPathArg)
204 #define NPathAddSizeCb(st, name, bytes) \
206 if (st->add_attr_cb) { \
207 st->add_attr_cb(aTHX_ st, NP-1, NPattr_LEAFSIZE, (name), (bytes)); \
213 #define NPathAddSizeCb(st, name, bytes)
214 #define pPATH void *npath_dummy /* XXX ideally remove */
215 #define dNPathNodes(nodes, prev_np) dNOOP
216 #define NPathLink(nodeid, nodetype) NULL
217 #define NPathOpLink NULL
218 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
220 #endif /* PATH_TRACKING */
227 static const char *svtypenames[SVt_LAST] = {
229 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
230 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
231 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
232 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
233 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
234 #elif PERL_VERSION < 13
235 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
237 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
242 np_print_node_name(pTHX_ FILE *fp, npath_node_t *npath_node)
244 switch (npath_node->type) {
245 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
246 const SV *sv = (SV*)npath_node->id;
247 int type = SvTYPE(sv);
248 const char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
249 fprintf(fp, "SV(%s)", typename);
250 switch(type) { /* add some useful details */
251 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
252 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
256 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
257 const OP *op = (OP*)npath_node->id;
258 fprintf(fp, "OP(%s)", OP_NAME(op));
261 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
262 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
263 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
264 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
268 fprintf(fp, "%s", (const char *)npath_node->id);
271 fprintf(fp, "%s", (const char *)npath_node->id);
273 default: /* assume id is a string pointer */
274 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
281 np_dump_indent(int depth) {
283 fprintf(stderr, ": ");
287 np_walk_new_nodes(pTHX_ struct state *st,
288 npath_node_t *npath_node,
289 npath_node_t *npath_node_deeper,
290 int (*cb)(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
292 if (npath_node->seqn) /* node already output */
295 if (npath_node->prev) {
296 np_walk_new_nodes(aTHX_ st, npath_node->prev, npath_node, cb); /* recurse */
297 npath_node->depth = npath_node->prev->depth + 1;
299 else npath_node->depth = 0;
300 npath_node->seqn = ++st->seqn;
303 if (cb(aTHX_ st, npath_node, npath_node_deeper)) {
304 /* ignore this node */
305 assert(npath_node->prev);
306 assert(npath_node->depth);
307 assert(npath_node_deeper);
309 npath_node->seqn = --st->seqn;
310 npath_node_deeper->prev = npath_node->prev;
318 np_dump_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
320 PERL_UNUSED_ARG(npath_node_deeper);
321 if (0 && npath_node->type == NPtype_LINK)
323 np_dump_indent(npath_node->depth);
324 np_print_node_name(aTHX_ stderr, npath_node);
325 if (npath_node->type == NPtype_LINK)
326 fprintf(stderr, "->"); /* cosmetic */
327 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
328 fprintf(stderr, "\n");
333 np_dump_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
335 if (attr_type == NPattr_LEAFSIZE && !attr_value)
336 return; /* ignore zero sized leaf items */
337 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_dump_formatted_node);
338 np_dump_indent(npath_node->depth+1);
340 case NPattr_LEAFSIZE:
341 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
344 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
347 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
352 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
355 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
358 fprintf(stderr, "\n");
362 np_stream_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
363 PERL_UNUSED_ARG(npath_node_deeper);
364 fprintf(st->node_stream_fh, "-%u %lu %u ",
365 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
367 np_print_node_name(aTHX_ st->node_stream_fh, npath_node);
368 fprintf(st->node_stream_fh, "\n");
373 np_stream_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
375 if (!attr_type && !attr_value)
376 return; /* ignore zero sized leaf items */
377 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_stream_formatted_node);
378 if (attr_type) { /* Attribute type, name and value */
379 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
381 else { /* Leaf name and memory size */
382 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
384 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
388 #endif /* PATH_TRACKING */
392 Checks to see if thing is in the bitstring.
393 Returns true or false, and
394 notes thing in the segmented bitstring.
397 check_new(struct state *st, const void *const p) {
398 unsigned int bits = 8 * sizeof(void*);
399 const size_t raw_p = PTR2nat(p);
400 /* This effectively rotates the value right by the number of low always-0
401 bits in an aligned pointer. The assmption is that most (if not all)
402 pointers are aligned, and these will be in the same chain of nodes
403 (and hence hot in the cache) but we can still deal with any unaligned
405 const size_t cooked_p
406 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
407 const U8 this_bit = 1 << (cooked_p & 0x7);
411 void **tv_p = (void **) (st->tracking);
413 if (NULL == p) return FALSE;
415 const char c = *(const char *)p;
419 if (st->dangle_whine)
420 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
426 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
428 /* First level is always present. */
430 i = (unsigned int)((cooked_p >> bits) & 0xFF);
432 Newxz(tv_p[i], 256, void *);
433 tv_p = (void **)(tv_p[i]);
435 } while (bits > LEAF_BITS + BYTE_BITS);
436 /* bits now 16 always */
437 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
438 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
439 a my_perl under multiplicity */
442 leaf_p = (U8 **)tv_p;
443 i = (unsigned int)((cooked_p >> bits) & 0xFF);
445 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
450 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
452 if(leaf[i] & this_bit)
460 free_tracking_at(void **tv, int level)
468 free_tracking_at((void **) tv[i], level);
482 free_state(pTHX_ struct state *st)
484 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
485 if (st->free_state_cb)
486 st->free_state_cb(aTHX_ st);
487 if (st->state_cb_data)
488 Safefree(st->state_cb_data);
489 free_tracking_at((void **)st->tracking, top_level);
493 /* For now, this is somewhat a compatibility bodge until the plan comes
494 together for fine grained recursion control. total_size() would recurse into
495 hash and array members, whereas sv_size() would not. However, sv_size() is
496 called with CvSTASH() of a CV, which means that if it (also) starts to
497 recurse fully, then the size of any CV now becomes the size of the entire
498 symbol table reachable from it, and potentially the entire symbol table, if
499 any subroutine makes a reference to a global (such as %SIG). The historical
500 implementation of total_size() didn't report "everything", and changing the
501 only available size to "everything" doesn't feel at all useful. */
503 #define NO_RECURSION 0
504 #define SOME_RECURSION 1
505 #define TOTAL_SIZE_RECURSION 2
507 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
523 , OPc_CONDOP /* 12 */
532 cc_opclass(const OP * const o)
538 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
540 if (o->op_type == OP_SASSIGN)
541 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
544 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
548 if ((o->op_type == OP_TRANS)) {
552 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
584 #ifdef OA_PVOP_OR_SVOP
585 case OA_PVOP_OR_SVOP: TAG;
587 * Character translations (tr///) are usually a PVOP, keeping a
588 * pointer to a table of shorts used to look up translations.
589 * Under utf8, however, a simple table isn't practical; instead,
590 * the OP is an SVOP, and the SV is a reference to a swash
591 * (i.e., an RV pointing to an HV).
593 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
594 ? OPc_SVOP : OPc_PVOP;
603 case OA_BASEOP_OR_UNOP: TAG;
605 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
606 * whether parens were seen. perly.y uses OPf_SPECIAL to
607 * signal whether a BASEOP had empty parens or none.
608 * Some other UNOPs are created later, though, so the best
609 * test is OPf_KIDS, which is set in newUNOP.
611 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
613 case OA_FILESTATOP: TAG;
615 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
616 * the OPf_REF flag to distinguish between OP types instead of the
617 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
618 * return OPc_UNOP so that walkoptree can find our children. If
619 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
620 * (no argument to the operator) it's an OP; with OPf_REF set it's
621 * an SVOP (and op_sv is the GV for the filehandle argument).
623 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
625 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
627 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
629 case OA_LOOPEXOP: TAG;
631 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
632 * label was omitted (in which case it's a BASEOP) or else a term was
633 * seen. In this last case, all except goto are definitely PVOP but
634 * goto is either a PVOP (with an ordinary constant label), an UNOP
635 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
636 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
639 if (o->op_flags & OPf_STACKED)
641 else if (o->op_flags & OPf_SPECIAL)
651 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
652 PL_op_name[o->op_type]);
658 /* Figure out how much magic is attached to the SV and return the
661 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
662 dNPathNodes(1, NPathArg);
663 MAGIC *magic_pointer = SvMAGIC(thing); /* caller ensures thing is SvMAGICAL */
665 /* push a dummy node for NPathSetNode to update inside the while loop */
666 NPathPushNode("dummy", NPtype_NAME);
668 /* Have we seen the magic pointer? (NULL has always been seen before) */
669 while (check_new(st, magic_pointer)) {
671 NPathSetNode(magic_pointer, NPtype_MAGIC);
673 ADD_SIZE(st, "mg", sizeof(MAGIC));
674 /* magic vtables aren't freed when magic is freed, so don't count them.
675 (They are static structures. Anything that assumes otherwise is buggy.)
680 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
681 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
682 if (magic_pointer->mg_len == HEf_SVKEY) {
683 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
685 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
686 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
687 if (check_new(st, magic_pointer->mg_ptr)) {
688 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
692 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
693 else if (magic_pointer->mg_len > 0) {
694 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
695 if (check_new(st, magic_pointer->mg_ptr)) {
696 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
700 /* Get the next in the chain */
701 magic_pointer = magic_pointer->mg_moremagic;
704 if (st->dangle_whine)
705 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
710 #define check_new_and_strlen(st, p, ppath) S_check_new_and_strlen(aTHX_ st, p, ppath)
712 S_check_new_and_strlen(pTHX_ struct state *st, const char *const p, pPATH) {
713 dNPathNodes(1, NPathArg->prev);
714 if(check_new(st, p)) {
715 NPathPushNode(NPathArg->id, NPtype_NAME);
716 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
721 regex_size(pTHX_ const REGEXP * const baseregex, struct state *st, pPATH) {
722 dNPathNodes(1, NPathArg);
723 if(!check_new(st, baseregex))
725 NPathPushNode("regex_size", NPtype_NAME);
726 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
727 #if (PERL_VERSION < 11)
728 /* Note the size of the paren offset thing */
729 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
730 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
732 ADD_SIZE(st, "regexp", sizeof(struct regexp));
733 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
734 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
736 if (st->go_yell && !st->regex_whine) {
737 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
743 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
745 dNPathNodes(1, NPathArg);
747 /* Hash keys can be shared. Have we seen this before? */
748 if (!check_new(st, hek))
750 NPathPushNode("hek", NPtype_NAME);
751 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
753 + 1 /* No hash key flags prior to 5.8.0 */
759 #if PERL_VERSION < 10
760 ADD_SIZE(st, "he", sizeof(struct he));
762 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
768 refcounted_he_size(pTHX_ struct state *st, struct refcounted_he *he, pPATH)
770 dNPathNodes(1, NPathArg);
771 if (!check_new(st, he))
773 NPathPushNode("refcounted_he_size", NPtype_NAME);
774 ADD_SIZE(st, "refcounted_he", sizeof(struct refcounted_he));
777 ADD_SIZE(st, "refcounted_he_data", NPtype_NAME);
779 hek_size(aTHX_ st, he->refcounted_he_hek, 0, NPathLink("refcounted_he_hek"));
782 if (he->refcounted_he_next)
783 refcounted_he_size(aTHX_ st, he->refcounted_he_next, NPathLink("refcounted_he_next"));
786 static void op_size_class(pTHX_ const OP * const baseop, opclass op_class, bool skip_op_struct, struct state *st, pPATH);
789 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
791 op_size_class(aTHX_ baseop, cc_opclass(baseop), 0, st, NPathArg);
795 op_size_class(pTHX_ const OP * const baseop, opclass op_class, bool skip_op_struct, struct state *st, pPATH)
797 /* op_size recurses to follow the chain of opcodes. For the node path we
798 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
799 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
800 * instead of NPathLink().
802 dNPathUseParent(NPathArg);
806 if(!check_new(st, baseop))
809 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
811 madprop_size(aTHX_ st, NPathOpLink, baseop->op_madprop);
815 case OPc_BASEOP: TAG;
817 ADD_SIZE(st, "op", sizeof(struct op));
821 ADD_SIZE(st, "unop", sizeof(struct unop));
822 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
826 ADD_SIZE(st, "binop", sizeof(struct binop));
827 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
828 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
832 ADD_SIZE(st, "logop", sizeof(struct logop));
833 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
834 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
837 case OPc_CONDOP: TAG;
839 ADD_SIZE(st, "condop", sizeof(struct condop));
840 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
841 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
842 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
845 case OPc_LISTOP: TAG;
847 ADD_SIZE(st, "listop", sizeof(struct listop));
848 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
849 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
853 ADD_SIZE(st, "pmop", sizeof(struct pmop));
854 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
855 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
856 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
857 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
858 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
860 /* This is defined away in perl 5.8.x, but it is in there for
863 regex_size(aTHX_ PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
865 regex_size(aTHX_ ((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
870 ADD_SIZE(st, "svop", sizeof(struct svop));
871 if (!(baseop->op_type == OP_AELEMFAST
872 && baseop->op_flags & OPf_SPECIAL)) {
873 /* not an OP_PADAV replacement */
874 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
880 ADD_SIZE(st, "padop", sizeof(struct padop));
886 ADD_SIZE(st, "gvop", sizeof(struct gvop));
887 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
891 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
895 ADD_SIZE(st, "loop", sizeof(struct loop));
896 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
897 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
898 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
899 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
900 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
906 basecop = (COP *)baseop;
908 ADD_SIZE(st, "cop", sizeof(struct cop));
910 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
911 Eliminate cop_label from struct cop by storing a label as the first
912 entry in the hints hash. Most statements don't have labels, so this
913 will save memory. Not sure how much.
914 The check below will be incorrect fail on bleadperls
915 before 5.11 @33656, but later than 5.10, producing slightly too
916 small memory sizes on these Perls. */
917 #if (PERL_VERSION < 11)
918 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
921 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
922 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
924 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
925 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
926 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
929 hh = CopHINTHASH_get(basecop);
930 refcounted_he_size(aTHX_ st, hh, NPathLink("cop_hints_hash"));
938 if (st->dangle_whine)
939 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
943 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
948 # define MAYBE_PURIFY(normal, pure) (pure)
949 # define MAYBE_OFFSET(struct_name, member) 0
951 # define MAYBE_PURIFY(normal, pure) (normal)
952 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
955 const U8 body_sizes[SVt_LAST] = {
958 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
959 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
960 sizeof(XRV), /* SVt_RV */
961 sizeof(XPV), /* SVt_PV */
962 sizeof(XPVIV), /* SVt_PVIV */
963 sizeof(XPVNV), /* SVt_PVNV */
964 sizeof(XPVMG), /* SVt_PVMG */
965 sizeof(XPVBM), /* SVt_PVBM */
966 sizeof(XPVLV), /* SVt_PVLV */
967 sizeof(XPVAV), /* SVt_PVAV */
968 sizeof(XPVHV), /* SVt_PVHV */
969 sizeof(XPVCV), /* SVt_PVCV */
970 sizeof(XPVGV), /* SVt_PVGV */
971 sizeof(XPVFM), /* SVt_PVFM */
972 sizeof(XPVIO) /* SVt_PVIO */
973 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
977 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
979 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
980 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
981 sizeof(XPVNV), /* SVt_PVNV */
982 sizeof(XPVMG), /* SVt_PVMG */
983 sizeof(XPVGV), /* SVt_PVGV */
984 sizeof(XPVLV), /* SVt_PVLV */
985 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
986 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
987 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
988 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
989 sizeof(XPVIO), /* SVt_PVIO */
990 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
994 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
996 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
997 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
998 sizeof(XPVNV), /* SVt_PVNV */
999 sizeof(XPVMG), /* SVt_PVMG */
1000 sizeof(XPVGV), /* SVt_PVGV */
1001 sizeof(XPVLV), /* SVt_PVLV */
1002 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
1003 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
1004 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
1005 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
1006 sizeof(XPVIO) /* SVt_PVIO */
1007 #elif PERL_VERSION < 13
1011 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
1012 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
1013 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
1014 sizeof(XPVNV), /* SVt_PVNV */
1015 sizeof(XPVMG), /* SVt_PVMG */
1016 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
1017 sizeof(XPVGV), /* SVt_PVGV */
1018 sizeof(XPVLV), /* SVt_PVLV */
1019 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
1020 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
1021 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
1022 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
1023 sizeof(XPVIO) /* SVt_PVIO */
1028 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
1029 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
1030 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
1031 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
1032 sizeof(XPVMG), /* SVt_PVMG */
1033 sizeof(regexp), /* SVt_REGEXP */
1034 sizeof(XPVGV), /* SVt_PVGV */
1035 sizeof(XPVLV), /* SVt_PVLV */
1036 sizeof(XPVAV), /* SVt_PVAV */
1037 sizeof(XPVHV), /* SVt_PVHV */
1038 sizeof(XPVCV), /* SVt_PVCV */
1039 sizeof(XPVFM), /* SVt_PVFM */
1040 sizeof(XPVIO) /* SVt_PVIO */
1045 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
1047 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
1050 dNPathUseParent(NPathArg);
1057 if( 0 && !check_new(st, padlist))
1060 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
1061 pname = AvARRAY(pad_name);
1063 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
1064 const SV *namesv = pname[ix];
1065 if (namesv && namesv == &PL_sv_undef) {
1069 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1071 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1073 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1076 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1080 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1085 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1086 const int recurse) {
1087 const SV *thing = orig_thing;
1088 dNPathNodes(3, NPathArg);
1091 if(!check_new(st, orig_thing))
1094 type = SvTYPE(thing);
1095 if (type > SVt_LAST) {
1096 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1099 NPathPushNode(thing, NPtype_SV);
1100 ADD_SIZE(st, "sv_head", sizeof(SV));
1101 ADD_SIZE(st, "sv_body", body_sizes[type]);
1104 #if (PERL_VERSION < 11)
1105 /* Is it a reference? */
1110 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1111 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1115 /* Is there anything in the array? */
1116 if (AvMAX(thing) != -1) {
1117 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1118 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1119 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1121 if (recurse >= st->min_recurse_threshold) {
1122 SSize_t i = AvFILLp(thing) + 1;
1125 ADD_PRE_ATTR(st, 0, "index", i);
1126 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1130 /* Add in the bits on the other side of the beginning */
1132 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1133 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1135 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1136 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1137 if (AvALLOC(thing) != 0) {
1138 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1140 #if (PERL_VERSION < 9)
1141 /* Is there something hanging off the arylen element?
1142 Post 5.9.something this is stored in magic, so will be found there,
1143 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1144 complain about AvARYLEN() passing thing to it. */
1145 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1150 /* Now the array of buckets */
1151 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1152 if (HvENAME(thing)) {
1153 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1155 /* Now walk the bucket chain */
1156 if (HvARRAY(thing)) {
1159 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1160 cur_entry = *(HvARRAY(thing) + cur_bucket);
1162 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1163 ADD_SIZE(st, "he", sizeof(HE));
1164 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1165 if (recurse >= st->min_recurse_threshold) {
1166 if (orig_thing == (SV*)PL_strtab) {
1167 /* For PL_strtab the HeVAL is used as a refcnt */
1168 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1171 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1172 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1173 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1174 * so we protect against that here, but I'd like to know the cause.
1176 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1177 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1178 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1181 cur_entry = cur_entry->hent_next;
1187 /* This direct access is arguably "naughty": */
1188 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1189 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1191 I32 count = HvAUX(thing)->xhv_name_count;
1194 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1198 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1203 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1206 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1208 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1209 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1210 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1211 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1213 #if PERL_VERSION > 10
1214 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1215 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1217 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1218 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1223 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1229 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1230 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1232 if (st->go_yell && !st->fm_whine) {
1233 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1239 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1240 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1241 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1242 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1243 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1244 if (CvISXSUB(thing)) {
1245 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1247 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1248 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1253 /* Some embedded char pointers */
1254 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1255 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1256 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1257 /* Throw the GVs on the list to be walked if they're not-null */
1258 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1259 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1260 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1262 /* Only go trotting through the IO structures if they're really
1263 trottable. If USE_PERLIO is defined we can do this. If
1264 not... we can't, so we don't even try */
1266 /* Dig into xio_ifp and xio_ofp here */
1267 warn("Devel::Size: Can't size up perlio layers yet\n");
1272 #if (PERL_VERSION < 9)
1277 if(isGV_with_GP(thing)) {
1279 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1281 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1283 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1285 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1286 #elif defined(GvFILE)
1287 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1288 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1289 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1290 and the relevant COP has been freed on scope cleanup after the eval.
1291 5.8.9 adds a binary compatible fudge that catches the vast majority
1292 of cases. 5.9.something added a proper fix, by converting the GP to
1293 use a shared hash key (porperly reference counted), instead of a
1294 char * (owned by who knows? possibly no-one now) */
1295 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1298 /* Is there something hanging off the glob? */
1299 if (check_new(st, GvGP(thing))) {
1300 ADD_SIZE(st, "GP", sizeof(GP));
1301 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1302 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1303 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1304 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1305 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1306 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1308 #if (PERL_VERSION >= 9)
1312 #if PERL_VERSION <= 8
1320 if(recurse && SvROK(thing))
1321 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1322 else if (SvIsCOW_shared_hash(thing))
1323 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1325 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1329 SvOOK_offset(thing, len);
1330 ADD_SIZE(st, "SvOOK", len);
1336 if (type >= SVt_PVMG) {
1337 if (SvMAGICAL(thing))
1338 magic_size(aTHX_ thing, st, NPathLink("MG"));
1339 if (SvPAD_OUR(thing) && SvOURSTASH(thing))
1340 sv_size(aTHX_ st, NPathLink("SvOURSTASH"), (SV *)SvOURSTASH(thing), SOME_RECURSION);
1342 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1349 free_memnode_state(pTHX_ struct state *st)
1351 PERL_UNUSED_ARG(aTHX);
1352 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1353 if (*st->node_stream_name == '|') {
1354 if (pclose(st->node_stream_fh))
1355 warn("%s exited with an error status\n", st->node_stream_name);
1358 if (fclose(st->node_stream_fh))
1359 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1364 static struct state *
1370 Newxz(st, 1, struct state);
1372 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1373 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1374 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1376 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1377 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1379 check_new(st, &PL_sv_undef);
1380 check_new(st, &PL_sv_no);
1381 check_new(st, &PL_sv_yes);
1382 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1383 check_new(st, &PL_sv_placeholder);
1386 #ifdef PATH_TRACKING
1387 /* XXX quick hack */
1388 st->node_stream_name = getenv("PERL_DMEM");
1389 if (st->node_stream_name) {
1390 if (*st->node_stream_name) {
1391 if (*st->node_stream_name == '|')
1392 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1394 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1395 if (!st->node_stream_fh)
1396 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1397 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1398 st->add_attr_cb = np_stream_node_path_info;
1401 st->add_attr_cb = np_dump_node_path_info;
1403 st->free_state_cb = free_memnode_state;
1409 /* XXX based on S_visit() in sv.c */
1411 unseen_sv_size(pTHX_ struct state *st, pPATH)
1415 dNPathNodes(1, NPathArg);
1417 NPathPushNode("unseen", NPtype_NAME);
1419 /* by this point we should have visited all the SVs
1420 * so now we'll run through all the SVs via the arenas
1421 * in order to find any thet we've missed for some reason.
1422 * Once the rest of the code is finding all the SVs then any
1423 * found here will be leaks.
1425 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1426 const SV * const svend = &sva[SvREFCNT(sva)];
1428 for (sv = sva + 1; sv < svend; ++sv) {
1429 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1430 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1432 else if (check_new(st, sv)) { /* sanity check */
1434 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1442 madprop_size(pTHX_ struct state *const st, pPath, MADPROP *prop)
1444 dPathNodes(2, NPathArg);
1445 if (!check_new(st, prop))
1447 NPathPushNode("madprop_size", NPtype_NAME);
1448 ADD_SIZE(st, "MADPROP", sizeof(MADPROP));
1450 NPathPushNode("val");
1451 ADD_SIZE(st, "val", prop->mad_val);
1453 madprop_size(aTHX_ st, NPathLink("mad_next"), prop->mad_next);
1458 parser_size(pTHX_ struct state *const st, pPATH, yy_parser *parser)
1460 dNPathNodes(2, NPathArg);
1461 if (!check_new(st, parser))
1463 NPathPushNode("parser_size", NPtype_NAME);
1464 ADD_SIZE(st, "yy_parser", sizeof(yy_parser));
1466 NPathPushNode("stack", NPtype_NAME);
1468 //warn("total: %u", parser->stack_size);
1469 //warn("foo: %u", parser->ps - parser->stack);
1470 ADD_SIZE(st, "stack_frames", parser->stack_size * sizeof(yy_stack_frame));
1471 for (ps = parser->stack; ps <= parser->ps; ps++) {
1472 ADD_PRE_ATTR(st, 0, "frame", ps - parser->ps);
1473 sv_size(aTHX_ st, NPathLink("compcv"), (SV*)ps->compcv, TOTAL_SIZE_RECURSION);
1477 sv_size(aTHX_ st, NPathLink("lex_repl"), (SV*)parser->lex_repl, TOTAL_SIZE_RECURSION);
1478 sv_size(aTHX_ st, NPathLink("lex_stuff"), (SV*)parser->lex_stuff, TOTAL_SIZE_RECURSION);
1479 sv_size(aTHX_ st, NPathLink("linestr"), (SV*)parser->linestr, TOTAL_SIZE_RECURSION);
1480 sv_size(aTHX_ st, NPathLink("in_my_stash"), (SV*)parser->in_my_stash, TOTAL_SIZE_RECURSION);
1481 //sv_size(aTHX_ st, NPathLink("rsfp"), parser->rsfp, TOTAL_SIZE_RECURSION);
1482 sv_size(aTHX_ st, NPathLink("rsfp_filters"), (SV*)parser->rsfp_filters, TOTAL_SIZE_RECURSION);
1484 sv_size(aTHX_ st, NPathLink("endwhite"), parser->endwhite, TOTAL_SIZE_RECURSION);
1485 sv_size(aTHX_ st, NPathLink("nextwhite"), parser->nextwhite, TOTAL_SIZE_RECURSION);
1486 sv_size(aTHX_ st, NPathLink("skipwhite"), parser->skipwhite, TOTAL_SIZE_RECURSION);
1487 sv_size(aTHX_ st, NPathLink("thisclose"), parser->thisclose, TOTAL_SIZE_RECURSION);
1488 madprop_size(aTHX_ st, NPathLink("thismad"), parser->thismad);
1489 sv_size(aTHX_ st, NPathLink("thisopen"), parser->thisopen, TOTAL_SIZE_RECURSION);
1490 sv_size(aTHX_ st, NPathLink("thisstuff"), parser->thisstuff, TOTAL_SIZE_RECURSION);
1491 sv_size(aTHX_ st, NPathLink("thistoken"), parser->thistoken, TOTAL_SIZE_RECURSION);
1492 sv_size(aTHX_ st, NPathLink("thiswhite"), parser->thiswhite, TOTAL_SIZE_RECURSION);
1494 op_size_class(aTHX_ (OP*)parser->saved_curcop, OPc_COP, 0,
1495 st, NPathLink("saved_curcop"));
1497 if (parser->old_parser)
1498 parser_size(aTHX_ st, NPathLink("old_parser"), parser->old_parser);
1502 perl_size(pTHX_ struct state *const st, pPATH)
1504 dNPathNodes(3, NPathArg);
1506 /* if(!check_new(st, interp)) return; */
1507 NPathPushNode("perl", NPtype_NAME);
1508 #if defined(MULTIPLICITY)
1509 ADD_SIZE(st, "PerlInterpreter", sizeof(PerlInterpreter));
1515 * unknown <== = O/S Heap size - perl - free_malloc_space
1517 /* start with PL_defstash to get everything reachable from \%main:: */
1518 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1520 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1521 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1522 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1523 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1524 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1525 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1526 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1527 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1528 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1529 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1530 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1532 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1534 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1535 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1536 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1537 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1538 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1539 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1540 sv_size(aTHX_ st, NPathLink("PL_envgv"), (SV*)PL_envgv, TOTAL_SIZE_RECURSION);
1541 sv_size(aTHX_ st, NPathLink("PL_hintgv"), (SV*)PL_hintgv, TOTAL_SIZE_RECURSION);
1542 sv_size(aTHX_ st, NPathLink("PL_e_script"), (SV*)PL_e_script, TOTAL_SIZE_RECURSION);
1543 sv_size(aTHX_ st, NPathLink("PL_encoding"), (SV*)PL_encoding, TOTAL_SIZE_RECURSION);
1544 sv_size(aTHX_ st, NPathLink("PL_ofsgv"), (SV*)PL_ofsgv, TOTAL_SIZE_RECURSION);
1545 sv_size(aTHX_ st, NPathLink("PL_argvout_stack"), (SV*)PL_argvout_stack, TOTAL_SIZE_RECURSION);
1546 sv_size(aTHX_ st, NPathLink("PL_beginav"), (SV*)PL_beginav, TOTAL_SIZE_RECURSION);
1547 sv_size(aTHX_ st, NPathLink("PL_beginav_save"), (SV*)PL_beginav_save, TOTAL_SIZE_RECURSION);
1548 sv_size(aTHX_ st, NPathLink("PL_checkav_save"), (SV*)PL_checkav_save, TOTAL_SIZE_RECURSION);
1549 sv_size(aTHX_ st, NPathLink("PL_unitcheckav"), (SV*)PL_unitcheckav, TOTAL_SIZE_RECURSION);
1550 sv_size(aTHX_ st, NPathLink("PL_unitcheckav_save"), (SV*)PL_unitcheckav_save, TOTAL_SIZE_RECURSION);
1551 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1552 sv_size(aTHX_ st, NPathLink("PL_checkav"), (SV*)PL_checkav, TOTAL_SIZE_RECURSION);
1553 sv_size(aTHX_ st, NPathLink("PL_initav"), (SV*)PL_initav, TOTAL_SIZE_RECURSION);
1554 sv_size(aTHX_ st, NPathLink("PL_isarev"), (SV*)PL_isarev, TOTAL_SIZE_RECURSION);
1555 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1556 sv_size(aTHX_ st, NPathLink("PL_preambleav"), (SV*)PL_preambleav, TOTAL_SIZE_RECURSION);
1557 sv_size(aTHX_ st, NPathLink("PL_ors_sv"), (SV*)PL_ors_sv, TOTAL_SIZE_RECURSION);
1558 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1559 sv_size(aTHX_ st, NPathLink("PL_custom_op_names"), (SV*)PL_custom_op_names, TOTAL_SIZE_RECURSION);
1560 sv_size(aTHX_ st, NPathLink("PL_custom_op_descs"), (SV*)PL_custom_op_descs, TOTAL_SIZE_RECURSION);
1561 sv_size(aTHX_ st, NPathLink("PL_custom_ops"), (SV*)PL_custom_ops, TOTAL_SIZE_RECURSION);
1562 sv_size(aTHX_ st, NPathLink("PL_compcv"), (SV*)PL_compcv, TOTAL_SIZE_RECURSION);
1563 sv_size(aTHX_ st, NPathLink("PL_DBcv"), (SV*)PL_DBcv, TOTAL_SIZE_RECURSION);
1564 #ifdef PERL_USES_PL_PIDSTATUS
1565 sv_size(aTHX_ st, NPathLink("PL_pidstatus"), (SV*)PL_pidstatus, TOTAL_SIZE_RECURSION);
1567 sv_size(aTHX_ st, NPathLink("PL_subname"), (SV*)PL_subname, TOTAL_SIZE_RECURSION);
1568 #ifdef USE_LOCALE_NUMERIC
1569 sv_size(aTHX_ st, NPathLink("PL_numeric_radix_sv"), (SV*)PL_numeric_radix_sv, TOTAL_SIZE_RECURSION);
1570 check_new_and_strlen(st, PL_numeric_name, NPathLink("PL_numeric_name"));
1572 #ifdef USE_LOCALE_COLLATE
1573 check_new_and_strlen(st, PL_collation_name, NPathLink("PL_collation_name"));
1575 check_new_and_strlen(st, PL_origfilename, NPathLink("PL_origfilename"));
1576 check_new_and_strlen(st, PL_inplace, NPathLink("PL_inplace"));
1577 check_new_and_strlen(st, PL_osname, NPathLink("PL_osname"));
1578 if (PL_op_mask && check_new(st, PL_op_mask))
1579 ADD_SIZE(st, "PL_op_mask", PL_maxo);
1580 if (PL_exitlistlen && check_new(st, PL_exitlist))
1581 ADD_SIZE(st, "PL_exitlist", (PL_exitlistlen * sizeof(PerlExitListEntry *))
1582 + (PL_exitlistlen * sizeof(PerlExitListEntry)));
1583 if (PL_my_cxt_size && check_new(st, PL_my_cxt_list)) {
1584 ADD_SIZE(st, "PL_my_cxt_list", (PL_my_cxt_size * sizeof(void *)));
1585 #ifdef PERL_GLOBAL_STRUCT_PRIVATE
1586 ADD_SIZE(st, "PL_my_cxt_keys", (PL_my_cxt_size * sizeof(char *)));
1589 /* TODO PL_stashpad */
1590 op_size_class(aTHX_ (OP*)&PL_compiling, OPc_COP, 1, st, NPathLink("PL_compiling"));
1591 op_size_class(aTHX_ (OP*)PL_curcopdb, OPc_COP, 0, st, NPathLink("PL_curcopdb"));
1593 parser_size(aTHX_ st, NPathLink("PL_parser"), PL_parser);
1594 /* TODO stacks: cur, main, tmps, mark, scope, save */
1595 /* TODO PL_exitlist */
1596 /* TODO PL_reentrant_buffers etc */
1598 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1600 /* TODO anything missed? */
1602 /* --- by this point we should have seen all reachable SVs --- */
1604 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1605 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1607 /* unused space in sv head arenas */
1611 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1612 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1613 if (!check_new(st, p)) /* sanity check */
1614 warn("Free'd SV head unexpectedly already seen");
1617 NPathPushNode("unused_sv_heads", NPtype_NAME);
1618 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1621 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1623 /* iterate over all SVs to find any we've not accounted for yet */
1624 /* once the code above is visiting all SVs, any found here have been leaked */
1625 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1629 MODULE = Devel::Memory PACKAGE = Devel::Memory
1637 total_size = TOTAL_SIZE_RECURSION
1640 SV *thing = orig_thing;
1641 struct state *st = new_state(aTHX);
1643 /* If they passed us a reference then dereference it. This is the
1644 only way we can check the sizes of arrays and hashes */
1646 thing = SvRV(thing);
1649 sv_size(aTHX_ st, NULL, thing, ix);
1650 RETVAL = st->total_size;
1651 free_state(aTHX_ st);
1660 /* just the current perl interpreter */
1661 struct state *st = new_state(aTHX);
1662 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1663 perl_size(aTHX_ st, NULL);
1664 RETVAL = st->total_size;
1665 free_state(aTHX_ st);
1674 /* the current perl interpreter plus malloc, in the context of total heap size */
1675 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1679 /* some systems have the SVID2/XPG mallinfo structure and function */
1680 struct mstats ms = mstats(); /* mstats() first */
1682 struct state *st = new_state(aTHX);
1683 dNPathNodes(1, NULL);
1684 NPathPushNode("heap", NPtype_NAME);
1686 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1688 perl_size(aTHX_ st, NPathLink("perl_interp"));
1690 NPathSetNode("free_malloc_space", NPtype_NAME);
1691 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1692 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1693 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1694 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1695 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1696 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1697 /* for now we use bytes_total as an approximation */
1698 NPathSetNode("unknown", NPtype_NAME);
1699 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1704 RETVAL = st->total_size;
1705 free_state(aTHX_ st);