5 * Refactor this to split out D:M code from Devel::Size code.
7 * Start migrating Devel::Size's Size.xs towards the new code.
9 * ADD_PRE_ATTR for index should check if the ptr is new first. Currently we're
10 * generating lots of ADD_PRE_ATTR's for SVs that we've already seen via other paths.
11 * That's wasteful and likely to cause subtle bugs.
13 * Give HE's their own node so keys and values can be tied together
17 #undef NDEBUG /* XXX */
20 #define PERL_NO_GET_CONTEXT
27 /* Not yet in ppport.h */
29 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
32 # define SvRV_const(rv) SvRV(rv)
35 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
38 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
39 (SVf_FAKE | SVf_READONLY))
41 #ifndef SvIsCOW_shared_hash
42 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
44 #ifndef SvSHARED_HEK_FROM_PV
45 # define SvSHARED_HEK_FROM_PV(pvx) \
46 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
50 # define PL_opargs opargs
51 # define PL_op_name op_name
55 /* "structured exception" handling is a Microsoft extension to C and C++.
56 It's *not* C++ exception handling - C++ exception handling can't capture
57 SEGVs and suchlike, whereas this can. There's no known analagous
58 functionality on other platforms. */
60 # define TRY_TO_CATCH_SEGV __try
61 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
63 # define TRY_TO_CATCH_SEGV if(1)
64 # define CAUGHT_EXCEPTION else
68 # define __attribute__(x)
71 #if 0 && defined(DEBUGGING)
72 #define dbg_printf(x) printf x
77 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
80 /* The idea is to have a tree structure to store 1 bit per possible pointer
81 address. The lowest 16 bits are stored in a block of 8092 bytes.
82 The blocks are in a 256-way tree, indexed by the reset of the pointer.
83 This can cope with 32 and 64 bit pointers, and any address space layout,
84 without excessive memory needs. The assumption is that your CPU cache
85 works :-) (And that we're not going to bust it) */
88 #define LEAF_BITS (16 - BYTE_BITS)
89 #define LEAF_MASK 0x1FFF
91 typedef struct npath_node_st npath_node_t;
92 struct npath_node_st {
107 /* My hunch (not measured) is that for most architectures pointers will
108 start with 0 bits, hence the start of this array will be hot, and the
109 end unused. So put the flags next to the hot end. */
111 int min_recurse_threshold;
112 /* callback hooks and data */
113 int (*add_attr_cb)(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
114 void (*free_state_cb)(pTHX_ struct state *st);
115 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
116 /* this stuff wil be moved to state_cb_data later */
118 FILE *node_stream_fh;
119 char *node_stream_name;
122 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
124 #define PATH_TRACKING
127 #define pPATH npath_node_t *NPathArg
129 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
130 * to the next unused slot (though with prev already filled in)
131 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
132 * to and passes that NP value to the function being called.
133 * seqn==0 indicates the node is new (hasn't been output yet)
135 #define dNPathNodes(nodes, prev_np) \
136 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
137 npath_node_t *NP = &name_path_nodes[0]; \
138 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
140 #define NPathPushNode(nodeid, nodetype) \
142 NP->type = nodetype; \
144 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
146 NP->id = Nullch; /* safety/debug */ \
149 #define NPathSetNode(nodeid, nodetype) \
150 (NP-1)->id = nodeid; \
151 (NP-1)->type = nodetype; \
152 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
154 #define NPathPopNode \
157 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
158 * So the function can only safely call ADD_*() but not NPathLink, unless the
159 * caller has spare nodes in its name_path_nodes.
161 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
163 #define NPtype_NAME 0x01
164 #define NPtype_LINK 0x02
165 #define NPtype_SV 0x03
166 #define NPtype_MAGIC 0x04
167 #define NPtype_OP 0x05
169 /* XXX these should probably be generalizes into flag bits */
170 #define NPattr_LEAFSIZE 0x00
171 #define NPattr_NAME 0x01
172 #define NPattr_PADFAKE 0x02
173 #define NPattr_PADNAME 0x03
174 #define NPattr_PADTMP 0x04
175 #define NPattr_NOTE 0x05
176 #define NPattr_PRE_ATTR 0x06
178 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) (st->add_attr_cb && st->add_attr_cb(aTHX_ st, np, attr_type, attr_name, attr_value))
179 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
180 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) (assert(!attr_type), _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1))
182 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
183 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
184 /* add a link and a name node to the path - a special case for op_size */
185 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
186 #define NPathOpLink (NPathArg)
187 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(aTHX_ st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
191 #define NPathAddSizeCb(st, name, bytes)
192 #define pPATH void *npath_dummy /* XXX ideally remove */
193 #define dNPathNodes(nodes, prev_np) dNOOP
194 #define NPathLink(nodeid, nodetype) NULL
195 #define NPathOpLink NULL
196 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
198 #endif /* PATH_TRACKING */
205 static const char *svtypenames[SVt_LAST] = {
207 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
208 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
209 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
210 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
211 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
212 #elif PERL_VERSION < 13
213 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
215 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
220 np_print_node_name(pTHX_ FILE *fp, npath_node_t *npath_node)
222 char buf[1024]; /* XXX */
224 switch (npath_node->type) {
225 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
226 const SV *sv = (SV*)npath_node->id;
227 int type = SvTYPE(sv);
228 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
229 fprintf(fp, "SV(%s)", typename);
230 switch(type) { /* add some useful details */
231 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
232 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
236 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
237 const OP *op = (OP*)npath_node->id;
238 fprintf(fp, "OP(%s)", OP_NAME(op));
241 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
242 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
243 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
244 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
248 fprintf(fp, "%s", npath_node->id);
251 fprintf(fp, "%s", npath_node->id);
253 default: /* assume id is a string pointer */
254 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
261 np_dump_indent(int depth) {
263 fprintf(stderr, ": ");
267 np_walk_new_nodes(pTHX_ struct state *st,
268 npath_node_t *npath_node,
269 npath_node_t *npath_node_deeper,
270 int (*cb)(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
272 if (npath_node->seqn) /* node already output */
275 if (npath_node->prev) {
276 np_walk_new_nodes(aTHX_ st, npath_node->prev, npath_node, cb); /* recurse */
277 npath_node->depth = npath_node->prev->depth + 1;
279 else npath_node->depth = 0;
280 npath_node->seqn = ++st->seqn;
283 if (cb(aTHX_ st, npath_node, npath_node_deeper)) {
284 /* ignore this node */
285 assert(npath_node->prev);
286 assert(npath_node->depth);
287 assert(npath_node_deeper);
289 npath_node->seqn = --st->seqn;
290 npath_node_deeper->prev = npath_node->prev;
298 np_dump_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
299 if (0 && npath_node->type == NPtype_LINK)
301 np_dump_indent(npath_node->depth);
302 np_print_node_name(aTHX_ stderr, npath_node);
303 if (npath_node->type == NPtype_LINK)
304 fprintf(stderr, "->"); /* cosmetic */
305 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
306 fprintf(stderr, "\n");
311 np_dump_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
313 if (attr_type == NPattr_LEAFSIZE && !attr_value)
314 return 0; /* ignore zero sized leaf items */
315 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_dump_formatted_node);
316 np_dump_indent(npath_node->depth+1);
318 case NPattr_LEAFSIZE:
319 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
322 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
325 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
330 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
333 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
336 fprintf(stderr, "\n");
341 np_stream_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
342 fprintf(st->node_stream_fh, "-%u %lu %u ",
343 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
345 np_print_node_name(aTHX_ st->node_stream_fh, npath_node);
346 fprintf(st->node_stream_fh, "\n");
351 np_stream_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
353 if (!attr_type && !attr_value)
354 return 0; /* ignore zero sized leaf items */
355 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_stream_formatted_node);
356 if (attr_type) { /* Attribute type, name and value */
357 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
359 else { /* Leaf name and memory size */
360 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
362 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
367 #endif /* PATH_TRACKING */
371 Checks to see if thing is in the bitstring.
372 Returns true or false, and
373 notes thing in the segmented bitstring.
376 check_new(struct state *st, const void *const p) {
377 unsigned int bits = 8 * sizeof(void*);
378 const size_t raw_p = PTR2nat(p);
379 /* This effectively rotates the value right by the number of low always-0
380 bits in an aligned pointer. The assmption is that most (if not all)
381 pointers are aligned, and these will be in the same chain of nodes
382 (and hence hot in the cache) but we can still deal with any unaligned
384 const size_t cooked_p
385 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
386 const U8 this_bit = 1 << (cooked_p & 0x7);
390 void **tv_p = (void **) (st->tracking);
392 if (NULL == p) return FALSE;
394 const char c = *(const char *)p;
397 if (st->dangle_whine)
398 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
404 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
406 /* First level is always present. */
408 i = (unsigned int)((cooked_p >> bits) & 0xFF);
410 Newxz(tv_p[i], 256, void *);
411 tv_p = (void **)(tv_p[i]);
413 } while (bits > LEAF_BITS + BYTE_BITS);
414 /* bits now 16 always */
415 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
416 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
417 a my_perl under multiplicity */
420 leaf_p = (U8 **)tv_p;
421 i = (unsigned int)((cooked_p >> bits) & 0xFF);
423 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
428 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
430 if(leaf[i] & this_bit)
438 free_tracking_at(void **tv, int level)
446 free_tracking_at((void **) tv[i], level);
460 free_state(pTHX_ struct state *st)
462 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
463 if (st->free_state_cb)
464 st->free_state_cb(aTHX_ st);
465 if (st->state_cb_data)
466 Safefree(st->state_cb_data);
467 free_tracking_at((void **)st->tracking, top_level);
471 /* For now, this is somewhat a compatibility bodge until the plan comes
472 together for fine grained recursion control. total_size() would recurse into
473 hash and array members, whereas sv_size() would not. However, sv_size() is
474 called with CvSTASH() of a CV, which means that if it (also) starts to
475 recurse fully, then the size of any CV now becomes the size of the entire
476 symbol table reachable from it, and potentially the entire symbol table, if
477 any subroutine makes a reference to a global (such as %SIG). The historical
478 implementation of total_size() didn't report "everything", and changing the
479 only available size to "everything" doesn't feel at all useful. */
481 #define NO_RECURSION 0
482 #define SOME_RECURSION 1
483 #define TOTAL_SIZE_RECURSION 2
485 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
501 , OPc_CONDOP /* 12 */
510 cc_opclass(const OP * const o)
516 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
518 if (o->op_type == OP_SASSIGN)
519 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
522 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
526 if ((o->op_type == OP_TRANS)) {
530 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
562 #ifdef OA_PVOP_OR_SVOP
563 case OA_PVOP_OR_SVOP: TAG;
565 * Character translations (tr///) are usually a PVOP, keeping a
566 * pointer to a table of shorts used to look up translations.
567 * Under utf8, however, a simple table isn't practical; instead,
568 * the OP is an SVOP, and the SV is a reference to a swash
569 * (i.e., an RV pointing to an HV).
571 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
572 ? OPc_SVOP : OPc_PVOP;
581 case OA_BASEOP_OR_UNOP: TAG;
583 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
584 * whether parens were seen. perly.y uses OPf_SPECIAL to
585 * signal whether a BASEOP had empty parens or none.
586 * Some other UNOPs are created later, though, so the best
587 * test is OPf_KIDS, which is set in newUNOP.
589 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
591 case OA_FILESTATOP: TAG;
593 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
594 * the OPf_REF flag to distinguish between OP types instead of the
595 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
596 * return OPc_UNOP so that walkoptree can find our children. If
597 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
598 * (no argument to the operator) it's an OP; with OPf_REF set it's
599 * an SVOP (and op_sv is the GV for the filehandle argument).
601 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
603 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
605 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
607 case OA_LOOPEXOP: TAG;
609 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
610 * label was omitted (in which case it's a BASEOP) or else a term was
611 * seen. In this last case, all except goto are definitely PVOP but
612 * goto is either a PVOP (with an ordinary constant label), an UNOP
613 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
614 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
617 if (o->op_flags & OPf_STACKED)
619 else if (o->op_flags & OPf_SPECIAL)
629 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
630 PL_op_name[o->op_type]);
636 /* Figure out how much magic is attached to the SV and return the
639 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
640 dNPathNodes(1, NPathArg);
641 MAGIC *magic_pointer = SvMAGIC(thing);
646 if (!SvMAGICAL(thing)) {
648 warn("Ignoring suspect magic on this SV\n");
654 /* push a dummy node for NPathSetNode to update inside the while loop */
655 NPathPushNode("dummy", NPtype_NAME);
657 /* Have we seen the magic pointer? (NULL has always been seen before) */
658 while (check_new(st, magic_pointer)) {
660 NPathSetNode(magic_pointer, NPtype_MAGIC);
662 ADD_SIZE(st, "mg", sizeof(MAGIC));
663 /* magic vtables aren't freed when magic is freed, so don't count them.
664 (They are static structures. Anything that assumes otherwise is buggy.)
669 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
670 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
671 if (magic_pointer->mg_len == HEf_SVKEY) {
672 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
674 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
675 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
676 if (check_new(st, magic_pointer->mg_ptr)) {
677 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
681 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
682 else if (magic_pointer->mg_len > 0) {
683 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
684 if (check_new(st, magic_pointer->mg_ptr)) {
685 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
689 /* Get the next in the chain */
690 magic_pointer = magic_pointer->mg_moremagic;
693 if (st->dangle_whine)
694 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
699 #define check_new_and_strlen(st, p, ppath) S_check_new_and_strlen(aTHX_ st, p, ppath)
701 S_check_new_and_strlen(pTHX_ struct state *st, const char *const p, pPATH) {
702 dNPathNodes(1, NPathArg->prev);
703 if(check_new(st, p)) {
704 NPathPushNode(NPathArg->id, NPtype_NAME);
705 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
710 regex_size(pTHX_ const REGEXP * const baseregex, struct state *st, pPATH) {
711 dNPathNodes(1, NPathArg);
712 if(!check_new(st, baseregex))
714 NPathPushNode("regex_size", NPtype_NAME);
715 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
716 #if (PERL_VERSION < 11)
717 /* Note the size of the paren offset thing */
718 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
719 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
721 ADD_SIZE(st, "regexp", sizeof(struct regexp));
722 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
723 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
725 if (st->go_yell && !st->regex_whine) {
726 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
732 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
734 /* op_size recurses to follow the chain of opcodes. For the node path we
735 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
736 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
737 * instead of NPathLink().
739 dNPathUseParent(NPathArg);
743 if(!check_new(st, baseop))
746 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
748 switch (cc_opclass(baseop)) {
749 case OPc_BASEOP: TAG;
750 ADD_SIZE(st, "op", sizeof(struct op));
753 ADD_SIZE(st, "unop", sizeof(struct unop));
754 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
757 ADD_SIZE(st, "binop", sizeof(struct binop));
758 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
759 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
762 ADD_SIZE(st, "logop", sizeof(struct logop));
763 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
764 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
767 case OPc_CONDOP: TAG;
768 ADD_SIZE(st, "condop", sizeof(struct condop));
769 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
770 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
771 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
774 case OPc_LISTOP: TAG;
775 ADD_SIZE(st, "listop", sizeof(struct listop));
776 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
777 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
780 ADD_SIZE(st, "pmop", sizeof(struct pmop));
781 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
782 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
783 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
784 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
785 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
787 /* This is defined away in perl 5.8.x, but it is in there for
790 regex_size(aTHX_ PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
792 regex_size(aTHX_ ((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
796 ADD_SIZE(st, "svop", sizeof(struct svop));
797 if (!(baseop->op_type == OP_AELEMFAST
798 && baseop->op_flags & OPf_SPECIAL)) {
799 /* not an OP_PADAV replacement */
800 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
805 ADD_SIZE(st, "padop", sizeof(struct padop));
810 ADD_SIZE(st, "gvop", sizeof(struct gvop));
811 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
815 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
818 ADD_SIZE(st, "loop", sizeof(struct loop));
819 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
820 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
821 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
822 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
823 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
828 basecop = (COP *)baseop;
829 ADD_SIZE(st, "cop", sizeof(struct cop));
831 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
832 Eliminate cop_label from struct cop by storing a label as the first
833 entry in the hints hash. Most statements don't have labels, so this
834 will save memory. Not sure how much.
835 The check below will be incorrect fail on bleadperls
836 before 5.11 @33656, but later than 5.10, producing slightly too
837 small memory sizes on these Perls. */
838 #if (PERL_VERSION < 11)
839 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
842 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
843 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
845 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
846 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
847 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
857 if (st->dangle_whine)
858 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
863 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
865 dNPathNodes(1, NPathArg);
867 /* Hash keys can be shared. Have we seen this before? */
868 if (!check_new(st, hek))
870 NPathPushNode("hek", NPtype_NAME);
871 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
873 + 1 /* No hash key flags prior to 5.8.0 */
879 #if PERL_VERSION < 10
880 ADD_SIZE(st, "he", sizeof(struct he));
882 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
888 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
893 # define MAYBE_PURIFY(normal, pure) (pure)
894 # define MAYBE_OFFSET(struct_name, member) 0
896 # define MAYBE_PURIFY(normal, pure) (normal)
897 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
900 const U8 body_sizes[SVt_LAST] = {
903 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
904 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
905 sizeof(XRV), /* SVt_RV */
906 sizeof(XPV), /* SVt_PV */
907 sizeof(XPVIV), /* SVt_PVIV */
908 sizeof(XPVNV), /* SVt_PVNV */
909 sizeof(XPVMG), /* SVt_PVMG */
910 sizeof(XPVBM), /* SVt_PVBM */
911 sizeof(XPVLV), /* SVt_PVLV */
912 sizeof(XPVAV), /* SVt_PVAV */
913 sizeof(XPVHV), /* SVt_PVHV */
914 sizeof(XPVCV), /* SVt_PVCV */
915 sizeof(XPVGV), /* SVt_PVGV */
916 sizeof(XPVFM), /* SVt_PVFM */
917 sizeof(XPVIO) /* SVt_PVIO */
918 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
922 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
924 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
925 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
926 sizeof(XPVNV), /* SVt_PVNV */
927 sizeof(XPVMG), /* SVt_PVMG */
928 sizeof(XPVGV), /* SVt_PVGV */
929 sizeof(XPVLV), /* SVt_PVLV */
930 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
931 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
932 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
933 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
934 sizeof(XPVIO), /* SVt_PVIO */
935 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
939 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
941 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
942 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
943 sizeof(XPVNV), /* SVt_PVNV */
944 sizeof(XPVMG), /* SVt_PVMG */
945 sizeof(XPVGV), /* SVt_PVGV */
946 sizeof(XPVLV), /* SVt_PVLV */
947 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
948 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
949 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
950 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
951 sizeof(XPVIO) /* SVt_PVIO */
952 #elif PERL_VERSION < 13
956 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
957 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
958 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
959 sizeof(XPVNV), /* SVt_PVNV */
960 sizeof(XPVMG), /* SVt_PVMG */
961 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
962 sizeof(XPVGV), /* SVt_PVGV */
963 sizeof(XPVLV), /* SVt_PVLV */
964 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
965 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
966 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
967 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
968 sizeof(XPVIO) /* SVt_PVIO */
973 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
974 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
975 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
976 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
977 sizeof(XPVMG), /* SVt_PVMG */
978 sizeof(regexp), /* SVt_REGEXP */
979 sizeof(XPVGV), /* SVt_PVGV */
980 sizeof(XPVLV), /* SVt_PVLV */
981 sizeof(XPVAV), /* SVt_PVAV */
982 sizeof(XPVHV), /* SVt_PVHV */
983 sizeof(XPVCV), /* SVt_PVCV */
984 sizeof(XPVFM), /* SVt_PVFM */
985 sizeof(XPVIO) /* SVt_PVIO */
990 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
992 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
995 dNPathUseParent(NPathArg);
1002 if( 0 && !check_new(st, padlist))
1005 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
1006 pname = AvARRAY(pad_name);
1008 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
1009 const SV *namesv = pname[ix];
1010 if (namesv && namesv == &PL_sv_undef) {
1014 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1016 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1018 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1021 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1025 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1030 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1031 const int recurse) {
1032 const SV *thing = orig_thing;
1033 dNPathNodes(3, NPathArg);
1036 if(!check_new(st, orig_thing))
1039 type = SvTYPE(thing);
1040 if (type > SVt_LAST) {
1041 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1044 NPathPushNode(thing, NPtype_SV);
1045 ADD_SIZE(st, "sv_head", sizeof(SV));
1046 ADD_SIZE(st, "sv_body", body_sizes[type]);
1049 #if (PERL_VERSION < 11)
1050 /* Is it a reference? */
1055 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1056 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1060 /* Is there anything in the array? */
1061 if (AvMAX(thing) != -1) {
1062 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1063 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1064 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1066 if (recurse >= st->min_recurse_threshold) {
1067 SSize_t i = AvFILLp(thing) + 1;
1070 ADD_PRE_ATTR(st, 0, "index", i);
1071 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1075 /* Add in the bits on the other side of the beginning */
1077 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1078 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1080 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1081 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1082 if (AvALLOC(thing) != 0) {
1083 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1085 #if (PERL_VERSION < 9)
1086 /* Is there something hanging off the arylen element?
1087 Post 5.9.something this is stored in magic, so will be found there,
1088 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1089 complain about AvARYLEN() passing thing to it. */
1090 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1095 /* Now the array of buckets */
1096 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1097 if (HvENAME(thing)) {
1098 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1100 /* Now walk the bucket chain */
1101 if (HvARRAY(thing)) {
1104 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1105 cur_entry = *(HvARRAY(thing) + cur_bucket);
1107 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1108 ADD_SIZE(st, "he", sizeof(HE));
1109 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1110 if (recurse >= st->min_recurse_threshold) {
1111 if (orig_thing == (SV*)PL_strtab) {
1112 /* For PL_strtab the HeVAL is used as a refcnt */
1113 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1116 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1117 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1118 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1119 * so we protect against that here, but I'd like to know the cause.
1121 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1122 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1123 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1126 cur_entry = cur_entry->hent_next;
1132 /* This direct access is arguably "naughty": */
1133 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1134 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1136 I32 count = HvAUX(thing)->xhv_name_count;
1139 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1143 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1148 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1151 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1153 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1154 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1155 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1156 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1158 #if PERL_VERSION > 10
1159 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1160 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1162 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1163 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1168 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1174 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1175 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1177 if (st->go_yell && !st->fm_whine) {
1178 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1184 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1185 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1186 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1187 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1188 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1189 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1190 if (CvISXSUB(thing)) {
1191 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1193 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1194 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1199 /* Some embedded char pointers */
1200 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1201 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1202 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1203 /* Throw the GVs on the list to be walked if they're not-null */
1204 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1205 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1206 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1208 /* Only go trotting through the IO structures if they're really
1209 trottable. If USE_PERLIO is defined we can do this. If
1210 not... we can't, so we don't even try */
1212 /* Dig into xio_ifp and xio_ofp here */
1213 warn("Devel::Size: Can't size up perlio layers yet\n");
1218 #if (PERL_VERSION < 9)
1223 if(isGV_with_GP(thing)) {
1225 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1227 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1229 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1231 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1232 #elif defined(GvFILE)
1233 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1234 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1235 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1236 and the relevant COP has been freed on scope cleanup after the eval.
1237 5.8.9 adds a binary compatible fudge that catches the vast majority
1238 of cases. 5.9.something added a proper fix, by converting the GP to
1239 use a shared hash key (porperly reference counted), instead of a
1240 char * (owned by who knows? possibly no-one now) */
1241 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1244 /* Is there something hanging off the glob? */
1245 if (check_new(st, GvGP(thing))) {
1246 ADD_SIZE(st, "GP", sizeof(GP));
1247 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1248 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1249 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1250 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1251 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1252 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1254 #if (PERL_VERSION >= 9)
1258 #if PERL_VERSION <= 8
1266 if(recurse && SvROK(thing))
1267 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1268 else if (SvIsCOW_shared_hash(thing))
1269 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1271 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1275 SvOOK_offset(thing, len);
1276 ADD_SIZE(st, "SvOOK", len);
1282 if (type >= SVt_PVMG) {
1283 magic_size(aTHX_ thing, st, NPathLink("MG"));
1290 free_memnode_state(pTHX_ struct state *st)
1292 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1293 if (*st->node_stream_name == '|') {
1294 if (pclose(st->node_stream_fh))
1295 warn("%s exited with an error status\n", st->node_stream_name);
1298 if (fclose(st->node_stream_fh))
1299 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1304 static struct state *
1310 Newxz(st, 1, struct state);
1312 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1313 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1314 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1316 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1317 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1319 check_new(st, &PL_sv_undef);
1320 check_new(st, &PL_sv_no);
1321 check_new(st, &PL_sv_yes);
1322 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1323 check_new(st, &PL_sv_placeholder);
1326 #ifdef PATH_TRACKING
1327 /* XXX quick hack */
1328 st->node_stream_name = getenv("PERL_DMEM");
1329 if (st->node_stream_name) {
1330 if (*st->node_stream_name) {
1331 if (*st->node_stream_name == '|')
1332 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1334 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1335 if (!st->node_stream_fh)
1336 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1337 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1338 st->add_attr_cb = np_stream_node_path_info;
1341 st->add_attr_cb = np_dump_node_path_info;
1343 st->free_state_cb = free_memnode_state;
1349 /* XXX based on S_visit() in sv.c */
1351 unseen_sv_size(pTHX_ struct state *st, pPATH)
1356 dNPathNodes(1, NPathArg);
1358 NPathPushNode("unseen", NPtype_NAME);
1360 /* by this point we should have visited all the SVs
1361 * so now we'll run through all the SVs via the arenas
1362 * in order to find any thet we've missed for some reason.
1363 * Once the rest of the code is finding all the SVs then any
1364 * found here will be leaks.
1366 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1367 const SV * const svend = &sva[SvREFCNT(sva)];
1369 for (sv = sva + 1; sv < svend; ++sv) {
1370 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1371 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1373 else if (check_new(st, sv)) { /* sanity check */
1375 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1382 perl_size(pTHX_ struct state *const st, pPATH)
1384 dNPathNodes(3, NPathArg);
1386 /* if(!check_new(st, interp)) return; */
1387 NPathPushNode("perl", NPtype_NAME);
1393 * unknown <== = O/S Heap size - perl - free_malloc_space
1395 /* start with PL_defstash to get everything reachable from \%main:: */
1396 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1398 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1399 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1400 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1401 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1402 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1403 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1404 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1405 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1406 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1407 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1408 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1410 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1412 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1413 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1414 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1415 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1416 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1417 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1418 /* TODO PL_pidstatus */
1419 /* TODO PL_stashpad */
1420 /* TODO PL_compiling? COP */
1422 /* TODO stacks: cur, main, tmps, mark, scope, save */
1423 /* TODO PL_exitlist */
1424 /* TODO PL_reentrant_buffers etc */
1426 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1428 /* TODO anything missed? */
1430 /* --- by this point we should have seen all reachable SVs --- */
1432 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1433 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1435 /* unused space in sv head arenas */
1439 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1440 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1441 if (!check_new(st, p)) /* sanity check */
1442 warn("Free'd SV head unexpectedly already seen");
1445 NPathPushNode("unused_sv_heads", NPtype_NAME);
1446 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1449 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1451 /* iterate over all SVs to find any we've not accounted for yet */
1452 /* once the code above is visiting all SVs, any found here have been leaked */
1453 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1457 MODULE = Devel::Memory PACKAGE = Devel::Memory
1465 total_size = TOTAL_SIZE_RECURSION
1468 SV *thing = orig_thing;
1469 struct state *st = new_state(aTHX);
1471 /* If they passed us a reference then dereference it. This is the
1472 only way we can check the sizes of arrays and hashes */
1474 thing = SvRV(thing);
1477 sv_size(aTHX_ st, NULL, thing, ix);
1478 RETVAL = st->total_size;
1479 free_state(aTHX_ st);
1488 /* just the current perl interpreter */
1489 struct state *st = new_state(aTHX);
1490 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1491 perl_size(aTHX_ st, NULL);
1492 RETVAL = st->total_size;
1493 free_state(aTHX_ st);
1502 /* the current perl interpreter plus malloc, in the context of total heap size */
1503 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1507 /* some systems have the SVID2/XPG mallinfo structure and function */
1508 struct mstats ms = mstats(); /* mstats() first */
1510 struct state *st = new_state(aTHX);
1511 dNPathNodes(1, NULL);
1512 NPathPushNode("heap", NPtype_NAME);
1514 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1516 perl_size(aTHX_ st, NPathLink("perl_interp"));
1518 NPathSetNode("free_malloc_space", NPtype_NAME);
1519 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1520 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1521 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1522 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1523 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1524 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1525 /* for now we use bytes_total as an approximation */
1526 NPathSetNode("unknown", NPtype_NAME);
1527 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1532 RETVAL = st->total_size;
1533 free_state(aTHX_ st);