5 * Refactor this to split out D:M code from Devel::Size code.
7 * Start migrating Devel::Size's Size.xs towards the new code.
9 * ADD_PRE_ATTR for index should check if the ptr is new first. Currently we're
10 * generating lots of ADD_PRE_ATTR's for SVs that we've already seen via other paths.
11 * That's wasteful and likely to cause subtle bugs.
13 * Give HE's their own node so keys and values can be tied together
17 #undef NDEBUG /* XXX */
20 #define PERL_NO_GET_CONTEXT
27 /* Not yet in ppport.h */
29 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
32 # define SvRV_const(rv) SvRV(rv)
35 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
38 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
39 (SVf_FAKE | SVf_READONLY))
41 #ifndef SvIsCOW_shared_hash
42 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
44 #ifndef SvSHARED_HEK_FROM_PV
45 # define SvSHARED_HEK_FROM_PV(pvx) \
46 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
50 # define PL_opargs opargs
51 # define PL_op_name op_name
55 /* "structured exception" handling is a Microsoft extension to C and C++.
56 It's *not* C++ exception handling - C++ exception handling can't capture
57 SEGVs and suchlike, whereas this can. There's no known analagous
58 functionality on other platforms. */
60 # define TRY_TO_CATCH_SEGV __try
61 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
63 # define TRY_TO_CATCH_SEGV if(1)
64 # define CAUGHT_EXCEPTION else
68 # define __attribute__(x)
71 #if 0 && defined(DEBUGGING)
72 #define dbg_printf(x) printf x
77 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
80 /* The idea is to have a tree structure to store 1 bit per possible pointer
81 address. The lowest 16 bits are stored in a block of 8092 bytes.
82 The blocks are in a 256-way tree, indexed by the reset of the pointer.
83 This can cope with 32 and 64 bit pointers, and any address space layout,
84 without excessive memory needs. The assumption is that your CPU cache
85 works :-) (And that we're not going to bust it) */
88 #define LEAF_BITS (16 - BYTE_BITS)
89 #define LEAF_MASK 0x1FFF
91 typedef struct npath_node_st npath_node_t;
92 struct npath_node_st {
107 /* My hunch (not measured) is that for most architectures pointers will
108 start with 0 bits, hence the start of this array will be hot, and the
109 end unused. So put the flags next to the hot end. */
111 int min_recurse_threshold;
112 /* callback hooks and data */
113 int (*add_attr_cb)(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
114 void (*free_state_cb)(pTHX_ struct state *st);
115 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
116 /* this stuff wil be moved to state_cb_data later */
118 FILE *node_stream_fh;
119 char *node_stream_name;
122 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
124 #define PATH_TRACKING
127 #define pPATH npath_node_t *NPathArg
129 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
130 * to the next unused slot (though with prev already filled in)
131 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
132 * to and passes that NP value to the function being called.
133 * seqn==0 indicates the node is new (hasn't been output yet)
135 #define dNPathNodes(nodes, prev_np) \
136 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
137 npath_node_t *NP = &name_path_nodes[0]; \
138 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
140 #define NPathPushNode(nodeid, nodetype) \
142 NP->type = nodetype; \
144 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
146 NP->id = Nullch; /* safety/debug */ \
149 #define NPathSetNode(nodeid, nodetype) \
150 (NP-1)->id = nodeid; \
151 (NP-1)->type = nodetype; \
152 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
154 #define NPathPopNode \
157 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
158 * So the function can only safely call ADD_*() but not NPathLink, unless the
159 * caller has spare nodes in its name_path_nodes.
161 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
163 #define NPtype_NAME 0x01
164 #define NPtype_LINK 0x02
165 #define NPtype_SV 0x03
166 #define NPtype_MAGIC 0x04
167 #define NPtype_OP 0x05
169 /* XXX these should probably be generalizes into flag bits */
170 #define NPattr_LEAFSIZE 0x00
171 #define NPattr_NAME 0x01
172 #define NPattr_PADFAKE 0x02
173 #define NPattr_PADNAME 0x03
174 #define NPattr_PADTMP 0x04
175 #define NPattr_NOTE 0x05
176 #define NPattr_PRE_ATTR 0x06
178 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) (st->add_attr_cb && st->add_attr_cb(aTHX_ st, np, attr_type, attr_name, attr_value))
179 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
180 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) (assert(!attr_type), _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1))
182 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
183 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
184 /* add a link and a name node to the path - a special case for op_size */
185 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
186 #define NPathOpLink (NPathArg)
187 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(aTHX_ st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
191 #define NPathAddSizeCb(st, name, bytes)
192 #define pPATH void *npath_dummy /* XXX ideally remove */
193 #define dNPathNodes(nodes, prev_np) dNOOP
194 #define NPathLink(nodeid, nodetype) NULL
195 #define NPathOpLink NULL
196 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
198 #endif /* PATH_TRACKING */
205 static const char *svtypenames[SVt_LAST] = {
207 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
208 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
209 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
210 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
211 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
212 #elif PERL_VERSION < 13
213 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
215 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
220 np_print_node_name(pTHX_ FILE *fp, npath_node_t *npath_node)
222 char buf[1024]; /* XXX */
224 switch (npath_node->type) {
225 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
226 const SV *sv = (SV*)npath_node->id;
227 int type = SvTYPE(sv);
228 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
229 fprintf(fp, "SV(%s)", typename);
230 switch(type) { /* add some useful details */
231 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
232 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
236 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
237 const OP *op = (OP*)npath_node->id;
238 fprintf(fp, "OP(%s)", OP_NAME(op));
241 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
242 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
243 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
244 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
248 fprintf(fp, "%s", npath_node->id);
251 fprintf(fp, "%s", npath_node->id);
253 default: /* assume id is a string pointer */
254 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
261 np_dump_indent(int depth) {
263 fprintf(stderr, ": ");
267 np_walk_new_nodes(pTHX_ struct state *st,
268 npath_node_t *npath_node,
269 npath_node_t *npath_node_deeper,
270 int (*cb)(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
272 if (npath_node->seqn) /* node already output */
275 if (npath_node->prev) {
276 np_walk_new_nodes(aTHX_ st, npath_node->prev, npath_node, cb); /* recurse */
277 npath_node->depth = npath_node->prev->depth + 1;
279 else npath_node->depth = 0;
280 npath_node->seqn = ++st->seqn;
283 if (cb(aTHX_ st, npath_node, npath_node_deeper)) {
284 /* ignore this node */
285 assert(npath_node->prev);
286 assert(npath_node->depth);
287 assert(npath_node_deeper);
289 npath_node->seqn = --st->seqn;
290 npath_node_deeper->prev = npath_node->prev;
298 np_dump_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
299 if (0 && npath_node->type == NPtype_LINK)
301 np_dump_indent(npath_node->depth);
302 np_print_node_name(aTHX_ stderr, npath_node);
303 if (npath_node->type == NPtype_LINK)
304 fprintf(stderr, "->"); /* cosmetic */
305 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
306 fprintf(stderr, "\n");
311 np_dump_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
313 if (attr_type == NPattr_LEAFSIZE && !attr_value)
314 return 0; /* ignore zero sized leaf items */
315 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_dump_formatted_node);
316 np_dump_indent(npath_node->depth+1);
318 case NPattr_LEAFSIZE:
319 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
322 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
325 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
330 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
333 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
336 fprintf(stderr, "\n");
341 np_stream_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
342 fprintf(st->node_stream_fh, "-%u %lu %u ",
343 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
345 np_print_node_name(aTHX_ st->node_stream_fh, npath_node);
346 fprintf(st->node_stream_fh, "\n");
351 np_stream_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
353 if (!attr_type && !attr_value)
354 return 0; /* ignore zero sized leaf items */
355 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_stream_formatted_node);
356 if (attr_type) { /* Attribute type, name and value */
357 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
359 else { /* Leaf name and memory size */
360 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
362 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
367 #endif /* PATH_TRACKING */
371 Checks to see if thing is in the bitstring.
372 Returns true or false, and
373 notes thing in the segmented bitstring.
376 check_new(struct state *st, const void *const p) {
377 unsigned int bits = 8 * sizeof(void*);
378 const size_t raw_p = PTR2nat(p);
379 /* This effectively rotates the value right by the number of low always-0
380 bits in an aligned pointer. The assmption is that most (if not all)
381 pointers are aligned, and these will be in the same chain of nodes
382 (and hence hot in the cache) but we can still deal with any unaligned
384 const size_t cooked_p
385 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
386 const U8 this_bit = 1 << (cooked_p & 0x7);
390 void **tv_p = (void **) (st->tracking);
392 if (NULL == p) return FALSE;
394 const char c = *(const char *)p;
397 if (st->dangle_whine)
398 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
404 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
406 /* First level is always present. */
408 i = (unsigned int)((cooked_p >> bits) & 0xFF);
410 Newxz(tv_p[i], 256, void *);
411 tv_p = (void **)(tv_p[i]);
413 } while (bits > LEAF_BITS + BYTE_BITS);
414 /* bits now 16 always */
415 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
416 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
417 a my_perl under multiplicity */
420 leaf_p = (U8 **)tv_p;
421 i = (unsigned int)((cooked_p >> bits) & 0xFF);
423 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
428 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
430 if(leaf[i] & this_bit)
438 free_tracking_at(void **tv, int level)
446 free_tracking_at((void **) tv[i], level);
460 free_state(pTHX_ struct state *st)
462 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
463 if (st->free_state_cb)
464 st->free_state_cb(aTHX_ st);
465 if (st->state_cb_data)
466 Safefree(st->state_cb_data);
467 free_tracking_at((void **)st->tracking, top_level);
471 /* For now, this is somewhat a compatibility bodge until the plan comes
472 together for fine grained recursion control. total_size() would recurse into
473 hash and array members, whereas sv_size() would not. However, sv_size() is
474 called with CvSTASH() of a CV, which means that if it (also) starts to
475 recurse fully, then the size of any CV now becomes the size of the entire
476 symbol table reachable from it, and potentially the entire symbol table, if
477 any subroutine makes a reference to a global (such as %SIG). The historical
478 implementation of total_size() didn't report "everything", and changing the
479 only available size to "everything" doesn't feel at all useful. */
481 #define NO_RECURSION 0
482 #define SOME_RECURSION 1
483 #define TOTAL_SIZE_RECURSION 2
485 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
501 , OPc_CONDOP /* 12 */
510 cc_opclass(const OP * const o)
516 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
518 if (o->op_type == OP_SASSIGN)
519 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
522 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
526 if ((o->op_type == OP_TRANS)) {
530 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
562 #ifdef OA_PVOP_OR_SVOP
563 case OA_PVOP_OR_SVOP: TAG;
565 * Character translations (tr///) are usually a PVOP, keeping a
566 * pointer to a table of shorts used to look up translations.
567 * Under utf8, however, a simple table isn't practical; instead,
568 * the OP is an SVOP, and the SV is a reference to a swash
569 * (i.e., an RV pointing to an HV).
571 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
572 ? OPc_SVOP : OPc_PVOP;
581 case OA_BASEOP_OR_UNOP: TAG;
583 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
584 * whether parens were seen. perly.y uses OPf_SPECIAL to
585 * signal whether a BASEOP had empty parens or none.
586 * Some other UNOPs are created later, though, so the best
587 * test is OPf_KIDS, which is set in newUNOP.
589 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
591 case OA_FILESTATOP: TAG;
593 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
594 * the OPf_REF flag to distinguish between OP types instead of the
595 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
596 * return OPc_UNOP so that walkoptree can find our children. If
597 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
598 * (no argument to the operator) it's an OP; with OPf_REF set it's
599 * an SVOP (and op_sv is the GV for the filehandle argument).
601 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
603 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
605 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
607 case OA_LOOPEXOP: TAG;
609 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
610 * label was omitted (in which case it's a BASEOP) or else a term was
611 * seen. In this last case, all except goto are definitely PVOP but
612 * goto is either a PVOP (with an ordinary constant label), an UNOP
613 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
614 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
617 if (o->op_flags & OPf_STACKED)
619 else if (o->op_flags & OPf_SPECIAL)
629 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
630 PL_op_name[o->op_type]);
636 /* Figure out how much magic is attached to the SV and return the
639 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
640 dNPathNodes(1, NPathArg);
641 MAGIC *magic_pointer = SvMAGIC(thing); /* caller ensures thing is SvMAGICAL */
643 /* push a dummy node for NPathSetNode to update inside the while loop */
644 NPathPushNode("dummy", NPtype_NAME);
646 /* Have we seen the magic pointer? (NULL has always been seen before) */
647 while (check_new(st, magic_pointer)) {
649 NPathSetNode(magic_pointer, NPtype_MAGIC);
651 ADD_SIZE(st, "mg", sizeof(MAGIC));
652 /* magic vtables aren't freed when magic is freed, so don't count them.
653 (They are static structures. Anything that assumes otherwise is buggy.)
658 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
659 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
660 if (magic_pointer->mg_len == HEf_SVKEY) {
661 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
663 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
664 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
665 if (check_new(st, magic_pointer->mg_ptr)) {
666 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
670 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
671 else if (magic_pointer->mg_len > 0) {
672 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
673 if (check_new(st, magic_pointer->mg_ptr)) {
674 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
678 /* Get the next in the chain */
679 magic_pointer = magic_pointer->mg_moremagic;
682 if (st->dangle_whine)
683 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
688 #define check_new_and_strlen(st, p, ppath) S_check_new_and_strlen(aTHX_ st, p, ppath)
690 S_check_new_and_strlen(pTHX_ struct state *st, const char *const p, pPATH) {
691 dNPathNodes(1, NPathArg->prev);
692 if(check_new(st, p)) {
693 NPathPushNode(NPathArg->id, NPtype_NAME);
694 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
699 regex_size(pTHX_ const REGEXP * const baseregex, struct state *st, pPATH) {
700 dNPathNodes(1, NPathArg);
701 if(!check_new(st, baseregex))
703 NPathPushNode("regex_size", NPtype_NAME);
704 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
705 #if (PERL_VERSION < 11)
706 /* Note the size of the paren offset thing */
707 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
708 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
710 ADD_SIZE(st, "regexp", sizeof(struct regexp));
711 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
712 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
714 if (st->go_yell && !st->regex_whine) {
715 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
721 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
723 /* op_size recurses to follow the chain of opcodes. For the node path we
724 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
725 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
726 * instead of NPathLink().
728 dNPathUseParent(NPathArg);
732 if(!check_new(st, baseop))
735 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
737 switch (cc_opclass(baseop)) {
738 case OPc_BASEOP: TAG;
739 ADD_SIZE(st, "op", sizeof(struct op));
742 ADD_SIZE(st, "unop", sizeof(struct unop));
743 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
746 ADD_SIZE(st, "binop", sizeof(struct binop));
747 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
748 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
751 ADD_SIZE(st, "logop", sizeof(struct logop));
752 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
753 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
756 case OPc_CONDOP: TAG;
757 ADD_SIZE(st, "condop", sizeof(struct condop));
758 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
759 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
760 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
763 case OPc_LISTOP: TAG;
764 ADD_SIZE(st, "listop", sizeof(struct listop));
765 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
766 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
769 ADD_SIZE(st, "pmop", sizeof(struct pmop));
770 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
771 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
772 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
773 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
774 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
776 /* This is defined away in perl 5.8.x, but it is in there for
779 regex_size(aTHX_ PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
781 regex_size(aTHX_ ((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
785 ADD_SIZE(st, "svop", sizeof(struct svop));
786 if (!(baseop->op_type == OP_AELEMFAST
787 && baseop->op_flags & OPf_SPECIAL)) {
788 /* not an OP_PADAV replacement */
789 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
794 ADD_SIZE(st, "padop", sizeof(struct padop));
799 ADD_SIZE(st, "gvop", sizeof(struct gvop));
800 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
804 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
807 ADD_SIZE(st, "loop", sizeof(struct loop));
808 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
809 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
810 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
811 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
812 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
817 basecop = (COP *)baseop;
818 ADD_SIZE(st, "cop", sizeof(struct cop));
820 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
821 Eliminate cop_label from struct cop by storing a label as the first
822 entry in the hints hash. Most statements don't have labels, so this
823 will save memory. Not sure how much.
824 The check below will be incorrect fail on bleadperls
825 before 5.11 @33656, but later than 5.10, producing slightly too
826 small memory sizes on these Perls. */
827 #if (PERL_VERSION < 11)
828 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
831 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
832 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
834 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
835 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
836 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
846 if (st->dangle_whine)
847 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
852 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
854 dNPathNodes(1, NPathArg);
856 /* Hash keys can be shared. Have we seen this before? */
857 if (!check_new(st, hek))
859 NPathPushNode("hek", NPtype_NAME);
860 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
862 + 1 /* No hash key flags prior to 5.8.0 */
868 #if PERL_VERSION < 10
869 ADD_SIZE(st, "he", sizeof(struct he));
871 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
877 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
882 # define MAYBE_PURIFY(normal, pure) (pure)
883 # define MAYBE_OFFSET(struct_name, member) 0
885 # define MAYBE_PURIFY(normal, pure) (normal)
886 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
889 const U8 body_sizes[SVt_LAST] = {
892 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
893 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
894 sizeof(XRV), /* SVt_RV */
895 sizeof(XPV), /* SVt_PV */
896 sizeof(XPVIV), /* SVt_PVIV */
897 sizeof(XPVNV), /* SVt_PVNV */
898 sizeof(XPVMG), /* SVt_PVMG */
899 sizeof(XPVBM), /* SVt_PVBM */
900 sizeof(XPVLV), /* SVt_PVLV */
901 sizeof(XPVAV), /* SVt_PVAV */
902 sizeof(XPVHV), /* SVt_PVHV */
903 sizeof(XPVCV), /* SVt_PVCV */
904 sizeof(XPVGV), /* SVt_PVGV */
905 sizeof(XPVFM), /* SVt_PVFM */
906 sizeof(XPVIO) /* SVt_PVIO */
907 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
911 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
913 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
914 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
915 sizeof(XPVNV), /* SVt_PVNV */
916 sizeof(XPVMG), /* SVt_PVMG */
917 sizeof(XPVGV), /* SVt_PVGV */
918 sizeof(XPVLV), /* SVt_PVLV */
919 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
920 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
921 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
922 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
923 sizeof(XPVIO), /* SVt_PVIO */
924 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
928 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
930 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
931 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
932 sizeof(XPVNV), /* SVt_PVNV */
933 sizeof(XPVMG), /* SVt_PVMG */
934 sizeof(XPVGV), /* SVt_PVGV */
935 sizeof(XPVLV), /* SVt_PVLV */
936 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
937 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
938 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
939 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
940 sizeof(XPVIO) /* SVt_PVIO */
941 #elif PERL_VERSION < 13
945 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
946 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
947 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
948 sizeof(XPVNV), /* SVt_PVNV */
949 sizeof(XPVMG), /* SVt_PVMG */
950 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
951 sizeof(XPVGV), /* SVt_PVGV */
952 sizeof(XPVLV), /* SVt_PVLV */
953 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
954 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
955 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
956 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
957 sizeof(XPVIO) /* SVt_PVIO */
962 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
963 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
964 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
965 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
966 sizeof(XPVMG), /* SVt_PVMG */
967 sizeof(regexp), /* SVt_REGEXP */
968 sizeof(XPVGV), /* SVt_PVGV */
969 sizeof(XPVLV), /* SVt_PVLV */
970 sizeof(XPVAV), /* SVt_PVAV */
971 sizeof(XPVHV), /* SVt_PVHV */
972 sizeof(XPVCV), /* SVt_PVCV */
973 sizeof(XPVFM), /* SVt_PVFM */
974 sizeof(XPVIO) /* SVt_PVIO */
979 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
981 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
984 dNPathUseParent(NPathArg);
991 if( 0 && !check_new(st, padlist))
994 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
995 pname = AvARRAY(pad_name);
997 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
998 const SV *namesv = pname[ix];
999 if (namesv && namesv == &PL_sv_undef) {
1003 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1005 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1007 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1010 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1014 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1019 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1020 const int recurse) {
1021 const SV *thing = orig_thing;
1022 dNPathNodes(3, NPathArg);
1025 if(!check_new(st, orig_thing))
1028 type = SvTYPE(thing);
1029 if (type > SVt_LAST) {
1030 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1033 NPathPushNode(thing, NPtype_SV);
1034 ADD_SIZE(st, "sv_head", sizeof(SV));
1035 ADD_SIZE(st, "sv_body", body_sizes[type]);
1038 #if (PERL_VERSION < 11)
1039 /* Is it a reference? */
1044 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1045 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1049 /* Is there anything in the array? */
1050 if (AvMAX(thing) != -1) {
1051 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1052 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1053 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1055 if (recurse >= st->min_recurse_threshold) {
1056 SSize_t i = AvFILLp(thing) + 1;
1059 ADD_PRE_ATTR(st, 0, "index", i);
1060 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1064 /* Add in the bits on the other side of the beginning */
1066 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1067 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1069 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1070 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1071 if (AvALLOC(thing) != 0) {
1072 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1074 #if (PERL_VERSION < 9)
1075 /* Is there something hanging off the arylen element?
1076 Post 5.9.something this is stored in magic, so will be found there,
1077 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1078 complain about AvARYLEN() passing thing to it. */
1079 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1084 /* Now the array of buckets */
1085 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1086 if (HvENAME(thing)) {
1087 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1089 /* Now walk the bucket chain */
1090 if (HvARRAY(thing)) {
1093 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1094 cur_entry = *(HvARRAY(thing) + cur_bucket);
1096 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1097 ADD_SIZE(st, "he", sizeof(HE));
1098 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1099 if (recurse >= st->min_recurse_threshold) {
1100 if (orig_thing == (SV*)PL_strtab) {
1101 /* For PL_strtab the HeVAL is used as a refcnt */
1102 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1105 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1106 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1107 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1108 * so we protect against that here, but I'd like to know the cause.
1110 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1111 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1112 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1115 cur_entry = cur_entry->hent_next;
1121 /* This direct access is arguably "naughty": */
1122 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1123 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1125 I32 count = HvAUX(thing)->xhv_name_count;
1128 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1132 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1137 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1140 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1142 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1143 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1144 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1145 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1147 #if PERL_VERSION > 10
1148 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1149 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1151 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1152 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1157 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1163 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1164 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1166 if (st->go_yell && !st->fm_whine) {
1167 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1173 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1174 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1175 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1176 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1177 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1178 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1179 if (CvISXSUB(thing)) {
1180 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1182 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1183 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1188 /* Some embedded char pointers */
1189 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1190 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1191 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1192 /* Throw the GVs on the list to be walked if they're not-null */
1193 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1194 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1195 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1197 /* Only go trotting through the IO structures if they're really
1198 trottable. If USE_PERLIO is defined we can do this. If
1199 not... we can't, so we don't even try */
1201 /* Dig into xio_ifp and xio_ofp here */
1202 warn("Devel::Size: Can't size up perlio layers yet\n");
1207 #if (PERL_VERSION < 9)
1212 if(isGV_with_GP(thing)) {
1214 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1216 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1218 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1220 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1221 #elif defined(GvFILE)
1222 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1223 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1224 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1225 and the relevant COP has been freed on scope cleanup after the eval.
1226 5.8.9 adds a binary compatible fudge that catches the vast majority
1227 of cases. 5.9.something added a proper fix, by converting the GP to
1228 use a shared hash key (porperly reference counted), instead of a
1229 char * (owned by who knows? possibly no-one now) */
1230 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1233 /* Is there something hanging off the glob? */
1234 if (check_new(st, GvGP(thing))) {
1235 ADD_SIZE(st, "GP", sizeof(GP));
1236 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1237 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1238 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1239 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1240 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1241 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1243 #if (PERL_VERSION >= 9)
1247 #if PERL_VERSION <= 8
1255 if(recurse && SvROK(thing))
1256 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1257 else if (SvIsCOW_shared_hash(thing))
1258 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1260 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1264 SvOOK_offset(thing, len);
1265 ADD_SIZE(st, "SvOOK", len);
1271 if (type >= SVt_PVMG) {
1272 if (SvMAGICAL(thing))
1273 magic_size(aTHX_ thing, st, NPathLink("MG"));
1274 if (SvPAD_OUR(thing) && SvOURSTASH(thing))
1275 sv_size(aTHX_ st, NPathLink("SvOURSTASH"), (SV *)SvOURSTASH(thing), SOME_RECURSION);
1282 free_memnode_state(pTHX_ struct state *st)
1284 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1285 if (*st->node_stream_name == '|') {
1286 if (pclose(st->node_stream_fh))
1287 warn("%s exited with an error status\n", st->node_stream_name);
1290 if (fclose(st->node_stream_fh))
1291 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1296 static struct state *
1302 Newxz(st, 1, struct state);
1304 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1305 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1306 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1308 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1309 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1311 check_new(st, &PL_sv_undef);
1312 check_new(st, &PL_sv_no);
1313 check_new(st, &PL_sv_yes);
1314 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1315 check_new(st, &PL_sv_placeholder);
1318 #ifdef PATH_TRACKING
1319 /* XXX quick hack */
1320 st->node_stream_name = getenv("PERL_DMEM");
1321 if (st->node_stream_name) {
1322 if (*st->node_stream_name) {
1323 if (*st->node_stream_name == '|')
1324 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1326 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1327 if (!st->node_stream_fh)
1328 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1329 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1330 st->add_attr_cb = np_stream_node_path_info;
1333 st->add_attr_cb = np_dump_node_path_info;
1335 st->free_state_cb = free_memnode_state;
1341 /* XXX based on S_visit() in sv.c */
1343 unseen_sv_size(pTHX_ struct state *st, pPATH)
1348 dNPathNodes(1, NPathArg);
1350 NPathPushNode("unseen", NPtype_NAME);
1352 /* by this point we should have visited all the SVs
1353 * so now we'll run through all the SVs via the arenas
1354 * in order to find any thet we've missed for some reason.
1355 * Once the rest of the code is finding all the SVs then any
1356 * found here will be leaks.
1358 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1359 const SV * const svend = &sva[SvREFCNT(sva)];
1361 for (sv = sva + 1; sv < svend; ++sv) {
1362 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1363 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1365 else if (check_new(st, sv)) { /* sanity check */
1367 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1374 perl_size(pTHX_ struct state *const st, pPATH)
1376 dNPathNodes(3, NPathArg);
1378 /* if(!check_new(st, interp)) return; */
1379 NPathPushNode("perl", NPtype_NAME);
1385 * unknown <== = O/S Heap size - perl - free_malloc_space
1387 /* start with PL_defstash to get everything reachable from \%main:: */
1388 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1390 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1391 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1392 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1393 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1394 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1395 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1396 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1397 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1398 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1399 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1400 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1402 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1404 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1405 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1406 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1407 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1408 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1409 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1410 /* TODO PL_pidstatus */
1411 /* TODO PL_stashpad */
1412 /* TODO PL_compiling? COP */
1414 /* TODO stacks: cur, main, tmps, mark, scope, save */
1415 /* TODO PL_exitlist */
1416 /* TODO PL_reentrant_buffers etc */
1418 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1420 /* TODO anything missed? */
1422 /* --- by this point we should have seen all reachable SVs --- */
1424 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1425 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1427 /* unused space in sv head arenas */
1431 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1432 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1433 if (!check_new(st, p)) /* sanity check */
1434 warn("Free'd SV head unexpectedly already seen");
1437 NPathPushNode("unused_sv_heads", NPtype_NAME);
1438 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1441 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1443 /* iterate over all SVs to find any we've not accounted for yet */
1444 /* once the code above is visiting all SVs, any found here have been leaked */
1445 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1449 MODULE = Devel::Memory PACKAGE = Devel::Memory
1457 total_size = TOTAL_SIZE_RECURSION
1460 SV *thing = orig_thing;
1461 struct state *st = new_state(aTHX);
1463 /* If they passed us a reference then dereference it. This is the
1464 only way we can check the sizes of arrays and hashes */
1466 thing = SvRV(thing);
1469 sv_size(aTHX_ st, NULL, thing, ix);
1470 RETVAL = st->total_size;
1471 free_state(aTHX_ st);
1480 /* just the current perl interpreter */
1481 struct state *st = new_state(aTHX);
1482 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1483 perl_size(aTHX_ st, NULL);
1484 RETVAL = st->total_size;
1485 free_state(aTHX_ st);
1494 /* the current perl interpreter plus malloc, in the context of total heap size */
1495 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1499 /* some systems have the SVID2/XPG mallinfo structure and function */
1500 struct mstats ms = mstats(); /* mstats() first */
1502 struct state *st = new_state(aTHX);
1503 dNPathNodes(1, NULL);
1504 NPathPushNode("heap", NPtype_NAME);
1506 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1508 perl_size(aTHX_ st, NPathLink("perl_interp"));
1510 NPathSetNode("free_malloc_space", NPtype_NAME);
1511 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1512 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1513 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1514 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1515 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1516 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1517 /* for now we use bytes_total as an approximation */
1518 NPathSetNode("unknown", NPtype_NAME);
1519 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1524 RETVAL = st->total_size;
1525 free_state(aTHX_ st);