5 * Refactor this to split out D:M code from Devel::Size code.
7 * Start migrating Devel::Size's Size.xs towards the new code.
9 * ADD_PRE_ATTR for index should check if the ptr is new first. Currently we're
10 * generating lots of ADD_PRE_ATTR's for SVs that we've already seen via other paths.
11 * That's wasteful and likely to cause subtle bugs.
13 * Give HE's their own node so keys and values can be tied together
17 #undef NDEBUG /* XXX */
20 #define PERL_NO_GET_CONTEXT
27 /* Not yet in ppport.h */
29 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
32 # define SvRV_const(rv) SvRV(rv)
35 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
38 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
39 (SVf_FAKE | SVf_READONLY))
41 #ifndef SvIsCOW_shared_hash
42 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
44 #ifndef SvSHARED_HEK_FROM_PV
45 # define SvSHARED_HEK_FROM_PV(pvx) \
46 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
50 # define PL_opargs opargs
51 # define PL_op_name op_name
55 /* "structured exception" handling is a Microsoft extension to C and C++.
56 It's *not* C++ exception handling - C++ exception handling can't capture
57 SEGVs and suchlike, whereas this can. There's no known analagous
58 functionality on other platforms. */
60 # define TRY_TO_CATCH_SEGV __try
61 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
63 # define TRY_TO_CATCH_SEGV if(1)
64 # define CAUGHT_EXCEPTION else
68 # define __attribute__(x)
71 #if 0 && defined(DEBUGGING)
72 #define dbg_printf(x) printf x
77 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
80 /* The idea is to have a tree structure to store 1 bit per possible pointer
81 address. The lowest 16 bits are stored in a block of 8092 bytes.
82 The blocks are in a 256-way tree, indexed by the reset of the pointer.
83 This can cope with 32 and 64 bit pointers, and any address space layout,
84 without excessive memory needs. The assumption is that your CPU cache
85 works :-) (And that we're not going to bust it) */
88 #define LEAF_BITS (16 - BYTE_BITS)
89 #define LEAF_MASK 0x1FFF
91 typedef struct npath_node_st npath_node_t;
92 struct npath_node_st {
107 /* My hunch (not measured) is that for most architectures pointers will
108 start with 0 bits, hence the start of this array will be hot, and the
109 end unused. So put the flags next to the hot end. */
111 int min_recurse_threshold;
112 /* callback hooks and data */
113 int (*add_attr_cb)(struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
114 void (*free_state_cb)(pTHX_ struct state *st);
115 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
116 /* this stuff wil be moved to state_cb_data later */
118 FILE *node_stream_fh;
119 char *node_stream_name;
122 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
124 #define PATH_TRACKING
127 #define pPATH npath_node_t *NPathArg
129 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
130 * to the next unused slot (though with prev already filled in)
131 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
132 * to and passes that NP value to the function being called.
133 * seqn==0 indicates the node is new (hasn't been output yet)
135 #define dNPathNodes(nodes, prev_np) \
136 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
137 npath_node_t *NP = &name_path_nodes[0]; \
138 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
140 #define NPathPushNode(nodeid, nodetype) \
142 NP->type = nodetype; \
144 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
146 NP->id = Nullch; /* safety/debug */ \
149 #define NPathSetNode(nodeid, nodetype) \
150 (NP-1)->id = nodeid; \
151 (NP-1)->type = nodetype; \
152 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
154 #define NPathPopNode \
157 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
158 * So the function can only safely call ADD_*() but not NPathLink, unless the
159 * caller has spare nodes in its name_path_nodes.
161 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
163 #define NPtype_NAME 0x01
164 #define NPtype_LINK 0x02
165 #define NPtype_SV 0x03
166 #define NPtype_MAGIC 0x04
167 #define NPtype_OP 0x05
169 /* XXX these should probably be generalizes into flag bits */
170 #define NPattr_LEAFSIZE 0x00
171 #define NPattr_NAME 0x01
172 #define NPattr_PADFAKE 0x02
173 #define NPattr_PADNAME 0x03
174 #define NPattr_PADTMP 0x04
175 #define NPattr_NOTE 0x05
176 #define NPattr_PRE_ATTR 0x06
178 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) (st->add_attr_cb && st->add_attr_cb(st, np, attr_type, attr_name, attr_value))
179 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
180 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) (assert(!attr_type), _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1))
182 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
183 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
184 /* add a link and a name node to the path - a special case for op_size */
185 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
186 #define NPathOpLink (NPathArg)
187 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
191 #define NPathAddSizeCb(st, name, bytes)
192 #define pPATH void *npath_dummy /* XXX ideally remove */
193 #define dNPathNodes(nodes, prev_np) dNOOP
194 #define NPathLink(nodeid, nodetype) NULL
195 #define NPathOpLink NULL
196 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
198 #endif /* PATH_TRACKING */
205 static const char *svtypenames[SVt_LAST] = {
207 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
208 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
209 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
210 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
211 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
212 #elif PERL_VERSION < 13
213 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
215 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
220 np_print_node_name(FILE *fp, npath_node_t *npath_node)
222 char buf[1024]; /* XXX */
224 switch (npath_node->type) {
225 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
226 const SV *sv = (SV*)npath_node->id;
227 int type = SvTYPE(sv);
228 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
229 fprintf(fp, "SV(%s)", typename);
230 switch(type) { /* add some useful details */
231 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
232 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
236 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
237 const OP *op = (OP*)npath_node->id;
238 fprintf(fp, "OP(%s)", OP_NAME(op));
241 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
242 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
243 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
244 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
248 fprintf(fp, "%s", npath_node->id);
251 fprintf(fp, "%s", npath_node->id);
253 default: /* assume id is a string pointer */
254 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
261 np_dump_indent(int depth) {
263 fprintf(stderr, ": ");
267 np_walk_new_nodes(struct state *st,
268 npath_node_t *npath_node,
269 npath_node_t *npath_node_deeper,
270 int (*cb)(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
272 if (npath_node->seqn) /* node already output */
275 if (npath_node->prev) {
276 np_walk_new_nodes(st, npath_node->prev, npath_node, cb); /* recurse */
277 npath_node->depth = npath_node->prev->depth + 1;
279 else npath_node->depth = 0;
280 npath_node->seqn = ++st->seqn;
283 if (cb(st, npath_node, npath_node_deeper)) {
284 /* ignore this node */
285 assert(npath_node->prev);
286 assert(npath_node->depth);
287 assert(npath_node_deeper);
289 npath_node->seqn = --st->seqn;
290 npath_node_deeper->prev = npath_node->prev;
298 np_dump_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
299 if (0 && npath_node->type == NPtype_LINK)
301 np_dump_indent(npath_node->depth);
302 np_print_node_name(stderr, npath_node);
303 if (npath_node->type == NPtype_LINK)
304 fprintf(stderr, "->"); /* cosmetic */
305 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
306 fprintf(stderr, "\n");
311 np_dump_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
313 if (attr_type == NPattr_LEAFSIZE && !attr_value)
314 return 0; /* ignore zero sized leaf items */
315 np_walk_new_nodes(st, npath_node, NULL, np_dump_formatted_node);
316 np_dump_indent(npath_node->depth+1);
318 case NPattr_LEAFSIZE:
319 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
322 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
325 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
330 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
333 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
336 fprintf(stderr, "\n");
341 np_stream_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
342 fprintf(st->node_stream_fh, "-%u %lu %u ",
343 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
345 np_print_node_name(st->node_stream_fh, npath_node);
346 fprintf(st->node_stream_fh, "\n");
351 np_stream_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
353 if (!attr_type && !attr_value)
354 return 0; /* ignore zero sized leaf items */
355 np_walk_new_nodes(st, npath_node, NULL, np_stream_formatted_node);
356 if (attr_type) { /* Attribute type, name and value */
357 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
359 else { /* Leaf name and memory size */
360 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
362 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
367 #endif /* PATH_TRACKING */
371 Checks to see if thing is in the bitstring.
372 Returns true or false, and
373 notes thing in the segmented bitstring.
376 check_new(struct state *st, const void *const p) {
377 unsigned int bits = 8 * sizeof(void*);
378 const size_t raw_p = PTR2nat(p);
379 /* This effectively rotates the value right by the number of low always-0
380 bits in an aligned pointer. The assmption is that most (if not all)
381 pointers are aligned, and these will be in the same chain of nodes
382 (and hence hot in the cache) but we can still deal with any unaligned
384 const size_t cooked_p
385 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
386 const U8 this_bit = 1 << (cooked_p & 0x7);
390 void **tv_p = (void **) (st->tracking);
392 if (NULL == p) return FALSE;
394 const char c = *(const char *)p;
397 if (st->dangle_whine)
398 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
404 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
406 /* First level is always present. */
408 i = (unsigned int)((cooked_p >> bits) & 0xFF);
410 Newxz(tv_p[i], 256, void *);
411 tv_p = (void **)(tv_p[i]);
413 } while (bits > LEAF_BITS + BYTE_BITS);
414 /* bits now 16 always */
415 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
416 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
417 a my_perl under multiplicity */
420 leaf_p = (U8 **)tv_p;
421 i = (unsigned int)((cooked_p >> bits) & 0xFF);
423 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
428 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
430 if(leaf[i] & this_bit)
438 free_tracking_at(void **tv, int level)
446 free_tracking_at((void **) tv[i], level);
460 free_state(struct state *st)
462 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
463 if (st->free_state_cb)
464 st->free_state_cb(st);
465 if (st->state_cb_data)
466 Safefree(st->state_cb_data);
467 free_tracking_at((void **)st->tracking, top_level);
471 /* For now, this is somewhat a compatibility bodge until the plan comes
472 together for fine grained recursion control. total_size() would recurse into
473 hash and array members, whereas sv_size() would not. However, sv_size() is
474 called with CvSTASH() of a CV, which means that if it (also) starts to
475 recurse fully, then the size of any CV now becomes the size of the entire
476 symbol table reachable from it, and potentially the entire symbol table, if
477 any subroutine makes a reference to a global (such as %SIG). The historical
478 implementation of total_size() didn't report "everything", and changing the
479 only available size to "everything" doesn't feel at all useful. */
481 #define NO_RECURSION 0
482 #define SOME_RECURSION 1
483 #define TOTAL_SIZE_RECURSION 2
485 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
501 , OPc_CONDOP /* 12 */
510 cc_opclass(const OP * const o)
516 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
518 if (o->op_type == OP_SASSIGN)
519 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
522 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
526 if ((o->op_type == OP_TRANS)) {
530 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
562 #ifdef OA_PVOP_OR_SVOP
563 case OA_PVOP_OR_SVOP: TAG;
565 * Character translations (tr///) are usually a PVOP, keeping a
566 * pointer to a table of shorts used to look up translations.
567 * Under utf8, however, a simple table isn't practical; instead,
568 * the OP is an SVOP, and the SV is a reference to a swash
569 * (i.e., an RV pointing to an HV).
571 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
572 ? OPc_SVOP : OPc_PVOP;
581 case OA_BASEOP_OR_UNOP: TAG;
583 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
584 * whether parens were seen. perly.y uses OPf_SPECIAL to
585 * signal whether a BASEOP had empty parens or none.
586 * Some other UNOPs are created later, though, so the best
587 * test is OPf_KIDS, which is set in newUNOP.
589 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
591 case OA_FILESTATOP: TAG;
593 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
594 * the OPf_REF flag to distinguish between OP types instead of the
595 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
596 * return OPc_UNOP so that walkoptree can find our children. If
597 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
598 * (no argument to the operator) it's an OP; with OPf_REF set it's
599 * an SVOP (and op_sv is the GV for the filehandle argument).
601 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
603 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
605 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
607 case OA_LOOPEXOP: TAG;
609 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
610 * label was omitted (in which case it's a BASEOP) or else a term was
611 * seen. In this last case, all except goto are definitely PVOP but
612 * goto is either a PVOP (with an ordinary constant label), an UNOP
613 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
614 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
617 if (o->op_flags & OPf_STACKED)
619 else if (o->op_flags & OPf_SPECIAL)
629 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
630 PL_op_name[o->op_type]);
636 /* Figure out how much magic is attached to the SV and return the
639 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
640 dNPathNodes(1, NPathArg);
641 MAGIC *magic_pointer = SvMAGIC(thing);
646 if (!SvMAGICAL(thing)) {
648 warn("Ignoring suspect magic on this SV\n");
654 /* push a dummy node for NPathSetNode to update inside the while loop */
655 NPathPushNode("dummy", NPtype_NAME);
657 /* Have we seen the magic pointer? (NULL has always been seen before) */
658 while (check_new(st, magic_pointer)) {
660 NPathSetNode(magic_pointer, NPtype_MAGIC);
662 ADD_SIZE(st, "mg", sizeof(MAGIC));
663 /* magic vtables aren't freed when magic is freed, so don't count them.
664 (They are static structures. Anything that assumes otherwise is buggy.)
669 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
670 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
671 if (magic_pointer->mg_len == HEf_SVKEY) {
672 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
674 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
675 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
676 if (check_new(st, magic_pointer->mg_ptr)) {
677 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
681 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
682 else if (magic_pointer->mg_len > 0) {
683 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
684 if (check_new(st, magic_pointer->mg_ptr)) {
685 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
689 /* Get the next in the chain */
690 magic_pointer = magic_pointer->mg_moremagic;
693 if (st->dangle_whine)
694 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
700 check_new_and_strlen(struct state *st, const char *const p, pPATH) {
701 dNPathNodes(1, NPathArg->prev);
702 if(check_new(st, p)) {
703 NPathPushNode(NPathArg->id, NPtype_NAME);
704 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
709 regex_size(const REGEXP * const baseregex, struct state *st, pPATH) {
710 dNPathNodes(1, NPathArg);
711 if(!check_new(st, baseregex))
713 NPathPushNode("regex_size", NPtype_NAME);
714 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
715 #if (PERL_VERSION < 11)
716 /* Note the size of the paren offset thing */
717 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
718 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
720 ADD_SIZE(st, "regexp", sizeof(struct regexp));
721 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
722 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
724 if (st->go_yell && !st->regex_whine) {
725 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
731 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
733 /* op_size recurses to follow the chain of opcodes. For the node path we
734 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
735 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
736 * instead of NPathLink().
738 dNPathUseParent(NPathArg);
742 if(!check_new(st, baseop))
745 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
747 switch (cc_opclass(baseop)) {
748 case OPc_BASEOP: TAG;
749 ADD_SIZE(st, "op", sizeof(struct op));
752 ADD_SIZE(st, "unop", sizeof(struct unop));
753 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
756 ADD_SIZE(st, "binop", sizeof(struct binop));
757 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
758 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
761 ADD_SIZE(st, "logop", sizeof(struct logop));
762 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
763 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
766 case OPc_CONDOP: TAG;
767 ADD_SIZE(st, "condop", sizeof(struct condop));
768 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
769 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
770 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
773 case OPc_LISTOP: TAG;
774 ADD_SIZE(st, "listop", sizeof(struct listop));
775 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
776 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
779 ADD_SIZE(st, "pmop", sizeof(struct pmop));
780 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
781 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
782 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
783 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
784 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
786 /* This is defined away in perl 5.8.x, but it is in there for
789 regex_size(PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
791 regex_size(((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
795 ADD_SIZE(st, "svop", sizeof(struct svop));
796 if (!(baseop->op_type == OP_AELEMFAST
797 && baseop->op_flags & OPf_SPECIAL)) {
798 /* not an OP_PADAV replacement */
799 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
804 ADD_SIZE(st, "padop", sizeof(struct padop));
809 ADD_SIZE(st, "gvop", sizeof(struct gvop));
810 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
814 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
817 ADD_SIZE(st, "loop", sizeof(struct loop));
818 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
819 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
820 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
821 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
822 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
827 basecop = (COP *)baseop;
828 ADD_SIZE(st, "cop", sizeof(struct cop));
830 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
831 Eliminate cop_label from struct cop by storing a label as the first
832 entry in the hints hash. Most statements don't have labels, so this
833 will save memory. Not sure how much.
834 The check below will be incorrect fail on bleadperls
835 before 5.11 @33656, but later than 5.10, producing slightly too
836 small memory sizes on these Perls. */
837 #if (PERL_VERSION < 11)
838 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
841 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
842 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
844 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
845 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
846 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
856 if (st->dangle_whine)
857 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
862 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
864 dNPathNodes(1, NPathArg);
866 /* Hash keys can be shared. Have we seen this before? */
867 if (!check_new(st, hek))
869 NPathPushNode("hek", NPtype_NAME);
870 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
872 + 1 /* No hash key flags prior to 5.8.0 */
878 #if PERL_VERSION < 10
879 ADD_SIZE(st, "he", sizeof(struct he));
881 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
887 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
892 # define MAYBE_PURIFY(normal, pure) (pure)
893 # define MAYBE_OFFSET(struct_name, member) 0
895 # define MAYBE_PURIFY(normal, pure) (normal)
896 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
899 const U8 body_sizes[SVt_LAST] = {
902 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
903 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
904 sizeof(XRV), /* SVt_RV */
905 sizeof(XPV), /* SVt_PV */
906 sizeof(XPVIV), /* SVt_PVIV */
907 sizeof(XPVNV), /* SVt_PVNV */
908 sizeof(XPVMG), /* SVt_PVMG */
909 sizeof(XPVBM), /* SVt_PVBM */
910 sizeof(XPVLV), /* SVt_PVLV */
911 sizeof(XPVAV), /* SVt_PVAV */
912 sizeof(XPVHV), /* SVt_PVHV */
913 sizeof(XPVCV), /* SVt_PVCV */
914 sizeof(XPVGV), /* SVt_PVGV */
915 sizeof(XPVFM), /* SVt_PVFM */
916 sizeof(XPVIO) /* SVt_PVIO */
917 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
921 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
923 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
924 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
925 sizeof(XPVNV), /* SVt_PVNV */
926 sizeof(XPVMG), /* SVt_PVMG */
927 sizeof(XPVGV), /* SVt_PVGV */
928 sizeof(XPVLV), /* SVt_PVLV */
929 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
930 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
931 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
932 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
933 sizeof(XPVIO), /* SVt_PVIO */
934 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
938 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
940 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
941 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
942 sizeof(XPVNV), /* SVt_PVNV */
943 sizeof(XPVMG), /* SVt_PVMG */
944 sizeof(XPVGV), /* SVt_PVGV */
945 sizeof(XPVLV), /* SVt_PVLV */
946 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
947 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
948 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
949 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
950 sizeof(XPVIO) /* SVt_PVIO */
951 #elif PERL_VERSION < 13
955 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
956 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
957 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
958 sizeof(XPVNV), /* SVt_PVNV */
959 sizeof(XPVMG), /* SVt_PVMG */
960 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
961 sizeof(XPVGV), /* SVt_PVGV */
962 sizeof(XPVLV), /* SVt_PVLV */
963 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
964 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
965 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
966 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
967 sizeof(XPVIO) /* SVt_PVIO */
972 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
973 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
974 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
975 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
976 sizeof(XPVMG), /* SVt_PVMG */
977 sizeof(regexp), /* SVt_REGEXP */
978 sizeof(XPVGV), /* SVt_PVGV */
979 sizeof(XPVLV), /* SVt_PVLV */
980 sizeof(XPVAV), /* SVt_PVAV */
981 sizeof(XPVHV), /* SVt_PVHV */
982 sizeof(XPVCV), /* SVt_PVCV */
983 sizeof(XPVFM), /* SVt_PVFM */
984 sizeof(XPVIO) /* SVt_PVIO */
989 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
991 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
994 dNPathUseParent(NPathArg);
1001 if( 0 && !check_new(st, padlist))
1004 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
1005 pname = AvARRAY(pad_name);
1007 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
1008 const SV *namesv = pname[ix];
1009 if (namesv && namesv == &PL_sv_undef) {
1013 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1015 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1017 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1020 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1024 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1029 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1030 const int recurse) {
1031 const SV *thing = orig_thing;
1032 dNPathNodes(3, NPathArg);
1035 if(!check_new(st, orig_thing))
1038 type = SvTYPE(thing);
1039 if (type > SVt_LAST) {
1040 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1043 NPathPushNode(thing, NPtype_SV);
1044 ADD_SIZE(st, "sv_head", sizeof(SV));
1045 ADD_SIZE(st, "sv_body", body_sizes[type]);
1048 #if (PERL_VERSION < 11)
1049 /* Is it a reference? */
1054 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1055 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1059 /* Is there anything in the array? */
1060 if (AvMAX(thing) != -1) {
1061 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1062 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1063 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1065 if (recurse >= st->min_recurse_threshold) {
1066 SSize_t i = AvFILLp(thing) + 1;
1069 ADD_PRE_ATTR(st, 0, "index", i);
1070 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1074 /* Add in the bits on the other side of the beginning */
1076 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1077 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1079 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1080 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1081 if (AvALLOC(thing) != 0) {
1082 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1084 #if (PERL_VERSION < 9)
1085 /* Is there something hanging off the arylen element?
1086 Post 5.9.something this is stored in magic, so will be found there,
1087 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1088 complain about AvARYLEN() passing thing to it. */
1089 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1094 /* Now the array of buckets */
1095 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1096 if (HvENAME(thing)) {
1097 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1099 /* Now walk the bucket chain */
1100 if (HvARRAY(thing)) {
1103 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1104 cur_entry = *(HvARRAY(thing) + cur_bucket);
1106 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1107 ADD_SIZE(st, "he", sizeof(HE));
1108 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1109 if (recurse >= st->min_recurse_threshold) {
1110 if (orig_thing == (SV*)PL_strtab) {
1111 /* For PL_strtab the HeVAL is used as a refcnt */
1112 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1115 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1116 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1117 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1118 * so we protect against that here, but I'd like to know the cause.
1120 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1121 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1122 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1125 cur_entry = cur_entry->hent_next;
1131 /* This direct access is arguably "naughty": */
1132 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1133 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1135 I32 count = HvAUX(thing)->xhv_name_count;
1138 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1142 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1147 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1150 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1152 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1153 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1154 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1155 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1157 #if PERL_VERSION > 10
1158 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1159 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1161 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1162 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1167 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1173 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1174 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1176 if (st->go_yell && !st->fm_whine) {
1177 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1183 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1184 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1185 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1186 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1187 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1188 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1189 if (CvISXSUB(thing)) {
1190 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1192 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1193 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1198 /* Some embedded char pointers */
1199 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1200 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1201 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1202 /* Throw the GVs on the list to be walked if they're not-null */
1203 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1204 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1205 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1207 /* Only go trotting through the IO structures if they're really
1208 trottable. If USE_PERLIO is defined we can do this. If
1209 not... we can't, so we don't even try */
1211 /* Dig into xio_ifp and xio_ofp here */
1212 warn("Devel::Size: Can't size up perlio layers yet\n");
1217 #if (PERL_VERSION < 9)
1222 if(isGV_with_GP(thing)) {
1224 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1226 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1228 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1230 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1231 #elif defined(GvFILE)
1232 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1233 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1234 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1235 and the relevant COP has been freed on scope cleanup after the eval.
1236 5.8.9 adds a binary compatible fudge that catches the vast majority
1237 of cases. 5.9.something added a proper fix, by converting the GP to
1238 use a shared hash key (porperly reference counted), instead of a
1239 char * (owned by who knows? possibly no-one now) */
1240 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1243 /* Is there something hanging off the glob? */
1244 if (check_new(st, GvGP(thing))) {
1245 ADD_SIZE(st, "GP", sizeof(GP));
1246 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1247 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1248 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1249 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1250 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1251 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1253 #if (PERL_VERSION >= 9)
1257 #if PERL_VERSION <= 8
1265 if(recurse && SvROK(thing))
1266 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1267 else if (SvIsCOW_shared_hash(thing))
1268 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1270 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1274 SvOOK_offset(thing, len);
1275 ADD_SIZE(st, "SvOOK", len);
1281 if (type >= SVt_PVMG) {
1282 magic_size(aTHX_ thing, st, NPathLink("MG"));
1289 free_memnode_state(pTHX_ struct state *st)
1291 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1292 if (*st->node_stream_name == '|') {
1293 if (pclose(st->node_stream_fh))
1294 warn("%s exited with an error status\n", st->node_stream_name);
1297 if (fclose(st->node_stream_fh))
1298 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1303 static struct state *
1309 Newxz(st, 1, struct state);
1311 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1312 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1313 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1315 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1316 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1318 check_new(st, &PL_sv_undef);
1319 check_new(st, &PL_sv_no);
1320 check_new(st, &PL_sv_yes);
1321 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1322 check_new(st, &PL_sv_placeholder);
1325 #ifdef PATH_TRACKING
1326 /* XXX quick hack */
1327 st->node_stream_name = getenv("PERL_DMEM");
1328 if (st->node_stream_name) {
1329 if (*st->node_stream_name) {
1330 if (*st->node_stream_name == '|')
1331 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1333 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1334 if (!st->node_stream_fh)
1335 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1336 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1337 st->add_attr_cb = np_stream_node_path_info;
1340 st->add_attr_cb = np_dump_node_path_info;
1342 st->free_state_cb = free_memnode_state;
1348 /* XXX based on S_visit() in sv.c */
1350 unseen_sv_size(pTHX_ struct state *st, pPATH)
1355 dNPathNodes(1, NPathArg);
1357 NPathPushNode("unseen", NPtype_NAME);
1359 /* by this point we should have visited all the SVs
1360 * so now we'll run through all the SVs via the arenas
1361 * in order to find any thet we've missed for some reason.
1362 * Once the rest of the code is finding all the SVs then any
1363 * found here will be leaks.
1365 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1366 const SV * const svend = &sva[SvREFCNT(sva)];
1368 for (sv = sva + 1; sv < svend; ++sv) {
1369 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1370 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1372 else if (check_new(st, sv)) { /* sanity check */
1374 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1381 perl_size(pTHX_ struct state *const st, pPATH)
1383 dNPathNodes(3, NPathArg);
1385 /* if(!check_new(st, interp)) return; */
1386 NPathPushNode("perl", NPtype_NAME);
1392 * unknown <== = O/S Heap size - perl - free_malloc_space
1394 /* start with PL_defstash to get everything reachable from \%main:: */
1395 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1397 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1398 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1399 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1400 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1401 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1402 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1403 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1404 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1405 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1406 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1407 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1409 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1411 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1412 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1413 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1414 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1415 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1416 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1417 /* TODO PL_pidstatus */
1418 /* TODO PL_stashpad */
1419 /* TODO PL_compiling? COP */
1421 /* TODO stacks: cur, main, tmps, mark, scope, save */
1422 /* TODO PL_exitlist */
1423 /* TODO PL_reentrant_buffers etc */
1425 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1427 /* TODO anything missed? */
1429 /* --- by this point we should have seen all reachable SVs --- */
1431 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1432 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1434 /* unused space in sv head arenas */
1438 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1439 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1440 if (!check_new(st, p)) /* sanity check */
1441 warn("Free'd SV head unexpectedly already seen");
1444 NPathPushNode("unused_sv_heads", NPtype_NAME);
1445 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1448 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1450 /* iterate over all SVs to find any we've not accounted for yet */
1451 /* once the code above is visiting all SVs, any found here have been leaked */
1452 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1456 MODULE = Devel::Memory PACKAGE = Devel::Memory
1464 total_size = TOTAL_SIZE_RECURSION
1467 SV *thing = orig_thing;
1468 struct state *st = new_state(aTHX);
1470 /* If they passed us a reference then dereference it. This is the
1471 only way we can check the sizes of arrays and hashes */
1473 thing = SvRV(thing);
1476 sv_size(aTHX_ st, NULL, thing, ix);
1477 RETVAL = st->total_size;
1487 /* just the current perl interpreter */
1488 struct state *st = new_state(aTHX);
1489 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1490 perl_size(aTHX_ st, NULL);
1491 RETVAL = st->total_size;
1501 /* the current perl interpreter plus malloc, in the context of total heap size */
1502 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1506 /* some systems have the SVID2/XPG mallinfo structure and function */
1507 struct mstats ms = mstats(); /* mstats() first */
1509 struct state *st = new_state(aTHX);
1510 dNPathNodes(1, NULL);
1511 NPathPushNode("heap", NPtype_NAME);
1513 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1515 perl_size(aTHX_ st, NPathLink("perl_interp"));
1517 NPathSetNode("free_malloc_space", NPtype_NAME);
1518 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1519 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1520 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1521 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1522 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1523 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1524 /* for now we use bytes_total as an approximation */
1525 NPathSetNode("unknown", NPtype_NAME);
1526 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1531 RETVAL = st->total_size;