3 #undef NDEBUG /* XXX */
6 #define PERL_NO_GET_CONTEXT
13 /* Not yet in ppport.h */
15 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
18 # define SvRV_const(rv) SvRV(rv)
21 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
24 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
25 (SVf_FAKE | SVf_READONLY))
27 #ifndef SvIsCOW_shared_hash
28 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
30 #ifndef SvSHARED_HEK_FROM_PV
31 # define SvSHARED_HEK_FROM_PV(pvx) \
32 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
36 # define PL_opargs opargs
37 # define PL_op_name op_name
41 /* "structured exception" handling is a Microsoft extension to C and C++.
42 It's *not* C++ exception handling - C++ exception handling can't capture
43 SEGVs and suchlike, whereas this can. There's no known analagous
44 functionality on other platforms. */
46 # define TRY_TO_CATCH_SEGV __try
47 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
49 # define TRY_TO_CATCH_SEGV if(1)
50 # define CAUGHT_EXCEPTION else
54 # define __attribute__(x)
57 #if 0 && defined(DEBUGGING)
58 #define dbg_printf(x) printf x
63 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
66 /* The idea is to have a tree structure to store 1 bit per possible pointer
67 address. The lowest 16 bits are stored in a block of 8092 bytes.
68 The blocks are in a 256-way tree, indexed by the reset of the pointer.
69 This can cope with 32 and 64 bit pointers, and any address space layout,
70 without excessive memory needs. The assumption is that your CPU cache
71 works :-) (And that we're not going to bust it) */
74 #define LEAF_BITS (16 - BYTE_BITS)
75 #define LEAF_MASK 0x1FFF
77 typedef struct npath_node_st npath_node_t;
78 struct npath_node_st {
93 /* My hunch (not measured) is that for most architectures pointers will
94 start with 0 bits, hence the start of this array will be hot, and the
95 end unused. So put the flags next to the hot end. */
97 int min_recurse_threshold;
98 /* callback hooks and data */
99 int (*add_attr_cb)(struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
100 void (*free_state_cb)(pTHX_ struct state *st);
102 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
103 /* this stuff wil be moved to state_cb_data later */
104 FILE *node_stream_fh;
105 char *node_stream_name;
108 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
110 #define PATH_TRACKING
113 #define pPATH npath_node_t *NPathArg
115 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
116 * to the next unused slot (though with prev already filled in)
117 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
118 * to and passes that NP value to the function being called.
119 * seqn==0 indicates the node is new (hasn't been output yet)
121 #define dNPathNodes(nodes, prev_np) \
122 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
123 npath_node_t *NP = &name_path_nodes[0]; \
124 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
126 #define NPathPushNode(nodeid, nodetype) \
128 NP->type = nodetype; \
130 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
132 NP->id = Nullch; /* safety/debug */ \
135 #define NPathSetNode(nodeid, nodetype) \
136 (NP-1)->id = nodeid; \
137 (NP-1)->type = nodetype; \
138 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
140 #define NPathPopNode \
143 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
144 * So the function can only safely call ADD_*() but not NPathLink, unless the
145 * caller has spare nodes in its name_path_nodes.
147 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
149 #define NPtype_NAME 0x01
150 #define NPtype_LINK 0x02
151 #define NPtype_SV 0x03
152 #define NPtype_MAGIC 0x04
153 #define NPtype_OP 0x05
155 #define NPattr_LEAFSIZE 0x00
156 #define NPattr_NAME 0x01
157 #define NPattr_PADFAKE 0x02
158 #define NPattr_PADNAME 0x03
159 #define NPattr_PADTMP 0x04
160 #define NPattr_NOTE 0x05
162 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
163 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
164 /* add a link and a name node to the path - a special case for op_size */
165 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
166 #define NPathOpLink (NPathArg)
167 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
168 #define ADD_ATTR(st, attr_type, attr_name, attr_value) (st->add_attr_cb && st->add_attr_cb(st, NP-1, attr_type, attr_name, attr_value))
172 #define NPathAddSizeCb(st, name, bytes)
173 #define pPATH void *npath_dummy /* XXX ideally remove */
174 #define dNPathNodes(nodes, prev_np) dNOOP
175 #define NPathLink(nodeid, nodetype) NULL
176 #define NPathOpLink NULL
177 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
179 #endif /* PATH_TRACKING */
186 static const char *svtypenames[SVt_LAST] = {
188 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
189 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
190 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
191 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
192 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
193 #elif PERL_VERSION < 13
194 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
196 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
201 np_print_node_name(FILE *fp, npath_node_t *npath_node)
203 char buf[1024]; /* XXX */
205 switch (npath_node->type) {
206 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
207 const SV *sv = (SV*)npath_node->id;
208 int type = SvTYPE(sv);
209 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
210 fprintf(fp, "SV(%s)", typename);
211 switch(type) { /* add some useful details */
212 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
213 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
217 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
218 const OP *op = (OP*)npath_node->id;
219 fprintf(fp, "OP(%s)", OP_NAME(op));
222 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
223 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
224 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
225 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
229 fprintf(fp, "%s", npath_node->id);
232 fprintf(fp, "%s", npath_node->id);
234 default: /* assume id is a string pointer */
235 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
242 np_dump_indent(int depth) {
244 fprintf(stderr, ": ");
248 np_walk_new_nodes(struct state *st,
249 npath_node_t *npath_node,
250 npath_node_t *npath_node_deeper,
251 int (*cb)(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
253 if (npath_node->seqn) /* node already output */
256 if (npath_node->prev) {
257 np_walk_new_nodes(st, npath_node->prev, npath_node, cb); /* recurse */
258 npath_node->depth = npath_node->prev->depth + 1;
260 else npath_node->depth = 0;
261 npath_node->seqn = ++st->seqn;
264 if (cb(st, npath_node, npath_node_deeper)) {
265 /* ignore this node */
266 assert(npath_node->prev);
267 assert(npath_node->depth);
268 assert(npath_node_deeper);
270 npath_node->seqn = --st->seqn;
271 npath_node_deeper->prev = npath_node->prev;
279 np_dump_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
280 if (0 && npath_node->type == NPtype_LINK)
282 np_dump_indent(npath_node->depth);
283 np_print_node_name(stderr, npath_node);
284 if (npath_node->type == NPtype_LINK)
285 fprintf(stderr, "->"); /* cosmetic */
286 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
287 fprintf(stderr, "\n");
292 np_dump_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
294 if (attr_type == NPattr_LEAFSIZE && !attr_value)
295 return 0; /* ignore zero sized leaf items */
296 np_walk_new_nodes(st, npath_node, NULL, np_dump_formatted_node);
297 np_dump_indent(npath_node->depth+1);
299 case NPattr_LEAFSIZE:
300 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
303 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
306 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
311 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
314 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
317 fprintf(stderr, "\n");
322 np_stream_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
323 fprintf(st->node_stream_fh, "-%u %lu %u ",
324 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
326 np_print_node_name(st->node_stream_fh, npath_node);
327 fprintf(st->node_stream_fh, "\n");
332 np_stream_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
334 if (!attr_type && !attr_value)
335 return 0; /* ignore zero sized leaf items */
336 np_walk_new_nodes(st, npath_node, NULL, np_stream_formatted_node);
337 if (attr_type) { /* Attribute type, name and value */
338 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
340 else { /* Leaf name and memory size */
341 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
343 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
348 #endif /* PATH_TRACKING */
352 Checks to see if thing is in the bitstring.
353 Returns true or false, and
354 notes thing in the segmented bitstring.
357 check_new(struct state *st, const void *const p) {
358 unsigned int bits = 8 * sizeof(void*);
359 const size_t raw_p = PTR2nat(p);
360 /* This effectively rotates the value right by the number of low always-0
361 bits in an aligned pointer. The assmption is that most (if not all)
362 pointers are aligned, and these will be in the same chain of nodes
363 (and hence hot in the cache) but we can still deal with any unaligned
365 const size_t cooked_p
366 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
367 const U8 this_bit = 1 << (cooked_p & 0x7);
371 void **tv_p = (void **) (st->tracking);
373 if (NULL == p) return FALSE;
375 const char c = *(const char *)p;
378 if (st->dangle_whine)
379 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
385 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
387 /* First level is always present. */
389 i = (unsigned int)((cooked_p >> bits) & 0xFF);
391 Newxz(tv_p[i], 256, void *);
392 tv_p = (void **)(tv_p[i]);
394 } while (bits > LEAF_BITS + BYTE_BITS);
395 /* bits now 16 always */
396 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
397 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
398 a my_perl under multiplicity */
401 leaf_p = (U8 **)tv_p;
402 i = (unsigned int)((cooked_p >> bits) & 0xFF);
404 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
409 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
411 if(leaf[i] & this_bit)
419 free_tracking_at(void **tv, int level)
427 free_tracking_at((void **) tv[i], level);
441 free_state(struct state *st)
443 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
444 if (st->free_state_cb)
445 st->free_state_cb(st);
446 if (st->state_cb_data)
447 Safefree(st->state_cb_data);
448 free_tracking_at((void **)st->tracking, top_level);
452 /* For now, this is somewhat a compatibility bodge until the plan comes
453 together for fine grained recursion control. total_size() would recurse into
454 hash and array members, whereas sv_size() would not. However, sv_size() is
455 called with CvSTASH() of a CV, which means that if it (also) starts to
456 recurse fully, then the size of any CV now becomes the size of the entire
457 symbol table reachable from it, and potentially the entire symbol table, if
458 any subroutine makes a reference to a global (such as %SIG). The historical
459 implementation of total_size() didn't report "everything", and changing the
460 only available size to "everything" doesn't feel at all useful. */
462 #define NO_RECURSION 0
463 #define SOME_RECURSION 1
464 #define TOTAL_SIZE_RECURSION 2
466 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
482 , OPc_CONDOP /* 12 */
491 cc_opclass(const OP * const o)
497 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
499 if (o->op_type == OP_SASSIGN)
500 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
503 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
507 if ((o->op_type == OP_TRANS)) {
511 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
543 #ifdef OA_PVOP_OR_SVOP
544 case OA_PVOP_OR_SVOP: TAG;
546 * Character translations (tr///) are usually a PVOP, keeping a
547 * pointer to a table of shorts used to look up translations.
548 * Under utf8, however, a simple table isn't practical; instead,
549 * the OP is an SVOP, and the SV is a reference to a swash
550 * (i.e., an RV pointing to an HV).
552 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
553 ? OPc_SVOP : OPc_PVOP;
562 case OA_BASEOP_OR_UNOP: TAG;
564 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
565 * whether parens were seen. perly.y uses OPf_SPECIAL to
566 * signal whether a BASEOP had empty parens or none.
567 * Some other UNOPs are created later, though, so the best
568 * test is OPf_KIDS, which is set in newUNOP.
570 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
572 case OA_FILESTATOP: TAG;
574 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
575 * the OPf_REF flag to distinguish between OP types instead of the
576 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
577 * return OPc_UNOP so that walkoptree can find our children. If
578 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
579 * (no argument to the operator) it's an OP; with OPf_REF set it's
580 * an SVOP (and op_sv is the GV for the filehandle argument).
582 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
584 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
586 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
588 case OA_LOOPEXOP: TAG;
590 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
591 * label was omitted (in which case it's a BASEOP) or else a term was
592 * seen. In this last case, all except goto are definitely PVOP but
593 * goto is either a PVOP (with an ordinary constant label), an UNOP
594 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
595 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
598 if (o->op_flags & OPf_STACKED)
600 else if (o->op_flags & OPf_SPECIAL)
610 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
611 PL_op_name[o->op_type]);
617 /* Figure out how much magic is attached to the SV and return the
620 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
621 dNPathNodes(1, NPathArg);
622 MAGIC *magic_pointer = SvMAGIC(thing);
624 /* push a dummy node for NPathSetNode to update inside the while loop */
625 NPathPushNode("dummy", NPtype_NAME);
627 /* Have we seen the magic pointer? (NULL has always been seen before) */
628 while (check_new(st, magic_pointer)) {
630 NPathSetNode(magic_pointer, NPtype_MAGIC);
632 ADD_SIZE(st, "mg", sizeof(MAGIC));
633 /* magic vtables aren't freed when magic is freed, so don't count them.
634 (They are static structures. Anything that assumes otherwise is buggy.)
639 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
640 if (magic_pointer->mg_len == HEf_SVKEY) {
641 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
643 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
644 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
645 if (check_new(st, magic_pointer->mg_ptr)) {
646 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
650 else if (magic_pointer->mg_len > 0) {
651 if (check_new(st, magic_pointer->mg_ptr)) {
652 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
656 /* Get the next in the chain */
657 magic_pointer = magic_pointer->mg_moremagic;
660 if (st->dangle_whine)
661 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
667 check_new_and_strlen(struct state *st, const char *const p, pPATH) {
668 dNPathNodes(1, NPathArg->prev);
669 if(check_new(st, p)) {
670 NPathPushNode(NPathArg->id, NPtype_NAME);
671 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
676 regex_size(const REGEXP * const baseregex, struct state *st, pPATH) {
677 dNPathNodes(1, NPathArg);
678 if(!check_new(st, baseregex))
680 NPathPushNode("regex_size", NPtype_NAME);
681 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
682 #if (PERL_VERSION < 11)
683 /* Note the size of the paren offset thing */
684 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
685 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
687 ADD_SIZE(st, "regexp", sizeof(struct regexp));
688 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
689 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
691 if (st->go_yell && !st->regex_whine) {
692 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
698 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
700 /* op_size recurses to follow the chain of opcodes. For the node path we
701 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
702 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
703 * instead of NPathLink().
705 dNPathUseParent(NPathArg);
709 if(!check_new(st, baseop))
712 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
714 switch (cc_opclass(baseop)) {
715 case OPc_BASEOP: TAG;
716 ADD_SIZE(st, "op", sizeof(struct op));
719 ADD_SIZE(st, "unop", sizeof(struct unop));
720 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
723 ADD_SIZE(st, "binop", sizeof(struct binop));
724 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
725 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
728 ADD_SIZE(st, "logop", sizeof(struct logop));
729 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
730 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
733 case OPc_CONDOP: TAG;
734 ADD_SIZE(st, "condop", sizeof(struct condop));
735 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
736 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
737 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
740 case OPc_LISTOP: TAG;
741 ADD_SIZE(st, "listop", sizeof(struct listop));
742 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
743 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
746 ADD_SIZE(st, "pmop", sizeof(struct pmop));
747 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
748 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
749 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
750 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
751 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
753 /* This is defined away in perl 5.8.x, but it is in there for
756 regex_size(PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
758 regex_size(((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
762 ADD_SIZE(st, "svop", sizeof(struct svop));
763 if (!(baseop->op_type == OP_AELEMFAST
764 && baseop->op_flags & OPf_SPECIAL)) {
765 /* not an OP_PADAV replacement */
766 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
771 ADD_SIZE(st, "padop", sizeof(struct padop));
776 ADD_SIZE(st, "gvop", sizeof(struct gvop));
777 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
781 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
784 ADD_SIZE(st, "loop", sizeof(struct loop));
785 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
786 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
787 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
788 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
789 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
794 basecop = (COP *)baseop;
795 ADD_SIZE(st, "cop", sizeof(struct cop));
797 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
798 Eliminate cop_label from struct cop by storing a label as the first
799 entry in the hints hash. Most statements don't have labels, so this
800 will save memory. Not sure how much.
801 The check below will be incorrect fail on bleadperls
802 before 5.11 @33656, but later than 5.10, producing slightly too
803 small memory sizes on these Perls. */
804 #if (PERL_VERSION < 11)
805 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
808 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
809 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
811 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
812 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
813 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
823 if (st->dangle_whine)
824 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
829 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
831 dNPathNodes(1, NPathArg);
833 /* Hash keys can be shared. Have we seen this before? */
834 if (!check_new(st, hek))
836 NPathPushNode("hek", NPtype_NAME);
837 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
839 + 1 /* No hash key flags prior to 5.8.0 */
845 #if PERL_VERSION < 10
846 ADD_SIZE(st, "he", sizeof(struct he));
848 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
854 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
859 # define MAYBE_PURIFY(normal, pure) (pure)
860 # define MAYBE_OFFSET(struct_name, member) 0
862 # define MAYBE_PURIFY(normal, pure) (normal)
863 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
866 const U8 body_sizes[SVt_LAST] = {
869 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
870 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
871 sizeof(XRV), /* SVt_RV */
872 sizeof(XPV), /* SVt_PV */
873 sizeof(XPVIV), /* SVt_PVIV */
874 sizeof(XPVNV), /* SVt_PVNV */
875 sizeof(XPVMG), /* SVt_PVMG */
876 sizeof(XPVBM), /* SVt_PVBM */
877 sizeof(XPVLV), /* SVt_PVLV */
878 sizeof(XPVAV), /* SVt_PVAV */
879 sizeof(XPVHV), /* SVt_PVHV */
880 sizeof(XPVCV), /* SVt_PVCV */
881 sizeof(XPVGV), /* SVt_PVGV */
882 sizeof(XPVFM), /* SVt_PVFM */
883 sizeof(XPVIO) /* SVt_PVIO */
884 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
888 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
890 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
891 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
892 sizeof(XPVNV), /* SVt_PVNV */
893 sizeof(XPVMG), /* SVt_PVMG */
894 sizeof(XPVGV), /* SVt_PVGV */
895 sizeof(XPVLV), /* SVt_PVLV */
896 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
897 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
898 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
899 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
900 sizeof(XPVIO), /* SVt_PVIO */
901 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
905 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
907 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
908 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
909 sizeof(XPVNV), /* SVt_PVNV */
910 sizeof(XPVMG), /* SVt_PVMG */
911 sizeof(XPVGV), /* SVt_PVGV */
912 sizeof(XPVLV), /* SVt_PVLV */
913 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
914 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
915 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
916 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
917 sizeof(XPVIO) /* SVt_PVIO */
918 #elif PERL_VERSION < 13
922 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
923 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
924 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
925 sizeof(XPVNV), /* SVt_PVNV */
926 sizeof(XPVMG), /* SVt_PVMG */
927 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
928 sizeof(XPVGV), /* SVt_PVGV */
929 sizeof(XPVLV), /* SVt_PVLV */
930 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
931 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
932 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
933 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
934 sizeof(XPVIO) /* SVt_PVIO */
939 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
940 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
941 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
942 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
943 sizeof(XPVMG), /* SVt_PVMG */
944 sizeof(regexp), /* SVt_REGEXP */
945 sizeof(XPVGV), /* SVt_PVGV */
946 sizeof(XPVLV), /* SVt_PVLV */
947 sizeof(XPVAV), /* SVt_PVAV */
948 sizeof(XPVHV), /* SVt_PVHV */
949 sizeof(XPVCV), /* SVt_PVCV */
950 sizeof(XPVFM), /* SVt_PVFM */
951 sizeof(XPVIO) /* SVt_PVIO */
956 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
958 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
961 dNPathUseParent(NPathArg);
969 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
970 pname = AvARRAY(pad_name);
972 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
973 const SV *namesv = pname[ix];
974 if (namesv && namesv == &PL_sv_undef) {
979 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
981 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
984 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
988 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
993 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
995 const SV *thing = orig_thing;
996 dNPathNodes(3, NPathArg);
999 if(!check_new(st, orig_thing))
1002 type = SvTYPE(thing);
1003 if (type > SVt_LAST) {
1004 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1007 NPathPushNode(thing, NPtype_SV);
1008 ADD_SIZE(st, "sv", sizeof(SV) + body_sizes[type]);
1011 #if (PERL_VERSION < 11)
1012 /* Is it a reference? */
1017 if(recurse && SvROK(thing))
1018 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1022 /* Is there anything in the array? */
1023 if (AvMAX(thing) != -1) {
1024 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1025 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1026 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1028 if (recurse >= st->min_recurse_threshold) {
1029 SSize_t i = AvFILLp(thing) + 1;
1032 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1035 /* Add in the bits on the other side of the beginning */
1037 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1038 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1040 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1041 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1042 if (AvALLOC(thing) != 0) {
1043 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1045 #if (PERL_VERSION < 9)
1046 /* Is there something hanging off the arylen element?
1047 Post 5.9.something this is stored in magic, so will be found there,
1048 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1049 complain about AvARYLEN() passing thing to it. */
1050 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1055 /* Now the array of buckets */
1056 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1057 if (HvENAME(thing)) {
1058 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1060 /* Now walk the bucket chain */
1061 if (HvARRAY(thing)) {
1064 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1065 cur_entry = *(HvARRAY(thing) + cur_bucket);
1067 ADD_SIZE(st, "he", sizeof(HE));
1068 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1069 if (recurse >= st->min_recurse_threshold) {
1070 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1071 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1072 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1073 * so we protect against that here, but I'd like to know the cause.
1075 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1076 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1078 cur_entry = cur_entry->hent_next;
1084 /* This direct access is arguably "naughty": */
1085 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1086 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1088 I32 count = HvAUX(thing)->xhv_name_count;
1091 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1095 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1100 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1103 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1105 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1106 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1107 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1108 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1110 #if PERL_VERSION > 10
1111 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1112 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1114 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1115 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1120 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1126 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), SOME_RECURSION);
1127 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), recurse);
1129 if (st->go_yell && !st->fm_whine) {
1130 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1136 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1137 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1138 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1139 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), SOME_RECURSION);
1140 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), recurse);
1141 if (CvISXSUB(thing)) {
1142 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1144 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1145 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1150 /* Some embedded char pointers */
1151 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1152 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1153 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1154 /* Throw the GVs on the list to be walked if they're not-null */
1155 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1156 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1157 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1159 /* Only go trotting through the IO structures if they're really
1160 trottable. If USE_PERLIO is defined we can do this. If
1161 not... we can't, so we don't even try */
1163 /* Dig into xio_ifp and xio_ofp here */
1164 warn("Devel::Size: Can't size up perlio layers yet\n");
1169 #if (PERL_VERSION < 9)
1174 if(isGV_with_GP(thing)) {
1176 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1178 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1180 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1182 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1183 #elif defined(GvFILE)
1184 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1185 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1186 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1187 and the relevant COP has been freed on scope cleanup after the eval.
1188 5.8.9 adds a binary compatible fudge that catches the vast majority
1189 of cases. 5.9.something added a proper fix, by converting the GP to
1190 use a shared hash key (porperly reference counted), instead of a
1191 char * (owned by who knows? possibly no-one now) */
1192 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1195 /* Is there something hanging off the glob? */
1196 if (check_new(st, GvGP(thing))) {
1197 ADD_SIZE(st, "GP", sizeof(GP));
1198 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1199 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1200 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1201 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1202 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1203 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1205 #if (PERL_VERSION >= 9)
1209 #if PERL_VERSION <= 8
1217 if(recurse && SvROK(thing))
1218 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1219 else if (SvIsCOW_shared_hash(thing))
1220 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1222 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1226 SvOOK_offset(thing, len);
1227 ADD_SIZE(st, "SvOOK", len);
1233 if (type >= SVt_PVMG) {
1234 magic_size(aTHX_ thing, st, NPathLink("MG"));
1241 free_memnode_state(pTHX_ struct state *st)
1243 if (st->node_stream_fh && st->node_stream_name) {
1244 if (*st->node_stream_name == '|') {
1245 if (pclose(st->node_stream_fh))
1246 warn("%s exited with an error status\n", st->node_stream_name);
1249 if (fclose(st->node_stream_fh))
1250 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1255 static struct state *
1261 Newxz(st, 1, struct state);
1263 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1264 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1265 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1267 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1268 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1270 check_new(st, &PL_sv_undef);
1271 check_new(st, &PL_sv_no);
1272 check_new(st, &PL_sv_yes);
1273 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1274 check_new(st, &PL_sv_placeholder);
1276 #ifdef PATH_TRACKING
1277 if (getenv("MEMNODES") && *getenv("MEMNODES")) { /* XXX quick hack */
1278 st->node_stream_name = getenv("MEMNODES");
1279 if (*st->node_stream_name == '|')
1280 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1282 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1283 if (!st->node_stream_fh)
1284 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1285 st->add_attr_cb = np_stream_node_path_info;
1288 st->add_attr_cb = np_dump_node_path_info;
1289 st->free_state_cb = free_memnode_state;
1294 /* XXX based on S_visit() in sv.c */
1296 unseen_sv_size(pTHX_ struct state *st, pPATH)
1301 dNPathNodes(1, NPathArg);
1303 NPathPushNode("unseen", NPtype_NAME);
1305 /* by this point we should have visited all the SVs
1306 * so now we'll run through all the SVs via the arenas
1307 * in order to find any thet we've missed for some reason.
1308 * Once the rest of the code is finding all the SVs then any
1309 * found here will be leaks.
1311 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1312 const SV * const svend = &sva[SvREFCNT(sva)];
1314 for (sv = sva + 1; sv < svend; ++sv) {
1315 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1316 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1318 else if (check_new(st, sv)) { /* sanity check */
1319 warn("unseen_sv_size encountered freed SV unexpectedly");
1326 MODULE = Devel::Size PACKAGE = Devel::Size
1334 total_size = TOTAL_SIZE_RECURSION
1337 SV *thing = orig_thing;
1338 struct state *st = new_state(aTHX);
1340 /* If they passed us a reference then dereference it. This is the
1341 only way we can check the sizes of arrays and hashes */
1343 thing = SvRV(thing);
1346 sv_size(aTHX_ st, NULL, thing, ix);
1347 RETVAL = st->total_size;
1357 struct state *st = new_state(aTHX);
1358 dNPathNodes(3, NULL);
1360 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1362 NPathPushNode("perl_size", NPtype_NAME); /* provide a root node */
1363 /* start with PL_defstash to get everything reachable from \%main:: */
1364 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1366 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1367 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1368 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1369 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1370 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1371 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1372 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1373 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1374 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1375 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1376 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1378 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1380 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1381 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1382 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1383 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1384 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1385 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1386 /* TODO PL_pidstatus */
1387 /* TODO PL_stashpad */
1388 /* TODO PL_compiling? COP */
1390 /* TODO stacks: cur, main, tmps, mark, scope, save */
1391 /* TODO PL_exitlist */
1392 /* TODO PL_reentrant_buffers etc */
1394 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1396 /* TODO anything missed? */
1398 /* --- by this point we should have seen all reachable SVs --- */
1400 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1401 sv_size(aTHX_ st, NPathLink("PL_strtab"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1403 /* unused space in sv head arenas */
1407 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX */
1408 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1409 if (!check_new(st, p)) /* sanity check */
1410 warn("Free'd SV head unexpectedly already seen");
1413 NPathPushNode("unused_sv_heads", NPtype_NAME);
1414 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1417 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1419 /* iterate over all SVs to find any we've not accounted for yet */
1420 /* once the code above is visiting all SVs, any found here have been leaked */
1421 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1424 struct mstats ms = mstats();
1425 NPathSetNode("unused malloc space", NPtype_NAME);
1426 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1427 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1428 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1429 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1430 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1433 RETVAL = st->total_size;