3 #undef NDEBUG /* XXX */
6 #define PERL_NO_GET_CONTEXT
13 /* Not yet in ppport.h */
15 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
18 # define SvRV_const(rv) SvRV(rv)
21 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
24 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
25 (SVf_FAKE | SVf_READONLY))
27 #ifndef SvIsCOW_shared_hash
28 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
30 #ifndef SvSHARED_HEK_FROM_PV
31 # define SvSHARED_HEK_FROM_PV(pvx) \
32 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
36 # define PL_opargs opargs
37 # define PL_op_name op_name
41 /* "structured exception" handling is a Microsoft extension to C and C++.
42 It's *not* C++ exception handling - C++ exception handling can't capture
43 SEGVs and suchlike, whereas this can. There's no known analagous
44 functionality on other platforms. */
46 # define TRY_TO_CATCH_SEGV __try
47 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
49 # define TRY_TO_CATCH_SEGV if(1)
50 # define CAUGHT_EXCEPTION else
54 # define __attribute__(x)
57 #if 0 && defined(DEBUGGING)
58 #define dbg_printf(x) printf x
63 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
66 /* The idea is to have a tree structure to store 1 bit per possible pointer
67 address. The lowest 16 bits are stored in a block of 8092 bytes.
68 The blocks are in a 256-way tree, indexed by the reset of the pointer.
69 This can cope with 32 and 64 bit pointers, and any address space layout,
70 without excessive memory needs. The assumption is that your CPU cache
71 works :-) (And that we're not going to bust it) */
74 #define LEAF_BITS (16 - BYTE_BITS)
75 #define LEAF_MASK 0x1FFF
77 typedef struct npath_node_st npath_node_t;
78 struct npath_node_st {
93 /* My hunch (not measured) is that for most architectures pointers will
94 start with 0 bits, hence the start of this array will be hot, and the
95 end unused. So put the flags next to the hot end. */
97 int min_recurse_threshold;
98 /* callback hooks and data */
99 int (*add_attr_cb)(struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
100 void (*free_state_cb)(pTHX_ struct state *st);
102 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
103 /* this stuff wil be moved to state_cb_data later */
104 FILE *node_stream_fh;
105 char *node_stream_name;
108 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
110 #define PATH_TRACKING
113 #define pPATH npath_node_t *NPathArg
115 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
116 * to the next unused slot (though with prev already filled in)
117 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
118 * to and passes that NP value to the function being called.
119 * seqn==0 indicates the node is new (hasn't been output yet)
121 #define dNPathNodes(nodes, prev_np) \
122 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
123 npath_node_t *NP = &name_path_nodes[0]; \
124 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
126 #define NPathPushNode(nodeid, nodetype) \
128 NP->type = nodetype; \
130 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
132 NP->id = Nullch; /* safety/debug */ \
135 #define NPathSetNode(nodeid, nodetype) \
136 (NP-1)->id = nodeid; \
137 (NP-1)->type = nodetype; \
138 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
140 #define NPathPopNode \
143 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
144 * So the function can only safely call ADD_*() but not NPathLink, unless the
145 * caller has spare nodes in its name_path_nodes.
147 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
149 #define NPtype_NAME 0x01
150 #define NPtype_LINK 0x02
151 #define NPtype_SV 0x03
152 #define NPtype_MAGIC 0x04
153 #define NPtype_OP 0x05
155 #define NPattr_LEAFSIZE 0x00
156 #define NPattr_NAME 0x01
157 #define NPattr_PADFAKE 0x02
158 #define NPattr_PADNAME 0x03
159 #define NPattr_PADTMP 0x04
160 #define NPattr_NOTE 0x05
162 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
163 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
164 /* add a link and a name node to the path - a special case for op_size */
165 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
166 #define NPathOpLink (NPathArg)
167 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
168 #define ADD_ATTR(st, attr_type, attr_name, attr_value) (st->add_attr_cb && st->add_attr_cb(st, NP-1, attr_type, attr_name, attr_value))
172 #define NPathAddSizeCb(st, name, bytes)
173 #define pPATH void *npath_dummy /* XXX ideally remove */
174 #define dNPathNodes(nodes, prev_np) dNOOP
175 #define NPathLink(nodeid, nodetype) NULL
176 #define NPathOpLink NULL
177 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
179 #endif /* PATH_TRACKING */
186 static const char *svtypenames[SVt_LAST] = {
188 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
189 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
190 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
191 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
192 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
193 #elif PERL_VERSION < 13
194 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
196 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
201 np_print_node_name(FILE *fp, npath_node_t *npath_node)
203 char buf[1024]; /* XXX */
205 switch (npath_node->type) {
206 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
207 const SV *sv = (SV*)npath_node->id;
208 int type = SvTYPE(sv);
209 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
210 fprintf(fp, "SV(%s)", typename);
211 switch(type) { /* add some useful details */
212 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
213 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
217 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
218 const OP *op = (OP*)npath_node->id;
219 fprintf(fp, "OP(%s)", OP_NAME(op));
222 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
223 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
224 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
225 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
229 fprintf(fp, "%s", npath_node->id);
232 fprintf(fp, "%s", npath_node->id);
234 default: /* assume id is a string pointer */
235 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
242 np_dump_indent(int depth) {
244 fprintf(stderr, ": ");
248 np_walk_new_nodes(struct state *st,
249 npath_node_t *npath_node,
250 npath_node_t *npath_node_deeper,
251 int (*cb)(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
253 if (npath_node->seqn) /* node already output */
256 if (npath_node->prev) {
257 np_walk_new_nodes(st, npath_node->prev, npath_node, cb); /* recurse */
258 npath_node->depth = npath_node->prev->depth + 1;
260 else npath_node->depth = 0;
261 npath_node->seqn = ++st->seqn;
264 if (cb(st, npath_node, npath_node_deeper)) {
265 /* ignore this node */
266 assert(npath_node->prev);
267 assert(npath_node->depth);
268 assert(npath_node_deeper);
270 npath_node->seqn = --st->seqn;
271 npath_node_deeper->prev = npath_node->prev;
279 np_dump_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
280 if (0 && npath_node->type == NPtype_LINK)
282 np_dump_indent(npath_node->depth);
283 np_print_node_name(stderr, npath_node);
284 if (npath_node->type == NPtype_LINK)
285 fprintf(stderr, "->"); /* cosmetic */
286 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
287 fprintf(stderr, "\n");
292 np_dump_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
294 if (attr_type == NPattr_LEAFSIZE && !attr_value)
295 return 0; /* ignore zero sized leaf items */
296 np_walk_new_nodes(st, npath_node, NULL, np_dump_formatted_node);
297 np_dump_indent(npath_node->depth+1);
299 case NPattr_LEAFSIZE:
300 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
303 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
306 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
311 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
314 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
317 fprintf(stderr, "\n");
322 np_stream_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
323 fprintf(st->node_stream_fh, "-%u %lu %u ",
324 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
326 np_print_node_name(st->node_stream_fh, npath_node);
327 fprintf(st->node_stream_fh, "\n");
332 np_stream_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
334 if (!attr_type && !attr_value)
335 return 0; /* ignore zero sized leaf items */
336 np_walk_new_nodes(st, npath_node, NULL, np_stream_formatted_node);
337 if (attr_type) { /* Attribute type, name and value */
338 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
340 else { /* Leaf name and memory size */
341 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
343 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
348 #endif /* PATH_TRACKING */
352 Checks to see if thing is in the bitstring.
353 Returns true or false, and
354 notes thing in the segmented bitstring.
357 check_new(struct state *st, const void *const p) {
358 unsigned int bits = 8 * sizeof(void*);
359 const size_t raw_p = PTR2nat(p);
360 /* This effectively rotates the value right by the number of low always-0
361 bits in an aligned pointer. The assmption is that most (if not all)
362 pointers are aligned, and these will be in the same chain of nodes
363 (and hence hot in the cache) but we can still deal with any unaligned
365 const size_t cooked_p
366 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
367 const U8 this_bit = 1 << (cooked_p & 0x7);
371 void **tv_p = (void **) (st->tracking);
373 if (NULL == p) return FALSE;
375 const char c = *(const char *)p;
378 if (st->dangle_whine)
379 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
385 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
387 /* First level is always present. */
389 i = (unsigned int)((cooked_p >> bits) & 0xFF);
391 Newxz(tv_p[i], 256, void *);
392 tv_p = (void **)(tv_p[i]);
394 } while (bits > LEAF_BITS + BYTE_BITS);
395 /* bits now 16 always */
396 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
397 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
398 a my_perl under multiplicity */
401 leaf_p = (U8 **)tv_p;
402 i = (unsigned int)((cooked_p >> bits) & 0xFF);
404 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
409 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
411 if(leaf[i] & this_bit)
419 free_tracking_at(void **tv, int level)
427 free_tracking_at((void **) tv[i], level);
441 free_state(struct state *st)
443 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
444 if (st->free_state_cb)
445 st->free_state_cb(st);
446 if (st->state_cb_data)
447 Safefree(st->state_cb_data);
448 free_tracking_at((void **)st->tracking, top_level);
452 /* For now, this is somewhat a compatibility bodge until the plan comes
453 together for fine grained recursion control. total_size() would recurse into
454 hash and array members, whereas sv_size() would not. However, sv_size() is
455 called with CvSTASH() of a CV, which means that if it (also) starts to
456 recurse fully, then the size of any CV now becomes the size of the entire
457 symbol table reachable from it, and potentially the entire symbol table, if
458 any subroutine makes a reference to a global (such as %SIG). The historical
459 implementation of total_size() didn't report "everything", and changing the
460 only available size to "everything" doesn't feel at all useful. */
462 #define NO_RECURSION 0
463 #define SOME_RECURSION 1
464 #define TOTAL_SIZE_RECURSION 2
466 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
482 , OPc_CONDOP /* 12 */
491 cc_opclass(const OP * const o)
497 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
499 if (o->op_type == OP_SASSIGN)
500 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
503 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
507 if ((o->op_type == OP_TRANS)) {
511 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
543 #ifdef OA_PVOP_OR_SVOP
544 case OA_PVOP_OR_SVOP: TAG;
546 * Character translations (tr///) are usually a PVOP, keeping a
547 * pointer to a table of shorts used to look up translations.
548 * Under utf8, however, a simple table isn't practical; instead,
549 * the OP is an SVOP, and the SV is a reference to a swash
550 * (i.e., an RV pointing to an HV).
552 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
553 ? OPc_SVOP : OPc_PVOP;
562 case OA_BASEOP_OR_UNOP: TAG;
564 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
565 * whether parens were seen. perly.y uses OPf_SPECIAL to
566 * signal whether a BASEOP had empty parens or none.
567 * Some other UNOPs are created later, though, so the best
568 * test is OPf_KIDS, which is set in newUNOP.
570 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
572 case OA_FILESTATOP: TAG;
574 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
575 * the OPf_REF flag to distinguish between OP types instead of the
576 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
577 * return OPc_UNOP so that walkoptree can find our children. If
578 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
579 * (no argument to the operator) it's an OP; with OPf_REF set it's
580 * an SVOP (and op_sv is the GV for the filehandle argument).
582 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
584 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
586 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
588 case OA_LOOPEXOP: TAG;
590 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
591 * label was omitted (in which case it's a BASEOP) or else a term was
592 * seen. In this last case, all except goto are definitely PVOP but
593 * goto is either a PVOP (with an ordinary constant label), an UNOP
594 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
595 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
598 if (o->op_flags & OPf_STACKED)
600 else if (o->op_flags & OPf_SPECIAL)
610 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
611 PL_op_name[o->op_type]);
617 /* Figure out how much magic is attached to the SV and return the
620 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
621 dNPathNodes(1, NPathArg);
622 MAGIC *magic_pointer = SvMAGIC(thing);
624 /* push a dummy node for NPathSetNode to update inside the while loop */
625 NPathPushNode("dummy", NPtype_NAME);
627 /* Have we seen the magic pointer? (NULL has always been seen before) */
628 while (check_new(st, magic_pointer)) {
630 NPathSetNode(magic_pointer, NPtype_MAGIC);
632 ADD_SIZE(st, "mg", sizeof(MAGIC));
633 /* magic vtables aren't freed when magic is freed, so don't count them.
634 (They are static structures. Anything that assumes otherwise is buggy.)
639 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
640 if (magic_pointer->mg_len == HEf_SVKEY) {
641 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
643 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
644 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
645 if (check_new(st, magic_pointer->mg_ptr)) {
646 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
650 else if (magic_pointer->mg_len > 0) {
651 if (check_new(st, magic_pointer->mg_ptr)) {
652 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
656 /* Get the next in the chain */
657 magic_pointer = magic_pointer->mg_moremagic;
660 if (st->dangle_whine)
661 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
667 check_new_and_strlen(struct state *st, const char *const p, pPATH) {
668 dNPathNodes(1, NPathArg->prev);
669 if(check_new(st, p)) {
670 NPathPushNode(NPathArg->id, NPtype_NAME);
671 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
676 regex_size(const REGEXP * const baseregex, struct state *st, pPATH) {
677 dNPathNodes(1, NPathArg);
678 if(!check_new(st, baseregex))
680 NPathPushNode("regex_size", NPtype_NAME);
681 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
682 #if (PERL_VERSION < 11)
683 /* Note the size of the paren offset thing */
684 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
685 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
687 ADD_SIZE(st, "regexp", sizeof(struct regexp));
688 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
689 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
691 if (st->go_yell && !st->regex_whine) {
692 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
698 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
700 /* op_size recurses to follow the chain of opcodes. For the node path we
701 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
702 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
703 * instead of NPathLink().
705 dNPathUseParent(NPathArg);
709 if(!check_new(st, baseop))
712 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
714 switch (cc_opclass(baseop)) {
715 case OPc_BASEOP: TAG;
716 ADD_SIZE(st, "op", sizeof(struct op));
719 ADD_SIZE(st, "unop", sizeof(struct unop));
720 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
723 ADD_SIZE(st, "binop", sizeof(struct binop));
724 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
725 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
728 ADD_SIZE(st, "logop", sizeof(struct logop));
729 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
730 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
733 case OPc_CONDOP: TAG;
734 ADD_SIZE(st, "condop", sizeof(struct condop));
735 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
736 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
737 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
740 case OPc_LISTOP: TAG;
741 ADD_SIZE(st, "listop", sizeof(struct listop));
742 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
743 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
746 ADD_SIZE(st, "pmop", sizeof(struct pmop));
747 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
748 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
749 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
750 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
751 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
753 /* This is defined away in perl 5.8.x, but it is in there for
756 regex_size(PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
758 regex_size(((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
762 ADD_SIZE(st, "svop", sizeof(struct svop));
763 if (!(baseop->op_type == OP_AELEMFAST
764 && baseop->op_flags & OPf_SPECIAL)) {
765 /* not an OP_PADAV replacement */
766 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
771 ADD_SIZE(st, "padop", sizeof(struct padop));
776 ADD_SIZE(st, "gvop", sizeof(struct gvop));
777 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
781 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
784 ADD_SIZE(st, "loop", sizeof(struct loop));
785 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
786 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
787 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
788 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
789 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
794 basecop = (COP *)baseop;
795 ADD_SIZE(st, "cop", sizeof(struct cop));
797 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
798 Eliminate cop_label from struct cop by storing a label as the first
799 entry in the hints hash. Most statements don't have labels, so this
800 will save memory. Not sure how much.
801 The check below will be incorrect fail on bleadperls
802 before 5.11 @33656, but later than 5.10, producing slightly too
803 small memory sizes on these Perls. */
804 #if (PERL_VERSION < 11)
805 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
808 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
809 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
811 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
812 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
822 if (st->dangle_whine)
823 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
828 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
830 dNPathNodes(1, NPathArg);
832 /* Hash keys can be shared. Have we seen this before? */
833 if (!check_new(st, hek))
835 NPathPushNode("hek", NPtype_NAME);
836 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
838 + 1 /* No hash key flags prior to 5.8.0 */
844 #if PERL_VERSION < 10
845 ADD_SIZE(st, "he", sizeof(struct he));
847 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
853 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
858 # define MAYBE_PURIFY(normal, pure) (pure)
859 # define MAYBE_OFFSET(struct_name, member) 0
861 # define MAYBE_PURIFY(normal, pure) (normal)
862 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
865 const U8 body_sizes[SVt_LAST] = {
868 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
869 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
870 sizeof(XRV), /* SVt_RV */
871 sizeof(XPV), /* SVt_PV */
872 sizeof(XPVIV), /* SVt_PVIV */
873 sizeof(XPVNV), /* SVt_PVNV */
874 sizeof(XPVMG), /* SVt_PVMG */
875 sizeof(XPVBM), /* SVt_PVBM */
876 sizeof(XPVLV), /* SVt_PVLV */
877 sizeof(XPVAV), /* SVt_PVAV */
878 sizeof(XPVHV), /* SVt_PVHV */
879 sizeof(XPVCV), /* SVt_PVCV */
880 sizeof(XPVGV), /* SVt_PVGV */
881 sizeof(XPVFM), /* SVt_PVFM */
882 sizeof(XPVIO) /* SVt_PVIO */
883 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
887 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
889 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
890 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
891 sizeof(XPVNV), /* SVt_PVNV */
892 sizeof(XPVMG), /* SVt_PVMG */
893 sizeof(XPVGV), /* SVt_PVGV */
894 sizeof(XPVLV), /* SVt_PVLV */
895 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
896 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
897 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
898 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
899 sizeof(XPVIO), /* SVt_PVIO */
900 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
904 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
906 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
907 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
908 sizeof(XPVNV), /* SVt_PVNV */
909 sizeof(XPVMG), /* SVt_PVMG */
910 sizeof(XPVGV), /* SVt_PVGV */
911 sizeof(XPVLV), /* SVt_PVLV */
912 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
913 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
914 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
915 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
916 sizeof(XPVIO) /* SVt_PVIO */
917 #elif PERL_VERSION < 13
921 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
922 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
923 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
924 sizeof(XPVNV), /* SVt_PVNV */
925 sizeof(XPVMG), /* SVt_PVMG */
926 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
927 sizeof(XPVGV), /* SVt_PVGV */
928 sizeof(XPVLV), /* SVt_PVLV */
929 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
930 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
931 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
932 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
933 sizeof(XPVIO) /* SVt_PVIO */
938 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
939 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
940 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
941 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
942 sizeof(XPVMG), /* SVt_PVMG */
943 sizeof(regexp), /* SVt_REGEXP */
944 sizeof(XPVGV), /* SVt_PVGV */
945 sizeof(XPVLV), /* SVt_PVLV */
946 sizeof(XPVAV), /* SVt_PVAV */
947 sizeof(XPVHV), /* SVt_PVHV */
948 sizeof(XPVCV), /* SVt_PVCV */
949 sizeof(XPVFM), /* SVt_PVFM */
950 sizeof(XPVIO) /* SVt_PVIO */
955 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
957 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
960 dNPathUseParent(NPathArg);
968 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
969 pname = AvARRAY(pad_name);
971 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
972 const SV *namesv = pname[ix];
973 if (namesv && namesv == &PL_sv_undef) {
978 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
980 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
983 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
987 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
992 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
994 const SV *thing = orig_thing;
995 dNPathNodes(3, NPathArg);
998 if(!check_new(st, orig_thing))
1001 type = SvTYPE(thing);
1002 if (type > SVt_LAST) {
1003 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1006 NPathPushNode(thing, NPtype_SV);
1007 ADD_SIZE(st, "sv", sizeof(SV) + body_sizes[type]);
1009 if (type >= SVt_PVMG) {
1010 magic_size(aTHX_ thing, st, NPathLink("MG"));
1014 #if (PERL_VERSION < 11)
1015 /* Is it a reference? */
1020 if(recurse && SvROK(thing))
1021 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1025 /* Is there anything in the array? */
1026 if (AvMAX(thing) != -1) {
1027 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1028 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1029 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1031 if (recurse >= st->min_recurse_threshold) {
1032 SSize_t i = AvFILLp(thing) + 1;
1035 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1038 /* Add in the bits on the other side of the beginning */
1040 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1041 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1043 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1044 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1045 if (AvALLOC(thing) != 0) {
1046 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1048 #if (PERL_VERSION < 9)
1049 /* Is there something hanging off the arylen element?
1050 Post 5.9.something this is stored in magic, so will be found there,
1051 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1052 complain about AvARYLEN() passing thing to it. */
1053 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1057 /* Now the array of buckets */
1058 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1059 if (HvENAME(thing)) {
1060 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1062 /* Now walk the bucket chain */
1063 if (HvARRAY(thing)) {
1066 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1067 cur_entry = *(HvARRAY(thing) + cur_bucket);
1069 ADD_SIZE(st, "he", sizeof(HE));
1070 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1071 if (recurse >= st->min_recurse_threshold) {
1072 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1073 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1074 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1075 * so we protect against that here, but I'd like to know the cause.
1077 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1078 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1080 cur_entry = cur_entry->hent_next;
1086 /* This direct access is arguably "naughty": */
1087 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1088 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1090 I32 count = HvAUX(thing)->xhv_name_count;
1093 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1097 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1102 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1105 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1107 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1108 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1109 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1110 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1112 #if PERL_VERSION > 10
1113 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1114 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1116 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1117 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1122 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1128 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), SOME_RECURSION);
1129 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), recurse);
1131 if (st->go_yell && !st->fm_whine) {
1132 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1138 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1139 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1140 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1141 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), SOME_RECURSION);
1142 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), recurse);
1143 if (CvISXSUB(thing)) {
1144 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1146 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1147 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1152 /* Some embedded char pointers */
1153 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1154 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1155 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1156 /* Throw the GVs on the list to be walked if they're not-null */
1157 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1158 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1159 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1161 /* Only go trotting through the IO structures if they're really
1162 trottable. If USE_PERLIO is defined we can do this. If
1163 not... we can't, so we don't even try */
1165 /* Dig into xio_ifp and xio_ofp here */
1166 warn("Devel::Size: Can't size up perlio layers yet\n");
1171 #if (PERL_VERSION < 9)
1176 if(isGV_with_GP(thing)) {
1178 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1180 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1182 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1184 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1185 #elif defined(GvFILE)
1186 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1187 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1188 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1189 and the relevant COP has been freed on scope cleanup after the eval.
1190 5.8.9 adds a binary compatible fudge that catches the vast majority
1191 of cases. 5.9.something added a proper fix, by converting the GP to
1192 use a shared hash key (porperly reference counted), instead of a
1193 char * (owned by who knows? possibly no-one now) */
1194 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1197 /* Is there something hanging off the glob? */
1198 if (check_new(st, GvGP(thing))) {
1199 ADD_SIZE(st, "GP", sizeof(GP));
1200 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1201 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1202 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1203 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1204 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1205 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1207 #if (PERL_VERSION >= 9)
1211 #if PERL_VERSION <= 8
1219 if(recurse && SvROK(thing))
1220 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1221 else if (SvIsCOW_shared_hash(thing))
1222 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1224 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1228 SvOOK_offset(thing, len);
1229 ADD_SIZE(st, "SvOOK", len);
1238 free_memnode_state(pTHX_ struct state *st)
1240 if (st->node_stream_fh && st->node_stream_name) {
1241 if (*st->node_stream_name == '|') {
1242 if (pclose(st->node_stream_fh))
1243 warn("%s exited with an error status\n", st->node_stream_name);
1246 if (fclose(st->node_stream_fh))
1247 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1252 static struct state *
1258 Newxz(st, 1, struct state);
1260 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1261 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1262 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1264 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1265 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1267 check_new(st, &PL_sv_undef);
1268 check_new(st, &PL_sv_no);
1269 check_new(st, &PL_sv_yes);
1270 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1271 check_new(st, &PL_sv_placeholder);
1273 #ifdef PATH_TRACKING
1274 if (getenv("MEMNODES") && *getenv("MEMNODES")) { /* XXX quick hack */
1275 st->node_stream_name = getenv("MEMNODES");
1276 if (*st->node_stream_name == '|')
1277 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1279 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1280 if (!st->node_stream_fh)
1281 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1282 st->add_attr_cb = np_stream_node_path_info;
1285 st->add_attr_cb = np_dump_node_path_info;
1286 st->free_state_cb = free_memnode_state;
1291 /* XXX based on S_visit() in sv.c */
1293 unseen_sv_size(pTHX_ struct state *st, pPATH)
1298 dNPathNodes(1, NPathArg);
1300 NPathPushNode("unseen", NPtype_NAME);
1302 /* by this point we should have visited all the SVs
1303 * so now we'll run through all the SVs via the arenas
1304 * in order to find any thet we've missed for some reason.
1305 * Once the rest of the code is finding all the SVs then any
1306 * found here will be leaks.
1308 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1309 const SV * const svend = &sva[SvREFCNT(sva)];
1311 for (sv = sva + 1; sv < svend; ++sv) {
1312 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1313 sv_size(aTHX_ st, NPathLink(""), sv, TOTAL_SIZE_RECURSION);
1315 else if (check_new(st, sv)) { /* sanity check */
1316 warn("unseen_sv_size encountered freed SV unexpectedly");
1323 MODULE = Devel::Size PACKAGE = Devel::Size
1331 total_size = TOTAL_SIZE_RECURSION
1334 SV *thing = orig_thing;
1335 struct state *st = new_state(aTHX);
1337 /* If they passed us a reference then dereference it. This is the
1338 only way we can check the sizes of arrays and hashes */
1340 thing = SvRV(thing);
1343 sv_size(aTHX_ st, NULL, thing, ix);
1344 RETVAL = st->total_size;
1354 struct state *st = new_state(aTHX);
1355 dNPathNodes(3, NULL);
1357 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1359 NPathPushNode("perl_size", NPtype_NAME); /* provide a root node */
1360 /* start with PL_defstash to get everything reachable from \%main:: */
1361 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1363 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1364 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1365 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1366 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1367 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1368 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1369 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1370 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1371 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1372 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1373 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1375 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1377 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1378 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1379 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1380 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1381 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1382 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1383 /* TODO PL_pidstatus */
1384 /* TODO PL_stashpad */
1385 /* TODO PL_compiling? COP */
1387 /* TODO stacks: cur, main, tmps, mark, scope, save */
1388 /* TODO PL_exitlist */
1389 /* TODO PL_reentrant_buffers etc */
1391 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1393 /* TODO anything missed? */
1395 /* --- by this point we should have seen all reachable SVs --- */
1397 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1398 sv_size(aTHX_ st, NPathLink("PL_strtab"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1400 /* unused space in sv head arenas */
1404 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX */
1405 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1406 if (!check_new(st, p)) /* sanity check */
1407 warn("Free'd SV head unexpectedly already seen");
1410 NPathPushNode("unused_sv_heads", NPtype_NAME);
1411 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1414 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1416 /* iterate over all SVs to find any we've not accounted for yet */
1417 /* once the code above is visiting all SVs, any found here have been leaked */
1418 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1421 struct mstats ms = mstats();
1422 NPathSetNode("unused malloc space", NPtype_NAME);
1423 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1424 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1425 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1426 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1427 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1430 RETVAL = st->total_size;