3 #undef NDEBUG /* XXX */
6 #define PERL_NO_GET_CONTEXT
13 /* Not yet in ppport.h */
15 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
18 # define SvRV_const(rv) SvRV(rv)
21 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
24 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
25 (SVf_FAKE | SVf_READONLY))
27 #ifndef SvIsCOW_shared_hash
28 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
30 #ifndef SvSHARED_HEK_FROM_PV
31 # define SvSHARED_HEK_FROM_PV(pvx) \
32 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
36 # define PL_opargs opargs
37 # define PL_op_name op_name
41 /* "structured exception" handling is a Microsoft extension to C and C++.
42 It's *not* C++ exception handling - C++ exception handling can't capture
43 SEGVs and suchlike, whereas this can. There's no known analagous
44 functionality on other platforms. */
46 # define TRY_TO_CATCH_SEGV __try
47 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
49 # define TRY_TO_CATCH_SEGV if(1)
50 # define CAUGHT_EXCEPTION else
54 # define __attribute__(x)
57 #if 0 && defined(DEBUGGING)
58 #define dbg_printf(x) printf x
63 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
66 /* The idea is to have a tree structure to store 1 bit per possible pointer
67 address. The lowest 16 bits are stored in a block of 8092 bytes.
68 The blocks are in a 256-way tree, indexed by the reset of the pointer.
69 This can cope with 32 and 64 bit pointers, and any address space layout,
70 without excessive memory needs. The assumption is that your CPU cache
71 works :-) (And that we're not going to bust it) */
74 #define LEAF_BITS (16 - BYTE_BITS)
75 #define LEAF_MASK 0x1FFF
77 typedef struct npath_node_st npath_node_t;
78 struct npath_node_st {
93 /* My hunch (not measured) is that for most architectures pointers will
94 start with 0 bits, hence the start of this array will be hot, and the
95 end unused. So put the flags next to the hot end. */
97 int min_recurse_threshold;
98 /* callback hooks and data */
99 int (*add_attr_cb)(struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
100 void (*free_state_cb)(pTHX_ struct state *st);
101 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
102 /* this stuff wil be moved to state_cb_data later */
104 FILE *node_stream_fh;
105 char *node_stream_name;
108 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
110 #define PATH_TRACKING
113 #define pPATH npath_node_t *NPathArg
115 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
116 * to the next unused slot (though with prev already filled in)
117 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
118 * to and passes that NP value to the function being called.
119 * seqn==0 indicates the node is new (hasn't been output yet)
121 #define dNPathNodes(nodes, prev_np) \
122 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
123 npath_node_t *NP = &name_path_nodes[0]; \
124 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
126 #define NPathPushNode(nodeid, nodetype) \
128 NP->type = nodetype; \
130 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
132 NP->id = Nullch; /* safety/debug */ \
135 #define NPathSetNode(nodeid, nodetype) \
136 (NP-1)->id = nodeid; \
137 (NP-1)->type = nodetype; \
138 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
140 #define NPathPopNode \
143 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
144 * So the function can only safely call ADD_*() but not NPathLink, unless the
145 * caller has spare nodes in its name_path_nodes.
147 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
149 #define NPtype_NAME 0x01
150 #define NPtype_LINK 0x02
151 #define NPtype_SV 0x03
152 #define NPtype_MAGIC 0x04
153 #define NPtype_OP 0x05
155 /* XXX these should probably be generalizes into flag bits */
156 #define NPattr_LEAFSIZE 0x00
157 #define NPattr_NAME 0x01
158 #define NPattr_PADFAKE 0x02
159 #define NPattr_PADNAME 0x03
160 #define NPattr_PADTMP 0x04
161 #define NPattr_NOTE 0x05
162 #define NPattr_PRE_ATTR 0x06
164 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) (st->add_attr_cb && st->add_attr_cb(st, np, attr_type, attr_name, attr_value))
165 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
166 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) (assert(!attr_type), _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1))
168 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
169 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
170 /* add a link and a name node to the path - a special case for op_size */
171 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
172 #define NPathOpLink (NPathArg)
173 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
177 #define NPathAddSizeCb(st, name, bytes)
178 #define pPATH void *npath_dummy /* XXX ideally remove */
179 #define dNPathNodes(nodes, prev_np) dNOOP
180 #define NPathLink(nodeid, nodetype) NULL
181 #define NPathOpLink NULL
182 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
184 #endif /* PATH_TRACKING */
191 static const char *svtypenames[SVt_LAST] = {
193 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
194 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
195 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
196 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
197 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
198 #elif PERL_VERSION < 13
199 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
201 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
206 np_print_node_name(FILE *fp, npath_node_t *npath_node)
208 char buf[1024]; /* XXX */
210 switch (npath_node->type) {
211 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
212 const SV *sv = (SV*)npath_node->id;
213 int type = SvTYPE(sv);
214 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
215 fprintf(fp, "SV(%s)", typename);
216 switch(type) { /* add some useful details */
217 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
218 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
222 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
223 const OP *op = (OP*)npath_node->id;
224 fprintf(fp, "OP(%s)", OP_NAME(op));
227 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
228 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
229 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
230 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
234 fprintf(fp, "%s", npath_node->id);
237 fprintf(fp, "%s", npath_node->id);
239 default: /* assume id is a string pointer */
240 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
247 np_dump_indent(int depth) {
249 fprintf(stderr, ": ");
253 np_walk_new_nodes(struct state *st,
254 npath_node_t *npath_node,
255 npath_node_t *npath_node_deeper,
256 int (*cb)(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
258 if (npath_node->seqn) /* node already output */
261 if (npath_node->prev) {
262 np_walk_new_nodes(st, npath_node->prev, npath_node, cb); /* recurse */
263 npath_node->depth = npath_node->prev->depth + 1;
265 else npath_node->depth = 0;
266 npath_node->seqn = ++st->seqn;
269 if (cb(st, npath_node, npath_node_deeper)) {
270 /* ignore this node */
271 assert(npath_node->prev);
272 assert(npath_node->depth);
273 assert(npath_node_deeper);
275 npath_node->seqn = --st->seqn;
276 npath_node_deeper->prev = npath_node->prev;
284 np_dump_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
285 if (0 && npath_node->type == NPtype_LINK)
287 np_dump_indent(npath_node->depth);
288 np_print_node_name(stderr, npath_node);
289 if (npath_node->type == NPtype_LINK)
290 fprintf(stderr, "->"); /* cosmetic */
291 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
292 fprintf(stderr, "\n");
297 np_dump_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
299 if (attr_type == NPattr_LEAFSIZE && !attr_value)
300 return 0; /* ignore zero sized leaf items */
301 np_walk_new_nodes(st, npath_node, NULL, np_dump_formatted_node);
302 np_dump_indent(npath_node->depth+1);
304 case NPattr_LEAFSIZE:
305 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
308 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
311 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
316 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
319 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
322 fprintf(stderr, "\n");
327 np_stream_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
328 fprintf(st->node_stream_fh, "-%u %lu %u ",
329 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
331 np_print_node_name(st->node_stream_fh, npath_node);
332 fprintf(st->node_stream_fh, "\n");
337 np_stream_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
339 if (!attr_type && !attr_value)
340 return 0; /* ignore zero sized leaf items */
341 np_walk_new_nodes(st, npath_node, NULL, np_stream_formatted_node);
342 if (attr_type) { /* Attribute type, name and value */
343 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
345 else { /* Leaf name and memory size */
346 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
348 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
353 #endif /* PATH_TRACKING */
357 Checks to see if thing is in the bitstring.
358 Returns true or false, and
359 notes thing in the segmented bitstring.
362 check_new(struct state *st, const void *const p) {
363 unsigned int bits = 8 * sizeof(void*);
364 const size_t raw_p = PTR2nat(p);
365 /* This effectively rotates the value right by the number of low always-0
366 bits in an aligned pointer. The assmption is that most (if not all)
367 pointers are aligned, and these will be in the same chain of nodes
368 (and hence hot in the cache) but we can still deal with any unaligned
370 const size_t cooked_p
371 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
372 const U8 this_bit = 1 << (cooked_p & 0x7);
376 void **tv_p = (void **) (st->tracking);
378 if (NULL == p) return FALSE;
380 const char c = *(const char *)p;
383 if (st->dangle_whine)
384 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
390 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
392 /* First level is always present. */
394 i = (unsigned int)((cooked_p >> bits) & 0xFF);
396 Newxz(tv_p[i], 256, void *);
397 tv_p = (void **)(tv_p[i]);
399 } while (bits > LEAF_BITS + BYTE_BITS);
400 /* bits now 16 always */
401 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
402 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
403 a my_perl under multiplicity */
406 leaf_p = (U8 **)tv_p;
407 i = (unsigned int)((cooked_p >> bits) & 0xFF);
409 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
414 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
416 if(leaf[i] & this_bit)
424 free_tracking_at(void **tv, int level)
432 free_tracking_at((void **) tv[i], level);
446 free_state(struct state *st)
448 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
449 if (st->free_state_cb)
450 st->free_state_cb(st);
451 if (st->state_cb_data)
452 Safefree(st->state_cb_data);
453 free_tracking_at((void **)st->tracking, top_level);
457 /* For now, this is somewhat a compatibility bodge until the plan comes
458 together for fine grained recursion control. total_size() would recurse into
459 hash and array members, whereas sv_size() would not. However, sv_size() is
460 called with CvSTASH() of a CV, which means that if it (also) starts to
461 recurse fully, then the size of any CV now becomes the size of the entire
462 symbol table reachable from it, and potentially the entire symbol table, if
463 any subroutine makes a reference to a global (such as %SIG). The historical
464 implementation of total_size() didn't report "everything", and changing the
465 only available size to "everything" doesn't feel at all useful. */
467 #define NO_RECURSION 0
468 #define SOME_RECURSION 1
469 #define TOTAL_SIZE_RECURSION 2
471 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
487 , OPc_CONDOP /* 12 */
496 cc_opclass(const OP * const o)
502 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
504 if (o->op_type == OP_SASSIGN)
505 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
508 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
512 if ((o->op_type == OP_TRANS)) {
516 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
548 #ifdef OA_PVOP_OR_SVOP
549 case OA_PVOP_OR_SVOP: TAG;
551 * Character translations (tr///) are usually a PVOP, keeping a
552 * pointer to a table of shorts used to look up translations.
553 * Under utf8, however, a simple table isn't practical; instead,
554 * the OP is an SVOP, and the SV is a reference to a swash
555 * (i.e., an RV pointing to an HV).
557 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
558 ? OPc_SVOP : OPc_PVOP;
567 case OA_BASEOP_OR_UNOP: TAG;
569 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
570 * whether parens were seen. perly.y uses OPf_SPECIAL to
571 * signal whether a BASEOP had empty parens or none.
572 * Some other UNOPs are created later, though, so the best
573 * test is OPf_KIDS, which is set in newUNOP.
575 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
577 case OA_FILESTATOP: TAG;
579 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
580 * the OPf_REF flag to distinguish between OP types instead of the
581 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
582 * return OPc_UNOP so that walkoptree can find our children. If
583 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
584 * (no argument to the operator) it's an OP; with OPf_REF set it's
585 * an SVOP (and op_sv is the GV for the filehandle argument).
587 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
589 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
591 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
593 case OA_LOOPEXOP: TAG;
595 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
596 * label was omitted (in which case it's a BASEOP) or else a term was
597 * seen. In this last case, all except goto are definitely PVOP but
598 * goto is either a PVOP (with an ordinary constant label), an UNOP
599 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
600 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
603 if (o->op_flags & OPf_STACKED)
605 else if (o->op_flags & OPf_SPECIAL)
615 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
616 PL_op_name[o->op_type]);
622 /* Figure out how much magic is attached to the SV and return the
625 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
626 dNPathNodes(1, NPathArg);
627 MAGIC *magic_pointer = SvMAGIC(thing);
632 if (!SvMAGICAL(thing)) {
634 warn("Ignoring suspect magic on this SV\n");
640 /* push a dummy node for NPathSetNode to update inside the while loop */
641 NPathPushNode("dummy", NPtype_NAME);
643 /* Have we seen the magic pointer? (NULL has always been seen before) */
644 while (check_new(st, magic_pointer)) {
646 NPathSetNode(magic_pointer, NPtype_MAGIC);
648 ADD_SIZE(st, "mg", sizeof(MAGIC));
649 /* magic vtables aren't freed when magic is freed, so don't count them.
650 (They are static structures. Anything that assumes otherwise is buggy.)
655 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
656 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
657 if (magic_pointer->mg_len == HEf_SVKEY) {
658 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
660 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
661 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
662 if (check_new(st, magic_pointer->mg_ptr)) {
663 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
667 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
668 else if (magic_pointer->mg_len > 0) {
669 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
670 if (check_new(st, magic_pointer->mg_ptr)) {
671 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
675 /* Get the next in the chain */
676 magic_pointer = magic_pointer->mg_moremagic;
679 if (st->dangle_whine)
680 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
686 check_new_and_strlen(struct state *st, const char *const p, pPATH) {
687 dNPathNodes(1, NPathArg->prev);
688 if(check_new(st, p)) {
689 NPathPushNode(NPathArg->id, NPtype_NAME);
690 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
695 regex_size(const REGEXP * const baseregex, struct state *st, pPATH) {
696 dNPathNodes(1, NPathArg);
697 if(!check_new(st, baseregex))
699 NPathPushNode("regex_size", NPtype_NAME);
700 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
701 #if (PERL_VERSION < 11)
702 /* Note the size of the paren offset thing */
703 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
704 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
706 ADD_SIZE(st, "regexp", sizeof(struct regexp));
707 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
708 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
710 if (st->go_yell && !st->regex_whine) {
711 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
717 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
719 /* op_size recurses to follow the chain of opcodes. For the node path we
720 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
721 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
722 * instead of NPathLink().
724 dNPathUseParent(NPathArg);
728 if(!check_new(st, baseop))
731 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
733 switch (cc_opclass(baseop)) {
734 case OPc_BASEOP: TAG;
735 ADD_SIZE(st, "op", sizeof(struct op));
738 ADD_SIZE(st, "unop", sizeof(struct unop));
739 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
742 ADD_SIZE(st, "binop", sizeof(struct binop));
743 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
744 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
747 ADD_SIZE(st, "logop", sizeof(struct logop));
748 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
749 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
752 case OPc_CONDOP: TAG;
753 ADD_SIZE(st, "condop", sizeof(struct condop));
754 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
755 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
756 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
759 case OPc_LISTOP: TAG;
760 ADD_SIZE(st, "listop", sizeof(struct listop));
761 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
762 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
765 ADD_SIZE(st, "pmop", sizeof(struct pmop));
766 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
767 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
768 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
769 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
770 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
772 /* This is defined away in perl 5.8.x, but it is in there for
775 regex_size(PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
777 regex_size(((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
781 ADD_SIZE(st, "svop", sizeof(struct svop));
782 if (!(baseop->op_type == OP_AELEMFAST
783 && baseop->op_flags & OPf_SPECIAL)) {
784 /* not an OP_PADAV replacement */
785 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
790 ADD_SIZE(st, "padop", sizeof(struct padop));
795 ADD_SIZE(st, "gvop", sizeof(struct gvop));
796 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
800 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
803 ADD_SIZE(st, "loop", sizeof(struct loop));
804 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
805 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
806 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
807 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
808 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
813 basecop = (COP *)baseop;
814 ADD_SIZE(st, "cop", sizeof(struct cop));
816 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
817 Eliminate cop_label from struct cop by storing a label as the first
818 entry in the hints hash. Most statements don't have labels, so this
819 will save memory. Not sure how much.
820 The check below will be incorrect fail on bleadperls
821 before 5.11 @33656, but later than 5.10, producing slightly too
822 small memory sizes on these Perls. */
823 #if (PERL_VERSION < 11)
824 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
827 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
828 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
830 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
831 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
832 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
842 if (st->dangle_whine)
843 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
848 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
850 dNPathNodes(1, NPathArg);
852 /* Hash keys can be shared. Have we seen this before? */
853 if (!check_new(st, hek))
855 NPathPushNode("hek", NPtype_NAME);
856 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
858 + 1 /* No hash key flags prior to 5.8.0 */
864 #if PERL_VERSION < 10
865 ADD_SIZE(st, "he", sizeof(struct he));
867 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
873 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
878 # define MAYBE_PURIFY(normal, pure) (pure)
879 # define MAYBE_OFFSET(struct_name, member) 0
881 # define MAYBE_PURIFY(normal, pure) (normal)
882 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
885 const U8 body_sizes[SVt_LAST] = {
888 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
889 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
890 sizeof(XRV), /* SVt_RV */
891 sizeof(XPV), /* SVt_PV */
892 sizeof(XPVIV), /* SVt_PVIV */
893 sizeof(XPVNV), /* SVt_PVNV */
894 sizeof(XPVMG), /* SVt_PVMG */
895 sizeof(XPVBM), /* SVt_PVBM */
896 sizeof(XPVLV), /* SVt_PVLV */
897 sizeof(XPVAV), /* SVt_PVAV */
898 sizeof(XPVHV), /* SVt_PVHV */
899 sizeof(XPVCV), /* SVt_PVCV */
900 sizeof(XPVGV), /* SVt_PVGV */
901 sizeof(XPVFM), /* SVt_PVFM */
902 sizeof(XPVIO) /* SVt_PVIO */
903 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
907 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
909 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
910 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
911 sizeof(XPVNV), /* SVt_PVNV */
912 sizeof(XPVMG), /* SVt_PVMG */
913 sizeof(XPVGV), /* SVt_PVGV */
914 sizeof(XPVLV), /* SVt_PVLV */
915 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
916 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
917 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
918 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
919 sizeof(XPVIO), /* SVt_PVIO */
920 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
924 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
926 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
927 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
928 sizeof(XPVNV), /* SVt_PVNV */
929 sizeof(XPVMG), /* SVt_PVMG */
930 sizeof(XPVGV), /* SVt_PVGV */
931 sizeof(XPVLV), /* SVt_PVLV */
932 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
933 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
934 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
935 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
936 sizeof(XPVIO) /* SVt_PVIO */
937 #elif PERL_VERSION < 13
941 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
942 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
943 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
944 sizeof(XPVNV), /* SVt_PVNV */
945 sizeof(XPVMG), /* SVt_PVMG */
946 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
947 sizeof(XPVGV), /* SVt_PVGV */
948 sizeof(XPVLV), /* SVt_PVLV */
949 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
950 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
951 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
952 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
953 sizeof(XPVIO) /* SVt_PVIO */
958 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
959 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
960 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
961 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
962 sizeof(XPVMG), /* SVt_PVMG */
963 sizeof(regexp), /* SVt_REGEXP */
964 sizeof(XPVGV), /* SVt_PVGV */
965 sizeof(XPVLV), /* SVt_PVLV */
966 sizeof(XPVAV), /* SVt_PVAV */
967 sizeof(XPVHV), /* SVt_PVHV */
968 sizeof(XPVCV), /* SVt_PVCV */
969 sizeof(XPVFM), /* SVt_PVFM */
970 sizeof(XPVIO) /* SVt_PVIO */
975 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
977 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
980 dNPathUseParent(NPathArg);
987 if( 0 && !check_new(st, padlist))
990 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
991 pname = AvARRAY(pad_name);
993 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
994 const SV *namesv = pname[ix];
995 if (namesv && namesv == &PL_sv_undef) {
999 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1001 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1003 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1006 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1010 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1015 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1016 const int recurse) {
1017 const SV *thing = orig_thing;
1018 dNPathNodes(3, NPathArg);
1021 if(!check_new(st, orig_thing))
1024 type = SvTYPE(thing);
1025 if (type > SVt_LAST) {
1026 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1029 NPathPushNode(thing, NPtype_SV);
1030 ADD_SIZE(st, "sv", sizeof(SV) + body_sizes[type]);
1033 #if (PERL_VERSION < 11)
1034 /* Is it a reference? */
1039 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1040 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1044 /* Is there anything in the array? */
1045 if (AvMAX(thing) != -1) {
1046 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1047 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1048 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1050 if (recurse >= st->min_recurse_threshold) {
1051 SSize_t i = AvFILLp(thing) + 1;
1054 ADD_PRE_ATTR(st, 0, "index", i);
1055 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1059 /* Add in the bits on the other side of the beginning */
1061 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1062 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1064 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1065 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1066 if (AvALLOC(thing) != 0) {
1067 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1069 #if (PERL_VERSION < 9)
1070 /* Is there something hanging off the arylen element?
1071 Post 5.9.something this is stored in magic, so will be found there,
1072 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1073 complain about AvARYLEN() passing thing to it. */
1074 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1079 /* Now the array of buckets */
1080 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1081 if (HvENAME(thing)) {
1082 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1084 /* Now walk the bucket chain */
1085 if (HvARRAY(thing)) {
1088 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1089 cur_entry = *(HvARRAY(thing) + cur_bucket);
1091 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1092 ADD_SIZE(st, "he", sizeof(HE));
1093 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1094 if (recurse >= st->min_recurse_threshold) {
1095 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1096 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1097 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1098 * so we protect against that here, but I'd like to know the cause.
1100 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1101 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1102 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1104 cur_entry = cur_entry->hent_next;
1110 /* This direct access is arguably "naughty": */
1111 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1112 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1114 I32 count = HvAUX(thing)->xhv_name_count;
1117 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1121 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1126 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1129 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1131 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1132 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1133 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1134 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1136 #if PERL_VERSION > 10
1137 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1138 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1140 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1141 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1146 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1152 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1153 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1155 if (st->go_yell && !st->fm_whine) {
1156 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1162 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1163 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1164 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1165 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1166 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1167 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1168 if (CvISXSUB(thing)) {
1169 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1171 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1172 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1177 /* Some embedded char pointers */
1178 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1179 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1180 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1181 /* Throw the GVs on the list to be walked if they're not-null */
1182 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1183 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1184 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1186 /* Only go trotting through the IO structures if they're really
1187 trottable. If USE_PERLIO is defined we can do this. If
1188 not... we can't, so we don't even try */
1190 /* Dig into xio_ifp and xio_ofp here */
1191 warn("Devel::Size: Can't size up perlio layers yet\n");
1196 #if (PERL_VERSION < 9)
1201 if(isGV_with_GP(thing)) {
1203 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1205 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1207 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1209 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1210 #elif defined(GvFILE)
1211 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1212 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1213 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1214 and the relevant COP has been freed on scope cleanup after the eval.
1215 5.8.9 adds a binary compatible fudge that catches the vast majority
1216 of cases. 5.9.something added a proper fix, by converting the GP to
1217 use a shared hash key (porperly reference counted), instead of a
1218 char * (owned by who knows? possibly no-one now) */
1219 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1222 /* Is there something hanging off the glob? */
1223 if (check_new(st, GvGP(thing))) {
1224 ADD_SIZE(st, "GP", sizeof(GP));
1225 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1226 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1227 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1228 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1229 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1230 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1232 #if (PERL_VERSION >= 9)
1236 #if PERL_VERSION <= 8
1244 if(recurse && SvROK(thing))
1245 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1246 else if (SvIsCOW_shared_hash(thing))
1247 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1249 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1253 SvOOK_offset(thing, len);
1254 ADD_SIZE(st, "SvOOK", len);
1260 if (type >= SVt_PVMG) {
1261 magic_size(aTHX_ thing, st, NPathLink("MG"));
1268 free_memnode_state(pTHX_ struct state *st)
1270 if (st->node_stream_fh && st->node_stream_name) {
1271 if (*st->node_stream_name == '|') {
1272 if (pclose(st->node_stream_fh))
1273 warn("%s exited with an error status\n", st->node_stream_name);
1276 if (fclose(st->node_stream_fh))
1277 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1282 static struct state *
1288 Newxz(st, 1, struct state);
1290 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1291 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1292 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1294 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1295 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1297 check_new(st, &PL_sv_undef);
1298 check_new(st, &PL_sv_no);
1299 check_new(st, &PL_sv_yes);
1300 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1301 check_new(st, &PL_sv_placeholder);
1303 #ifdef PATH_TRACKING
1304 if (getenv("MEMVIEW") && *getenv("MEMVIEW")) { /* XXX quick hack */
1305 st->node_stream_name = getenv("MEMVIEW");
1306 if (*st->node_stream_name == '|')
1307 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1309 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1310 if (!st->node_stream_fh)
1311 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1312 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1313 st->add_attr_cb = np_stream_node_path_info;
1316 st->add_attr_cb = np_dump_node_path_info;
1317 st->free_state_cb = free_memnode_state;
1322 /* XXX based on S_visit() in sv.c */
1324 unseen_sv_size(pTHX_ struct state *st, pPATH)
1329 dNPathNodes(1, NPathArg);
1331 NPathPushNode("unseen", NPtype_NAME);
1333 /* by this point we should have visited all the SVs
1334 * so now we'll run through all the SVs via the arenas
1335 * in order to find any thet we've missed for some reason.
1336 * Once the rest of the code is finding all the SVs then any
1337 * found here will be leaks.
1339 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1340 const SV * const svend = &sva[SvREFCNT(sva)];
1342 for (sv = sva + 1; sv < svend; ++sv) {
1343 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1344 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1346 else if (check_new(st, sv)) { /* sanity check */
1347 warn("unseen_sv_size encountered freed SV unexpectedly");
1354 MODULE = Devel::Size PACKAGE = Devel::Size
1362 total_size = TOTAL_SIZE_RECURSION
1365 SV *thing = orig_thing;
1366 struct state *st = new_state(aTHX);
1368 /* If they passed us a reference then dereference it. This is the
1369 only way we can check the sizes of arrays and hashes */
1371 thing = SvRV(thing);
1374 sv_size(aTHX_ st, NULL, thing, ix);
1375 RETVAL = st->total_size;
1385 struct state *st = new_state(aTHX);
1386 dNPathNodes(3, NULL);
1388 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1390 NPathPushNode("perl_size", NPtype_NAME); /* provide a root node */
1391 /* start with PL_defstash to get everything reachable from \%main:: */
1392 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1394 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1395 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1396 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1397 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1398 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1399 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1400 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1401 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1402 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1403 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1404 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1406 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1408 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1409 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1410 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1411 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1412 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1413 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1414 /* TODO PL_pidstatus */
1415 /* TODO PL_stashpad */
1416 /* TODO PL_compiling? COP */
1418 /* TODO stacks: cur, main, tmps, mark, scope, save */
1419 /* TODO PL_exitlist */
1420 /* TODO PL_reentrant_buffers etc */
1422 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1424 /* TODO anything missed? */
1426 /* --- by this point we should have seen all reachable SVs --- */
1428 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1429 sv_size(aTHX_ st, NPathLink("PL_strtab"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1431 /* unused space in sv head arenas */
1435 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX */
1436 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1437 if (!check_new(st, p)) /* sanity check */
1438 warn("Free'd SV head unexpectedly already seen");
1441 NPathPushNode("unused_sv_heads", NPtype_NAME);
1442 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1445 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1447 /* iterate over all SVs to find any we've not accounted for yet */
1448 /* once the code above is visiting all SVs, any found here have been leaked */
1449 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1452 struct mstats ms = mstats();
1453 NPathSetNode("freed_malloc_space", NPtype_NAME);
1454 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1455 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1456 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1457 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1458 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1461 RETVAL = st->total_size;