5 * Refactor this to split out D:M code from Devel::Size code.
7 * Start migrating Devel::Size's Size.xs towards the new code.
9 * ADD_PRE_ATTR for index should check if the ptr is new first. Currently we're
10 * generating lots of ADD_PRE_ATTR's for SVs that we've already seen via other paths.
11 * That's wasteful and likely to cause subtle bugs.
13 * Give HE's their own node so keys and values can be tied together
17 #undef NDEBUG /* XXX */
20 #define PERL_NO_GET_CONTEXT
27 /* Not yet in ppport.h */
29 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
32 # define SvRV_const(rv) SvRV(rv)
35 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
38 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
39 (SVf_FAKE | SVf_READONLY))
41 #ifndef SvIsCOW_shared_hash
42 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
44 #ifndef SvSHARED_HEK_FROM_PV
45 # define SvSHARED_HEK_FROM_PV(pvx) \
46 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
50 # define PL_opargs opargs
51 # define PL_op_name op_name
55 /* "structured exception" handling is a Microsoft extension to C and C++.
56 It's *not* C++ exception handling - C++ exception handling can't capture
57 SEGVs and suchlike, whereas this can. There's no known analagous
58 functionality on other platforms. */
60 # define TRY_TO_CATCH_SEGV __try
61 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
63 # define TRY_TO_CATCH_SEGV if(1)
64 # define CAUGHT_EXCEPTION else
68 # define __attribute__(x)
71 #if 0 && defined(DEBUGGING)
72 #define dbg_printf(x) printf x
77 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
80 /* The idea is to have a tree structure to store 1 bit per possible pointer
81 address. The lowest 16 bits are stored in a block of 8092 bytes.
82 The blocks are in a 256-way tree, indexed by the reset of the pointer.
83 This can cope with 32 and 64 bit pointers, and any address space layout,
84 without excessive memory needs. The assumption is that your CPU cache
85 works :-) (And that we're not going to bust it) */
88 #define LEAF_BITS (16 - BYTE_BITS)
89 #define LEAF_MASK 0x1FFF
91 typedef struct npath_node_st npath_node_t;
92 struct npath_node_st {
107 /* My hunch (not measured) is that for most architectures pointers will
108 start with 0 bits, hence the start of this array will be hot, and the
109 end unused. So put the flags next to the hot end. */
112 int min_recurse_threshold;
113 /* callback hooks and data */
114 int (*add_attr_cb)(struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
115 void (*free_state_cb)(pTHX_ struct state *st);
116 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
117 /* this stuff wil be moved to state_cb_data later */
119 FILE *node_stream_fh;
120 char *node_stream_name;
123 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
125 #define PATH_TRACKING
128 #define pPATH npath_node_t *NPathArg
130 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
131 * to the next unused slot (though with prev already filled in)
132 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
133 * to and passes that NP value to the function being called.
134 * seqn==0 indicates the node is new (hasn't been output yet)
136 #define dNPathNodes(nodes, prev_np) \
137 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
138 npath_node_t *NP = &name_path_nodes[0]; \
139 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
141 #define NPathPushNode(nodeid, nodetype) \
143 NP->type = nodetype; \
145 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
147 NP->id = Nullch; /* safety/debug */ \
150 #define NPathSetNode(nodeid, nodetype) \
151 (NP-1)->id = nodeid; \
152 (NP-1)->type = nodetype; \
153 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
155 #define NPathPopNode \
158 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
159 * So the function can only safely call ADD_*() but not NPathLink, unless the
160 * caller has spare nodes in its name_path_nodes.
162 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
164 #define NPtype_NAME 0x01
165 #define NPtype_LINK 0x02
166 #define NPtype_SV 0x03
167 #define NPtype_MAGIC 0x04
168 #define NPtype_OP 0x05
170 /* XXX these should probably be generalizes into flag bits */
171 #define NPattr_LEAFSIZE 0x00
172 #define NPattr_NAME 0x01
173 #define NPattr_PADFAKE 0x02
174 #define NPattr_PADNAME 0x03
175 #define NPattr_PADTMP 0x04
176 #define NPattr_NOTE 0x05
177 #define NPattr_PRE_ATTR 0x06
179 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) (st->add_attr_cb && st->add_attr_cb(st, np, attr_type, attr_name, attr_value))
180 #define ADD_ATTR(st, attr_type, attr_name, attr_value) _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
181 #define ADD_PRE_ATTR(st, attr_type, attr_name, attr_value) (assert(!attr_type), _ADD_ATTR_NP(st, NPattr_PRE_ATTR, attr_name, attr_value, NP-1))
183 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
184 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
185 /* add a link and a name node to the path - a special case for op_size */
186 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
187 #define NPathOpLink (NPathArg)
188 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
192 #define NPathAddSizeCb(st, name, bytes)
193 #define pPATH void *npath_dummy /* XXX ideally remove */
194 #define dNPathNodes(nodes, prev_np) dNOOP
195 #define NPathLink(nodeid, nodetype) NULL
196 #define NPathOpLink NULL
197 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
199 #endif /* PATH_TRACKING */
206 static const char *svtypenames[SVt_LAST] = {
208 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
209 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
210 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
211 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
212 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
213 #elif PERL_VERSION < 13
214 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
216 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
221 gettimeofday_nv(void)
223 #ifdef HAS_GETTIMEOFDAY
225 gettimeofday(&when, (struct timezone *) 0);
226 return when.tv_sec + (when.tv_usec / 1000000.0);
230 (*u2time)(aTHX_ &time_of_day);
231 return time_of_day[0] + (time_of_day[1] / 1000000.0);
239 np_print_node_name(FILE *fp, npath_node_t *npath_node)
241 char buf[1024]; /* XXX */
243 switch (npath_node->type) {
244 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
245 const SV *sv = (SV*)npath_node->id;
246 int type = SvTYPE(sv);
247 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
248 fprintf(fp, "SV(%s)", typename);
249 switch(type) { /* add some useful details */
250 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
251 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
255 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
256 const OP *op = (OP*)npath_node->id;
257 fprintf(fp, "OP(%s)", OP_NAME(op));
260 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
261 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
262 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
263 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
267 fprintf(fp, "%s", npath_node->id);
270 fprintf(fp, "%s", npath_node->id);
272 default: /* assume id is a string pointer */
273 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
280 np_dump_indent(int depth) {
282 fprintf(stderr, ": ");
286 np_walk_new_nodes(struct state *st,
287 npath_node_t *npath_node,
288 npath_node_t *npath_node_deeper,
289 int (*cb)(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
291 if (npath_node->seqn) /* node already output */
294 if (npath_node->prev) {
295 np_walk_new_nodes(st, npath_node->prev, npath_node, cb); /* recurse */
296 npath_node->depth = npath_node->prev->depth + 1;
298 else npath_node->depth = 0;
299 npath_node->seqn = ++st->seqn;
302 if (cb(st, npath_node, npath_node_deeper)) {
303 /* ignore this node */
304 assert(npath_node->prev);
305 assert(npath_node->depth);
306 assert(npath_node_deeper);
308 npath_node->seqn = --st->seqn;
309 npath_node_deeper->prev = npath_node->prev;
317 np_dump_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
318 if (0 && npath_node->type == NPtype_LINK)
320 np_dump_indent(npath_node->depth);
321 np_print_node_name(stderr, npath_node);
322 if (npath_node->type == NPtype_LINK)
323 fprintf(stderr, "->"); /* cosmetic */
324 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
325 fprintf(stderr, "\n");
330 np_dump_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
332 if (attr_type == NPattr_LEAFSIZE && !attr_value)
333 return 0; /* ignore zero sized leaf items */
334 np_walk_new_nodes(st, npath_node, NULL, np_dump_formatted_node);
335 np_dump_indent(npath_node->depth+1);
337 case NPattr_LEAFSIZE:
338 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
341 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
344 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
349 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
352 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
355 fprintf(stderr, "\n");
360 np_stream_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
361 fprintf(st->node_stream_fh, "-%u %lu %u ",
362 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
364 np_print_node_name(st->node_stream_fh, npath_node);
365 fprintf(st->node_stream_fh, "\n");
370 np_stream_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
372 if (!attr_type && !attr_value)
373 return 0; /* ignore zero sized leaf items */
374 np_walk_new_nodes(st, npath_node, NULL, np_stream_formatted_node);
375 if (attr_type) { /* Attribute type, name and value */
376 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
378 else { /* Leaf name and memory size */
379 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
381 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
386 #endif /* PATH_TRACKING */
390 Checks to see if thing is in the bitstring.
391 Returns true or false, and
392 notes thing in the segmented bitstring.
395 check_new(struct state *st, const void *const p) {
396 unsigned int bits = 8 * sizeof(void*);
397 const size_t raw_p = PTR2nat(p);
398 /* This effectively rotates the value right by the number of low always-0
399 bits in an aligned pointer. The assmption is that most (if not all)
400 pointers are aligned, and these will be in the same chain of nodes
401 (and hence hot in the cache) but we can still deal with any unaligned
403 const size_t cooked_p
404 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
405 const U8 this_bit = 1 << (cooked_p & 0x7);
409 void **tv_p = (void **) (st->tracking);
411 if (NULL == p) return FALSE;
413 const char c = *(const char *)p;
416 if (st->dangle_whine)
417 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
423 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
425 /* First level is always present. */
427 i = (unsigned int)((cooked_p >> bits) & 0xFF);
429 Newxz(tv_p[i], 256, void *);
430 tv_p = (void **)(tv_p[i]);
432 } while (bits > LEAF_BITS + BYTE_BITS);
433 /* bits now 16 always */
434 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
435 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
436 a my_perl under multiplicity */
439 leaf_p = (U8 **)tv_p;
440 i = (unsigned int)((cooked_p >> bits) & 0xFF);
442 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
447 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
449 if(leaf[i] & this_bit)
457 free_tracking_at(void **tv, int level)
465 free_tracking_at((void **) tv[i], level);
479 free_state(struct state *st)
481 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
482 if (st->free_state_cb)
483 st->free_state_cb(st);
484 if (st->state_cb_data)
485 Safefree(st->state_cb_data);
486 free_tracking_at((void **)st->tracking, top_level);
490 /* For now, this is somewhat a compatibility bodge until the plan comes
491 together for fine grained recursion control. total_size() would recurse into
492 hash and array members, whereas sv_size() would not. However, sv_size() is
493 called with CvSTASH() of a CV, which means that if it (also) starts to
494 recurse fully, then the size of any CV now becomes the size of the entire
495 symbol table reachable from it, and potentially the entire symbol table, if
496 any subroutine makes a reference to a global (such as %SIG). The historical
497 implementation of total_size() didn't report "everything", and changing the
498 only available size to "everything" doesn't feel at all useful. */
500 #define NO_RECURSION 0
501 #define SOME_RECURSION 1
502 #define TOTAL_SIZE_RECURSION 2
504 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
520 , OPc_CONDOP /* 12 */
529 cc_opclass(const OP * const o)
535 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
537 if (o->op_type == OP_SASSIGN)
538 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
541 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
545 if ((o->op_type == OP_TRANS)) {
549 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
581 #ifdef OA_PVOP_OR_SVOP
582 case OA_PVOP_OR_SVOP: TAG;
584 * Character translations (tr///) are usually a PVOP, keeping a
585 * pointer to a table of shorts used to look up translations.
586 * Under utf8, however, a simple table isn't practical; instead,
587 * the OP is an SVOP, and the SV is a reference to a swash
588 * (i.e., an RV pointing to an HV).
590 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
591 ? OPc_SVOP : OPc_PVOP;
600 case OA_BASEOP_OR_UNOP: TAG;
602 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
603 * whether parens were seen. perly.y uses OPf_SPECIAL to
604 * signal whether a BASEOP had empty parens or none.
605 * Some other UNOPs are created later, though, so the best
606 * test is OPf_KIDS, which is set in newUNOP.
608 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
610 case OA_FILESTATOP: TAG;
612 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
613 * the OPf_REF flag to distinguish between OP types instead of the
614 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
615 * return OPc_UNOP so that walkoptree can find our children. If
616 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
617 * (no argument to the operator) it's an OP; with OPf_REF set it's
618 * an SVOP (and op_sv is the GV for the filehandle argument).
620 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
622 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
624 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
626 case OA_LOOPEXOP: TAG;
628 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
629 * label was omitted (in which case it's a BASEOP) or else a term was
630 * seen. In this last case, all except goto are definitely PVOP but
631 * goto is either a PVOP (with an ordinary constant label), an UNOP
632 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
633 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
636 if (o->op_flags & OPf_STACKED)
638 else if (o->op_flags & OPf_SPECIAL)
648 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
649 PL_op_name[o->op_type]);
655 /* Figure out how much magic is attached to the SV and return the
658 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
659 dNPathNodes(1, NPathArg);
660 MAGIC *magic_pointer = SvMAGIC(thing);
665 if (!SvMAGICAL(thing)) {
667 warn("Ignoring suspect magic on this SV\n");
673 /* push a dummy node for NPathSetNode to update inside the while loop */
674 NPathPushNode("dummy", NPtype_NAME);
676 /* Have we seen the magic pointer? (NULL has always been seen before) */
677 while (check_new(st, magic_pointer)) {
679 NPathSetNode(magic_pointer, NPtype_MAGIC);
681 ADD_SIZE(st, "mg", sizeof(MAGIC));
682 /* magic vtables aren't freed when magic is freed, so don't count them.
683 (They are static structures. Anything that assumes otherwise is buggy.)
688 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
689 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
690 if (magic_pointer->mg_len == HEf_SVKEY) {
691 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
693 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
694 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
695 if (check_new(st, magic_pointer->mg_ptr)) {
696 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
700 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
701 else if (magic_pointer->mg_len > 0) {
702 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
703 if (check_new(st, magic_pointer->mg_ptr)) {
704 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
708 /* Get the next in the chain */
709 magic_pointer = magic_pointer->mg_moremagic;
712 if (st->dangle_whine)
713 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
719 check_new_and_strlen(struct state *st, const char *const p, pPATH) {
720 dNPathNodes(1, NPathArg->prev);
721 if(check_new(st, p)) {
722 NPathPushNode(NPathArg->id, NPtype_NAME);
723 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
728 regex_size(const REGEXP * const baseregex, struct state *st, pPATH) {
729 dNPathNodes(1, NPathArg);
730 if(!check_new(st, baseregex))
732 NPathPushNode("regex_size", NPtype_NAME);
733 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
734 #if (PERL_VERSION < 11)
735 /* Note the size of the paren offset thing */
736 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
737 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
739 ADD_SIZE(st, "regexp", sizeof(struct regexp));
740 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
741 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
743 if (st->go_yell && !st->regex_whine) {
744 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
750 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
752 /* op_size recurses to follow the chain of opcodes. For the node path we
753 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
754 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
755 * instead of NPathLink().
757 dNPathUseParent(NPathArg);
761 if(!check_new(st, baseop))
764 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
766 switch (cc_opclass(baseop)) {
767 case OPc_BASEOP: TAG;
768 ADD_SIZE(st, "op", sizeof(struct op));
771 ADD_SIZE(st, "unop", sizeof(struct unop));
772 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
775 ADD_SIZE(st, "binop", sizeof(struct binop));
776 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
777 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
780 ADD_SIZE(st, "logop", sizeof(struct logop));
781 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
782 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
785 case OPc_CONDOP: TAG;
786 ADD_SIZE(st, "condop", sizeof(struct condop));
787 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
788 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
789 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
792 case OPc_LISTOP: TAG;
793 ADD_SIZE(st, "listop", sizeof(struct listop));
794 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
795 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
798 ADD_SIZE(st, "pmop", sizeof(struct pmop));
799 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
800 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
801 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
802 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
803 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
805 /* This is defined away in perl 5.8.x, but it is in there for
808 regex_size(PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
810 regex_size(((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
814 ADD_SIZE(st, "svop", sizeof(struct svop));
815 if (!(baseop->op_type == OP_AELEMFAST
816 && baseop->op_flags & OPf_SPECIAL)) {
817 /* not an OP_PADAV replacement */
818 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
823 ADD_SIZE(st, "padop", sizeof(struct padop));
828 ADD_SIZE(st, "gvop", sizeof(struct gvop));
829 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
833 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
836 ADD_SIZE(st, "loop", sizeof(struct loop));
837 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
838 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
839 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
840 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
841 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
846 basecop = (COP *)baseop;
847 ADD_SIZE(st, "cop", sizeof(struct cop));
849 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
850 Eliminate cop_label from struct cop by storing a label as the first
851 entry in the hints hash. Most statements don't have labels, so this
852 will save memory. Not sure how much.
853 The check below will be incorrect fail on bleadperls
854 before 5.11 @33656, but later than 5.10, producing slightly too
855 small memory sizes on these Perls. */
856 #if (PERL_VERSION < 11)
857 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
860 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
861 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
863 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
864 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
865 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
875 if (st->dangle_whine)
876 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
881 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
883 dNPathNodes(1, NPathArg);
885 /* Hash keys can be shared. Have we seen this before? */
886 if (!check_new(st, hek))
888 NPathPushNode("hek", NPtype_NAME);
889 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
891 + 1 /* No hash key flags prior to 5.8.0 */
897 #if PERL_VERSION < 10
898 ADD_SIZE(st, "he", sizeof(struct he));
900 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
906 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
911 # define MAYBE_PURIFY(normal, pure) (pure)
912 # define MAYBE_OFFSET(struct_name, member) 0
914 # define MAYBE_PURIFY(normal, pure) (normal)
915 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
918 const U8 body_sizes[SVt_LAST] = {
921 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
922 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
923 sizeof(XRV), /* SVt_RV */
924 sizeof(XPV), /* SVt_PV */
925 sizeof(XPVIV), /* SVt_PVIV */
926 sizeof(XPVNV), /* SVt_PVNV */
927 sizeof(XPVMG), /* SVt_PVMG */
928 sizeof(XPVBM), /* SVt_PVBM */
929 sizeof(XPVLV), /* SVt_PVLV */
930 sizeof(XPVAV), /* SVt_PVAV */
931 sizeof(XPVHV), /* SVt_PVHV */
932 sizeof(XPVCV), /* SVt_PVCV */
933 sizeof(XPVGV), /* SVt_PVGV */
934 sizeof(XPVFM), /* SVt_PVFM */
935 sizeof(XPVIO) /* SVt_PVIO */
936 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
940 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
942 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
943 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
944 sizeof(XPVNV), /* SVt_PVNV */
945 sizeof(XPVMG), /* SVt_PVMG */
946 sizeof(XPVGV), /* SVt_PVGV */
947 sizeof(XPVLV), /* SVt_PVLV */
948 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
949 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
950 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
951 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
952 sizeof(XPVIO), /* SVt_PVIO */
953 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
957 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
959 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
960 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
961 sizeof(XPVNV), /* SVt_PVNV */
962 sizeof(XPVMG), /* SVt_PVMG */
963 sizeof(XPVGV), /* SVt_PVGV */
964 sizeof(XPVLV), /* SVt_PVLV */
965 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
966 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
967 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
968 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
969 sizeof(XPVIO) /* SVt_PVIO */
970 #elif PERL_VERSION < 13
974 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
975 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
976 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
977 sizeof(XPVNV), /* SVt_PVNV */
978 sizeof(XPVMG), /* SVt_PVMG */
979 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
980 sizeof(XPVGV), /* SVt_PVGV */
981 sizeof(XPVLV), /* SVt_PVLV */
982 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
983 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
984 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
985 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
986 sizeof(XPVIO) /* SVt_PVIO */
991 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
992 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
993 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
994 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
995 sizeof(XPVMG), /* SVt_PVMG */
996 sizeof(regexp), /* SVt_REGEXP */
997 sizeof(XPVGV), /* SVt_PVGV */
998 sizeof(XPVLV), /* SVt_PVLV */
999 sizeof(XPVAV), /* SVt_PVAV */
1000 sizeof(XPVHV), /* SVt_PVHV */
1001 sizeof(XPVCV), /* SVt_PVCV */
1002 sizeof(XPVFM), /* SVt_PVFM */
1003 sizeof(XPVIO) /* SVt_PVIO */
1008 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
1010 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
1013 dNPathUseParent(NPathArg);
1020 if( 0 && !check_new(st, padlist))
1023 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
1024 pname = AvARRAY(pad_name);
1026 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
1027 const SV *namesv = pname[ix];
1028 if (namesv && namesv == &PL_sv_undef) {
1032 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1034 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1036 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1039 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1043 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1048 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1049 const int recurse) {
1050 const SV *thing = orig_thing;
1051 dNPathNodes(3, NPathArg);
1054 if(!check_new(st, orig_thing))
1057 type = SvTYPE(thing);
1058 if (type > SVt_LAST) {
1059 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1062 NPathPushNode(thing, NPtype_SV);
1063 ADD_SIZE(st, "sv_head", sizeof(SV));
1064 ADD_SIZE(st, "sv_body", body_sizes[type]);
1067 #if (PERL_VERSION < 11)
1068 /* Is it a reference? */
1073 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1074 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1078 /* Is there anything in the array? */
1079 if (AvMAX(thing) != -1) {
1080 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1081 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1082 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1084 if (recurse >= st->min_recurse_threshold) {
1085 SSize_t i = AvFILLp(thing) + 1;
1088 ADD_PRE_ATTR(st, 0, "index", i);
1089 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1093 /* Add in the bits on the other side of the beginning */
1095 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1096 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1098 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1099 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1100 if (AvALLOC(thing) != 0) {
1101 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1103 #if (PERL_VERSION < 9)
1104 /* Is there something hanging off the arylen element?
1105 Post 5.9.something this is stored in magic, so will be found there,
1106 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1107 complain about AvARYLEN() passing thing to it. */
1108 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1113 /* Now the array of buckets */
1114 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1115 if (HvENAME(thing)) {
1116 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1118 /* Now walk the bucket chain */
1119 if (HvARRAY(thing)) {
1122 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1123 cur_entry = *(HvARRAY(thing) + cur_bucket);
1125 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1126 ADD_SIZE(st, "he", sizeof(HE));
1127 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1128 if (recurse >= st->min_recurse_threshold) {
1129 if (orig_thing == (SV*)PL_strtab) {
1130 /* For PL_strtab the HeVAL is used as a refcnt */
1131 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1134 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1135 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1136 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1137 * so we protect against that here, but I'd like to know the cause.
1139 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1140 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1141 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1144 cur_entry = cur_entry->hent_next;
1150 /* This direct access is arguably "naughty": */
1151 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1152 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1154 I32 count = HvAUX(thing)->xhv_name_count;
1157 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1161 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1166 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1169 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1171 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1172 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1173 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1174 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1176 #if PERL_VERSION > 10
1177 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1178 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1180 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1181 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1186 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1192 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1193 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1195 if (st->go_yell && !st->fm_whine) {
1196 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1202 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1203 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1204 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1205 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1206 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1207 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1208 if (CvISXSUB(thing)) {
1209 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1211 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1212 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1217 /* Some embedded char pointers */
1218 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1219 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1220 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1221 /* Throw the GVs on the list to be walked if they're not-null */
1222 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1223 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1224 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1226 /* Only go trotting through the IO structures if they're really
1227 trottable. If USE_PERLIO is defined we can do this. If
1228 not... we can't, so we don't even try */
1230 /* Dig into xio_ifp and xio_ofp here */
1231 warn("Devel::Size: Can't size up perlio layers yet\n");
1236 #if (PERL_VERSION < 9)
1241 if(isGV_with_GP(thing)) {
1243 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1245 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1247 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1249 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1250 #elif defined(GvFILE)
1251 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1252 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1253 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1254 and the relevant COP has been freed on scope cleanup after the eval.
1255 5.8.9 adds a binary compatible fudge that catches the vast majority
1256 of cases. 5.9.something added a proper fix, by converting the GP to
1257 use a shared hash key (porperly reference counted), instead of a
1258 char * (owned by who knows? possibly no-one now) */
1259 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1262 /* Is there something hanging off the glob? */
1263 if (check_new(st, GvGP(thing))) {
1264 ADD_SIZE(st, "GP", sizeof(GP));
1265 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1266 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1267 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1268 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1269 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1270 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1272 #if (PERL_VERSION >= 9)
1276 #if PERL_VERSION <= 8
1284 if(recurse && SvROK(thing))
1285 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1286 else if (SvIsCOW_shared_hash(thing))
1287 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1289 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1293 SvOOK_offset(thing, len);
1294 ADD_SIZE(st, "SvOOK", len);
1300 if (type >= SVt_PVMG) {
1301 magic_size(aTHX_ thing, st, NPathLink("MG"));
1308 free_memnode_state(pTHX_ struct state *st)
1310 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1311 fprintf(st->node_stream_fh, "E %lu %f %s\n",
1312 getpid(), gettimeofday_nv()-st->start_time_nv, "unnamed");
1313 if (*st->node_stream_name == '|') {
1314 if (pclose(st->node_stream_fh))
1315 warn("%s exited with an error status\n", st->node_stream_name);
1318 if (fclose(st->node_stream_fh))
1319 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1324 static struct state *
1330 Newxz(st, 1, struct state);
1332 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1333 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1334 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1336 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1337 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1339 st->start_time_nv = gettimeofday_nv();
1340 check_new(st, &PL_sv_undef);
1341 check_new(st, &PL_sv_no);
1342 check_new(st, &PL_sv_yes);
1343 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1344 check_new(st, &PL_sv_placeholder);
1347 #ifdef PATH_TRACKING
1348 /* XXX quick hack */
1349 st->node_stream_name = getenv("SIZEME");
1350 if (st->node_stream_name) {
1351 if (*st->node_stream_name) {
1352 if (*st->node_stream_name == '|')
1353 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1355 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1356 if (!st->node_stream_fh)
1357 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1358 if(0)setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1359 st->add_attr_cb = np_stream_node_path_info;
1360 fprintf(st->node_stream_fh, "S %lu %f %s\n",
1361 getpid(), st->start_time_nv, "unnamed");
1364 st->add_attr_cb = np_dump_node_path_info;
1366 st->free_state_cb = free_memnode_state;
1372 /* XXX based on S_visit() in sv.c */
1374 unseen_sv_size(pTHX_ struct state *st, pPATH)
1379 dNPathNodes(1, NPathArg);
1381 NPathPushNode("unseen", NPtype_NAME);
1383 /* by this point we should have visited all the SVs
1384 * so now we'll run through all the SVs via the arenas
1385 * in order to find any thet we've missed for some reason.
1386 * Once the rest of the code is finding all the SVs then any
1387 * found here will be leaks.
1389 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1390 const SV * const svend = &sva[SvREFCNT(sva)];
1392 for (sv = sva + 1; sv < svend; ++sv) {
1393 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1394 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1396 else if (check_new(st, sv)) { /* sanity check */
1398 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1405 perl_size(pTHX_ struct state *const st, pPATH)
1407 dNPathNodes(3, NPathArg);
1409 /* if(!check_new(st, interp)) return; */
1410 NPathPushNode("perl", NPtype_NAME);
1416 * unknown <== = O/S Heap size - perl - free_malloc_space
1418 /* start with PL_defstash to get everything reachable from \%main:: */
1419 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1421 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1422 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1423 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1424 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1425 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1426 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1427 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1428 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1429 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1430 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1431 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1433 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1435 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1436 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1437 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1438 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1439 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1440 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1441 /* TODO PL_pidstatus */
1442 /* TODO PL_stashpad */
1443 /* TODO PL_compiling? COP */
1445 /* TODO stacks: cur, main, tmps, mark, scope, save */
1446 /* TODO PL_exitlist */
1447 /* TODO PL_reentrant_buffers etc */
1449 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1451 /* TODO anything missed? */
1453 /* --- by this point we should have seen all reachable SVs --- */
1455 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1456 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1458 /* unused space in sv head arenas */
1462 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1463 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1464 if (!check_new(st, p)) /* sanity check */
1465 warn("Free'd SV head unexpectedly already seen");
1468 NPathPushNode("unused_sv_heads", NPtype_NAME);
1469 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1472 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1474 /* iterate over all SVs to find any we've not accounted for yet */
1475 /* once the code above is visiting all SVs, any found here have been leaked */
1476 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1480 MODULE = Devel::Memory PACKAGE = Devel::Memory
1488 total_size = TOTAL_SIZE_RECURSION
1491 SV *thing = orig_thing;
1492 struct state *st = new_state(aTHX);
1494 /* If they passed us a reference then dereference it. This is the
1495 only way we can check the sizes of arrays and hashes */
1497 thing = SvRV(thing);
1500 sv_size(aTHX_ st, NULL, thing, ix);
1501 RETVAL = st->total_size;
1511 /* just the current perl interpreter */
1512 struct state *st = new_state(aTHX);
1513 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1514 perl_size(aTHX_ st, NULL);
1515 RETVAL = st->total_size;
1525 /* the current perl interpreter plus malloc, in the context of total heap size */
1526 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1530 /* some systems have the SVID2/XPG mallinfo structure and function */
1531 struct mstats ms = mstats(); /* mstats() first */
1533 struct state *st = new_state(aTHX);
1534 dNPathNodes(1, NULL);
1535 NPathPushNode("heap", NPtype_NAME);
1537 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1539 perl_size(aTHX_ st, NPathLink("perl_interp"));
1541 NPathSetNode("free_malloc_space", NPtype_NAME);
1542 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1543 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1544 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1545 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1546 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1547 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1548 /* for now we use bytes_total as an approximation */
1549 NPathSetNode("unknown", NPtype_NAME);
1550 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1555 RETVAL = st->total_size;