3 #undef NDEBUG /* XXX */
6 #define PERL_NO_GET_CONTEXT
13 /* Not yet in ppport.h */
15 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
18 # define SvRV_const(rv) SvRV(rv)
21 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
24 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
25 (SVf_FAKE | SVf_READONLY))
27 #ifndef SvIsCOW_shared_hash
28 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
30 #ifndef SvSHARED_HEK_FROM_PV
31 # define SvSHARED_HEK_FROM_PV(pvx) \
32 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
36 # define PL_opargs opargs
37 # define PL_op_name op_name
41 /* "structured exception" handling is a Microsoft extension to C and C++.
42 It's *not* C++ exception handling - C++ exception handling can't capture
43 SEGVs and suchlike, whereas this can. There's no known analagous
44 functionality on other platforms. */
46 # define TRY_TO_CATCH_SEGV __try
47 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
49 # define TRY_TO_CATCH_SEGV if(1)
50 # define CAUGHT_EXCEPTION else
54 # define __attribute__(x)
57 #if 0 && defined(DEBUGGING)
58 #define dbg_printf(x) printf x
63 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
66 /* The idea is to have a tree structure to store 1 bit per possible pointer
67 address. The lowest 16 bits are stored in a block of 8092 bytes.
68 The blocks are in a 256-way tree, indexed by the reset of the pointer.
69 This can cope with 32 and 64 bit pointers, and any address space layout,
70 without excessive memory needs. The assumption is that your CPU cache
71 works :-) (And that we're not going to bust it) */
74 #define LEAF_BITS (16 - BYTE_BITS)
75 #define LEAF_MASK 0x1FFF
77 typedef struct npath_node_st npath_node_t;
78 struct npath_node_st {
93 /* My hunch (not measured) is that for most architectures pointers will
94 start with 0 bits, hence the start of this array will be hot, and the
95 end unused. So put the flags next to the hot end. */
97 int min_recurse_threshold;
98 /* callback hooks and data */
99 int (*add_attr_cb)(struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
100 void (*free_state_cb)(pTHX_ struct state *st);
102 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
103 /* this stuff wil be moved to state_cb_data later */
104 FILE *node_stream_fh;
105 char *node_stream_name;
108 #define ADD_SIZE(st, leafname, bytes) (NPathAddSizeCb(st, leafname, bytes) (st)->total_size += (bytes))
110 #define PATH_TRACKING
113 #define pPATH npath_node_t *NPathArg
115 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
116 * to the next unused slot (though with prev already filled in)
117 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
118 * to and passes that NP value to the function being called.
119 * seqn==0 indicates the node is new (hasn't been output yet)
121 #define dNPathNodes(nodes, prev_np) \
122 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
123 npath_node_t *NP = &name_path_nodes[0]; \
124 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
126 #define NPathPushNode(nodeid, nodetype) \
128 NP->type = nodetype; \
130 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
132 NP->id = Nullch; /* safety/debug */ \
135 #define NPathSetNode(nodeid, nodetype) \
136 (NP-1)->id = nodeid; \
137 (NP-1)->type = nodetype; \
138 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
140 #define NPathPopNode \
143 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
144 * So the function can only safely call ADD_*() but not NPathLink, unless the
145 * caller has spare nodes in its name_path_nodes.
147 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
149 #define NPtype_NAME 0x01
150 #define NPtype_LINK 0x02
151 #define NPtype_SV 0x03
152 #define NPtype_MAGIC 0x04
153 #define NPtype_OP 0x05
155 #define NPattr_LEAFSIZE 0x00
156 #define NPattr_NAME 0x01
157 #define NPattr_PADFAKE 0x02
158 #define NPattr_PADNAME 0x03
159 #define NPattr_PADTMP 0x04
160 #define NPattr_NOTE 0x05
162 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
163 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
164 /* add a link and a name node to the path - a special case for op_size */
165 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
166 #define NPathOpLink (NPathArg)
167 #define NPathAddSizeCb(st, name, bytes) (st->add_attr_cb && st->add_attr_cb(st, NP-1, NPattr_LEAFSIZE, (name), (bytes))),
168 #define ADD_ATTR(st, attr_type, attr_name, attr_value) (st->add_attr_cb && st->add_attr_cb(st, NP-1, attr_type, attr_name, attr_value))
172 #define NPathAddSizeCb(st, name, bytes)
173 #define pPATH void *npath_dummy /* XXX ideally remove */
174 #define dNPathNodes(nodes, prev_np) dNOOP
175 #define NPathLink(nodeid, nodetype) NULL
176 #define NPathOpLink NULL
177 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
179 #endif /* PATH_TRACKING */
186 static const char *svtypenames[SVt_LAST] = {
188 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
189 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
190 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
191 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
192 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
193 #elif PERL_VERSION < 13
194 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
196 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
201 np_print_node_name(FILE *fp, npath_node_t *npath_node)
203 char buf[1024]; /* XXX */
205 switch (npath_node->type) {
206 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
207 const SV *sv = (SV*)npath_node->id;
208 int type = SvTYPE(sv);
209 char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
210 fprintf(fp, "SV(%s)", typename);
211 switch(type) { /* add some useful details */
212 case SVt_PVAV: fprintf(fp, " fill=%d/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
213 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
217 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
218 const OP *op = (OP*)npath_node->id;
219 fprintf(fp, "OP(%s)", OP_NAME(op));
222 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
223 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
224 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
225 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
229 fprintf(fp, "%s", npath_node->id);
232 fprintf(fp, "%s", npath_node->id);
234 default: /* assume id is a string pointer */
235 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
242 np_dump_indent(int depth) {
244 fprintf(stderr, ": ");
248 np_walk_new_nodes(struct state *st,
249 npath_node_t *npath_node,
250 npath_node_t *npath_node_deeper,
251 int (*cb)(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
253 if (npath_node->seqn) /* node already output */
256 if (npath_node->prev) {
257 np_walk_new_nodes(st, npath_node->prev, npath_node, cb); /* recurse */
258 npath_node->depth = npath_node->prev->depth + 1;
260 else npath_node->depth = 0;
261 npath_node->seqn = ++st->seqn;
264 if (cb(st, npath_node, npath_node_deeper)) {
265 /* ignore this node */
266 assert(npath_node->prev);
267 assert(npath_node->depth);
268 assert(npath_node_deeper);
270 npath_node->seqn = --st->seqn;
271 npath_node_deeper->prev = npath_node->prev;
279 np_dump_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
280 if (0 && npath_node->type == NPtype_LINK)
282 np_dump_indent(npath_node->depth);
283 np_print_node_name(stderr, npath_node);
284 if (npath_node->type == NPtype_LINK)
285 fprintf(stderr, "->"); /* cosmetic */
286 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
287 fprintf(stderr, "\n");
292 np_dump_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
294 if (attr_type == NPattr_LEAFSIZE && !attr_value)
295 return 0; /* ignore zero sized leaf items */
296 np_walk_new_nodes(st, npath_node, NULL, np_dump_formatted_node);
297 np_dump_indent(npath_node->depth+1);
299 case NPattr_LEAFSIZE:
300 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
303 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
306 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
311 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
314 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
317 fprintf(stderr, "\n");
322 np_stream_formatted_node(struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
323 fprintf(st->node_stream_fh, "-%u %lu %u ",
324 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
326 np_print_node_name(st->node_stream_fh, npath_node);
327 fprintf(st->node_stream_fh, "\n");
332 np_stream_node_path_info(struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
334 if (!attr_type && !attr_value)
335 return 0; /* ignore zero sized leaf items */
336 np_walk_new_nodes(st, npath_node, NULL, np_stream_formatted_node);
337 if (attr_type) { /* Attribute type, name and value */
338 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
340 else { /* Leaf name and memory size */
341 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
343 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
348 #endif /* PATH_TRACKING */
352 Checks to see if thing is in the bitstring.
353 Returns true or false, and
354 notes thing in the segmented bitstring.
357 check_new(struct state *st, const void *const p) {
358 unsigned int bits = 8 * sizeof(void*);
359 const size_t raw_p = PTR2nat(p);
360 /* This effectively rotates the value right by the number of low always-0
361 bits in an aligned pointer. The assmption is that most (if not all)
362 pointers are aligned, and these will be in the same chain of nodes
363 (and hence hot in the cache) but we can still deal with any unaligned
365 const size_t cooked_p
366 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
367 const U8 this_bit = 1 << (cooked_p & 0x7);
371 void **tv_p = (void **) (st->tracking);
373 if (NULL == p) return FALSE;
375 const char c = *(const char *)p;
378 if (st->dangle_whine)
379 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
385 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
387 /* First level is always present. */
389 i = (unsigned int)((cooked_p >> bits) & 0xFF);
391 Newxz(tv_p[i], 256, void *);
392 tv_p = (void **)(tv_p[i]);
394 } while (bits > LEAF_BITS + BYTE_BITS);
395 /* bits now 16 always */
396 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
397 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
398 a my_perl under multiplicity */
401 leaf_p = (U8 **)tv_p;
402 i = (unsigned int)((cooked_p >> bits) & 0xFF);
404 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
409 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
411 if(leaf[i] & this_bit)
419 free_tracking_at(void **tv, int level)
427 free_tracking_at((void **) tv[i], level);
441 free_state(struct state *st)
443 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
444 if (st->free_state_cb)
445 st->free_state_cb(st);
446 if (st->state_cb_data)
447 Safefree(st->state_cb_data);
448 free_tracking_at((void **)st->tracking, top_level);
452 /* For now, this is somewhat a compatibility bodge until the plan comes
453 together for fine grained recursion control. total_size() would recurse into
454 hash and array members, whereas sv_size() would not. However, sv_size() is
455 called with CvSTASH() of a CV, which means that if it (also) starts to
456 recurse fully, then the size of any CV now becomes the size of the entire
457 symbol table reachable from it, and potentially the entire symbol table, if
458 any subroutine makes a reference to a global (such as %SIG). The historical
459 implementation of total_size() didn't report "everything", and changing the
460 only available size to "everything" doesn't feel at all useful. */
462 #define NO_RECURSION 0
463 #define SOME_RECURSION 1
464 #define TOTAL_SIZE_RECURSION 2
466 static void sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
482 , OPc_CONDOP /* 12 */
491 cc_opclass(const OP * const o)
497 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
499 if (o->op_type == OP_SASSIGN)
500 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
503 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
507 if ((o->op_type == OP_TRANS)) {
511 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
543 #ifdef OA_PVOP_OR_SVOP
544 case OA_PVOP_OR_SVOP: TAG;
546 * Character translations (tr///) are usually a PVOP, keeping a
547 * pointer to a table of shorts used to look up translations.
548 * Under utf8, however, a simple table isn't practical; instead,
549 * the OP is an SVOP, and the SV is a reference to a swash
550 * (i.e., an RV pointing to an HV).
552 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
553 ? OPc_SVOP : OPc_PVOP;
562 case OA_BASEOP_OR_UNOP: TAG;
564 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
565 * whether parens were seen. perly.y uses OPf_SPECIAL to
566 * signal whether a BASEOP had empty parens or none.
567 * Some other UNOPs are created later, though, so the best
568 * test is OPf_KIDS, which is set in newUNOP.
570 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
572 case OA_FILESTATOP: TAG;
574 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
575 * the OPf_REF flag to distinguish between OP types instead of the
576 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
577 * return OPc_UNOP so that walkoptree can find our children. If
578 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
579 * (no argument to the operator) it's an OP; with OPf_REF set it's
580 * an SVOP (and op_sv is the GV for the filehandle argument).
582 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
584 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
586 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
588 case OA_LOOPEXOP: TAG;
590 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
591 * label was omitted (in which case it's a BASEOP) or else a term was
592 * seen. In this last case, all except goto are definitely PVOP but
593 * goto is either a PVOP (with an ordinary constant label), an UNOP
594 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
595 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
598 if (o->op_flags & OPf_STACKED)
600 else if (o->op_flags & OPf_SPECIAL)
610 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
611 PL_op_name[o->op_type]);
617 /* Figure out how much magic is attached to the SV and return the
620 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
621 dNPathNodes(1, NPathArg);
622 MAGIC *magic_pointer = SvMAGIC(thing);
627 if (!SvMAGICAL(thing)) {
629 warn("Ignoring suspect magic on this SV\n");
635 /* push a dummy node for NPathSetNode to update inside the while loop */
636 NPathPushNode("dummy", NPtype_NAME);
638 /* Have we seen the magic pointer? (NULL has always been seen before) */
639 while (check_new(st, magic_pointer)) {
641 NPathSetNode(magic_pointer, NPtype_MAGIC);
643 ADD_SIZE(st, "mg", sizeof(MAGIC));
644 /* magic vtables aren't freed when magic is freed, so don't count them.
645 (They are static structures. Anything that assumes otherwise is buggy.)
650 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
651 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
652 if (magic_pointer->mg_len == HEf_SVKEY) {
653 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
655 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
656 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
657 if (check_new(st, magic_pointer->mg_ptr)) {
658 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
662 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
663 else if (magic_pointer->mg_len > 0) {
664 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
665 if (check_new(st, magic_pointer->mg_ptr)) {
666 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
670 /* Get the next in the chain */
671 magic_pointer = magic_pointer->mg_moremagic;
674 if (st->dangle_whine)
675 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
681 check_new_and_strlen(struct state *st, const char *const p, pPATH) {
682 dNPathNodes(1, NPathArg->prev);
683 if(check_new(st, p)) {
684 NPathPushNode(NPathArg->id, NPtype_NAME);
685 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
690 regex_size(const REGEXP * const baseregex, struct state *st, pPATH) {
691 dNPathNodes(1, NPathArg);
692 if(!check_new(st, baseregex))
694 NPathPushNode("regex_size", NPtype_NAME);
695 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
696 #if (PERL_VERSION < 11)
697 /* Note the size of the paren offset thing */
698 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
699 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
701 ADD_SIZE(st, "regexp", sizeof(struct regexp));
702 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
703 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
705 if (st->go_yell && !st->regex_whine) {
706 carp("Devel::Size: Calculated sizes for compiled regexes are incompatible, and probably always will be");
712 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
714 /* op_size recurses to follow the chain of opcodes. For the node path we
715 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
716 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
717 * instead of NPathLink().
719 dNPathUseParent(NPathArg);
723 if(!check_new(st, baseop))
726 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
728 switch (cc_opclass(baseop)) {
729 case OPc_BASEOP: TAG;
730 ADD_SIZE(st, "op", sizeof(struct op));
733 ADD_SIZE(st, "unop", sizeof(struct unop));
734 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
737 ADD_SIZE(st, "binop", sizeof(struct binop));
738 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
739 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
742 ADD_SIZE(st, "logop", sizeof(struct logop));
743 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
744 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
747 case OPc_CONDOP: TAG;
748 ADD_SIZE(st, "condop", sizeof(struct condop));
749 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
750 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
751 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
754 case OPc_LISTOP: TAG;
755 ADD_SIZE(st, "listop", sizeof(struct listop));
756 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
757 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
760 ADD_SIZE(st, "pmop", sizeof(struct pmop));
761 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
762 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
763 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
764 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
765 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
767 /* This is defined away in perl 5.8.x, but it is in there for
770 regex_size(PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
772 regex_size(((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
776 ADD_SIZE(st, "svop", sizeof(struct svop));
777 if (!(baseop->op_type == OP_AELEMFAST
778 && baseop->op_flags & OPf_SPECIAL)) {
779 /* not an OP_PADAV replacement */
780 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
785 ADD_SIZE(st, "padop", sizeof(struct padop));
790 ADD_SIZE(st, "gvop", sizeof(struct gvop));
791 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
795 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
798 ADD_SIZE(st, "loop", sizeof(struct loop));
799 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
800 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
801 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
802 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
803 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
808 basecop = (COP *)baseop;
809 ADD_SIZE(st, "cop", sizeof(struct cop));
811 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
812 Eliminate cop_label from struct cop by storing a label as the first
813 entry in the hints hash. Most statements don't have labels, so this
814 will save memory. Not sure how much.
815 The check below will be incorrect fail on bleadperls
816 before 5.11 @33656, but later than 5.10, producing slightly too
817 small memory sizes on these Perls. */
818 #if (PERL_VERSION < 11)
819 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
822 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
823 check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv"));
825 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
826 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
827 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
837 if (st->dangle_whine)
838 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
843 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
845 dNPathNodes(1, NPathArg);
847 /* Hash keys can be shared. Have we seen this before? */
848 if (!check_new(st, hek))
850 NPathPushNode("hek", NPtype_NAME);
851 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
853 + 1 /* No hash key flags prior to 5.8.0 */
859 #if PERL_VERSION < 10
860 ADD_SIZE(st, "he", sizeof(struct he));
862 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
868 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9
873 # define MAYBE_PURIFY(normal, pure) (pure)
874 # define MAYBE_OFFSET(struct_name, member) 0
876 # define MAYBE_PURIFY(normal, pure) (normal)
877 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
880 const U8 body_sizes[SVt_LAST] = {
883 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
884 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
885 sizeof(XRV), /* SVt_RV */
886 sizeof(XPV), /* SVt_PV */
887 sizeof(XPVIV), /* SVt_PVIV */
888 sizeof(XPVNV), /* SVt_PVNV */
889 sizeof(XPVMG), /* SVt_PVMG */
890 sizeof(XPVBM), /* SVt_PVBM */
891 sizeof(XPVLV), /* SVt_PVLV */
892 sizeof(XPVAV), /* SVt_PVAV */
893 sizeof(XPVHV), /* SVt_PVHV */
894 sizeof(XPVCV), /* SVt_PVCV */
895 sizeof(XPVGV), /* SVt_PVGV */
896 sizeof(XPVFM), /* SVt_PVFM */
897 sizeof(XPVIO) /* SVt_PVIO */
898 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
902 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
904 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
905 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
906 sizeof(XPVNV), /* SVt_PVNV */
907 sizeof(XPVMG), /* SVt_PVMG */
908 sizeof(XPVGV), /* SVt_PVGV */
909 sizeof(XPVLV), /* SVt_PVLV */
910 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
911 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
912 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
913 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
914 sizeof(XPVIO), /* SVt_PVIO */
915 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
919 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
921 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
922 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
923 sizeof(XPVNV), /* SVt_PVNV */
924 sizeof(XPVMG), /* SVt_PVMG */
925 sizeof(XPVGV), /* SVt_PVGV */
926 sizeof(XPVLV), /* SVt_PVLV */
927 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
928 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
929 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
930 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
931 sizeof(XPVIO) /* SVt_PVIO */
932 #elif PERL_VERSION < 13
936 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
937 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
938 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
939 sizeof(XPVNV), /* SVt_PVNV */
940 sizeof(XPVMG), /* SVt_PVMG */
941 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
942 sizeof(XPVGV), /* SVt_PVGV */
943 sizeof(XPVLV), /* SVt_PVLV */
944 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
945 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
946 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
947 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
948 sizeof(XPVIO) /* SVt_PVIO */
953 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
954 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
955 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
956 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
957 sizeof(XPVMG), /* SVt_PVMG */
958 sizeof(regexp), /* SVt_REGEXP */
959 sizeof(XPVGV), /* SVt_PVGV */
960 sizeof(XPVLV), /* SVt_PVLV */
961 sizeof(XPVAV), /* SVt_PVAV */
962 sizeof(XPVHV), /* SVt_PVHV */
963 sizeof(XPVCV), /* SVt_PVCV */
964 sizeof(XPVFM), /* SVt_PVFM */
965 sizeof(XPVIO) /* SVt_PVIO */
970 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
972 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
975 dNPathUseParent(NPathArg);
982 if( 0 && !check_new(st, padlist))
985 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
986 pname = AvARRAY(pad_name);
988 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
989 const SV *namesv = pname[ix];
990 if (namesv && namesv == &PL_sv_undef) {
994 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
996 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
998 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1001 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1005 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1010 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1011 const int recurse) {
1012 const SV *thing = orig_thing;
1013 dNPathNodes(3, NPathArg);
1016 if(!check_new(st, orig_thing))
1019 type = SvTYPE(thing);
1020 if (type > SVt_LAST) {
1021 warn("Devel::Size: Unknown variable type: %d encountered\n", type);
1024 NPathPushNode(thing, NPtype_SV);
1025 ADD_SIZE(st, "sv", sizeof(SV) + body_sizes[type]);
1028 #if (PERL_VERSION < 11)
1029 /* Is it a reference? */
1034 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1035 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1039 /* Is there anything in the array? */
1040 if (AvMAX(thing) != -1) {
1041 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1042 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1043 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1045 if (recurse >= st->min_recurse_threshold) {
1046 SSize_t i = AvFILLp(thing) + 1;
1049 sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse);
1052 /* Add in the bits on the other side of the beginning */
1054 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1055 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1057 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1058 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1059 if (AvALLOC(thing) != 0) {
1060 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1062 #if (PERL_VERSION < 9)
1063 /* Is there something hanging off the arylen element?
1064 Post 5.9.something this is stored in magic, so will be found there,
1065 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1066 complain about AvARYLEN() passing thing to it. */
1067 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1072 /* Now the array of buckets */
1073 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1074 if (HvENAME(thing)) {
1075 ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0);
1077 /* Now walk the bucket chain */
1078 if (HvARRAY(thing)) {
1081 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1082 cur_entry = *(HvARRAY(thing) + cur_bucket);
1084 /* XXX a HE should probably be a node so the keys and values are seen as pairs */
1085 ADD_SIZE(st, "he", sizeof(HE));
1086 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1087 if (recurse >= st->min_recurse_threshold) {
1088 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1089 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1090 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1091 * so we protect against that here, but I'd like to know the cause.
1093 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1094 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1095 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1097 cur_entry = cur_entry->hent_next;
1103 /* This direct access is arguably "naughty": */
1104 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1105 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8
1107 I32 count = HvAUX(thing)->xhv_name_count;
1110 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1114 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1119 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1122 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1124 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1125 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1126 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1127 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1129 #if PERL_VERSION > 10
1130 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1131 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1133 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1134 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1139 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1145 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1146 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), recurse);
1148 if (st->go_yell && !st->fm_whine) {
1149 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1155 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1156 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1157 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1158 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1159 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), recurse);
1160 if (CvISXSUB(thing)) {
1161 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1163 if(1)op_size(aTHX_ CvSTART(thing), st, NPathLinkAndNode("CvSTART", "OPs")); /* XXX ? */
1164 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1169 /* Some embedded char pointers */
1170 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1171 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1172 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1173 /* Throw the GVs on the list to be walked if they're not-null */
1174 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1175 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1176 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1178 /* Only go trotting through the IO structures if they're really
1179 trottable. If USE_PERLIO is defined we can do this. If
1180 not... we can't, so we don't even try */
1182 /* Dig into xio_ifp and xio_ofp here */
1183 warn("Devel::Size: Can't size up perlio layers yet\n");
1188 #if (PERL_VERSION < 9)
1193 if(isGV_with_GP(thing)) {
1195 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1197 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1199 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1201 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1202 #elif defined(GvFILE)
1203 # if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1204 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1205 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1206 and the relevant COP has been freed on scope cleanup after the eval.
1207 5.8.9 adds a binary compatible fudge that catches the vast majority
1208 of cases. 5.9.something added a proper fix, by converting the GP to
1209 use a shared hash key (porperly reference counted), instead of a
1210 char * (owned by who knows? possibly no-one now) */
1211 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1214 /* Is there something hanging off the glob? */
1215 if (check_new(st, GvGP(thing))) {
1216 ADD_SIZE(st, "GP", sizeof(GP));
1217 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1218 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1219 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1220 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1221 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1222 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1224 #if (PERL_VERSION >= 9)
1228 #if PERL_VERSION <= 8
1236 if(recurse && SvROK(thing))
1237 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1238 else if (SvIsCOW_shared_hash(thing))
1239 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1241 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1245 SvOOK_offset(thing, len);
1246 ADD_SIZE(st, "SvOOK", len);
1252 if (type >= SVt_PVMG) {
1253 magic_size(aTHX_ thing, st, NPathLink("MG"));
1260 free_memnode_state(pTHX_ struct state *st)
1262 if (st->node_stream_fh && st->node_stream_name) {
1263 if (*st->node_stream_name == '|') {
1264 if (pclose(st->node_stream_fh))
1265 warn("%s exited with an error status\n", st->node_stream_name);
1268 if (fclose(st->node_stream_fh))
1269 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1274 static struct state *
1280 Newxz(st, 1, struct state);
1282 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1283 if (NULL != (warn_flag = perl_get_sv("Devel::Size::warn", FALSE))) {
1284 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1286 if (NULL != (warn_flag = perl_get_sv("Devel::Size::dangle", FALSE))) {
1287 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1289 check_new(st, &PL_sv_undef);
1290 check_new(st, &PL_sv_no);
1291 check_new(st, &PL_sv_yes);
1292 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1293 check_new(st, &PL_sv_placeholder);
1295 #ifdef PATH_TRACKING
1296 if (getenv("MEMVIEW") && *getenv("MEMVIEW")) { /* XXX quick hack */
1297 st->node_stream_name = getenv("MEMVIEW");
1298 if (*st->node_stream_name == '|')
1299 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1301 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1302 if (!st->node_stream_fh)
1303 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1304 setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1305 st->add_attr_cb = np_stream_node_path_info;
1308 st->add_attr_cb = np_dump_node_path_info;
1309 st->free_state_cb = free_memnode_state;
1314 /* XXX based on S_visit() in sv.c */
1316 unseen_sv_size(pTHX_ struct state *st, pPATH)
1321 dNPathNodes(1, NPathArg);
1323 NPathPushNode("unseen", NPtype_NAME);
1325 /* by this point we should have visited all the SVs
1326 * so now we'll run through all the SVs via the arenas
1327 * in order to find any thet we've missed for some reason.
1328 * Once the rest of the code is finding all the SVs then any
1329 * found here will be leaks.
1331 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1332 const SV * const svend = &sva[SvREFCNT(sva)];
1334 for (sv = sva + 1; sv < svend; ++sv) {
1335 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1336 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1338 else if (check_new(st, sv)) { /* sanity check */
1339 warn("unseen_sv_size encountered freed SV unexpectedly");
1346 MODULE = Devel::Size PACKAGE = Devel::Size
1354 total_size = TOTAL_SIZE_RECURSION
1357 SV *thing = orig_thing;
1358 struct state *st = new_state(aTHX);
1360 /* If they passed us a reference then dereference it. This is the
1361 only way we can check the sizes of arrays and hashes */
1363 thing = SvRV(thing);
1366 sv_size(aTHX_ st, NULL, thing, ix);
1367 RETVAL = st->total_size;
1377 struct state *st = new_state(aTHX);
1378 dNPathNodes(3, NULL);
1380 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1382 NPathPushNode("perl_size", NPtype_NAME); /* provide a root node */
1383 /* start with PL_defstash to get everything reachable from \%main:: */
1384 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1386 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1387 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1388 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1389 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1390 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1391 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1392 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1393 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1394 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1395 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1396 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1398 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1400 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1401 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1402 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1403 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1404 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1405 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1406 /* TODO PL_pidstatus */
1407 /* TODO PL_stashpad */
1408 /* TODO PL_compiling? COP */
1410 /* TODO stacks: cur, main, tmps, mark, scope, save */
1411 /* TODO PL_exitlist */
1412 /* TODO PL_reentrant_buffers etc */
1414 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1416 /* TODO anything missed? */
1418 /* --- by this point we should have seen all reachable SVs --- */
1420 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1421 sv_size(aTHX_ st, NPathLink("PL_strtab"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1423 /* unused space in sv head arenas */
1427 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX */
1428 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1429 if (!check_new(st, p)) /* sanity check */
1430 warn("Free'd SV head unexpectedly already seen");
1433 NPathPushNode("unused_sv_heads", NPtype_NAME);
1434 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1437 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1439 /* iterate over all SVs to find any we've not accounted for yet */
1440 /* once the code above is visiting all SVs, any found here have been leaked */
1441 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1444 struct mstats ms = mstats();
1445 NPathSetNode("unused malloc space", NPtype_NAME);
1446 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1447 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1448 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1449 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1450 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1453 RETVAL = st->total_size;