5 * Refactor this to split out D:M code from Devel::Size code.
6 * Start migrating Devel::Size's Size.xs towards the new code.
10 #undef NDEBUG /* XXX */
13 #define PERL_NO_GET_CONTEXT
19 #define DPPP_PL_parser_NO_DUMMY
20 #define NEED_PL_parser
23 #include "refcounted_he.h"
25 /* Not yet in ppport.h */
27 # define CvISXSUB(cv) (CvXSUB(cv) ? TRUE : FALSE)
30 # define SvRV_const(rv) SvRV(rv)
33 # define SvOOK_offset(sv, len) STMT_START { len = SvIVX(sv); } STMT_END
36 # define SvIsCOW(sv) ((SvFLAGS(sv) & (SVf_FAKE | SVf_READONLY)) == \
37 (SVf_FAKE | SVf_READONLY))
39 #ifndef SvIsCOW_shared_hash
40 # define SvIsCOW_shared_hash(sv) (SvIsCOW(sv) && SvLEN(sv) == 0)
42 #ifndef SvSHARED_HEK_FROM_PV
43 # define SvSHARED_HEK_FROM_PV(pvx) \
44 ((struct hek*)(pvx - STRUCT_OFFSET(struct hek, hek_key)))
47 #define MUTABLE_AV(p) ((AV*)p)
50 #define MUTABLE_SV(p) ((SV*)p)
54 # define PL_opargs opargs
55 # define PL_op_name op_name
59 /* "structured exception" handling is a Microsoft extension to C and C++.
60 It's *not* C++ exception handling - C++ exception handling can't capture
61 SEGVs and suchlike, whereas this can. There's no known analagous
62 functionality on other platforms. */
64 # define TRY_TO_CATCH_SEGV __try
65 # define CAUGHT_EXCEPTION __except(EXCEPTION_EXECUTE_HANDLER)
67 # define TRY_TO_CATCH_SEGV if(1)
68 # define CAUGHT_EXCEPTION else
72 # define __attribute__(x)
75 #if 0 && defined(DEBUGGING)
76 #define dbg_printf(x) printf x
81 #define TAG /* printf( "# %s(%d)\n", __FILE__, __LINE__ ) */
84 /* The idea is to have a tree structure to store 1 bit per possible pointer
85 address. The lowest 16 bits are stored in a block of 8092 bytes.
86 The blocks are in a 256-way tree, indexed by the reset of the pointer.
87 This can cope with 32 and 64 bit pointers, and any address space layout,
88 without excessive memory needs. The assumption is that your CPU cache
89 works :-) (And that we're not going to bust it) */
92 #define LEAF_BITS (16 - BYTE_BITS)
93 #define LEAF_MASK 0x1FFF
95 typedef struct npath_node_st npath_node_t;
96 struct npath_node_st {
111 /* My hunch (not measured) is that for most architectures pointers will
112 start with 0 bits, hence the start of this array will be hot, and the
113 end unused. So put the flags next to the hot end. */
116 int min_recurse_threshold;
117 /* callback hooks and data */
118 void (*add_attr_cb)(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *name, UV value);
119 void (*free_state_cb)(pTHX_ struct state *st);
120 void *state_cb_data; /* free'd by free_state() after free_state_cb() call */
121 /* this stuff wil be moved to state_cb_data later */
123 FILE *node_stream_fh;
124 char *node_stream_name;
127 #define ADD_SIZE(st, leafname, bytes) \
129 NPathAddSizeCb(st, leafname, bytes); \
130 (st)->total_size += (bytes); \
134 #define PATH_TRACKING
137 #define pPATH npath_node_t *NPathArg
139 /* A subtle point here is that dNPathNodes and NPathPushNode leave NP pointing
140 * to the next unused slot (though with prev already filled in)
141 * whereas NPathLink leaves NP unchanged, it just fills in the slot NP points
142 * to and passes that NP value to the function being called.
143 * seqn==0 indicates the node is new (hasn't been output yet)
145 #define dNPathNodes(nodes, prev_np) \
146 npath_node_t name_path_nodes[nodes+1]; /* +1 for NPathLink */ \
147 npath_node_t *NP = &name_path_nodes[0]; \
148 NP->seqn = NP->type = 0; NP->id = Nullch; /* safety/debug */ \
150 #define NPathPushNode(nodeid, nodetype) \
152 NP->type = nodetype; \
154 if(0)fprintf(stderr,"NPathPushNode (%p <-) %p <- [%d %s]\n", NP->prev, NP, nodetype,(char*)nodeid);\
156 NP->id = Nullch; /* safety/debug */ \
159 #define NPathSetNode(nodeid, nodetype) \
160 (NP-1)->id = nodeid; \
161 (NP-1)->type = nodetype; \
162 if(0)fprintf(stderr,"NPathSetNode (%p <-) %p <- [%d %s]\n", (NP-1)->prev, (NP-1), nodetype,(char*)nodeid);\
164 #define NPathPopNode \
167 /* dNPathUseParent points NP directly the the parents' name_path_nodes array
168 * So the function can only safely call ADD_*() but not NPathLink, unless the
169 * caller has spare nodes in its name_path_nodes.
171 #define dNPathUseParent(prev_np) npath_node_t *NP = (((prev_np+1)->prev = prev_np), prev_np+1)
173 #define NPtype_NAME 0x01
174 #define NPtype_LINK 0x02
175 #define NPtype_SV 0x03
176 #define NPtype_MAGIC 0x04
177 #define NPtype_OP 0x05
179 /* XXX these should probably be generalized into flag bits */
180 #define NPattr_LEAFSIZE 0x00
181 #define NPattr_NAME 0x01
182 #define NPattr_PADFAKE 0x02
183 #define NPattr_PADNAME 0x03
184 #define NPattr_PADTMP 0x04
185 #define NPattr_NOTE 0x05
187 #define _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, np) \
189 if (st->add_attr_cb) { \
190 st->add_attr_cb(aTHX_ st, np, attr_type, attr_name, attr_value); \
194 #define ADD_ATTR(st, attr_type, attr_name, attr_value) \
195 _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP-1)
197 #define ADD_LINK_ATTR(st, attr_type, attr_name, attr_value) \
199 if (st->add_attr_cb) assert(NP->seqn); \
200 _ADD_ATTR_NP(st, attr_type, attr_name, attr_value, NP); \
203 #define _NPathLink(np, nid, ntype) (((np)->id=nid), ((np)->type=ntype), ((np)->seqn=0))
204 #define NPathLink(nid) (_NPathLink(NP, nid, NPtype_LINK), NP)
205 /* add a link and a name node to the path - a special case for op_size */
206 #define NPathLinkAndNode(nid, nid2) (_NPathLink(NP, nid, NPtype_LINK), _NPathLink(NP+1, nid2, NPtype_NAME), ((NP+1)->prev=NP), (NP+1))
207 #define NPathOpLink (NPathArg)
208 #define NPathAddSizeCb(st, name, bytes) \
210 if (st->add_attr_cb) { \
211 st->add_attr_cb(aTHX_ st, NP-1, NPattr_LEAFSIZE, (name), (bytes)); \
217 #define NPathAddSizeCb(st, name, bytes)
218 #define pPATH void *npath_dummy /* XXX ideally remove */
219 #define dNPathNodes(nodes, prev_np) dNOOP
220 #define NPathLink(nodeid, nodetype) NULL
221 #define NPathOpLink NULL
222 #define ADD_ATTR(st, attr_type, attr_name, attr_value) NOOP
224 #endif /* PATH_TRACKING */
231 static const char *svtypenames[SVt_LAST] = {
233 "NULL", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVBM", "PVLV", "PVAV", "PVHV", "PVCV", "PVGV", "PVFM", "PVIO",
234 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
235 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
236 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
237 "NULL", "BIND", "IV", "NV", "RV", "PV", "PVIV", "PVNV", "PVMG", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
238 #elif PERL_VERSION < 13
239 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
241 "NULL", "BIND", "IV", "NV", "PV", "PVIV", "PVNV", "PVMG", "REGEXP", "PVGV", "PVLV", "PVAV", "PVHV", "PVCV", "PVFM", "PVIO",
246 gettimeofday_nv(pTHX_)
248 #ifdef HAS_GETTIMEOFDAY
250 gettimeofday(&when, (struct timezone *) 0);
251 return when.tv_sec + (when.tv_usec / 1000000.0);
255 (*u2time)(aTHX_ &time_of_day);
256 return time_of_day[0] + (time_of_day[1] / 1000000.0);
264 np_print_node_name(pTHX_ FILE *fp, npath_node_t *npath_node)
266 switch (npath_node->type) {
267 case NPtype_SV: { /* id is pointer to the SV sv_size was called on */
268 const SV *sv = (SV*)npath_node->id;
269 int type = SvTYPE(sv);
270 const char *typename = (type == SVt_IV && SvROK(sv)) ? "RV" : svtypenames[type];
271 fprintf(fp, "SV(%s)", typename);
272 switch(type) { /* add some useful details */
273 case SVt_PVAV: fprintf(fp, " fill=%ld/%ld", av_len((AV*)sv), AvMAX((AV*)sv)); break;
274 case SVt_PVHV: fprintf(fp, " fill=%ld/%ld", HvFILL((HV*)sv), HvMAX((HV*)sv)); break;
278 case NPtype_OP: { /* id is pointer to the OP op_size was called on */
279 const OP *op = (OP*)npath_node->id;
280 fprintf(fp, "OP(%s)", OP_NAME(op));
283 case NPtype_MAGIC: { /* id is pointer to the MAGIC struct */
284 MAGIC *magic_pointer = (MAGIC*)npath_node->id;
285 /* XXX it would be nice if we could reuse mg_names.c [sigh] */
286 fprintf(fp, "MAGIC(%c)", magic_pointer->mg_type ? magic_pointer->mg_type : '0');
290 fprintf(fp, "%s", (const char *)npath_node->id);
293 fprintf(fp, "%s", (const char *)npath_node->id);
295 default: /* assume id is a string pointer */
296 fprintf(fp, "UNKNOWN(%d,%p)", npath_node->type, npath_node->id);
303 np_dump_indent(pTHX_ int depth) {
305 fprintf(stderr, ": ");
309 np_walk_new_nodes(pTHX_ struct state *st,
310 npath_node_t *npath_node,
311 npath_node_t *npath_node_deeper,
312 int (*cb)(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper))
314 if (npath_node->seqn) /* node already output */
317 if (npath_node->prev) {
318 np_walk_new_nodes(aTHX_ st, npath_node->prev, npath_node, cb); /* recurse */
319 npath_node->depth = npath_node->prev->depth + 1;
321 else npath_node->depth = 0;
322 npath_node->seqn = ++st->seqn;
325 if (cb(aTHX_ st, npath_node, npath_node_deeper)) {
326 /* ignore this node */
327 assert(npath_node->prev);
328 assert(npath_node->depth);
329 assert(npath_node_deeper);
331 npath_node->seqn = --st->seqn;
332 npath_node_deeper->prev = npath_node->prev;
340 np_dump_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
342 PERL_UNUSED_ARG(npath_node_deeper);
343 if (0 && npath_node->type == NPtype_LINK)
345 np_dump_indent(aTHX_ npath_node->depth);
346 np_print_node_name(aTHX_ stderr, npath_node);
347 if (npath_node->type == NPtype_LINK)
348 fprintf(stderr, "->"); /* cosmetic */
349 fprintf(stderr, "\t\t[#%ld @%u] ", npath_node->seqn, npath_node->depth);
350 fprintf(stderr, "\n");
355 np_dump_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
357 if (attr_type == NPattr_LEAFSIZE && !attr_value)
358 return; /* ignore zero sized leaf items */
359 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_dump_formatted_node);
360 np_dump_indent(aTHX_ npath_node->depth+1);
362 case NPattr_LEAFSIZE:
363 fprintf(stderr, "+%ld %s =%ld", attr_value, attr_name, attr_value+st->total_size);
366 fprintf(stderr, "~NAMED('%s') %lu", attr_name, attr_value);
369 fprintf(stderr, "~note %s %lu", attr_name, attr_value);
374 fprintf(stderr, "~pad%lu %s %lu", attr_type, attr_name, attr_value);
377 fprintf(stderr, "~??? %s %lu", attr_name, attr_value);
380 fprintf(stderr, "\n");
384 np_stream_formatted_node(pTHX_ struct state *st, npath_node_t *npath_node, npath_node_t *npath_node_deeper) {
385 PERL_UNUSED_ARG(npath_node_deeper);
386 fprintf(st->node_stream_fh, "-%u %lu %u ",
387 npath_node->type, npath_node->seqn, (unsigned)npath_node->depth
389 np_print_node_name(aTHX_ st->node_stream_fh, npath_node);
390 fprintf(st->node_stream_fh, "\n");
395 np_stream_node_path_info(pTHX_ struct state *st, npath_node_t *npath_node, UV attr_type, const char *attr_name, UV attr_value)
397 if (!attr_type && !attr_value)
398 return; /* ignore zero sized leaf items */
399 np_walk_new_nodes(aTHX_ st, npath_node, NULL, np_stream_formatted_node);
400 if (attr_type) { /* Attribute type, name and value */
401 fprintf(st->node_stream_fh, "%lu %lu ", attr_type, npath_node->seqn);
403 else { /* Leaf name and memory size */
404 fprintf(st->node_stream_fh, "L %lu ", npath_node->seqn);
406 fprintf(st->node_stream_fh, "%lu %s\n", attr_value, attr_name);
410 #endif /* PATH_TRACKING */
414 Checks to see if thing is in the bitstring.
415 Returns true or false, and
416 notes thing in the segmented bitstring.
419 check_new(struct state *st, const void *const p) {
420 unsigned int bits = 8 * sizeof(void*);
421 const size_t raw_p = PTR2nat(p);
422 /* This effectively rotates the value right by the number of low always-0
423 bits in an aligned pointer. The assmption is that most (if not all)
424 pointers are aligned, and these will be in the same chain of nodes
425 (and hence hot in the cache) but we can still deal with any unaligned
427 const size_t cooked_p
428 = (raw_p >> ALIGN_BITS) | (raw_p << (bits - ALIGN_BITS));
429 const U8 this_bit = 1 << (cooked_p & 0x7);
433 void **tv_p = (void **) (st->tracking);
435 if (NULL == p) return FALSE;
437 const char c = *(const char *)p;
441 if (st->dangle_whine)
442 warn( "Devel::Size: Encountered invalid pointer: %p\n", p );
448 /* bits now 24 (32 bit pointers) or 56 (64 bit pointers) */
450 /* First level is always present. */
452 i = (unsigned int)((cooked_p >> bits) & 0xFF);
454 Newxz(tv_p[i], 256, void *);
455 tv_p = (void **)(tv_p[i]);
457 } while (bits > LEAF_BITS + BYTE_BITS);
458 /* bits now 16 always */
459 #if !defined(MULTIPLICITY) || PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8)
460 /* 5.8.8 and early have an assert() macro that uses Perl_croak, hence needs
461 a my_perl under multiplicity */
464 leaf_p = (U8 **)tv_p;
465 i = (unsigned int)((cooked_p >> bits) & 0xFF);
467 Newxz(leaf_p[i], 1 << LEAF_BITS, U8);
472 i = (unsigned int)((cooked_p >> BYTE_BITS) & LEAF_MASK);
474 if(leaf[i] & this_bit)
482 free_tracking_at(void **tv, int level)
490 free_tracking_at((void **) tv[i], level);
504 free_state(pTHX_ struct state *st)
506 const int top_level = (sizeof(void *) * 8 - LEAF_BITS - BYTE_BITS) / 8;
507 if (st->free_state_cb)
508 st->free_state_cb(aTHX_ st);
509 if (st->state_cb_data)
510 Safefree(st->state_cb_data);
511 free_tracking_at((void **)st->tracking, top_level);
515 /* For now, this is somewhat a compatibility bodge until the plan comes
516 together for fine grained recursion control. total_size() would recurse into
517 hash and array members, whereas sv_size() would not. However, sv_size() is
518 called with CvSTASH() of a CV, which means that if it (also) starts to
519 recurse fully, then the size of any CV now becomes the size of the entire
520 symbol table reachable from it, and potentially the entire symbol table, if
521 any subroutine makes a reference to a global (such as %SIG). The historical
522 implementation of total_size() didn't report "everything", and changing the
523 only available size to "everything" doesn't feel at all useful. */
525 #define NO_RECURSION 0
526 #define SOME_RECURSION 1
527 #define TOTAL_SIZE_RECURSION 2
529 static bool sv_size(pTHX_ struct state *, pPATH, const SV *const, const int recurse);
545 , OPc_CONDOP /* 12 */
554 cc_opclass(const OP * const o)
560 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
562 if (o->op_type == OP_SASSIGN)
563 return ((o->op_private & OPpASSIGN_BACKWARDS) ? OPc_UNOP : OPc_BINOP);
566 if (o->op_type == OP_GV || o->op_type == OP_GVSV || o->op_type == OP_AELEMFAST)
570 if ((o->op_type == OP_TRANS)) {
574 switch (PL_opargs[o->op_type] & OA_CLASS_MASK) {
606 #ifdef OA_PVOP_OR_SVOP
607 case OA_PVOP_OR_SVOP: TAG;
609 * Character translations (tr///) are usually a PVOP, keeping a
610 * pointer to a table of shorts used to look up translations.
611 * Under utf8, however, a simple table isn't practical; instead,
612 * the OP is an SVOP, and the SV is a reference to a swash
613 * (i.e., an RV pointing to an HV).
615 return (o->op_private & (OPpTRANS_TO_UTF|OPpTRANS_FROM_UTF))
616 ? OPc_SVOP : OPc_PVOP;
625 case OA_BASEOP_OR_UNOP: TAG;
627 * UNI(OP_foo) in toke.c returns token UNI or FUNC1 depending on
628 * whether parens were seen. perly.y uses OPf_SPECIAL to
629 * signal whether a BASEOP had empty parens or none.
630 * Some other UNOPs are created later, though, so the best
631 * test is OPf_KIDS, which is set in newUNOP.
633 return (o->op_flags & OPf_KIDS) ? OPc_UNOP : OPc_BASEOP;
635 case OA_FILESTATOP: TAG;
637 * The file stat OPs are created via UNI(OP_foo) in toke.c but use
638 * the OPf_REF flag to distinguish between OP types instead of the
639 * usual OPf_SPECIAL flag. As usual, if OPf_KIDS is set, then we
640 * return OPc_UNOP so that walkoptree can find our children. If
641 * OPf_KIDS is not set then we check OPf_REF. Without OPf_REF set
642 * (no argument to the operator) it's an OP; with OPf_REF set it's
643 * an SVOP (and op_sv is the GV for the filehandle argument).
645 return ((o->op_flags & OPf_KIDS) ? OPc_UNOP :
647 (o->op_flags & OPf_REF) ? OPc_PADOP : OPc_BASEOP);
649 (o->op_flags & OPf_REF) ? OPc_SVOP : OPc_BASEOP);
651 case OA_LOOPEXOP: TAG;
653 * next, last, redo, dump and goto use OPf_SPECIAL to indicate that a
654 * label was omitted (in which case it's a BASEOP) or else a term was
655 * seen. In this last case, all except goto are definitely PVOP but
656 * goto is either a PVOP (with an ordinary constant label), an UNOP
657 * with OPf_STACKED (with a non-constant non-sub) or an UNOP for
658 * OP_REFGEN (with goto &sub) in which case OPf_STACKED also seems to
661 if (o->op_flags & OPf_STACKED)
663 else if (o->op_flags & OPf_SPECIAL)
673 warn("Devel::Size: Can't determine class of operator %s, assuming BASEOP\n",
674 PL_op_name[o->op_type]);
680 /* Figure out how much magic is attached to the SV and return the
683 magic_size(pTHX_ const SV * const thing, struct state *st, pPATH) {
684 dNPathNodes(1, NPathArg);
685 MAGIC *magic_pointer = SvMAGIC(thing); /* caller ensures thing is SvMAGICAL */
687 /* push a dummy node for NPathSetNode to update inside the while loop */
688 NPathPushNode("dummy", NPtype_NAME);
690 /* Have we seen the magic pointer? (NULL has always been seen before) */
691 while (check_new(st, magic_pointer)) {
693 NPathSetNode(magic_pointer, NPtype_MAGIC);
695 ADD_SIZE(st, "mg", sizeof(MAGIC));
696 /* magic vtables aren't freed when magic is freed, so don't count them.
697 (They are static structures. Anything that assumes otherwise is buggy.)
702 /* XXX only chase mg_obj if mg->mg_flags & MGf_REFCOUNTED ? */
703 sv_size(aTHX_ st, NPathLink("mg_obj"), magic_pointer->mg_obj, TOTAL_SIZE_RECURSION);
704 if (magic_pointer->mg_len == HEf_SVKEY) {
705 sv_size(aTHX_ st, NPathLink("mg_ptr"), (SV *)magic_pointer->mg_ptr, TOTAL_SIZE_RECURSION);
707 #if defined(PERL_MAGIC_utf8) && defined (PERL_MAGIC_UTF8_CACHESIZE)
708 else if (magic_pointer->mg_type == PERL_MAGIC_utf8) {
709 if (check_new(st, magic_pointer->mg_ptr)) {
710 ADD_SIZE(st, "PERL_MAGIC_utf8", PERL_MAGIC_UTF8_CACHESIZE * 2 * sizeof(STRLEN));
714 /* XXX also handle mg->mg_type == PERL_MAGIC_utf8 ? */
715 else if (magic_pointer->mg_len > 0) {
716 if(0)do_magic_dump(0, Perl_debug_log, magic_pointer, 0, 0, FALSE, 0);
717 if (check_new(st, magic_pointer->mg_ptr)) {
718 ADD_SIZE(st, "mg_len", magic_pointer->mg_len);
722 /* Get the next in the chain */
723 magic_pointer = magic_pointer->mg_moremagic;
726 if (st->dangle_whine)
727 warn( "Devel::Size: Encountered bad magic at: %p\n", magic_pointer );
732 #define check_new_and_strlen(st, p, ppath) S_check_new_and_strlen(aTHX_ st, p, ppath)
734 S_check_new_and_strlen(pTHX_ struct state *st, const char *const p, pPATH) {
735 dNPathNodes(1, NPathArg->prev);
736 if(check_new(st, p)) {
737 NPathPushNode(NPathArg->id, NPtype_NAME);
738 ADD_SIZE(st, NPathArg->id, 1 + strlen(p));
743 regex_size(pTHX_ const REGEXP * const baseregex, struct state *st, pPATH) {
744 dNPathNodes(1, NPathArg);
745 if(!check_new(st, baseregex))
747 NPathPushNode("regex_size", NPtype_NAME);
748 ADD_SIZE(st, "REGEXP", sizeof(REGEXP));
749 #if (PERL_VERSION < 11)
750 /* Note the size of the paren offset thing */
751 ADD_SIZE(st, "nparens", sizeof(I32) * baseregex->nparens * 2);
752 ADD_SIZE(st, "precomp", strlen(baseregex->precomp));
754 ADD_SIZE(st, "regexp", sizeof(struct regexp));
755 ADD_SIZE(st, "nparens", sizeof(I32) * SvANY(baseregex)->nparens * 2);
756 /*ADD_SIZE(st, strlen(SvANY(baseregex)->subbeg));*/
758 if (st->go_yell && !st->regex_whine) {
759 carp("Devel::Size: Calculated sizes for compiled regexes are incomplete");
765 hek_size(pTHX_ struct state *st, HEK *hek, U32 shared, pPATH)
767 dNPathNodes(1, NPathArg);
769 /* Hash keys can be shared. Have we seen this before? */
770 if (!check_new(st, hek))
772 NPathPushNode("hek", NPtype_NAME);
773 ADD_SIZE(st, "hek_len", HEK_BASESIZE + hek->hek_len
775 + 1 /* No hash key flags prior to 5.8.0 */
781 #if PERL_VERSION < 10
782 ADD_SIZE(st, "he", sizeof(struct he));
784 ADD_SIZE(st, "shared_he", STRUCT_OFFSET(struct shared_he, shared_he_hek));
790 #if (PERL_BCDVERSION >= 0x5009004)
792 refcounted_he_size(pTHX_ struct state *st, struct refcounted_he *he, pPATH)
794 dNPathNodes(1, NPathArg);
795 if (!check_new(st, he))
797 NPathPushNode("refcounted_he_size", NPtype_NAME);
798 ADD_SIZE(st, "refcounted_he", sizeof(struct refcounted_he));
801 ADD_SIZE(st, "refcounted_he_data", NPtype_NAME);
803 hek_size(aTHX_ st, he->refcounted_he_hek, 0, NPathLink("refcounted_he_hek"));
806 if (he->refcounted_he_next)
807 refcounted_he_size(aTHX_ st, he->refcounted_he_next, NPathLink("refcounted_he_next"));
811 static void op_size_class(pTHX_ const OP * const baseop, opclass op_class, bool skip_op_struct, struct state *st, pPATH);
814 op_size(pTHX_ const OP * const baseop, struct state *st, pPATH)
816 op_size_class(aTHX_ baseop, cc_opclass(baseop), 0, st, NPathArg);
820 op_size_class(pTHX_ const OP * const baseop, opclass op_class, bool skip_op_struct, struct state *st, pPATH)
822 /* op_size recurses to follow the chain of opcodes. For the node path we
823 * don't want the chain to be 'nested' in the path so we use dNPathUseParent().
824 * Also, to avoid a link-to-a-link the caller should use NPathLinkAndNode()
825 * instead of NPathLink().
827 dNPathUseParent(NPathArg);
831 if(!check_new(st, baseop))
835 /* segv on OPc_LISTOP op_size(baseop->op_last) is, I suspect, the first symptom of need to handle slabbed allocation of OPs */
836 #if (PERL_BCDVERSION >= 0x5017000)
837 if(0)do_op_dump(0, Perl_debug_log, baseop);
840 op_size(aTHX_ baseop->op_next, st, NPathOpLink);
842 madprop_size(aTHX_ st, NPathOpLink, baseop->op_madprop);
846 case OPc_BASEOP: TAG;
848 ADD_SIZE(st, "op", sizeof(struct op));
852 ADD_SIZE(st, "unop", sizeof(struct unop));
853 op_size(aTHX_ ((UNOP *)baseop)->op_first, st, NPathOpLink);
857 ADD_SIZE(st, "binop", sizeof(struct binop));
858 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
859 op_size(aTHX_ ((BINOP *)baseop)->op_last, st, NPathOpLink);
863 ADD_SIZE(st, "logop", sizeof(struct logop));
864 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
865 op_size(aTHX_ ((LOGOP *)baseop)->op_other, st, NPathOpLink);
868 case OPc_CONDOP: TAG;
870 ADD_SIZE(st, "condop", sizeof(struct condop));
871 op_size(aTHX_ ((BINOP *)baseop)->op_first, st, NPathOpLink);
872 op_size(aTHX_ ((CONDOP *)baseop)->op_true, st, NPathOpLink);
873 op_size(aTHX_ ((CONDOP *)baseop)->op_false, st, NPathOpLink);
876 case OPc_LISTOP: TAG;
878 ADD_SIZE(st, "listop", sizeof(struct listop));
879 op_size(aTHX_ ((LISTOP *)baseop)->op_first, st, NPathOpLink);
880 op_size(aTHX_ ((LISTOP *)baseop)->op_last, st, NPathOpLink);
884 ADD_SIZE(st, "pmop", sizeof(struct pmop));
885 op_size(aTHX_ ((PMOP *)baseop)->op_first, st, NPathOpLink);
886 op_size(aTHX_ ((PMOP *)baseop)->op_last, st, NPathOpLink);
887 #if PERL_VERSION < 9 || (PERL_VERSION == 9 && PERL_SUBVERSION < 5)
888 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplroot, st, NPathOpLink);
889 op_size(aTHX_ ((PMOP *)baseop)->op_pmreplstart, st, NPathOpLink);
891 /* This is defined away in perl 5.8.x, but it is in there for
894 regex_size(aTHX_ PM_GETRE((PMOP *)baseop), st, NPathLink("PM_GETRE"));
896 regex_size(aTHX_ ((PMOP *)baseop)->op_pmregexp, st, NPathLink("op_pmregexp"));
901 ADD_SIZE(st, "svop", sizeof(struct svop));
902 if (!(baseop->op_type == OP_AELEMFAST
903 && baseop->op_flags & OPf_SPECIAL)) {
904 /* not an OP_PADAV replacement */
905 sv_size(aTHX_ st, NPathLink("SVOP"), ((SVOP *)baseop)->op_sv, SOME_RECURSION);
911 ADD_SIZE(st, "padop", sizeof(struct padop));
917 ADD_SIZE(st, "gvop", sizeof(struct gvop));
918 sv_size(aTHX_ st, NPathLink("GVOP"), ((GVOP *)baseop)->op_gv, SOME_RECURSION);
922 check_new_and_strlen(st, ((PVOP *)baseop)->op_pv, NPathLink("op_pv"));
926 ADD_SIZE(st, "loop", sizeof(struct loop));
927 op_size(aTHX_ ((LOOP *)baseop)->op_first, st, NPathOpLink);
928 op_size(aTHX_ ((LOOP *)baseop)->op_last, st, NPathOpLink);
929 op_size(aTHX_ ((LOOP *)baseop)->op_redoop, st, NPathOpLink);
930 op_size(aTHX_ ((LOOP *)baseop)->op_nextop, st, NPathOpLink);
931 op_size(aTHX_ ((LOOP *)baseop)->op_lastop, st, NPathOpLink);
936 basecop = (COP *)baseop;
938 ADD_SIZE(st, "cop", sizeof(struct cop));
940 /* Change 33656 by nicholas@mouse-mill on 2008/04/07 11:29:51
941 Eliminate cop_label from struct cop by storing a label as the first
942 entry in the hints hash. Most statements don't have labels, so this
943 will save memory. Not sure how much.
944 The check below will be incorrect fail on bleadperls
945 before 5.11 @33656, but later than 5.10, producing slightly too
946 small memory sizes on these Perls. */
947 #if (PERL_VERSION < 11)
948 check_new_and_strlen(st, basecop->cop_label, NPathLink("cop_label"));
951 check_new_and_strlen(st, basecop->cop_file, NPathLink("cop_file"));
952 /*check_new_and_strlen(st, basecop->cop_stashpv, NPathLink("cop_stashpv")); XXX */
954 if (SvREFCNT(basecop->cop_stash) == 1) /* XXX hack? */
955 sv_size(aTHX_ st, NPathLink("cop_stash"), (SV *)basecop->cop_stash, SOME_RECURSION);
956 sv_size(aTHX_ st, NPathLink("cop_filegv"), (SV *)basecop->cop_filegv, SOME_RECURSION);
959 #if (PERL_BCDVERSION >= 0x5009004)
960 # if (PERL_BCDVERSION < 0x5013007)
961 # define COPHH struct refcounted_he
963 # ifndef CopHINTHASH_get
964 # define CopHINTHASH_get(c) ((COPHH*)((c)->cop_hints_hash))
966 refcounted_he_size(aTHX_ st, CopHINTHASH_get(basecop), NPathLink("cop_hints_hash"));
975 if (st->dangle_whine)
976 warn( "Devel::Size: Encountered dangling pointer in opcode at: %p\n", baseop );
980 #if PERL_VERSION < 8 || PERL_SUBVERSION < 9 /* XXX plain || seems like a bug */
985 # define MAYBE_PURIFY(normal, pure) (pure)
986 # define MAYBE_OFFSET(struct_name, member) 0
988 # define MAYBE_PURIFY(normal, pure) (normal)
989 # define MAYBE_OFFSET(struct_name, member) STRUCT_OFFSET(struct_name, member)
992 const U8 body_sizes[SVt_LAST] = {
995 MAYBE_PURIFY(sizeof(IV), sizeof(XPVIV)), /* SVt_IV */
996 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
997 sizeof(XRV), /* SVt_RV */
998 sizeof(XPV), /* SVt_PV */
999 sizeof(XPVIV), /* SVt_PVIV */
1000 sizeof(XPVNV), /* SVt_PVNV */
1001 sizeof(XPVMG), /* SVt_PVMG */
1002 sizeof(XPVBM), /* SVt_PVBM */
1003 sizeof(XPVLV), /* SVt_PVLV */
1004 sizeof(XPVAV), /* SVt_PVAV */
1005 sizeof(XPVHV), /* SVt_PVHV */
1006 sizeof(XPVCV), /* SVt_PVCV */
1007 sizeof(XPVGV), /* SVt_PVGV */
1008 sizeof(XPVFM), /* SVt_PVFM */
1009 sizeof(XPVIO) /* SVt_PVIO */
1010 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 0
1014 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
1016 MAYBE_PURIFY(sizeof(xpv_allocated), sizeof(XPV)), /* SVt_PV */
1017 MAYBE_PURIFY(sizeof(xpviv_allocated), sizeof(XPVIV)),/* SVt_PVIV */
1018 sizeof(XPVNV), /* SVt_PVNV */
1019 sizeof(XPVMG), /* SVt_PVMG */
1020 sizeof(XPVGV), /* SVt_PVGV */
1021 sizeof(XPVLV), /* SVt_PVLV */
1022 MAYBE_PURIFY(sizeof(xpvav_allocated), sizeof(XPVAV)),/* SVt_PVAV */
1023 MAYBE_PURIFY(sizeof(xpvhv_allocated), sizeof(XPVHV)),/* SVt_PVHV */
1024 MAYBE_PURIFY(sizeof(xpvcv_allocated), sizeof(XPVCV)),/* SVt_PVCV */
1025 MAYBE_PURIFY(sizeof(xpvfm_allocated), sizeof(XPVFM)),/* SVt_PVFM */
1026 sizeof(XPVIO), /* SVt_PVIO */
1027 #elif PERL_VERSION == 10 && PERL_SUBVERSION == 1
1031 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
1033 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
1034 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
1035 sizeof(XPVNV), /* SVt_PVNV */
1036 sizeof(XPVMG), /* SVt_PVMG */
1037 sizeof(XPVGV), /* SVt_PVGV */
1038 sizeof(XPVLV), /* SVt_PVLV */
1039 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
1040 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
1041 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
1042 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
1043 sizeof(XPVIO) /* SVt_PVIO */
1044 #elif PERL_VERSION < 13
1048 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
1049 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
1050 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
1051 sizeof(XPVNV), /* SVt_PVNV */
1052 sizeof(XPVMG), /* SVt_PVMG */
1053 sizeof(regexp) - MAYBE_OFFSET(regexp, xpv_cur), /* SVt_REGEXP */
1054 sizeof(XPVGV), /* SVt_PVGV */
1055 sizeof(XPVLV), /* SVt_PVLV */
1056 sizeof(XPVAV) - MAYBE_OFFSET(XPVAV, xav_fill), /* SVt_PVAV */
1057 sizeof(XPVHV) - MAYBE_OFFSET(XPVHV, xhv_fill), /* SVt_PVHV */
1058 sizeof(XPVCV) - MAYBE_OFFSET(XPVCV, xpv_cur), /* SVt_PVCV */
1059 sizeof(XPVFM) - MAYBE_OFFSET(XPVFM, xpv_cur), /* SVt_PVFM */
1060 sizeof(XPVIO) /* SVt_PVIO */
1065 MAYBE_PURIFY(sizeof(NV), sizeof(XPVNV)), /* SVt_NV */
1066 sizeof(XPV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PV */
1067 sizeof(XPVIV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVIV */
1068 sizeof(XPVNV) - MAYBE_OFFSET(XPV, xpv_cur), /* SVt_PVNV */
1069 sizeof(XPVMG), /* SVt_PVMG */
1070 sizeof(regexp), /* SVt_REGEXP */
1071 sizeof(XPVGV), /* SVt_PVGV */
1072 sizeof(XPVLV), /* SVt_PVLV */
1073 sizeof(XPVAV), /* SVt_PVAV */
1074 sizeof(XPVHV), /* SVt_PVHV */
1075 sizeof(XPVCV), /* SVt_PVCV */
1076 sizeof(XPVFM), /* SVt_PVFM */
1077 sizeof(XPVIO) /* SVt_PVIO */
1082 /* based on Perl_do_dump_pad() - wraps sv_size and adds ADD_ATTR calls for the pad names */
1084 padlist_size(pTHX_ struct state *const st, pPATH, PADLIST *padlist,
1087 dNPathUseParent(NPathArg);
1094 if( 0 && !check_new(st, padlist))
1097 pad_name = MUTABLE_AV(*av_fetch(MUTABLE_AV(padlist), 0, FALSE));
1098 pname = AvARRAY(pad_name);
1100 for (ix = 1; ix <= AvFILLp(pad_name); ix++) {
1101 const SV *namesv = pname[ix];
1102 if (namesv && namesv == &PL_sv_undef) {
1106 /* SvFAKE: On a pad name SV, that slot in the frame AV is a REFCNT'ed reference to a lexical from "outside" */
1108 ADD_ATTR(st, NPattr_PADFAKE, SvPVX_const(namesv), ix);
1110 ADD_ATTR(st, NPattr_PADNAME, SvPVX_const(namesv), ix);
1113 ADD_ATTR(st, NPattr_PADTMP, "SVs_PADTMP", ix);
1117 sv_size(aTHX_ st, NPathArg, (SV*)padlist, recurse);
1122 sv_size(pTHX_ struct state *const st, pPATH, const SV * const orig_thing,
1123 const int recurse) {
1124 const SV *thing = orig_thing;
1125 dNPathNodes(3, NPathArg);
1128 if(!check_new(st, orig_thing))
1131 type = SvTYPE(thing);
1132 if (type > SVt_LAST) {
1133 warn("Devel::Size: Unknown variable type: %u encountered\n", type);
1136 NPathPushNode(thing, NPtype_SV);
1137 ADD_SIZE(st, "sv_head", sizeof(SV));
1138 ADD_SIZE(st, "sv_body", body_sizes[type]);
1141 #if (PERL_VERSION < 11)
1142 /* Is it a reference? */
1147 if(recurse && SvROK(thing)) /* XXX maybe don't follow weakrefs */
1148 sv_size(aTHX_ st, (SvWEAKREF(thing) ? NPathLink("weakRV") : NPathLink("RV")), SvRV_const(thing), recurse);
1152 /* Is there anything in the array? */
1153 if (AvMAX(thing) != -1) {
1154 /* an array with 10 slots has AvMax() set to 9 - te 2007-04-22 */
1155 ADD_SIZE(st, "av_max", sizeof(SV *) * (AvMAX(thing) + 1));
1156 ADD_ATTR(st, NPattr_NOTE, "av_len", av_len((AV*)thing));
1157 dbg_printf(("total_size: %li AvMAX: %li av_len: $i\n", st->total_size, AvMAX(thing), av_len((AV*)thing)));
1159 if (recurse >= st->min_recurse_threshold) {
1160 SSize_t i = AvFILLp(thing) + 1;
1163 if (sv_size(aTHX_ st, NPathLink("AVelem"), AvARRAY(thing)[i], recurse))
1164 ADD_LINK_ATTR(st, NPattr_NOTE, "i", i);
1168 /* Add in the bits on the other side of the beginning */
1170 dbg_printf(("total_size %li, sizeof(SV *) %li, AvARRAY(thing) %li, AvALLOC(thing)%li , sizeof(ptr) %li \n",
1171 st->total_size, sizeof(SV*), AvARRAY(thing), AvALLOC(thing), sizeof( thing )));
1173 /* under Perl 5.8.8 64bit threading, AvARRAY(thing) was a pointer while AvALLOC was 0,
1174 resulting in grossly overstated sized for arrays. Technically, this shouldn't happen... */
1175 if (AvALLOC(thing) != 0) {
1176 ADD_SIZE(st, "AvALLOC", (sizeof(SV *) * (AvARRAY(thing) - AvALLOC(thing))));
1178 #if (PERL_VERSION < 9)
1179 /* Is there something hanging off the arylen element?
1180 Post 5.9.something this is stored in magic, so will be found there,
1181 and Perl_av_arylen_p() takes a non-const AV*, hence compilers rightly
1182 complain about AvARYLEN() passing thing to it. */
1183 sv_size(aTHX_ st, NPathLink("ARYLEN"), AvARYLEN(thing), recurse);
1188 /* Now the array of buckets */
1190 if (HvENAME(thing)) { ADD_ATTR(st, NPattr_NAME, HvENAME(thing), 0); }
1192 if (HvNAME(thing)) { ADD_ATTR(st, NPattr_NAME, HvNAME(thing), 0); }
1194 ADD_SIZE(st, "hv_max", (sizeof(HE *) * (HvMAX(thing) + 1)));
1195 /* Now walk the bucket chain */
1196 if (HvARRAY(thing)) {
1200 for (cur_bucket = 0; cur_bucket <= HvMAX(thing); cur_bucket++) {
1201 cur_entry = *(HvARRAY(thing) + cur_bucket);
1203 NPathPushNode("he", NPtype_LINK);
1204 NPathPushNode("he+hek", NPtype_NAME);
1205 ADD_SIZE(st, "he", sizeof(HE));
1206 hek_size(aTHX_ st, cur_entry->hent_hek, HvSHAREKEYS(thing), NPathLink("hent_hek"));
1207 if (recurse >= st->min_recurse_threshold) {
1208 if (orig_thing == (SV*)PL_strtab) {
1209 /* For PL_strtab the HeVAL is used as a refcnt */
1210 ADD_SIZE(st, "shared_hek", HeKLEN(cur_entry));
1213 /* I've seen a PL_strtab HeVAL == 0xC and 0x40C etc
1214 * just running perl -Mblib -Mstrict -MDevel::Size=:all -MCarp -e 'warn perl_size()'
1215 * but it seemed like a corruption - it would change come and go with irrelevant code changes.
1216 * so we protect against that here, but I'd like to know the cause.
1218 if (PTR2UV(HeVAL(cur_entry)) > 0xFFF)
1219 sv_size(aTHX_ st, NPathLink("HeVAL"), HeVAL(cur_entry), recurse);
1220 else warn("skipped suspect HeVAL %p", HeVAL(cur_entry));
1223 cur_entry = cur_entry->hent_next;
1227 } /* bucket chain */
1232 /* This direct access is arguably "naughty": */
1233 struct mro_meta *meta = HvAUX(thing)->xhv_mro_meta;
1234 #if PERL_VERSION > 13 || PERL_SUBVERSION > 8 /* XXX plain || seems like a bug */
1236 I32 count = HvAUX(thing)->xhv_name_count;
1239 HEK **names = HvAUX(thing)->xhv_name_u.xhvnameu_names;
1243 hek_size(aTHX_ st, names[count], 1, NPathLink("HvAUXelem"));
1248 hek_size(aTHX_ st, HvNAME_HEK(thing), 1, NPathLink("HvNAME_HEK"));
1251 ADD_SIZE(st, "xpvhv_aux", sizeof(struct xpvhv_aux));
1253 ADD_SIZE(st, "mro_meta", sizeof(struct mro_meta));
1254 sv_size(aTHX_ st, NPathLink("mro_nextmethod"), (SV *)meta->mro_nextmethod, TOTAL_SIZE_RECURSION);
1255 #if PERL_VERSION > 10 || (PERL_VERSION == 10 && PERL_SUBVERSION > 0)
1256 sv_size(aTHX_ st, NPathLink("isa"), (SV *)meta->isa, TOTAL_SIZE_RECURSION);
1258 #if PERL_VERSION > 10
1259 sv_size(aTHX_ st, NPathLink("mro_linear_all"), (SV *)meta->mro_linear_all, TOTAL_SIZE_RECURSION);
1260 sv_size(aTHX_ st, NPathLink("mro_linear_current"), meta->mro_linear_current, TOTAL_SIZE_RECURSION);
1262 sv_size(aTHX_ st, NPathLink("mro_linear_dfs"), (SV *)meta->mro_linear_dfs, TOTAL_SIZE_RECURSION);
1263 sv_size(aTHX_ st, NPathLink("mro_linear_c3"), (SV *)meta->mro_linear_c3, TOTAL_SIZE_RECURSION);
1268 check_new_and_strlen(st, HvNAME_get(thing), NPathLink("HvNAME"));
1274 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1275 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1277 if (st->go_yell && !st->fm_whine) {
1278 carp("Devel::Size: Calculated sizes for FMs are incomplete");
1284 /* not CvSTASH, per https://rt.cpan.org/Ticket/Display.html?id=79366 */
1285 ADD_ATTR(st, NPattr_NAME, CvGV(thing) ? GvNAME(CvGV(thing)) : "UNDEFINED", 0);
1286 sv_size(aTHX_ st, NPathLink("CvGV"), (SV *)CvGV(thing), SOME_RECURSION);
1287 padlist_size(aTHX_ st, NPathLink("CvPADLIST"), CvPADLIST(thing), recurse);
1288 sv_size(aTHX_ st, NPathLink("CvOUTSIDE"), (SV *)CvOUTSIDE(thing), SOME_RECURSION);
1289 if (CvISXSUB(thing)) {
1290 sv_size(aTHX_ st, NPathLink("cv_const_sv"), cv_const_sv((CV *)thing), recurse);
1292 /* Note that we don't chase CvSTART */
1293 op_size(aTHX_ CvROOT(thing), st, NPathLinkAndNode("CvROOT", "OPs"));
1298 /* Some embedded char pointers */
1299 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_top_name, NPathLink("xio_top_name"));
1300 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_fmt_name, NPathLink("xio_fmt_name"));
1301 check_new_and_strlen(st, ((XPVIO *) SvANY(thing))->xio_bottom_name, NPathLink("xio_bottom_name"));
1302 /* Throw the GVs on the list to be walked if they're not-null */
1303 sv_size(aTHX_ st, NPathLink("xio_top_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_top_gv, recurse);
1304 sv_size(aTHX_ st, NPathLink("xio_bottom_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_bottom_gv, recurse);
1305 sv_size(aTHX_ st, NPathLink("xio_fmt_gv"), (SV *)((XPVIO *) SvANY(thing))->xio_fmt_gv, recurse);
1307 /* Only go trotting through the IO structures if they're really
1308 trottable. If USE_PERLIO is defined we can do this. If
1309 not... we can't, so we don't even try */
1311 /* Dig into xio_ifp and xio_ofp here */
1312 warn("Devel::Size: Can't size up perlio layers yet\n");
1317 #if (PERL_VERSION < 9)
1322 if(isGV_with_GP(thing)) {
1324 hek_size(aTHX_ st, GvNAME_HEK(thing), 1, NPathLink("GvNAME_HEK"));
1326 ADD_SIZE(st, "GvNAMELEN", GvNAMELEN(thing));
1328 ADD_ATTR(st, NPattr_NAME, GvNAME_get(thing), 0);
1330 hek_size(aTHX_ st, GvFILE_HEK(thing), 1, NPathLink("GvFILE_HEK"));
1331 #elif defined(GvFILE)
1332 /* XXX this coredumped for me in t/recurse.t with a non-threaded 5.8.9
1333 * so I've changed the condition to be more restricive
1334 *# if !defined(USE_ITHREADS) || (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 8))
1336 # if (PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 9))
1337 /* With itreads, before 5.8.9, this can end up pointing to freed memory
1338 if the GV was created in an eval, as GvFILE() points to CopFILE(),
1339 and the relevant COP has been freed on scope cleanup after the eval.
1340 5.8.9 adds a binary compatible fudge that catches the vast majority
1341 of cases. 5.9.something added a proper fix, by converting the GP to
1342 use a shared hash key (porperly reference counted), instead of a
1343 char * (owned by who knows? possibly no-one now) */
1344 check_new_and_strlen(st, GvFILE(thing), NPathLink("GvFILE"));
1347 /* Is there something hanging off the glob? */
1348 if (check_new(st, GvGP(thing))) {
1349 ADD_SIZE(st, "GP", sizeof(GP));
1350 sv_size(aTHX_ st, NPathLink("gp_sv"), (SV *)(GvGP(thing)->gp_sv), recurse);
1351 sv_size(aTHX_ st, NPathLink("gp_av"), (SV *)(GvGP(thing)->gp_av), recurse);
1352 sv_size(aTHX_ st, NPathLink("gp_hv"), (SV *)(GvGP(thing)->gp_hv), recurse);
1353 sv_size(aTHX_ st, NPathLink("gp_cv"), (SV *)(GvGP(thing)->gp_cv), recurse);
1354 sv_size(aTHX_ st, NPathLink("gp_egv"), (SV *)(GvGP(thing)->gp_egv), recurse);
1355 sv_size(aTHX_ st, NPathLink("gp_form"), (SV *)(GvGP(thing)->gp_form), recurse);
1357 #if (PERL_VERSION >= 9)
1361 #if PERL_VERSION <= 8
1369 if(recurse && SvROK(thing))
1370 sv_size(aTHX_ st, NPathLink("RV"), SvRV_const(thing), recurse);
1371 else if (SvIsCOW_shared_hash(thing))
1372 hek_size(aTHX_ st, SvSHARED_HEK_FROM_PV(SvPVX(thing)), 1, NPathLink("SvSHARED_HEK_FROM_PV"));
1374 ADD_SIZE(st, "SvLEN", SvLEN(thing));
1378 SvOOK_offset(thing, len);
1379 ADD_SIZE(st, "SvOOK", len);
1385 if (type >= SVt_PVMG) {
1386 if (SvMAGICAL(thing))
1387 magic_size(aTHX_ thing, st, NPathLink("MG"));
1388 /* SVpad_OUR shares same flag bit as SVpbm_VALID and others */
1389 if (type == SVt_PVGV && SvPAD_OUR(thing) && SvOURSTASH(thing))
1390 sv_size(aTHX_ st, NPathLink("SvOURSTASH"), (SV *)SvOURSTASH(thing), SOME_RECURSION);
1392 sv_size(aTHX_ st, NPathLink("SvSTASH"), (SV *)SvSTASH(thing), SOME_RECURSION);
1399 free_memnode_state(pTHX_ struct state *st)
1401 if (st->node_stream_fh && st->node_stream_name && *st->node_stream_name) {
1402 fprintf(st->node_stream_fh, "E %d %f %s\n",
1403 getpid(), gettimeofday_nv(aTHX)-st->start_time_nv, "unnamed");
1404 if (*st->node_stream_name == '|') {
1405 if (pclose(st->node_stream_fh))
1406 warn("%s exited with an error status\n", st->node_stream_name);
1409 if (fclose(st->node_stream_fh))
1410 warn("Error closing %s: %s\n", st->node_stream_name, strerror(errno));
1415 static struct state *
1421 Newxz(st, 1, struct state);
1423 st->min_recurse_threshold = TOTAL_SIZE_RECURSION;
1424 if (NULL != (warn_flag = get_sv("Devel::Size::warn", FALSE))) {
1425 st->dangle_whine = st->go_yell = SvIV(warn_flag) ? TRUE : FALSE;
1427 if (NULL != (warn_flag = get_sv("Devel::Size::dangle", FALSE))) {
1428 st->dangle_whine = SvIV(warn_flag) ? TRUE : FALSE;
1430 st->start_time_nv = gettimeofday_nv(aTHX);
1431 check_new(st, &PL_sv_undef);
1432 check_new(st, &PL_sv_no);
1433 check_new(st, &PL_sv_yes);
1434 #if PERL_VERSION > 8 || (PERL_VERSION == 8 && PERL_SUBVERSION > 0)
1435 check_new(st, &PL_sv_placeholder);
1438 #ifdef PATH_TRACKING
1439 /* XXX quick hack */
1440 st->node_stream_name = getenv("SIZEME");
1441 if (st->node_stream_name) {
1442 if (*st->node_stream_name) {
1443 if (*st->node_stream_name == '|')
1444 st->node_stream_fh = popen(st->node_stream_name+1, "w");
1446 st->node_stream_fh = fopen(st->node_stream_name, "wb");
1447 if (!st->node_stream_fh)
1448 croak("Can't open '%s' for writing: %s", st->node_stream_name, strerror(errno));
1449 if(0)setlinebuf(st->node_stream_fh); /* XXX temporary for debugging */
1450 st->add_attr_cb = np_stream_node_path_info;
1451 fprintf(st->node_stream_fh, "S %d %f %s\n",
1452 getpid(), st->start_time_nv, "unnamed");
1455 st->add_attr_cb = np_dump_node_path_info;
1457 st->free_state_cb = free_memnode_state;
1463 /* XXX based on S_visit() in sv.c */
1465 unseen_sv_size(pTHX_ struct state *st, pPATH)
1469 dNPathNodes(1, NPathArg);
1471 NPathPushNode("unseen", NPtype_NAME);
1473 /* by this point we should have visited all the SVs
1474 * so now we'll run through all the SVs via the arenas
1475 * in order to find any that we've missed for some reason.
1476 * Once the rest of the code is finding ALL the SVs then any
1477 * found here will be leaks.
1479 for (sva = PL_sv_arenaroot; sva; sva = MUTABLE_SV(SvANY(sva))) {
1480 const SV * const svend = &sva[SvREFCNT(sva)];
1482 for (sv = sva + 1; sv < svend; ++sv) {
1483 if (SvTYPE(sv) != (svtype)SVTYPEMASK && SvREFCNT(sv)) {
1484 sv_size(aTHX_ st, NPathLink("arena"), sv, TOTAL_SIZE_RECURSION);
1486 else if (check_new(st, sv)) { /* sanity check */
1488 warn("unseen_sv_size encountered freed SV unexpectedly"); /* XXX warn uses an SV, I think */
1496 madprop_size(pTHX_ struct state *const st, pPATH, MADPROP *prop)
1498 dPathNodes(2, NPathArg);
1499 if (!check_new(st, prop))
1501 NPathPushNode("madprop_size", NPtype_NAME);
1502 ADD_SIZE(st, "MADPROP", sizeof(MADPROP));
1504 NPathPushNode("val");
1505 ADD_SIZE(st, "val", prop->mad_val);
1507 madprop_size(aTHX_ st, NPathLink("mad_next"), prop->mad_next);
1511 #if (PERL_BCDVERSION >= 0x5009005)
1513 parser_size(pTHX_ struct state *const st, pPATH, yy_parser *parser)
1515 dNPathNodes(2, NPathArg);
1516 if (!check_new(st, parser))
1518 NPathPushNode("parser_size", NPtype_NAME);
1519 ADD_SIZE(st, "yy_parser", sizeof(yy_parser));
1521 NPathPushNode("stack", NPtype_NAME);
1523 /*warn("total: %u", parser->stack_size); */
1524 /*warn("foo: %u", parser->ps - parser->stack); */
1525 ADD_SIZE(st, "stack_frames", parser->stack_size * sizeof(yy_stack_frame));
1526 for (ps = parser->stack; ps <= parser->ps; ps++) {
1527 #if (PERL_BCDVERSION >= 0x5011002) /* roughly */
1528 if (sv_size(aTHX_ st, NPathLink("compcv"), (SV*)ps->compcv, TOTAL_SIZE_RECURSION))
1529 ADD_LINK_ATTR(st, NPattr_NOTE, "i", ps - parser->ps);
1530 #else /* prior to perl 8c63ea58 Dec 8 2009 */
1531 if (sv_size(aTHX_ st, NPathLink("comppad"), (SV*)ps->comppad, TOTAL_SIZE_RECURSION))
1532 ADD_LINK_ATTR(st, NPattr_NOTE, "i", ps - parser->ps);
1537 sv_size(aTHX_ st, NPathLink("lex_repl"), (SV*)parser->lex_repl, TOTAL_SIZE_RECURSION);
1538 sv_size(aTHX_ st, NPathLink("lex_stuff"), (SV*)parser->lex_stuff, TOTAL_SIZE_RECURSION);
1539 sv_size(aTHX_ st, NPathLink("linestr"), (SV*)parser->linestr, TOTAL_SIZE_RECURSION);
1540 sv_size(aTHX_ st, NPathLink("in_my_stash"), (SV*)parser->in_my_stash, TOTAL_SIZE_RECURSION);
1541 /*sv_size(aTHX_ st, NPathLink("rsfp"), parser->rsfp, TOTAL_SIZE_RECURSION); */
1542 sv_size(aTHX_ st, NPathLink("rsfp_filters"), (SV*)parser->rsfp_filters, TOTAL_SIZE_RECURSION);
1544 sv_size(aTHX_ st, NPathLink("endwhite"), parser->endwhite, TOTAL_SIZE_RECURSION);
1545 sv_size(aTHX_ st, NPathLink("nextwhite"), parser->nextwhite, TOTAL_SIZE_RECURSION);
1546 sv_size(aTHX_ st, NPathLink("skipwhite"), parser->skipwhite, TOTAL_SIZE_RECURSION);
1547 sv_size(aTHX_ st, NPathLink("thisclose"), parser->thisclose, TOTAL_SIZE_RECURSION);
1548 madprop_size(aTHX_ st, NPathLink("thismad"), parser->thismad);
1549 sv_size(aTHX_ st, NPathLink("thisopen"), parser->thisopen, TOTAL_SIZE_RECURSION);
1550 sv_size(aTHX_ st, NPathLink("thisstuff"), parser->thisstuff, TOTAL_SIZE_RECURSION);
1551 sv_size(aTHX_ st, NPathLink("thistoken"), parser->thistoken, TOTAL_SIZE_RECURSION);
1552 sv_size(aTHX_ st, NPathLink("thiswhite"), parser->thiswhite, TOTAL_SIZE_RECURSION);
1554 op_size_class(aTHX_ (OP*)parser->saved_curcop, OPc_COP, 0,
1555 st, NPathLink("saved_curcop"));
1557 if (parser->old_parser)
1558 parser_size(aTHX_ st, NPathLink("old_parser"), parser->old_parser);
1563 perl_size(pTHX_ struct state *const st, pPATH)
1565 dNPathNodes(3, NPathArg);
1567 /* if(!check_new(st, interp)) return; */
1568 NPathPushNode("perl", NPtype_NAME);
1569 #if defined(MULTIPLICITY)
1570 ADD_SIZE(st, "PerlInterpreter", sizeof(PerlInterpreter));
1576 * unknown <== = O/S Heap size - perl - free_malloc_space
1578 /* start with PL_defstash to get everything reachable from \%main:: */
1579 sv_size(aTHX_ st, NPathLink("PL_defstash"), (SV*)PL_defstash, TOTAL_SIZE_RECURSION);
1581 NPathPushNode("others", NPtype_NAME); /* group these (typically much smaller) items */
1582 sv_size(aTHX_ st, NPathLink("PL_defgv"), (SV*)PL_defgv, TOTAL_SIZE_RECURSION);
1583 sv_size(aTHX_ st, NPathLink("PL_incgv"), (SV*)PL_incgv, TOTAL_SIZE_RECURSION);
1584 sv_size(aTHX_ st, NPathLink("PL_rs"), (SV*)PL_rs, TOTAL_SIZE_RECURSION);
1585 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1586 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1587 sv_size(aTHX_ st, NPathLink("PL_errors"), (SV*)PL_errors, TOTAL_SIZE_RECURSION);
1588 sv_size(aTHX_ st, NPathLink("PL_stashcache"), (SV*)PL_stashcache, TOTAL_SIZE_RECURSION);
1589 sv_size(aTHX_ st, NPathLink("PL_patchlevel"), (SV*)PL_patchlevel, TOTAL_SIZE_RECURSION);
1590 #ifdef PL_apiversion
1591 sv_size(aTHX_ st, NPathLink("PL_apiversion"), (SV*)PL_apiversion, TOTAL_SIZE_RECURSION);
1593 #ifdef PL_registered_mros
1594 sv_size(aTHX_ st, NPathLink("PL_registered_mros"), (SV*)PL_registered_mros, TOTAL_SIZE_RECURSION);
1597 sv_size(aTHX_ st, NPathLink("PL_regex_padav"), (SV*)PL_regex_padav, TOTAL_SIZE_RECURSION);
1599 sv_size(aTHX_ st, NPathLink("PL_warnhook"), (SV*)PL_warnhook, TOTAL_SIZE_RECURSION);
1600 sv_size(aTHX_ st, NPathLink("PL_diehook"), (SV*)PL_diehook, TOTAL_SIZE_RECURSION);
1601 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1602 sv_size(aTHX_ st, NPathLink("PL_main_cv"), (SV*)PL_main_cv, TOTAL_SIZE_RECURSION);
1603 sv_size(aTHX_ st, NPathLink("PL_main_root"), (SV*)PL_main_root, TOTAL_SIZE_RECURSION);
1604 sv_size(aTHX_ st, NPathLink("PL_main_start"), (SV*)PL_main_start, TOTAL_SIZE_RECURSION);
1605 sv_size(aTHX_ st, NPathLink("PL_envgv"), (SV*)PL_envgv, TOTAL_SIZE_RECURSION);
1606 sv_size(aTHX_ st, NPathLink("PL_hintgv"), (SV*)PL_hintgv, TOTAL_SIZE_RECURSION);
1607 sv_size(aTHX_ st, NPathLink("PL_e_script"), (SV*)PL_e_script, TOTAL_SIZE_RECURSION);
1608 sv_size(aTHX_ st, NPathLink("PL_encoding"), (SV*)PL_encoding, TOTAL_SIZE_RECURSION);
1610 sv_size(aTHX_ st, NPathLink("PL_ofsgv"), (SV*)PL_ofsgv, TOTAL_SIZE_RECURSION);
1612 sv_size(aTHX_ st, NPathLink("PL_argvout_stack"), (SV*)PL_argvout_stack, TOTAL_SIZE_RECURSION);
1613 sv_size(aTHX_ st, NPathLink("PL_beginav"), (SV*)PL_beginav, TOTAL_SIZE_RECURSION);
1614 sv_size(aTHX_ st, NPathLink("PL_beginav_save"), (SV*)PL_beginav_save, TOTAL_SIZE_RECURSION);
1615 sv_size(aTHX_ st, NPathLink("PL_checkav_save"), (SV*)PL_checkav_save, TOTAL_SIZE_RECURSION);
1616 #ifdef PL_unitcheckav
1617 sv_size(aTHX_ st, NPathLink("PL_unitcheckav"), (SV*)PL_unitcheckav, TOTAL_SIZE_RECURSION);
1619 #ifdef PL_unitcheckav_save
1620 sv_size(aTHX_ st, NPathLink("PL_unitcheckav_save"), (SV*)PL_unitcheckav_save, TOTAL_SIZE_RECURSION);
1622 sv_size(aTHX_ st, NPathLink("PL_endav"), (SV*)PL_endav, TOTAL_SIZE_RECURSION);
1623 sv_size(aTHX_ st, NPathLink("PL_checkav"), (SV*)PL_checkav, TOTAL_SIZE_RECURSION);
1624 sv_size(aTHX_ st, NPathLink("PL_initav"), (SV*)PL_initav, TOTAL_SIZE_RECURSION);
1626 sv_size(aTHX_ st, NPathLink("PL_isarev"), (SV*)PL_isarev, TOTAL_SIZE_RECURSION);
1628 sv_size(aTHX_ st, NPathLink("PL_fdpid"), (SV*)PL_fdpid, TOTAL_SIZE_RECURSION);
1629 sv_size(aTHX_ st, NPathLink("PL_preambleav"), (SV*)PL_preambleav, TOTAL_SIZE_RECURSION);
1630 sv_size(aTHX_ st, NPathLink("PL_ors_sv"), (SV*)PL_ors_sv, TOTAL_SIZE_RECURSION);
1631 sv_size(aTHX_ st, NPathLink("PL_modglobal"), (SV*)PL_modglobal, TOTAL_SIZE_RECURSION);
1632 sv_size(aTHX_ st, NPathLink("PL_custom_op_names"), (SV*)PL_custom_op_names, TOTAL_SIZE_RECURSION);
1633 sv_size(aTHX_ st, NPathLink("PL_custom_op_descs"), (SV*)PL_custom_op_descs, TOTAL_SIZE_RECURSION);
1634 #ifdef PL_custom_ops
1635 sv_size(aTHX_ st, NPathLink("PL_custom_ops"), (SV*)PL_custom_ops, TOTAL_SIZE_RECURSION);
1637 sv_size(aTHX_ st, NPathLink("PL_compcv"), (SV*)PL_compcv, TOTAL_SIZE_RECURSION);
1638 sv_size(aTHX_ st, NPathLink("PL_DBcv"), (SV*)PL_DBcv, TOTAL_SIZE_RECURSION);
1639 #ifdef PERL_USES_PL_PIDSTATUS
1640 sv_size(aTHX_ st, NPathLink("PL_pidstatus"), (SV*)PL_pidstatus, TOTAL_SIZE_RECURSION);
1642 sv_size(aTHX_ st, NPathLink("PL_subname"), (SV*)PL_subname, TOTAL_SIZE_RECURSION);
1643 #ifdef USE_LOCALE_NUMERIC
1644 sv_size(aTHX_ st, NPathLink("PL_numeric_radix_sv"), (SV*)PL_numeric_radix_sv, TOTAL_SIZE_RECURSION);
1645 check_new_and_strlen(st, PL_numeric_name, NPathLink("PL_numeric_name"));
1647 #ifdef USE_LOCALE_COLLATE
1648 check_new_and_strlen(st, PL_collation_name, NPathLink("PL_collation_name"));
1650 check_new_and_strlen(st, PL_origfilename, NPathLink("PL_origfilename"));
1651 check_new_and_strlen(st, PL_inplace, NPathLink("PL_inplace"));
1652 check_new_and_strlen(st, PL_osname, NPathLink("PL_osname"));
1653 if (PL_op_mask && check_new(st, PL_op_mask))
1654 ADD_SIZE(st, "PL_op_mask", PL_maxo);
1655 if (PL_exitlistlen && check_new(st, PL_exitlist))
1656 ADD_SIZE(st, "PL_exitlist", (PL_exitlistlen * sizeof(PerlExitListEntry *))
1657 + (PL_exitlistlen * sizeof(PerlExitListEntry)));
1658 #ifdef PERL_IMPLICIT_CONTEXT
1659 if (PL_my_cxt_size && check_new(st, PL_my_cxt_list)) {
1660 ADD_SIZE(st, "PL_my_cxt_list", (PL_my_cxt_size * sizeof(void *)));
1661 #ifdef PERL_GLOBAL_STRUCT_PRIVATE
1662 ADD_SIZE(st, "PL_my_cxt_keys", (PL_my_cxt_size * sizeof(char *)));
1666 /* TODO PL_stashpad */
1667 op_size_class(aTHX_ (OP*)&PL_compiling, OPc_COP, 1, st, NPathLink("PL_compiling"));
1668 op_size_class(aTHX_ (OP*)PL_curcopdb, OPc_COP, 0, st, NPathLink("PL_curcopdb"));
1670 #if (PERL_BCDVERSION >= 0x5009005)
1671 parser_size(aTHX_ st, NPathLink("PL_parser"), PL_parser);
1674 /* TODO stacks: cur, main, tmps, mark, scope, save */
1675 /* TODO PL_exitlist */
1676 /* TODO PL_reentrant_buffers etc */
1678 /* TODO PerlIO? PL_known_layers PL_def_layerlist PL_perlio_fd_refcnt etc */
1680 /* TODO anything missed? */
1682 /* --- by this point we should have seen all reachable SVs --- */
1684 /* in theory we shouldn't have any elements in PL_strtab that haven't been seen yet */
1685 sv_size(aTHX_ st, NPathLink("PL_strtab-unseen"), (SV*)PL_strtab, TOTAL_SIZE_RECURSION);
1687 /* unused space in sv head arenas */
1691 # define SvARENA_CHAIN(sv) SvANY(sv) /* XXX breaks encapsulation*/
1692 while ((p = MUTABLE_SV(SvARENA_CHAIN(p)))) {
1693 if (!check_new(st, p)) /* sanity check */
1694 warn("Free'd SV head unexpectedly already seen");
1697 NPathPushNode("unused_sv_heads", NPtype_NAME);
1698 ADD_SIZE(st, "sv", free_heads * sizeof(SV));
1701 /* XXX iterate over bodies_by_type and crawl the free chains for each */
1703 /* iterate over all SVs to find any we've not accounted for yet */
1704 /* once the code above is visiting all SVs, any found here have been leaked */
1705 unseen_sv_size(aTHX_ st, NPathLink("unaccounted"));
1709 MODULE = Devel::SizeMe PACKAGE = Devel::SizeMe
1717 total_size = TOTAL_SIZE_RECURSION
1720 SV *thing = orig_thing;
1721 struct state *st = new_state(aTHX);
1723 /* If they passed us a reference then dereference it. This is the
1724 only way we can check the sizes of arrays and hashes */
1726 thing = SvRV(thing);
1729 sv_size(aTHX_ st, NULL, thing, ix);
1730 RETVAL = st->total_size;
1731 free_state(aTHX_ st);
1740 /* just the current perl interpreter */
1741 struct state *st = new_state(aTHX);
1742 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1743 perl_size(aTHX_ st, NULL);
1744 RETVAL = st->total_size;
1745 free_state(aTHX_ st);
1754 /* the current perl interpreter plus malloc, in the context of total heap size */
1755 # ifdef _MALLOC_MALLOC_H_ /* OSX. Now sure where else mstats is available */
1759 /* some systems have the SVID2/XPG mallinfo structure and function */
1760 struct mstats ms = mstats(); /* mstats() first */
1762 struct state *st = new_state(aTHX);
1763 dNPathNodes(1, NULL);
1764 NPathPushNode("heap", NPtype_NAME);
1766 st->min_recurse_threshold = NO_RECURSION; /* so always recurse */
1768 perl_size(aTHX_ st, NPathLink("perl_interp"));
1770 NPathSetNode("free_malloc_space", NPtype_NAME);
1771 ADD_SIZE(st, "bytes_free", ms.bytes_free);
1772 ADD_ATTR(st, NPattr_NOTE, "bytes_total", ms.bytes_total);
1773 ADD_ATTR(st, NPattr_NOTE, "bytes_used", ms.bytes_used);
1774 ADD_ATTR(st, NPattr_NOTE, "chunks_used", ms.chunks_used);
1775 ADD_ATTR(st, NPattr_NOTE, "chunks_free", ms.chunks_free);
1776 /* TODO get heap size from OS and add a node: unknown = heapsize - perl - ms.bytes_free */
1777 /* for now we use bytes_total as an approximation */
1778 NPathSetNode("unknown", NPtype_NAME);
1779 ADD_SIZE(st, "unknown", ms.bytes_total - st->total_size);
1784 RETVAL = st->total_size;
1785 free_state(aTHX_ st);