At the time of very final cleanup, sv_free_arenas() is called from
perl_destruct() to physically free all the arenas allocated since the
-start of the interpreter. Note that this also clears PL_he_arenaroot,
-which is otherwise dealt with in hv.c.
+start of the interpreter.
Manipulation of any of the PL_*root pointers is protected by enclosing
LOCK_SV_MUTEX; ... UNLOCK_SV_MUTEX calls which should Do the Right Thing
PL_body_roots[i] = 0;
}
- free_arena(he);
-
Safefree(PL_nice_chunk);
PL_nice_chunk = Nullch;
PL_nice_chunk_size = 0;
#define HADNV FALSE
#define NONV TRUE
+#ifdef PURIFY
+/* With -DPURFIY we allocate everything directly, and don't use arenas.
+ This seems a rather elegant way to simplify some of the code below. */
+#define HASARENA FALSE
+#else
#define HASARENA TRUE
+#endif
#define NOARENA FALSE
+/* A macro to work out the offset needed to subtract from a pointer to (say)
+
+typedef struct {
+ STRLEN xpv_cur;
+ STRLEN xpv_len;
+} xpv_allocated;
+
+to make its members accessible via a pointer to (say)
+
+struct xpv {
+ NV xnv_nv;
+ STRLEN xpv_cur;
+ STRLEN xpv_len;
+};
+
+*/
+
+#define relative_STRUCT_OFFSET(longer, shorter, member) \
+ STRUCT_OFFSET(shorter, member) - STRUCT_OFFSET(longer, member)
+
+/* Calculate the length to copy. Specifically work out the length less any
+ final padding the compiler needed to add. See the comment in sv_upgrade
+ for why copying the padding proved to be a bug. */
+
+#define copy_length(type, last_member) \
+ STRUCT_OFFSET(type, last_member) \
+ + sizeof (((type*)SvANY((SV*)0))->last_member)
+
static const struct body_details bodies_by_type[] = {
{0, 0, 0, FALSE, NONV, NOARENA},
/* IVs are in the head, so the allocation size is 0 */
{0, 0, 0, FALSE, NONV, NOARENA},
/* 8 bytes on most ILP32 with IEEE doubles */
{sizeof(xpv_allocated),
- STRUCT_OFFSET(XPV, xpv_len) + sizeof (((XPV*)SvANY((SV*)0))->xpv_len)
- + STRUCT_OFFSET(xpv_allocated, xpv_cur) - STRUCT_OFFSET(XPV, xpv_cur),
- + STRUCT_OFFSET(xpv_allocated, xpv_cur) - STRUCT_OFFSET(XPV, xpv_cur)
- , FALSE, NONV, HASARENA},
+ copy_length(XPV, xpv_len)
+ + relative_STRUCT_OFFSET(XPV, xpv_allocated, xpv_cur),
+ + relative_STRUCT_OFFSET(XPV, xpv_allocated, xpv_cur),
+ FALSE, NONV, HASARENA},
/* 12 */
{sizeof(xpviv_allocated),
- STRUCT_OFFSET(XPVIV, xiv_u) + sizeof (((XPVIV*)SvANY((SV*)0))->xiv_u)
- + STRUCT_OFFSET(xpviv_allocated, xpv_cur) - STRUCT_OFFSET(XPVIV, xpv_cur),
- + STRUCT_OFFSET(xpviv_allocated, xpv_cur) - STRUCT_OFFSET(XPVIV, xpv_cur)
- , FALSE, NONV, HASARENA},
+ copy_length(XPVIV, xiv_u)
+ + relative_STRUCT_OFFSET(XPVIV, xpviv_allocated, xpv_cur),
+ + relative_STRUCT_OFFSET(XPVIV, xpviv_allocated, xpv_cur),
+ FALSE, NONV, HASARENA},
/* 20 */
- {sizeof(XPVNV),
- STRUCT_OFFSET(XPVNV, xiv_u) + sizeof (((XPVNV*)SvANY((SV*)0))->xiv_u),
- 0, FALSE, HADNV, HASARENA},
+ {sizeof(XPVNV), copy_length(XPVNV, xiv_u), 0, FALSE, HADNV, HASARENA},
/* 28 */
- {sizeof(XPVMG),
- STRUCT_OFFSET(XPVMG, xmg_stash) + sizeof (((XPVMG*)SvANY((SV*)0))->xmg_stash),
- 0, FALSE, HADNV, HASARENA},
+ {sizeof(XPVMG), copy_length(XPVMG, xmg_stash), 0, FALSE, HADNV, HASARENA},
/* 36 */
{sizeof(XPVBM), sizeof(XPVBM), 0, TRUE, HADNV, HASARENA},
/* 48 */
{sizeof(XPVLV), sizeof(XPVLV), 0, TRUE, HADNV, HASARENA},
/* 20 */
{sizeof(xpvav_allocated),
- STRUCT_OFFSET(XPVAV, xmg_stash)
- + sizeof (((XPVAV*)SvANY((SV *)0))->xmg_stash)
- + STRUCT_OFFSET(xpvav_allocated, xav_fill)
- - STRUCT_OFFSET(XPVAV, xav_fill),
- STRUCT_OFFSET(xpvav_allocated, xav_fill)
- - STRUCT_OFFSET(XPVAV, xav_fill), TRUE, HADNV, HASARENA},
+ copy_length(XPVAV, xmg_stash)
+ + relative_STRUCT_OFFSET(XPVAV, xpvav_allocated, xav_fill),
+ relative_STRUCT_OFFSET(XPVAV, xpvav_allocated, xav_fill),
+ TRUE, HADNV, HASARENA},
/* 20 */
{sizeof(xpvhv_allocated),
- STRUCT_OFFSET(XPVHV, xmg_stash)
- + sizeof (((XPVHV*)SvANY((SV *)0))->xmg_stash)
- + STRUCT_OFFSET(xpvhv_allocated, xhv_fill)
- - STRUCT_OFFSET(XPVHV, xhv_fill),
- STRUCT_OFFSET(xpvhv_allocated, xhv_fill)
- - STRUCT_OFFSET(XPVHV, xhv_fill), TRUE, HADNV, HASARENA},
+ copy_length(XPVHV, xmg_stash)
+ + relative_STRUCT_OFFSET(XPVHV, xpvhv_allocated, xhv_fill),
+ relative_STRUCT_OFFSET(XPVHV, xpvhv_allocated, xhv_fill),
+ TRUE, HADNV, HASARENA},
/* 76 */
{sizeof(XPVCV), sizeof(XPVCV), 0, TRUE, HADNV, HASARENA},
/* 80 */
#define new_XNV() my_safemalloc(sizeof(XPVNV))
#define del_XNV(p) my_safefree(p)
-#define new_XPV() my_safemalloc(sizeof(XPV))
-#define del_XPV(p) my_safefree(p)
-
-#define new_XPVIV() my_safemalloc(sizeof(XPVIV))
-#define del_XPVIV(p) my_safefree(p)
-
#define new_XPVNV() my_safemalloc(sizeof(XPVNV))
#define del_XPVNV(p) my_safefree(p)
-#define new_XPVCV() my_safemalloc(sizeof(XPVCV))
-#define del_XPVCV(p) my_safefree(p)
-
#define new_XPVAV() my_safemalloc(sizeof(XPVAV))
#define del_XPVAV(p) my_safefree(p)
#define new_XPVGV() my_safemalloc(sizeof(XPVGV))
#define del_XPVGV(p) my_safefree(p)
-#define new_XPVLV() my_safemalloc(sizeof(XPVLV))
-#define del_XPVLV(p) my_safefree(p)
-
-#define new_XPVBM() my_safemalloc(sizeof(XPVBM))
-#define del_XPVBM(p) my_safefree(p)
-
#else /* !PURIFY */
#define new_XNV() new_body_type(SVt_NV)
#define del_XNV(p) del_body_type(p, SVt_NV)
-#define new_XPV() new_body_allocated(SVt_PV)
-#define del_XPV(p) del_body_allocated(p, SVt_PV)
-
-#define new_XPVIV() new_body_allocated(SVt_PVIV)
-#define del_XPVIV(p) del_body_allocated(p, SVt_PVIV)
-
#define new_XPVNV() new_body_type(SVt_PVNV)
#define del_XPVNV(p) del_body_type(p, SVt_PVNV)
-#define new_XPVCV() new_body_type(SVt_PVCV)
-#define del_XPVCV(p) del_body_type(p, SVt_PVCV)
-
#define new_XPVAV() new_body_allocated(SVt_PVAV)
#define del_XPVAV(p) del_body_allocated(p, SVt_PVAV)
#define new_XPVGV() new_body_type(SVt_PVGV)
#define del_XPVGV(p) del_body_type(p, SVt_PVGV)
-#define new_XPVLV() new_body_type(SVt_PVLV)
-#define del_XPVLV(p) del_body_type(p, SVt_PVLV)
-
-#define new_XPVBM() new_body_type(SVt_PVBM)
-#define del_XPVBM(p) del_body_type(p, SVt_PVBM)
-
#endif /* PURIFY */
/* no arena for you! */
#define new_NOARENAZ(details) \
my_safecalloc((details)->size - (details)->offset)
-#define new_XPVFM() my_safemalloc(sizeof(XPVFM))
-#define del_XPVFM(p) my_safefree(p)
-
-#define new_XPVIO() my_safemalloc(sizeof(XPVIO))
-#define del_XPVIO(p) my_safefree(p)
-
-
-
/*
=for apidoc sv_upgrade
case SVt_PV:
assert(new_type_details->size);
-#ifndef PURIFY
+ /* We always allocated the full length item with PURIFY. To do this
+ we fake things so that arena is false for all 16 types.. */
if(new_type_details->arena) {
/* This points to the start of the allocated area. */
new_body_inline(new_body, new_type_details->size, new_type);
} else {
new_body = new_NOARENAZ(new_type_details);
}
-#else
- /* We always allocated the full length item with PURIFY */
- new_body = new_NOARENAZ(new_type_details);
-#endif
SvANY(sv) = new_body;
if (old_type_details->copy) {
SvFLAGS(sv) &= SVf_BREAK;
SvFLAGS(sv) |= SVTYPEMASK;
-#ifndef PURIFY
if (sv_type_details->arena) {
del_body(((char *)SvANY(sv) - sv_type_details->offset),
&PL_body_roots[type]);
else if (sv_type_details->size) {
my_safefree(SvANY(sv));
}
-#else
- if (sv_type_details->size) {
- my_safefree(SvANY(sv));
- }
-#endif
}
/*
case SVt_PVIV:
case SVt_PV:
assert(sv_type_details->copy);
-#ifndef PURIFY
if (sv_type_details->arena) {
new_body_inline(new_body, sv_type_details->copy, sv_type);
new_body
} else {
new_body = new_NOARENA(sv_type_details);
}
-#else
- /* We always allocated the full length item with PURIFY */
- new_body = new_NOARENA(sv_type_details);
-#endif
}
assert(new_body);
SvANY(dstr) = new_body;
Zero(&PL_body_arenaroots, 1, PL_body_arenaroots);
Zero(&PL_body_roots, 1, PL_body_roots);
- PL_he_arenaroot = NULL;
- PL_he_root = NULL;
-
PL_nice_chunk = NULL;
PL_nice_chunk_size = 0;
PL_sv_count = 0;