"", "", "");
}
-#define USE_S_MORE_THINGY
-
-#ifdef USE_S_MORE_THINGY
-
-#define S_more_thingy(TYPE,lctype) \
-STATIC void \
-S_more_## lctype (pTHX) \
-{ \
- TYPE* lctype; \
- TYPE* lctype ## end; \
- void *ptr; \
- New(711, ptr, PERL_ARENA_SIZE/sizeof(TYPE), TYPE); \
- *((void **) ptr) = (void *)PL_## lctype ## _arenaroot; \
- PL_## lctype ## _arenaroot = ptr; \
- \
- lctype = (TYPE*) ptr; \
- lctype ## end = &lctype[PERL_ARENA_SIZE / sizeof(TYPE) - 1]; \
- \
- /* fudge by sizeof XPVIV */ \
- lctype += (sizeof(XPVIV) - 1) / sizeof(TYPE) + 1; \
- \
- PL_ ## lctype ## _root = lctype; \
- while ( lctype < lctype ## end) { \
- *(TYPE**) lctype = (TYPE*)(lctype + 1); \
- lctype++; \
- } \
- *(TYPE**) lctype = 0; \
-}
-
-#define S_more_thingy_allocated(lctype) \
-STATIC void \
-S_more_## lctype (pTHX) \
-{ \
- lctype ## _allocated * lctype ; \
- lctype ## _allocated * lctype ## end; \
- void *ptr; \
- New(711, ptr, PERL_ARENA_SIZE/sizeof(lctype ## _allocated ), lctype ## _allocated ); \
- *((void **) ptr) = (void *)PL_ ## lctype ## _arenaroot; \
- PL_## lctype ## _arenaroot = ptr; \
- \
- lctype = (lctype ## _allocated *) ptr; \
- lctype ## end = &lctype[PERL_ARENA_SIZE / sizeof(lctype ## _allocated ) - 1]; \
- \
- /* fudge by sizeof XPVIV */ \
- lctype += (sizeof(XPVIV) - 1) / sizeof(lctype ## _allocated ) + 1; \
- \
- PL_ ## lctype ## _root = lctype; \
- while ( lctype < lctype ## end) { \
- *(lctype ## _allocated **) lctype = (lctype ## _allocated *)(lctype + 1); \
- lctype++; \
- } \
- *(lctype ## _allocated **) lctype = 0; \
-}
-
-S_more_thingy(NV, xnv)
-
-S_more_thingy_allocated(xpv)
-
-S_more_thingy_allocated(xpviv)
-
-S_more_thingy(XPVNV, xpvnv)
-
-S_more_thingy(XPVCV, xpvcv)
-
-S_more_thingy_allocated(xpvav)
-
-S_more_thingy_allocated(xpvhv)
-
-S_more_thingy(XPVGV, xpvgv)
-
-S_more_thingy(XPVMG, xpvmg)
-
-S_more_thingy(XPVBM, xpvbm)
-
-S_more_thingy(XPVLV, xpvlv)
-
-
-#else
-
-
-/* allocate another arena's worth of NV bodies */
-
-STATIC void
-S_more_xnv(pTHX)
-{
- NV* xnv;
- NV* xnvend;
- void *ptr;
- New(711, ptr, PERL_ARENA_SIZE/sizeof(NV), NV);
- *((void **) ptr) = (void *)PL_xnv_arenaroot;
- PL_xnv_arenaroot = ptr;
-
- xnv = (NV*) ptr;
- xnvend = &xnv[PERL_ARENA_SIZE / sizeof(NV) - 1];
- xnv += (sizeof(XPVIV) - 1) / sizeof(NV) + 1; /* fudge by sizeof XPVIV */
- PL_xnv_root = xnv;
- while (xnv < xnvend) {
- *(NV**)xnv = (NV*)(xnv + 1);
- xnv++;
- }
- *(NV**)xnv = 0;
-}
-
-/* allocate another arena's worth of struct xpv */
-
-STATIC void
-S_more_xpv(pTHX)
-{
- xpv_allocated* xpv;
- xpv_allocated* xpvend;
- New(713, xpv, PERL_ARENA_SIZE/sizeof(xpv_allocated), xpv_allocated);
- *((xpv_allocated**)xpv) = PL_xpv_arenaroot;
- PL_xpv_arenaroot = xpv;
-
- xpvend = &xpv[PERL_ARENA_SIZE / sizeof(xpv_allocated) - 1];
- PL_xpv_root = ++xpv;
- while (xpv < xpvend) {
- *((xpv_allocated**)xpv) = xpv + 1;
- xpv++;
- }
- *((xpv_allocated**)xpv) = 0;
-}
-
-/* allocate another arena's worth of struct xpviv */
-
-STATIC void
-S_more_xpviv(pTHX)
-{
- xpviv_allocated* xpviv;
- xpviv_allocated* xpvivend;
- New(713, xpviv, PERL_ARENA_SIZE/sizeof(xpviv_allocated), xpviv_allocated);
- *((xpviv_allocated**)xpviv) = PL_xpviv_arenaroot;
- PL_xpviv_arenaroot = xpviv;
-
- xpvivend = &xpviv[PERL_ARENA_SIZE / sizeof(xpviv_allocated) - 1];
- PL_xpviv_root = ++xpviv;
- while (xpviv < xpvivend) {
- *((xpviv_allocated**)xpviv) = xpviv + 1;
- xpviv++;
- }
- *((xpviv_allocated**)xpviv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvnv */
-
-STATIC void
-S_more_xpvnv(pTHX)
-{
- XPVNV* xpvnv;
- XPVNV* xpvnvend;
- New(715, xpvnv, PERL_ARENA_SIZE/sizeof(XPVNV), XPVNV);
- *((XPVNV**)xpvnv) = PL_xpvnv_arenaroot;
- PL_xpvnv_arenaroot = xpvnv;
-
- xpvnvend = &xpvnv[PERL_ARENA_SIZE / sizeof(XPVNV) - 1];
- PL_xpvnv_root = ++xpvnv;
- while (xpvnv < xpvnvend) {
- *((XPVNV**)xpvnv) = xpvnv + 1;
- xpvnv++;
- }
- *((XPVNV**)xpvnv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvcv */
-
-STATIC void
-S_more_xpvcv(pTHX)
-{
- XPVCV* xpvcv;
- XPVCV* xpvcvend;
- New(716, xpvcv, PERL_ARENA_SIZE/sizeof(XPVCV), XPVCV);
- *((XPVCV**)xpvcv) = PL_xpvcv_arenaroot;
- PL_xpvcv_arenaroot = xpvcv;
-
- xpvcvend = &xpvcv[PERL_ARENA_SIZE / sizeof(XPVCV) - 1];
- PL_xpvcv_root = ++xpvcv;
- while (xpvcv < xpvcvend) {
- *((XPVCV**)xpvcv) = xpvcv + 1;
- xpvcv++;
- }
- *((XPVCV**)xpvcv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvav */
-
-STATIC void
-S_more_xpvav(pTHX)
-{
- xpvav_allocated* xpvav;
- xpvav_allocated* xpvavend;
- New(717, xpvav, PERL_ARENA_SIZE/sizeof(xpvav_allocated),
- xpvav_allocated);
- *((xpvav_allocated**)xpvav) = PL_xpvav_arenaroot;
- PL_xpvav_arenaroot = xpvav;
-
- xpvavend = &xpvav[PERL_ARENA_SIZE / sizeof(xpvav_allocated) - 1];
- PL_xpvav_root = ++xpvav;
- while (xpvav < xpvavend) {
- *((xpvav_allocated**)xpvav) = xpvav + 1;
- xpvav++;
- }
- *((xpvav_allocated**)xpvav) = 0;
-}
-
-/* allocate another arena's worth of struct xpvhv */
-
-STATIC void
-S_more_xpvhv(pTHX)
-{
- xpvhv_allocated* xpvhv;
- xpvhv_allocated* xpvhvend;
- New(718, xpvhv, PERL_ARENA_SIZE/sizeof(xpvhv_allocated),
- xpvhv_allocated);
- *((xpvhv_allocated**)xpvhv) = PL_xpvhv_arenaroot;
- PL_xpvhv_arenaroot = xpvhv;
-
- xpvhvend = &xpvhv[PERL_ARENA_SIZE / sizeof(xpvhv_allocated) - 1];
- PL_xpvhv_root = ++xpvhv;
- while (xpvhv < xpvhvend) {
- *((xpvhv_allocated**)xpvhv) = xpvhv + 1;
- xpvhv++;
- }
- *((xpvhv_allocated**)xpvhv) = 0;
-}
-
-/* allocate another arena's worth of struct xpvmg */
-
-STATIC void
-S_more_xpvmg(pTHX)
-{
- XPVMG* xpvmg;
- XPVMG* xpvmgend;
- New(719, xpvmg, PERL_ARENA_SIZE/sizeof(XPVMG), XPVMG);
- *((XPVMG**)xpvmg) = PL_xpvmg_arenaroot;
- PL_xpvmg_arenaroot = xpvmg;
-
- xpvmgend = &xpvmg[PERL_ARENA_SIZE / sizeof(XPVMG) - 1];
- PL_xpvmg_root = ++xpvmg;
- while (xpvmg < xpvmgend) {
- *((XPVMG**)xpvmg) = xpvmg + 1;
- xpvmg++;
- }
- *((XPVMG**)xpvmg) = 0;
-}
-
-/* allocate another arena's worth of struct xpvgv */
-
-STATIC void
-S_more_xpvgv(pTHX)
+STATIC void *
+S_more_bodies (pTHX_ void **arena_root, void **root, size_t size)
{
- XPVGV* xpvgv;
- XPVGV* xpvgvend;
- New(720, xpvgv, PERL_ARENA_SIZE/sizeof(XPVGV), XPVGV);
- *((XPVGV**)xpvgv) = PL_xpvgv_arenaroot;
- PL_xpvgv_arenaroot = xpvgv;
+ char *start;
+ const char *end;
+ size_t count = PERL_ARENA_SIZE/size;
+ New(0, start, count*size, char);
+ *((void **) start) = *arena_root;
+ *arena_root = (void *)start;
- xpvgvend = &xpvgv[PERL_ARENA_SIZE / sizeof(XPVGV) - 1];
- PL_xpvgv_root = ++xpvgv;
- while (xpvgv < xpvgvend) {
- *((XPVGV**)xpvgv) = xpvgv + 1;
- xpvgv++;
- }
- *((XPVGV**)xpvgv) = 0;
-}
+ end = start + (count-1) * size;
-/* allocate another arena's worth of struct xpvlv */
+ /* The initial slot is used to link the arenas together, so it isn't to be
+ linked into the list of ready-to-use bodies. */
-STATIC void
-S_more_xpvlv(pTHX)
-{
- XPVLV* xpvlv;
- XPVLV* xpvlvend;
- New(720, xpvlv, PERL_ARENA_SIZE/sizeof(XPVLV), XPVLV);
- *((XPVLV**)xpvlv) = PL_xpvlv_arenaroot;
- PL_xpvlv_arenaroot = xpvlv;
-
- xpvlvend = &xpvlv[PERL_ARENA_SIZE / sizeof(XPVLV) - 1];
- PL_xpvlv_root = ++xpvlv;
- while (xpvlv < xpvlvend) {
- *((XPVLV**)xpvlv) = xpvlv + 1;
- xpvlv++;
- }
- *((XPVLV**)xpvlv) = 0;
-}
+ start += size;
-/* allocate another arena's worth of struct xpvbm */
-
-STATIC void
-S_more_xpvbm(pTHX)
-{
- XPVBM* xpvbm;
- XPVBM* xpvbmend;
- New(721, xpvbm, PERL_ARENA_SIZE/sizeof(XPVBM), XPVBM);
- *((XPVBM**)xpvbm) = PL_xpvbm_arenaroot;
- PL_xpvbm_arenaroot = xpvbm;
+ *root = (void *)start;
- xpvbmend = &xpvbm[PERL_ARENA_SIZE / sizeof(XPVBM) - 1];
- PL_xpvbm_root = ++xpvbm;
- while (xpvbm < xpvbmend) {
- *((XPVBM**)xpvbm) = xpvbm + 1;
- xpvbm++;
+ while (start < end) {
+ char *next = start + size;
+ *(void**) start = (void *)next;
+ start = next;
}
- *((XPVBM**)xpvbm) = 0;
-}
-
-#endif
-
-/* grab a new NV body from the free list, allocating more if necessary */
-
-STATIC XPVNV*
-S_new_xnv(pTHX)
-{
- NV* xnv;
- LOCK_SV_MUTEX;
- if (!PL_xnv_root)
- S_more_xnv(aTHX);
- xnv = PL_xnv_root;
- PL_xnv_root = *(NV**)xnv;
- UNLOCK_SV_MUTEX;
- return (XPVNV*)((char*)xnv - STRUCT_OFFSET(XPVNV, xnv_nv));
-}
-
-/* return an NV body to the free list */
-
-STATIC void
-S_del_xnv(pTHX_ XPVNV *p)
-{
- NV* xnv = (NV*)((char*)(p) + STRUCT_OFFSET(XPVNV, xnv_nv));
- LOCK_SV_MUTEX;
- *(NV**)xnv = PL_xnv_root;
- PL_xnv_root = xnv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpv from the free list, allocating more if necessary */
-
-STATIC XPV*
-S_new_xpv(pTHX)
-{
- xpv_allocated* xpv;
- LOCK_SV_MUTEX;
- if (!PL_xpv_root)
- S_more_xpv(aTHX);
- xpv = PL_xpv_root;
- PL_xpv_root = *(xpv_allocated**)xpv;
- UNLOCK_SV_MUTEX;
- /* If xpv_allocated is the same structure as XPV then the two OFFSETs
- sum to zero, and the pointer is unchanged. If the allocated structure
- is smaller (no initial IV actually allocated) then the net effect is
- to subtract the size of the IV from the pointer, to return a new pointer
- as if an initial IV were actually allocated. */
- return (XPV*)((char*)xpv - STRUCT_OFFSET(XPV, xpv_cur)
- + STRUCT_OFFSET(xpv_allocated, xpv_cur));
-}
-
-/* return a struct xpv to the free list */
-
-STATIC void
-S_del_xpv(pTHX_ XPV *p)
-{
- xpv_allocated* xpv
- = (xpv_allocated*)((char*)(p) + STRUCT_OFFSET(XPV, xpv_cur)
- - STRUCT_OFFSET(xpv_allocated, xpv_cur));
- LOCK_SV_MUTEX;
- *(xpv_allocated**)xpv = PL_xpv_root;
- PL_xpv_root = xpv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpviv from the free list, allocating more if necessary */
-
-STATIC XPVIV*
-S_new_xpviv(pTHX)
-{
- xpviv_allocated* xpviv;
- LOCK_SV_MUTEX;
- if (!PL_xpviv_root)
- S_more_xpviv(aTHX);
- xpviv = PL_xpviv_root;
- PL_xpviv_root = *(xpviv_allocated**)xpviv;
- UNLOCK_SV_MUTEX;
- /* If xpviv_allocated is the same structure as XPVIV then the two OFFSETs
- sum to zero, and the pointer is unchanged. If the allocated structure
- is smaller (no initial IV actually allocated) then the net effect is
- to subtract the size of the IV from the pointer, to return a new pointer
- as if an initial IV were actually allocated. */
- return (XPVIV*)((char*)xpviv - STRUCT_OFFSET(XPVIV, xpv_cur)
- + STRUCT_OFFSET(xpviv_allocated, xpv_cur));
-}
-
-/* return a struct xpviv to the free list */
-
-STATIC void
-S_del_xpviv(pTHX_ XPVIV *p)
-{
- xpviv_allocated* xpviv
- = (xpviv_allocated*)((char*)(p) + STRUCT_OFFSET(XPVIV, xpv_cur)
- - STRUCT_OFFSET(xpviv_allocated, xpv_cur));
- LOCK_SV_MUTEX;
- *(xpviv_allocated**)xpviv = PL_xpviv_root;
- PL_xpviv_root = xpviv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvnv from the free list, allocating more if necessary */
+ *(void **)start = 0;
-STATIC XPVNV*
-S_new_xpvnv(pTHX)
-{
- XPVNV* xpvnv;
- LOCK_SV_MUTEX;
- if (!PL_xpvnv_root)
- S_more_xpvnv(aTHX);
- xpvnv = PL_xpvnv_root;
- PL_xpvnv_root = *(XPVNV**)xpvnv;
- UNLOCK_SV_MUTEX;
- return xpvnv;
-}
-
-/* return a struct xpvnv to the free list */
-
-STATIC void
-S_del_xpvnv(pTHX_ XPVNV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVNV**)p = PL_xpvnv_root;
- PL_xpvnv_root = p;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvcv from the free list, allocating more if necessary */
-
-STATIC XPVCV*
-S_new_xpvcv(pTHX)
-{
- XPVCV* xpvcv;
- LOCK_SV_MUTEX;
- if (!PL_xpvcv_root)
- S_more_xpvcv(aTHX);
- xpvcv = PL_xpvcv_root;
- PL_xpvcv_root = *(XPVCV**)xpvcv;
- UNLOCK_SV_MUTEX;
- return xpvcv;
-}
-
-/* return a struct xpvcv to the free list */
-
-STATIC void
-S_del_xpvcv(pTHX_ XPVCV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVCV**)p = PL_xpvcv_root;
- PL_xpvcv_root = p;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvav from the free list, allocating more if necessary */
-
-STATIC XPVAV*
-S_new_xpvav(pTHX)
-{
- xpvav_allocated* xpvav;
- LOCK_SV_MUTEX;
- if (!PL_xpvav_root)
- S_more_xpvav(aTHX);
- xpvav = PL_xpvav_root;
- PL_xpvav_root = *(xpvav_allocated**)xpvav;
- UNLOCK_SV_MUTEX;
- return (XPVAV*)((char*)xpvav - STRUCT_OFFSET(XPVAV, xav_fill)
- + STRUCT_OFFSET(xpvav_allocated, xav_fill));
-}
-
-/* return a struct xpvav to the free list */
-
-STATIC void
-S_del_xpvav(pTHX_ XPVAV *p)
-{
- xpvav_allocated* xpvav
- = (xpvav_allocated*)((char*)(p) + STRUCT_OFFSET(XPVAV, xav_fill)
- - STRUCT_OFFSET(xpvav_allocated, xav_fill));
- LOCK_SV_MUTEX;
- *(xpvav_allocated**)xpvav = PL_xpvav_root;
- PL_xpvav_root = xpvav;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvhv from the free list, allocating more if necessary */
-
-STATIC XPVHV*
-S_new_xpvhv(pTHX)
-{
- xpvhv_allocated* xpvhv;
- LOCK_SV_MUTEX;
- if (!PL_xpvhv_root)
- S_more_xpvhv(aTHX);
- xpvhv = PL_xpvhv_root;
- PL_xpvhv_root = *(xpvhv_allocated**)xpvhv;
- UNLOCK_SV_MUTEX;
- return (XPVHV*)((char*)xpvhv - STRUCT_OFFSET(XPVHV, xhv_fill)
- + STRUCT_OFFSET(xpvhv_allocated, xhv_fill));
-}
-
-/* return a struct xpvhv to the free list */
-
-STATIC void
-S_del_xpvhv(pTHX_ XPVHV *p)
-{
- xpvhv_allocated* xpvhv
- = (xpvhv_allocated*)((char*)(p) + STRUCT_OFFSET(XPVHV, xhv_fill)
- - STRUCT_OFFSET(xpvhv_allocated, xhv_fill));
- LOCK_SV_MUTEX;
- *(xpvhv_allocated**)xpvhv = PL_xpvhv_root;
- PL_xpvhv_root = xpvhv;
- UNLOCK_SV_MUTEX;
-}
-
-/* grab a new struct xpvmg from the free list, allocating more if necessary */
-
-STATIC XPVMG*
-S_new_xpvmg(pTHX)
-{
- XPVMG* xpvmg;
- LOCK_SV_MUTEX;
- if (!PL_xpvmg_root)
- S_more_xpvmg(aTHX);
- xpvmg = PL_xpvmg_root;
- PL_xpvmg_root = *(XPVMG**)xpvmg;
- UNLOCK_SV_MUTEX;
- return xpvmg;
-}
-
-/* return a struct xpvmg to the free list */
-
-STATIC void
-S_del_xpvmg(pTHX_ XPVMG *p)
-{
- LOCK_SV_MUTEX;
- *(XPVMG**)p = PL_xpvmg_root;
- PL_xpvmg_root = p;
- UNLOCK_SV_MUTEX;
+ return *root;
}
-/* grab a new struct xpvgv from the free list, allocating more if necessary */
+/* grab a new thing from the free list, allocating more if necessary */
-STATIC XPVGV*
-S_new_xpvgv(pTHX)
+STATIC void *
+S_new_body(pTHX_ void **arena_root, void **root, size_t size, size_t offset)
{
- XPVGV* xpvgv;
+ void *xpv;
LOCK_SV_MUTEX;
- if (!PL_xpvgv_root)
- S_more_xpvgv(aTHX);
- xpvgv = PL_xpvgv_root;
- PL_xpvgv_root = *(XPVGV**)xpvgv;
+ xpv = *root ? *root : S_more_bodies(aTHX_ arena_root, root, size);
+ *root = *(void**)xpv;
UNLOCK_SV_MUTEX;
- return xpvgv;
+ return (void*)((char*)xpv - offset);
}
-/* return a struct xpvgv to the free list */
+/* return a thing to the free list */
STATIC void
-S_del_xpvgv(pTHX_ XPVGV *p)
+S_del_body(pTHX_ void *thing, void **root, size_t offset)
{
+ void **real_thing = (void**)((char *)thing + offset);
LOCK_SV_MUTEX;
- *(XPVGV**)p = PL_xpvgv_root;
- PL_xpvgv_root = p;
+ *real_thing = *root;
+ *root = (void*)real_thing;
UNLOCK_SV_MUTEX;
}
-/* grab a new struct xpvlv from the free list, allocating more if necessary */
+/* Conventionally we simply malloc() a big block of memory, then divide it
+ up into lots of the thing that we're allocating.
-STATIC XPVLV*
-S_new_xpvlv(pTHX)
-{
- XPVLV* xpvlv;
- LOCK_SV_MUTEX;
- if (!PL_xpvlv_root)
- S_more_xpvlv(aTHX);
- xpvlv = PL_xpvlv_root;
- PL_xpvlv_root = *(XPVLV**)xpvlv;
- UNLOCK_SV_MUTEX;
- return xpvlv;
-}
-
-/* return a struct xpvlv to the free list */
-
-STATIC void
-S_del_xpvlv(pTHX_ XPVLV *p)
-{
- LOCK_SV_MUTEX;
- *(XPVLV**)p = PL_xpvlv_root;
- PL_xpvlv_root = p;
- UNLOCK_SV_MUTEX;
-}
+ This macro will expand to call to S_new_body. So for XPVBM (with ithreads),
+ it would become
-/* grab a new struct xpvbm from the free list, allocating more if necessary */
-
-STATIC XPVBM*
-S_new_xpvbm(pTHX)
-{
- XPVBM* xpvbm;
- LOCK_SV_MUTEX;
- if (!PL_xpvbm_root)
- S_more_xpvbm(aTHX);
- xpvbm = PL_xpvbm_root;
- PL_xpvbm_root = *(XPVBM**)xpvbm;
- UNLOCK_SV_MUTEX;
- return xpvbm;
-}
-
-/* return a struct xpvbm to the free list */
+ S_new_body(my_perl, (void**)&(my_perl->Ixpvbm_arenaroot),
+ (void**)&(my_perl->Ixpvbm_root), sizeof(XPVBM), 0)
+*/
-STATIC void
-S_del_xpvbm(pTHX_ XPVBM *p)
-{
- LOCK_SV_MUTEX;
- *(XPVBM**)p = PL_xpvbm_root;
- PL_xpvbm_root = p;
- UNLOCK_SV_MUTEX;
-}
+#define new_body(TYPE,lctype) \
+ S_new_body(aTHX_ (void**)&PL_ ## lctype ## _arenaroot, \
+ (void**)&PL_ ## lctype ## _root, \
+ sizeof(TYPE), \
+ 0)
+
+/* But for some types, we cheat. The type starts with some members that are
+ never accessed. So we allocate the substructure, starting at the first used
+ member, then adjust the pointer back in memory by the size of the bit not
+ allocated, so it's as if we allocated the full structure.
+ (But things will all go boom if you write to the part that is "not there",
+ because you'll be overwriting the last members of the preceding structure
+ in memory.)
+
+ We calculate the correction using the STRUCT_OFFSET macro. For example, if
+ xpv_allocated is the same structure as XPV then the two OFFSETs sum to zero,
+ and the pointer is unchanged. If the allocated structure is smaller (no
+ initial NV actually allocated) then the net effect is to subtract the size
+ of the NV from the pointer, to return a new pointer as if an initial NV were
+ actually allocated.
+
+ This is the same trick as was used for NV and IV bodies. Ironically it
+ doesn't need to be used for NV bodies any more, because NV is now at the
+ start of the structure. IV bodies don't need it either, because they are
+ no longer allocated. */
+
+#define new_body_allocated(TYPE,lctype,member) \
+ S_new_body(aTHX_ (void**)&PL_ ## lctype ## _arenaroot, \
+ (void**)&PL_ ## lctype ## _root, \
+ sizeof(lctype ## _allocated), \
+ STRUCT_OFFSET(TYPE, member) \
+ - STRUCT_OFFSET(lctype ## _allocated, member))
+
+
+#define del_body(p,TYPE,lctype) \
+ S_del_body(aTHX_ (void*)p, (void**)&PL_ ## lctype ## _root, 0)
+
+#define del_body_allocated(p,TYPE,lctype,member) \
+ S_del_body(aTHX_ (void*)p, (void**)&PL_ ## lctype ## _root, \
+ STRUCT_OFFSET(TYPE, member) \
+ - STRUCT_OFFSET(lctype ## _allocated, member))
#define my_safemalloc(s) (void*)safemalloc(s)
#define my_safefree(p) safefree((char*)p)
#else /* !PURIFY */
-#define new_XNV() (void*)new_xnv()
-#define del_XNV(p) del_xnv((XPVNV*) p)
+#define new_XNV() new_body(NV, xnv)
+#define del_XNV(p) del_body(p, NV, xnv)
-#define new_XPV() (void*)new_xpv()
-#define del_XPV(p) del_xpv((XPV *)p)
+#define new_XPV() new_body_allocated(XPV, xpv, xpv_cur)
+#define del_XPV(p) del_body_allocated(p, XPV, xpv, xpv_cur)
-#define new_XPVIV() (void*)new_xpviv()
-#define del_XPVIV(p) del_xpviv((XPVIV *)p)
+#define new_XPVIV() new_body_allocated(XPVIV, xpviv, xpv_cur)
+#define del_XPVIV(p) del_body_allocated(p, XPVIV, xpviv, xpv_cur)
-#define new_XPVNV() (void*)new_xpvnv()
-#define del_XPVNV(p) del_xpvnv((XPVNV *)p)
+#define new_XPVNV() new_body(XPVNV, xpvnv)
+#define del_XPVNV(p) del_body(p, XPVNV, xpvnv)
-#define new_XPVCV() (void*)new_xpvcv()
-#define del_XPVCV(p) del_xpvcv((XPVCV *)p)
+#define new_XPVCV() new_body(XPVCV, xpvcv)
+#define del_XPVCV(p) del_body(p, XPVCV, xpvcv)
-#define new_XPVAV() (void*)new_xpvav()
-#define del_XPVAV(p) del_xpvav((XPVAV *)p)
+#define new_XPVAV() new_body_allocated(XPVAV, xpvav, xav_fill)
+#define del_XPVAV(p) del_body_allocated(p, XPVAV, xpvav, xav_fill)
-#define new_XPVHV() (void*)new_xpvhv()
-#define del_XPVHV(p) del_xpvhv((XPVHV *)p)
+#define new_XPVHV() new_body_allocated(XPVHV, xpvhv, xhv_fill)
+#define del_XPVHV(p) del_body_allocated(p, XPVHV, xpvhv, xhv_fill)
-#define new_XPVMG() (void*)new_xpvmg()
-#define del_XPVMG(p) del_xpvmg((XPVMG *)p)
+#define new_XPVMG() new_body(XPVMG, xpvmg)
+#define del_XPVMG(p) del_body(p, XPVMG, xpvmg)
-#define new_XPVGV() (void*)new_xpvgv()
-#define del_XPVGV(p) del_xpvgv((XPVGV *)p)
+#define new_XPVGV() new_body(XPVGV, xpvgv)
+#define del_XPVGV(p) del_body(p, XPVGV, xpvgv)
-#define new_XPVLV() (void*)new_xpvlv()
-#define del_XPVLV(p) del_xpvlv((XPVLV *)p)
+#define new_XPVLV() new_body(XPVLV, xpvlv)
+#define del_XPVLV(p) del_body(p, XPVLV, xpvlv)
-#define new_XPVBM() (void*)new_xpvbm()
-#define del_XPVBM(p) del_xpvbm((XPVBM *)p)
+#define new_XPVBM() new_body(XPVBM, xpvbm)
+#define del_XPVBM(p) del_body(p, XPVBM, xpvbm)
#endif /* PURIFY */
NV nv;
MAGIC* magic;
HV* stash;
+ void** old_body_arena;
+ size_t old_body_offset;
+ size_t old_body_length; /* Well, the length to copy. */
+ void* old_body;
+ bool zero_nv = TRUE;
+#ifdef DEBUGGING
+ U32 old_type = SvTYPE(sv);
+#endif
if (mt != SVt_PV && SvIsCOW(sv)) {
sv_force_normal_flags(sv, 0);
if (SvTYPE(sv) == mt)
return;
+ if (SvTYPE(sv) > mt)
+ Perl_croak(aTHX_ "sv_upgrade from type %d down to type %d",
+ (int)SvTYPE(sv), (int)mt);
+
pv = NULL;
cur = 0;
len = 0;
magic = NULL;
stash = Nullhv;
+ old_body = SvANY(sv);
+ old_body_arena = 0;
+ old_body_offset = 0;
+ old_body_length = 0;
+
switch (SvTYPE(sv)) {
case SVt_NULL:
break;
mt = SVt_PVNV;
else if (mt < SVt_PVIV)
mt = SVt_PVIV;
+ old_body_offset = STRUCT_OFFSET(XPVIV, xiv_iv);
+ old_body_length = sizeof(IV);
break;
case SVt_NV:
nv = SvNVX(sv);
- del_XNV(SvANY(sv));
+ old_body_arena = (void **) &PL_xnv_root;
+ old_body_length = sizeof(NV);
+ zero_nv = FALSE;
+
if (mt < SVt_PVNV)
mt = SVt_PVNV;
break;
pv = SvPVX_mutable(sv);
cur = SvCUR(sv);
len = SvLEN(sv);
- del_XPV(SvANY(sv));
+ old_body_arena = (void **) &PL_xpv_root;
+ old_body_offset = STRUCT_OFFSET(XPV, xpv_cur)
+ - STRUCT_OFFSET(xpv_allocated, xpv_cur);
+ old_body_length = sizeof(XPV) - old_body_offset;
if (mt <= SVt_IV)
mt = SVt_PVIV;
else if (mt == SVt_NV)
cur = SvCUR(sv);
len = SvLEN(sv);
iv = SvIVX(sv);
- del_XPVIV(SvANY(sv));
+ old_body_arena = (void **) &PL_xpviv_root;
+ old_body_offset = STRUCT_OFFSET(XPVIV, xpv_cur)
+ - STRUCT_OFFSET(xpviv_allocated, xpv_cur);
+ old_body_length = sizeof(XPVIV) - old_body_offset;
break;
case SVt_PVNV:
pv = SvPVX_mutable(sv);
len = SvLEN(sv);
iv = SvIVX(sv);
nv = SvNVX(sv);
- del_XPVNV(SvANY(sv));
+ old_body_arena = (void **) &PL_xpvnv_root;
+ old_body_length = sizeof(XPVNV);
+ zero_nv = FALSE;
break;
case SVt_PVMG:
/* Because the XPVMG of PL_mess_sv isn't allocated from the arena,
nv = SvNVX(sv);
magic = SvMAGIC(sv);
stash = SvSTASH(sv);
- del_XPVMG(SvANY(sv));
+ old_body_arena = (void **) &PL_xpvmg_root;
+ old_body_length = sizeof(XPVMG);
+ zero_nv = FALSE;
break;
default:
Perl_croak(aTHX_ "Can't upgrade that kind of scalar");
case SVt_NULL:
Perl_croak(aTHX_ "Can't upgrade to undef");
case SVt_IV:
+ assert(old_type == SVt_NULL);
SvANY(sv) = (XPVIV*)((char*)&(sv->sv_u.svu_iv) - STRUCT_OFFSET(XPVIV, xiv_iv));
SvIV_set(sv, iv);
break;
case SVt_NV:
+ assert(old_type == SVt_NULL);
SvANY(sv) = new_XNV();
SvNV_set(sv, nv);
break;
case SVt_RV:
+ assert(old_type == SVt_NULL);
SvANY(sv) = &sv->sv_u.svu_rv;
SvRV_set(sv, (SV*)pv);
break;
SvLEN_set(sv, len);
break;
}
+
+
+ if (old_body_arena) {
+#ifdef PURIFY
+ my_safefree(old_body)
+#else
+ S_del_body(aTHX_ old_body, old_body_arena, old_body_offset);
+#endif
+}
}
/*
if (!(flags & SVp_POK) || !*SvPVX_const(sv)) {
if ((flags & SVTYPEMASK) < SVt_PVIV)
- sv_upgrade(sv, SVt_IV);
+ sv_upgrade(sv, ((flags & SVTYPEMASK) > SVt_IV ? SVt_PVIV : SVt_IV));
(void)SvIOK_only(sv);
SvIV_set(sv, 1);
return;
# define PTR_TABLE_HASH(ptr) (PTR2UV(ptr) >> 2)
#endif
-
-
-STATIC void
-S_more_pte(pTHX)
-{
- struct ptr_tbl_ent* pte;
- struct ptr_tbl_ent* pteend;
- New(0, pte, PERL_ARENA_SIZE/sizeof(struct ptr_tbl_ent), struct ptr_tbl_ent);
- pte->next = PL_pte_arenaroot;
- PL_pte_arenaroot = pte;
-
- pteend = &pte[PERL_ARENA_SIZE / sizeof(struct ptr_tbl_ent) - 1];
- PL_pte_root = ++pte;
- while (pte < pteend) {
- pte->next = pte + 1;
- pte++;
- }
- pte->next = 0;
-}
-
-STATIC struct ptr_tbl_ent*
-S_new_pte(pTHX)
-{
- struct ptr_tbl_ent* pte;
- if (!PL_pte_root)
- S_more_pte(aTHX);
- pte = PL_pte_root;
- PL_pte_root = pte->next;
- return pte;
-}
-
-STATIC void
-S_del_pte(pTHX_ struct ptr_tbl_ent*p)
-{
- p->next = PL_pte_root;
- PL_pte_root = p;
-}
+#define new_pte() new_body(struct ptr_tbl_ent, pte)
+#define del_pte(p) del_body(p, struct ptr_tbl_ent, pte)
/* map an existing pointer using a table */
return;
}
}
- tblent = S_new_pte(aTHX);
+ tblent = new_pte();
tblent->oldval = oldv;
tblent->newval = newv;
tblent->next = *otblent;
if (entry) {
PTR_TBL_ENT_t *oentry = entry;
entry = entry->next;
- S_del_pte(aTHX_ oentry);
+ del_pte(oentry);
}
if (!entry) {
if (++riter > max) {