S_more_he(pTHX)
{
dVAR;
- HE* he;
- HE* heend;
-
- he = (HE*) Perl_get_arena(aTHX_ PERL_ARENA_SIZE, HE_SVSLOT);
+ HE* he = (HE*) Perl_get_arena(aTHX_ PERL_ARENA_SIZE, HE_SVSLOT);
+ HE * const heend = &he[PERL_ARENA_SIZE / sizeof(HE) - 1];
- heend = &he[PERL_ARENA_SIZE / sizeof(HE) - 1];
PL_body_roots[HE_SVSLOT] = he;
while (he < heend) {
HeNEXT(he) = (HE*)(he + 1);
/* (klen == HEf_SVKEY) is special for MAGICAL hv entries, meaning key slot
* contains an SV* */
-#define HV_FETCH_ISSTORE 0x01
-#define HV_FETCH_ISEXISTS 0x02
-#define HV_FETCH_LVALUE 0x04
-#define HV_FETCH_JUST_SV 0x08
-
/*
=for apidoc hv_store
if (!hv)
return NULL;
+ if (SvSMAGICAL(hv) && SvGMAGICAL(hv) && !(action & HV_DISABLE_UVAR_XKEY)) {
+ keysv = hv_magic_uvar_xkey(hv, keysv, key, klen, flags, action);
+ /* If a fetch-as-store fails on the fetch, then the action is to
+ recurse once into "hv_store". If we didn't do this, then that
+ recursive call would call the key conversion routine again.
+ However, as we replace the original key with the converted
+ key, this would result in a double conversion, which would show
+ up as a bug if the conversion routine is not idempotent. */
+ hash = 0;
+ }
if (keysv) {
- if (SvSMAGICAL(hv) && SvGMAGICAL(hv))
- keysv = hv_magic_uvar_xkey(hv, keysv, action);
if (flags & HVhek_FREEKEY)
Safefree(key);
key = SvPV_const(keysv, klen);
key) whereas the store is for key (the original) */
entry = hv_fetch_common(hv, NULL, nkey, klen,
HVhek_FREEKEY, /* free nkey */
- 0 /* non-LVAL fetch */,
+ 0 /* non-LVAL fetch */
+ | HV_DISABLE_UVAR_XKEY,
NULL /* no value */,
0 /* compute hash */);
if (!entry && (action & HV_FETCH_LVALUE)) {
Do it this way to encourage compiler to tail
call optimise. */
entry = hv_fetch_common(hv, keysv, key, klen,
- flags, HV_FETCH_ISSTORE,
+ flags,
+ HV_FETCH_ISSTORE
+ | HV_DISABLE_UVAR_XKEY,
newSV(0), hash);
} else {
if (flags & HVhek_FREEKEY)
if (env) {
sv = newSVpvn(env,len);
SvTAINTED_on(sv);
- return hv_fetch_common(hv,keysv,key,klen,flags,HV_FETCH_ISSTORE,sv,
+ return hv_fetch_common(hv, keysv, key, klen, flags,
+ HV_FETCH_ISSTORE|HV_DISABLE_UVAR_XKEY, sv,
hash);
}
}
magic check happen. */
/* gonna assign to this, so it better be there */
return hv_fetch_common(hv, keysv, key, klen, flags,
- HV_FETCH_ISSTORE, val, hash);
+ HV_FETCH_ISSTORE|HV_DISABLE_UVAR_XKEY, val,
+ hash);
/* XXX Surely that could leak if the fetch-was-store fails?
Just like the hv_fetch. */
}
if (!hv)
return NULL;
+ if (SvSMAGICAL(hv) && SvGMAGICAL(hv)
+ && !(d_flags & HV_DISABLE_UVAR_XKEY)) {
+ keysv = hv_magic_uvar_xkey(hv, keysv, key, klen, k_flags, HV_DELETE);
+ hash = 0;
+ }
if (keysv) {
- if (SvSMAGICAL(hv) && SvGMAGICAL(hv))
- keysv = hv_magic_uvar_xkey(hv, keysv, -1);
if (k_flags & HVhek_FREEKEY)
Safefree(key);
key = SvPV_const(keysv, klen);
if (needs_copy) {
SV *sv;
entry = hv_fetch_common(hv, keysv, key, klen,
- k_flags & ~HVhek_FREEKEY, HV_FETCH_LVALUE,
+ k_flags & ~HVhek_FREEKEY,
+ HV_FETCH_LVALUE|HV_DISABLE_UVAR_XKEY,
NULL, hash);
sv = entry ? HeVAL(entry) : NULL;
if (sv) {
S_hsplit(pTHX_ HV *hv)
{
dVAR;
- register XPVHV* xhv = (XPVHV*)SvANY(hv);
+ register XPVHV* const xhv = (XPVHV*)SvANY(hv);
const I32 oldsize = (I32) xhv->xhv_max+1; /* HvMAX(hv)+1 (sick) */
register I32 newsize = oldsize * 2;
register I32 i;
Perl_newHV(pTHX)
{
register XPVHV* xhv;
- HV * const hv = (HV*)newSV(0);
-
- sv_upgrade((SV *)hv, SVt_PVHV);
+ HV * const hv = (HV*)newSV_type(SVt_PVHV);
xhv = (XPVHV*)SvANY(hv);
assert(!SvOK(hv));
#ifndef NODEFAULT_SHAREKEYS
return;
val = HeVAL(entry);
if (val && isGV(val) && GvCVu(val) && HvNAME_get(hv))
- PL_sub_generation++; /* may be deletion of method from stash */
+ mro_method_changed_in(hv); /* deletion of method from stash */
SvREFCNT_dec(val);
if (HeKLEN(entry) == HEf_SVKEY) {
SvREFCNT_dec(HeKEY_sv(entry));
HvREHASH_off(hv);
reset:
if (SvOOK(hv)) {
+ if(HvNAME_get(hv))
+ mro_isa_changed_in(hv);
HvEITER_set(hv, NULL);
}
}
if (SvOOK(hv)) {
HE *entry;
+ struct mro_meta *meta;
struct xpvhv_aux *iter = HvAUX(hv);
/* If there are weak references to this HV, we need to avoid
freeing them up here. In particular we need to keep the AV
iter->xhv_riter = -1; /* HvRITER(hv) = -1 */
iter->xhv_eiter = NULL; /* HvEITER(hv) = NULL */
+ if((meta = iter->xhv_mro_meta)) {
+ if(meta->mro_linear_dfs) SvREFCNT_dec(meta->mro_linear_dfs);
+ if(meta->mro_linear_c3) SvREFCNT_dec(meta->mro_linear_c3);
+ if(meta->mro_nextmethod) SvREFCNT_dec(meta->mro_nextmethod);
+ Safefree(meta);
+ iter->xhv_mro_meta = NULL;
+ }
+
/* There are now no allocated pointers in the aux structure. */
SvFLAGS(hv) &= ~SVf_OOK; /* Goodbye, aux structure. */
return;
DEBUG_A(Perl_hv_assert(aTHX_ hv));
xhv = (XPVHV*)SvANY(hv);
+
+ if ((name = HvNAME_get(hv)) && !PL_dirty)
+ mro_isa_changed_in(hv);
+
hfreeentries(hv);
- if ((name = HvNAME_get(hv))) {
+ if (name) {
if(PL_stashcache)
hv_delete(PL_stashcache, name, HvNAMELEN_get(hv), G_DISCARD);
hv_name_set(hv, NULL, 0, 0);
iter->xhv_eiter = NULL; /* HvEITER(hv) = NULL */
iter->xhv_name = 0;
iter->xhv_backreferences = 0;
+ iter->xhv_mro_meta = NULL;
return iter;
}
iter = hv_auxinit(hv);
}
PERL_HASH(hash, name, len);
- iter->xhv_name = name ? share_hek(name, len, hash) : 0;
+ iter->xhv_name = name ? share_hek(name, len, hash) : NULL;
}
AV **
void
Perl_unshare_hek(pTHX_ HEK *hek)
{
+ assert(hek);
unshare_hek_or_pvn(hek, NULL, 0, 0);
}
}
STATIC SV *
-S_hv_magic_uvar_xkey(pTHX_ HV* hv, SV* keysv, int action)
+S_hv_magic_uvar_xkey(pTHX_ HV* hv, SV* keysv, const char *const key,
+ const STRLEN klen, const int k_flags, int action)
{
MAGIC* mg;
if ((mg = mg_find((SV*)hv, PERL_MAGIC_uvar))) {
struct ufuncs * const uf = (struct ufuncs *)mg->mg_ptr;
if (uf->uf_set == NULL) {
SV* obj = mg->mg_obj;
+
+ if (!keysv) {
+ keysv = sv_2mortal(newSVpvn(key, klen));
+ if (k_flags & HVhek_UTF8)
+ SvUTF8_on(keysv);
+ }
+
mg->mg_obj = keysv; /* pass key */
uf->uf_index = action; /* pass action */
magic_getuvar((SV*)hv, mg);
value = &PL_sv_placeholder;
break;
case HVrhek_IV:
- value = (he->refcounted_he_data[0] & HVrhek_UV)
- ? newSVuv(he->refcounted_he_val.refcounted_he_u_iv)
- : newSViv(he->refcounted_he_val.refcounted_he_u_uv);
+ value = newSViv(he->refcounted_he_val.refcounted_he_u_iv);
+ break;
+ case HVrhek_UV:
+ value = newSVuv(he->refcounted_he_val.refcounted_he_u_uv);
break;
case HVrhek_PV:
+ case HVrhek_PV_UTF8:
/* Create a string SV that directly points to the bytes in our
structure. */
- value = newSV(0);
- sv_upgrade(value, SVt_PV);
+ value = newSV_type(SVt_PV);
SvPV_set(value, (char *) he->refcounted_he_data + 1);
SvCUR_set(value, he->refcounted_he_val.refcounted_he_u_len);
/* This stops anything trying to free it */
SvLEN_set(value, 0);
SvPOK_on(value);
SvREADONLY_on(value);
- if (he->refcounted_he_data[0] & HVrhek_UTF8)
+ if ((he->refcounted_he_data[0] & HVrhek_typemask) == HVrhek_PV_UTF8)
SvUTF8_on(value);
break;
default:
return value;
}
-#ifdef USE_ITHREADS
-/* A big expression to find the key offset */
-#define REF_HE_KEY(chain) \
- ((((chain->refcounted_he_data[0] & HVrhek_typemask) == HVrhek_PV) \
- ? chain->refcounted_he_val.refcounted_he_u_len + 1 : 0) \
- + 1 + chain->refcounted_he_data)
-#endif
-
/*
=for apidoc refcounted_he_chain_2hv
-Generates an returns a C<HV *> by walking up the tree starting at the passed
+Generates and returns a C<HV *> by walking up the tree starting at the passed
in C<struct refcounted_he *>.
=cut
value_len = 0;
key_offset = 1;
}
- flags = value_type;
#ifdef USE_ITHREADS
he = (struct refcounted_he*)
if (value_type == HVrhek_PV) {
Copy(value_p, he->refcounted_he_data + 1, value_len + 1, char);
he->refcounted_he_val.refcounted_he_u_len = value_len;
- if (SvUTF8(value)) {
- flags |= HVrhek_UTF8;
- }
+ /* Do it this way so that the SvUTF8() test is after the SvPV, in case
+ the value is overloaded, and doesn't yet have the UTF-8flag set. */
+ if (SvUTF8(value))
+ value_type = HVrhek_PV_UTF8;
} else if (value_type == HVrhek_IV) {
if (SvUOK(value)) {
he->refcounted_he_val.refcounted_he_u_uv = SvUVX(value);
- flags |= HVrhek_UV;
+ value_type = HVrhek_UV;
} else {
he->refcounted_he_val.refcounted_he_u_iv = SvIVX(value);
}
}
+ flags = value_type;
if (is_utf8) {
/* Hash keys are always stored normalised to (yes) ISO-8859-1.