*/
/*
- * $Id: Storable.xs,v 0.7.1.2 2000/08/14 07:19:27 ram Exp $
+ * $Id: Storable.xs,v 1.0.1.4 2000/10/26 17:11:04 ram Exp ram $
*
* Copyright (c) 1995-2000, Raphael Manfredi
*
- * You may redistribute only under the terms of the Artistic License,
- * as specified in the README file that comes with the distribution.
+ * You may redistribute only under the same terms as Perl 5, as specified
+ * in the README file that comes with the distribution.
*
* $Log: Storable.xs,v $
- * Revision 0.7.1.2 2000/08/14 07:19:27 ram
- * patch2: added a refcnt dec in retrieve_tied_key()
+ * Revision 1.0.1.4 2000/10/26 17:11:04 ram
+ * patch5: auto requires module of blessed ref when STORABLE_thaw misses
*
- * Revision 0.7.1.1 2000/08/13 20:10:06 ram
- * patch1: was wrongly optimizing for "undef" values in hashes
- * patch1: added support for ref to tied items in hash/array
- * patch1: added overloading support
+ * Revision 1.0.1.3 2000/09/29 19:49:57 ram
+ * patch3: avoid using "tainted" and "dirty" since Perl remaps them via cpp
*
- * Revision 0.7 2000/08/03 22:04:44 ram
- * Baseline for second beta release.
+ * $Log: Storable.xs,v $
+ * Revision 1.0 2000/09/01 19:40:41 ram
+ * Baseline for first official release.
*
*/
#include <patchlevel.h> /* Perl's one, needed since 5.6 */
#include <XSUB.h>
-/*#define DEBUGME /* Debug mode, turns assertions on as well */
-/*#define DASSERT /* Assertion mode */
+#if 0
+#define DEBUGME /* Debug mode, turns assertions on as well */
+#define DASSERT /* Assertion mode */
+#endif
/*
* Pre PerlIO time when none of USE_PERLIO and PERLIO_IS_STDIO is defined
/*
* Earlier versions of perl might be used, we can't assume they have the latest!
*/
+
+#ifndef PERL_VERSION /* For perls < 5.6 */
+#define PERL_VERSION PATCHLEVEL
#ifndef newRV_noinc
#define newRV_noinc(sv) ((Sv = newRV(sv)), --SvREFCNT(SvRV(Sv)), Sv)
#endif
#define PL_sv_yes sv_yes
#define PL_sv_no sv_no
#define PL_sv_undef sv_undef
+#if (SUBVERSION <= 4) /* 5.004_04 has been reported to lack newSVpvn */
+#define newSVpvn newSVpv
#endif
+#endif /* PATCHLEVEL <= 4 */
#ifndef HvSHAREKEYS_off
#define HvSHAREKEYS_off(hv) /* Ignore */
#endif
+#ifndef AvFILLp /* Older perls (<=5.003) lack AvFILLp */
+#define AvFILLp AvFILL
+#endif
+typedef double NV; /* Older perls lack the NV type */
+#define IVdf "ld" /* Various printf formats for Perl types */
+#define UVuf "lu"
+#define UVof "lo"
+#define UVxf "lx"
+#define INT2PTR(t,v) (t)(IV)(v)
+#define PTR2UV(v) (unsigned long)(v)
+#endif /* PERL_VERSION -- perls < 5.6 */
+
+#ifndef NVef /* The following were not part of perl 5.6 */
+#if defined(USE_LONG_DOUBLE) && \
+ defined(HAS_LONG_DOUBLE) && defined(PERL_PRIfldbl)
+#define NVef PERL_PRIeldbl
+#define NVff PERL_PRIfldbl
+#define NVgf PERL_PRIgldbl
+#else
+#define NVef "e"
+#define NVff "f"
+#define NVgf "g"
+#endif
+#endif
#ifdef DEBUGME
#ifndef DASSERT
#define C(x) ((char) (x)) /* For markers with dynamic retrieval handling */
#define SX_OBJECT C(0) /* Already stored object */
-#define SX_LSCALAR C(1) /* Scalar (string) forthcoming (length, data) */
+#define SX_LSCALAR C(1) /* Scalar (large binary) follows (length, data) */
#define SX_ARRAY C(2) /* Array forthcominng (size, item list) */
#define SX_HASH C(3) /* Hash forthcoming (size, key/value pair list) */
#define SX_REF C(4) /* Reference to object forthcoming */
#define SX_DOUBLE C(7) /* Double forthcoming */
#define SX_BYTE C(8) /* (signed) byte forthcoming */
#define SX_NETINT C(9) /* Integer in network order forthcoming */
-#define SX_SCALAR C(10) /* Scalar (small) forthcoming (length, data) */
+#define SX_SCALAR C(10) /* Scalar (binary, small) follows (length, data) */
#define SX_TIED_ARRAY C(11) /* Tied array forthcoming */
#define SX_TIED_HASH C(12) /* Tied hash forthcoming */
#define SX_TIED_SCALAR C(13) /* Tied scalar forthcoming */
#define SX_OVERLOAD C(20) /* Overloaded reference */
#define SX_TIED_KEY C(21) /* Tied magic key forthcoming */
#define SX_TIED_IDX C(22) /* Tied magic index forthcoming */
-#define SX_ERROR C(23) /* Error */
+#define SX_UTF8STR C(23) /* UTF-8 string forthcoming (small) */
+#define SX_LUTF8STR C(24) /* UTF-8 string forthcoming (large) */
+#define SX_ERROR C(25) /* Error */
/*
* Those are only used to retrieve "old" pre-0.6 binary images.
#define MY_VERSION "Storable(" XS_VERSION ")"
+/*
+ * Fields s_tainted and s_dirty are prefixed with s_ because Perl's include
+ * files remap tainted and dirty when threading is enabled. That's bad for
+ * perl to remap such common words. -- RAM, 29/09/00
+ */
+
typedef struct stcxt {
int entry; /* flags recursion */
int optype; /* type of traversal operation */
I32 tagnum; /* incremented at store time for each seen object */
I32 classnum; /* incremented at store time for each seen classname */
int netorder; /* true if network order used */
+ int s_tainted; /* true if input source is tainted, at retrieve time */
int forgive_me; /* whether to be forgiving... */
int canonical; /* whether to store hashes sorted by key */
- int dirty; /* context is dirty due to CROAK() -- can be cleaned */
+ int s_dirty; /* context is dirty due to CROAK() -- can be cleaned */
struct extendable keybuf; /* for hash key retrieval */
struct extendable membuf; /* for memory store/retrieve operations */
PerlIO *fio; /* where I/O are performed, NULL for memory */
#endif /* < perl5.004_68 */
#define dSTCXT_PTR(T,name) \
- T name = (T)(perinterp_sv && SvIOK(perinterp_sv)\
- ? SvIVX(perinterp_sv) : NULL)
+ T name = (perinterp_sv && SvIOK(perinterp_sv) \
+ ? INT2PTR(T, SvIVX(perinterp_sv)) : (T) 0)
#define dSTCXT \
dSTCXT_SV; \
dSTCXT_PTR(stcxt_t *, cxt)
#define INIT_STCXT \
dSTCXT; \
Newz(0, cxt, 1, stcxt_t); \
- sv_setiv(perinterp_sv, (IV) cxt)
+ sv_setiv(perinterp_sv, PTR2IV(cxt))
#define SET_STCXT(x) do { \
dSTCXT_SV; \
- sv_setiv(perinterp_sv, (IV) (x)); \
+ sv_setiv(perinterp_sv, PTR2IV(x)); \
} while (0)
#else /* !MULTIPLICITY && !PERL_OBJECT && !PERL_CAPI */
* but the topmost context stacked.
*/
-#define CROAK(x) do { cxt->dirty = 1; croak x; } while (0)
+#define CROAK(x) do { cxt->s_dirty = 1; croak x; } while (0)
/*
* End of "thread-safe" related definitions.
*/
/*
+ * LOW_32BITS
+ *
+ * Keep only the low 32 bits of a pointer (used for tags, which are not
+ * really pointers).
+ */
+
+#if PTRSIZE <= 4
+#define LOW_32BITS(x) ((I32) (x))
+#else
+#define LOW_32BITS(x) ((I32) ((unsigned long) (x) & 0xffffffffUL))
+#endif
+
+/*
+ * oI, oS, oC
+ *
+ * Hack for Crays, where sizeof(I32) == 8, and which are big-endians.
+ * Used in the WLEN and RLEN macros.
+ */
+
+#if INTSIZE > 4
+#define oI(x) ((I32 *) ((char *) (x) + 4))
+#define oS(x) ((x) - 4)
+#define oC(x) (x = 0)
+#define CRAY_HACK
+#else
+#define oI(x) (x)
+#define oS(x) (x)
+#define oC(x)
+#endif
+
+/*
* key buffer handling
*/
#define kbuf (cxt->keybuf).arena
return (SV *) 0; \
} while (0)
+#ifdef CRAY_HACK
+#define MBUF_GETINT(x) do { \
+ oC(x); \
+ if ((mptr + 4) <= mend) { \
+ memcpy(oI(&x), mptr, 4); \
+ mptr += 4; \
+ } else \
+ return (SV *) 0; \
+} while (0)
+#else
#define MBUF_GETINT(x) do { \
if ((mptr + sizeof(int)) <= mend) { \
if (int_aligned(mptr)) \
} else \
return (SV *) 0; \
} while (0)
+#endif
#define MBUF_READ(x,s) do { \
if ((mptr + (s)) <= mend) { \
} \
} while (0)
+#ifdef CRAY_HACK
+#define MBUF_PUTINT(i) do { \
+ MBUF_CHK(4); \
+ memcpy(mptr, oI(&i), 4); \
+ mptr += 4; \
+} while (0)
+#else
#define MBUF_PUTINT(i) do { \
MBUF_CHK(sizeof(int)); \
if (int_aligned(mptr)) \
memcpy(mptr, &i, sizeof(int)); \
mptr += sizeof(int); \
} while (0)
+#endif
#define MBUF_WRITE(x,s) do { \
MBUF_CHK(s); \
} while (0)
/*
- * LOW_32BITS
- *
- * Keep only the low 32 bits of a pointer (used for tags, which are not
- * really pointers).
- */
-
-#if PTRSIZE <= 4
-#define LOW_32BITS(x) ((I32) (x))
-#else
-#define LOW_32BITS(x) ((I32) ((unsigned long) (x) & 0xffffffffUL))
-#endif
-
-/*
* Possible return values for sv_type().
*/
static char magicstr[] = "pst0"; /* Used as a magic number */
#define STORABLE_BIN_MAJOR 2 /* Binary major "version" */
-#define STORABLE_BIN_MINOR 1 /* Binary minor "version" */
+#define STORABLE_BIN_MINOR 3 /* Binary minor "version" */
/*
* Useful store shortcuts...
return -1; \
} while (0)
+#define WRITE_I32(x) do { \
+ ASSERT(sizeof(x) == sizeof(I32), ("writing an I32")); \
+ if (!cxt->fio) \
+ MBUF_PUTINT(x); \
+ else if (PerlIO_write(cxt->fio, oI(&x), oS(sizeof(x))) != oS(sizeof(x))) \
+ return -1; \
+ } while (0)
+
#ifdef HAS_HTONL
#define WLEN(x) do { \
if (cxt->netorder) { \
int y = (int) htonl(x); \
if (!cxt->fio) \
MBUF_PUTINT(y); \
- else if (PerlIO_write(cxt->fio, &y, sizeof(y)) != sizeof(y)) \
+ else if (PerlIO_write(cxt->fio,oI(&y),oS(sizeof(y))) != oS(sizeof(y))) \
return -1; \
} else { \
if (!cxt->fio) \
MBUF_PUTINT(x); \
- else if (PerlIO_write(cxt->fio, &x, sizeof(x)) != sizeof(x)) \
+ else if (PerlIO_write(cxt->fio,oI(&x),oS(sizeof(x))) != oS(sizeof(x))) \
return -1; \
} \
} while (0)
#else
-#define WLEN(x) do { \
- if (!cxt->fio) \
- MBUF_PUTINT(x); \
- else if (PerlIO_write(cxt->fio, &x, sizeof(x)) != sizeof(x)) \
- return -1; \
- } while (0)
+#define WLEN(x) WRITE_I32(x)
#endif
#define WRITE(x,y) do { \
return -1; \
} while (0)
-#define STORE_SCALAR(pv, len) do { \
+#define STORE_PV_LEN(pv, len, small, large) do { \
if (len <= LG_SCALAR) { \
unsigned char clen = (unsigned char) len; \
- PUTMARK(SX_SCALAR); \
+ PUTMARK(small); \
PUTMARK(clen); \
if (len) \
WRITE(pv, len); \
} else { \
- PUTMARK(SX_LSCALAR); \
+ PUTMARK(large); \
WLEN(len); \
WRITE(pv, len); \
} \
} while (0)
+#define STORE_SCALAR(pv, len) STORE_PV_LEN(pv, len, SX_SCALAR, SX_LSCALAR)
+
+/*
+ * Conditional UTF8 support.
+ * On non-UTF8 perls, UTF8 strings are returned as normal strings.
+ *
+ */
+#ifdef SvUTF8_on
+#define STORE_UTF8STR(pv, len) STORE_PV_LEN(pv, len, SX_UTF8STR, SX_LUTF8STR)
+#else
+#define SvUTF8(sv) 0
+#define STORE_UTF8STR(pv, len) CROAK(("panic: storing UTF8 in non-UTF8 perl"))
+#define SvUTF8_on(sv) CROAK(("Cannot retrieve UTF8 data in non-UTF8 perl"))
+#endif
+
/*
* Store undef in arrays and hashes without recursing through store().
*/
return (SV *) 0; \
} while (0)
-#ifdef HAS_NTOHL
-#define RLEN(x) do { \
+#define READ_I32(x) do { \
+ ASSERT(sizeof(x) == sizeof(I32), ("reading an I32")); \
+ oC(x); \
if (!cxt->fio) \
MBUF_GETINT(x); \
- else if (PerlIO_read(cxt->fio, &x, sizeof(x)) != sizeof(x)) \
+ else if (PerlIO_read(cxt->fio, oI(&x), oS(sizeof(x))) != oS(sizeof(x))) \
return (SV *) 0; \
- if (cxt->netorder) \
- x = (int) ntohl(x); \
} while (0)
-#else
+
+#ifdef HAS_NTOHL
#define RLEN(x) do { \
+ oC(x); \
if (!cxt->fio) \
MBUF_GETINT(x); \
- else if (PerlIO_read(cxt->fio, &x, sizeof(x)) != sizeof(x)) \
+ else if (PerlIO_read(cxt->fio, oI(&x), oS(sizeof(x))) != oS(sizeof(x))) \
return (SV *) 0; \
+ if (cxt->netorder) \
+ x = (int) ntohl(x); \
} while (0)
+#else
+#define RLEN(x) READ_I32(x)
#endif
#define READ(x,y) do { \
return (SV *) 0; \
if (av_store(cxt->aseen, cxt->tagnum++, SvREFCNT_inc(y)) == 0) \
return (SV *) 0; \
- TRACEME(("aseen(#%d) = 0x%lx (refcnt=%d)", cxt->tagnum-1, \
- (unsigned long) y, SvREFCNT(y)-1)); \
+ TRACEME(("aseen(#%d) = 0x%"UVxf" (refcnt=%d)", cxt->tagnum-1, \
+ PTR2UV(y), SvREFCNT(y)-1)); \
} while (0)
/*
#define BLESS(s,p) do { \
SV *ref; \
HV *stash; \
- TRACEME(("blessing 0x%lx in %s", (unsigned long) (s), (p))); \
+ TRACEME(("blessing 0x%"UVxf" in %s", PTR2UV(s), (p))); \
stash = gv_stashpv((p), TRUE); \
ref = newRV_noinc(s); \
(void) sv_bless(ref, stash); \
static int store_tied(stcxt_t *cxt, SV *sv);
static int store_tied_item(stcxt_t *cxt, SV *sv);
static int store_other(stcxt_t *cxt, SV *sv);
+static int store_blessed(stcxt_t *cxt, SV *sv, int type, HV *pkg);
static int (*sv_store[])() = {
store_ref, /* svis_REF */
*/
static SV *retrieve_lscalar(stcxt_t *cxt);
+static SV *retrieve_lutf8str(stcxt_t *cxt);
static SV *old_retrieve_array(stcxt_t *cxt);
static SV *old_retrieve_hash(stcxt_t *cxt);
static SV *retrieve_ref(stcxt_t *cxt);
static SV *retrieve_byte(stcxt_t *cxt);
static SV *retrieve_netint(stcxt_t *cxt);
static SV *retrieve_scalar(stcxt_t *cxt);
+static SV *retrieve_utf8str(stcxt_t *cxt);
static SV *retrieve_tied_array(stcxt_t *cxt);
static SV *retrieve_tied_hash(stcxt_t *cxt);
static SV *retrieve_tied_scalar(stcxt_t *cxt);
retrieve_other, /* SX_OVERLOADED not supported */
retrieve_other, /* SX_TIED_KEY not supported */
retrieve_other, /* SX_TIED_IDX not supported */
+ retrieve_other, /* SX_UTF8STR not supported */
+ retrieve_other, /* SX_LUTF8STR not supported */
retrieve_other, /* SX_ERROR */
};
retrieve_overloaded, /* SX_OVERLOAD */
retrieve_tied_key, /* SX_TIED_KEY */
retrieve_tied_idx, /* SX_TIED_IDX */
+ retrieve_utf8str, /* SX_UTF8STR */
+ retrieve_lutf8str, /* SX_LUTF8STR */
retrieve_other, /* SX_ERROR */
};
#define RETRIEVE(c,x) (*(c)->retrieve_vtbl[(x) >= SX_ERROR ? SX_ERROR : (x)])
-static SV *mbuf2sv();
-static int store_blessed();
+static SV *mbuf2sv(void);
/***
*** Context management.
*
* Called once per "thread" (interpreter) to initialize some global context.
*/
-static void init_perinterp() {
+static void init_perinterp(void)
+{
INIT_STCXT;
cxt->netorder = 0; /* true if network order used */
*
* Initialize a new store context for real recursion.
*/
-static void init_store_context(cxt, f, optype, network_order)
-stcxt_t *cxt;
-PerlIO *f;
-int optype;
-int network_order;
+static void init_store_context(
+ stcxt_t *cxt,
+ PerlIO *f,
+ int optype,
+ int network_order)
{
TRACEME(("init_store_context"));
*
* It is reported fixed in 5.005, hence the #if.
*/
-#if PATCHLEVEL < 5
+#if PERL_VERSION >= 5
#define HBUCKETS 4096 /* Buckets for %hseen */
HvMAX(cxt->hseen) = HBUCKETS - 1; /* keys %hseen = $HBUCKETS; */
#endif
cxt->hclass = newHV(); /* Where seen classnames are stored */
-#if PATCHLEVEL < 5
+#if PERL_VERSION >= 5
HvMAX(cxt->hclass) = HBUCKETS - 1; /* keys %hclass = $HBUCKETS; */
#endif
*
* Clean store context by
*/
-static void clean_store_context(cxt)
-stcxt_t *cxt;
+static void clean_store_context(stcxt_t *cxt)
{
HE *he;
sv_free((SV *) cxt->hook);
cxt->entry = 0;
- cxt->dirty = 0;
+ cxt->s_dirty = 0;
}
/*
*
* Initialize a new retrieve context for real recursion.
*/
-static void init_retrieve_context(cxt, optype)
-stcxt_t *cxt;
-int optype;
+static void init_retrieve_context(stcxt_t *cxt, int optype, int is_tainted)
{
TRACEME(("init_retrieve_context"));
cxt->tagnum = 0; /* Have to count objects... */
cxt->classnum = 0; /* ...and class names as well */
cxt->optype = optype;
+ cxt->s_tainted = is_tainted;
cxt->entry = 1; /* No recursion yet */
}
*
* Clean retrieve context by
*/
-static void clean_retrieve_context(cxt)
-stcxt_t *cxt;
+static void clean_retrieve_context(stcxt_t *cxt)
{
TRACEME(("clean_retrieve_context"));
sv_free((SV *) cxt->hseen); /* optional HV, for backward compat. */
cxt->entry = 0;
- cxt->dirty = 0;
+ cxt->s_dirty = 0;
}
/*
{
TRACEME(("clean_context"));
- ASSERT(cxt->dirty, ("dirty context"));
+ ASSERT(cxt->s_dirty, ("dirty context"));
if (cxt->optype & ST_RETRIEVE)
clean_retrieve_context(cxt);
TRACEME(("allocate_context"));
- ASSERT(!parent_cxt->dirty, ("parent context clean"));
+ ASSERT(!parent_cxt->s_dirty, ("parent context clean"));
Newz(0, cxt, 1, stcxt_t);
cxt->prev = parent_cxt;
TRACEME(("free_context"));
- ASSERT(!cxt->dirty, ("clean context"));
+ ASSERT(!cxt->s_dirty, ("clean context"));
ASSERT(prev, ("not freeing root context"));
if (kbuf)
*
* Tells whether we're in the middle of a store operation.
*/
-int is_storing()
+int is_storing(void)
{
dSTCXT;
*
* Tells whether we're in the middle of a retrieve operation.
*/
-int is_retrieving()
+int is_retrieving(void)
{
dSTCXT;
* This is typically out-of-band information that might prove useful
* to people wishing to convert native to network order data when used.
*/
-int last_op_in_netorder()
+int last_op_in_netorder(void)
{
dSTCXT;
* Returns the routine reference as an SV*, or null if neither the package
* nor its ancestors know about the method.
*/
-static SV *pkg_fetchmeth(cache, pkg, method)
-HV *cache;
-HV *pkg;
-char *method;
+static SV *pkg_fetchmeth(
+ HV *cache,
+ HV *pkg,
+ char *method)
{
GV *gv;
SV *sv;
gv = gv_fetchmethod_autoload(pkg, method, FALSE);
if (gv && isGV(gv)) {
sv = newRV((SV*) GvCV(gv));
- TRACEME(("%s->%s: 0x%lx", HvNAME(pkg), method, (unsigned long) sv));
+ TRACEME(("%s->%s: 0x%"UVxf, HvNAME(pkg), method, PTR2UV(sv)));
} else {
sv = newSVsv(&PL_sv_undef);
TRACEME(("%s->%s: not found", HvNAME(pkg), method));
*
* Force cached value to be undef: hook ignored even if present.
*/
-static void pkg_hide(cache, pkg, method)
-HV *cache;
-HV *pkg;
-char *method;
+static void pkg_hide(
+ HV *cache,
+ HV *pkg,
+ char *method)
{
(void) hv_store(cache,
HvNAME(pkg), strlen(HvNAME(pkg)), newSVsv(&PL_sv_undef), 0);
}
/*
+ * pkg_uncache
+ *
+ * Discard cached value: a whole fetch loop will be retried at next lookup.
+ */
+static void pkg_uncache(
+ HV *cache,
+ HV *pkg,
+ char *method)
+{
+ (void) hv_delete(cache, HvNAME(pkg), strlen(HvNAME(pkg)), G_DISCARD);
+}
+
+/*
* pkg_can
*
* Our own "UNIVERSAL::can", which caches results.
* Returns the routine reference as an SV*, or null if the object does not
* know about the method.
*/
-static SV *pkg_can(cache, pkg, method)
-HV *cache;
-HV *pkg;
-char *method;
+static SV *pkg_can(
+ HV *cache,
+ HV *pkg,
+ char *method)
{
SV **svh;
SV *sv;
TRACEME(("cached %s->%s: not found", HvNAME(pkg), method));
return (SV *) 0;
} else {
- TRACEME(("cached %s->%s: 0x%lx", HvNAME(pkg), method,
- (unsigned long) sv));
+ TRACEME(("cached %s->%s: 0x%"UVxf,
+ HvNAME(pkg), method, PTR2UV(sv)));
return sv;
}
}
* Call routine as obj->hook(av) in scalar context.
* Propagates the single returned value if not called in void context.
*/
-static SV *scalar_call(obj, hook, cloning, av, flags)
-SV *obj;
-SV *hook;
-int cloning;
-AV *av;
-I32 flags;
+static SV *scalar_call(
+ SV *obj,
+ SV *hook,
+ int cloning,
+ AV *av,
+ I32 flags)
{
dSP;
int count;
int i;
XPUSHs(ary[0]); /* Frozen string */
for (i = 1; i < cnt; i++) {
- TRACEME(("pushing arg #%d (0x%lx)...", i, (unsigned long) ary[i]));
+ TRACEME(("pushing arg #%d (0x%"UVxf")...",
+ i, PTR2UV(ary[i])));
XPUSHs(sv_2mortal(newRV(ary[i])));
}
}
* Call routine obj->hook(cloning) in list context.
* Returns the list of returned values in an array.
*/
-static AV *array_call(obj, hook, cloning)
-SV *obj;
-SV *hook;
-int cloning;
+static AV *array_call(
+ SV *obj,
+ SV *hook,
+ int cloning)
{
dSP;
int count;
AV *av;
int i;
- TRACEME(("arrary_call (cloning=%d), cloning"));
+ TRACEME(("array_call (cloning=%d)", cloning));
ENTER;
SAVETMPS;
*
* Return true if the class was known, false if the ID was just generated.
*/
-static int known_class(cxt, name, len, classnum)
-stcxt_t *cxt;
-char *name; /* Class name */
-int len; /* Name length */
-I32 *classnum;
+static int known_class(
+ stcxt_t *cxt,
+ char *name, /* Class name */
+ int len, /* Name length */
+ I32 *classnum)
{
SV **svh;
HV *hclass = cxt->hclass;
/*
* Unknown classname, we need to record it.
- * The (IV) cast below is for 64-bit machines, to avoid compiler warnings.
*/
cxt->classnum++;
- if (!hv_store(hclass, name, len, (SV*)(IV) cxt->classnum, 0))
+ if (!hv_store(hclass, name, len, INT2PTR(SV*, cxt->classnum), 0))
CROAK(("Unable to record new classname"));
*classnum = cxt->classnum;
* Store a reference.
* Layout is SX_REF <object> or SX_OVERLOAD <object>.
*/
-static int store_ref(cxt, sv)
-stcxt_t *cxt;
-SV *sv;
+static int store_ref(stcxt_t *cxt, SV *sv)
{
- TRACEME(("store_ref (0x%lx)", (unsigned long) sv));
+ TRACEME(("store_ref (0x%"UVxf")", PTR2UV(sv)));
/*
* Follow reference, and check if target is overloaded.
if (SvOBJECT(sv)) {
HV *stash = (HV *) SvSTASH(sv);
if (stash && Gv_AMG(stash)) {
- TRACEME(("ref (0x%lx) is overloaded", (unsigned long) sv));
+ TRACEME(("ref (0x%"UVxf") is overloaded", PTR2UV(sv)));
PUTMARK(SX_OVERLOAD);
} else
PUTMARK(SX_REF);
* If integer or double, the layout is SX_INTEGER <data> or SX_DOUBLE <data>.
* Small integers (within [-127, +127]) are stored as SX_BYTE <byte>.
*/
-static int store_scalar(cxt, sv)
-stcxt_t *cxt;
-SV *sv;
+static int store_scalar(stcxt_t *cxt, SV *sv)
{
IV iv;
char *pv;
STRLEN len;
U32 flags = SvFLAGS(sv); /* "cc -O" may put it in register */
- TRACEME(("store_scalar (0x%lx)", (unsigned long) sv));
+ TRACEME(("store_scalar (0x%"UVxf")", PTR2UV(sv)));
/*
* For efficiency, break the SV encapsulation by peaking at the flags
TRACEME(("immortal undef"));
PUTMARK(SX_SV_UNDEF);
} else {
- TRACEME(("undef at 0x%x", sv));
+ TRACEME(("undef at 0x%"UVxf, PTR2UV(sv)));
PUTMARK(SX_UNDEF);
}
return 0;
goto string; /* Share code below */
}
} else if (flags & SVp_POK) { /* SvPOKp(sv) => string */
+ I32 wlen; /* For 64-bit machines */
pv = SvPV(sv, len);
/*
*/
string:
- STORE_SCALAR(pv, len);
- TRACEME(("ok (scalar 0x%lx '%s', length = %d)",
- (unsigned long) sv, SvPVX(sv), len));
+ wlen = (I32) len; /* WLEN via STORE_SCALAR expects I32 */
+ if (SvUTF8 (sv))
+ STORE_UTF8STR(pv, wlen);
+ else
+ STORE_SCALAR(pv, wlen);
+ TRACEME(("ok (scalar 0x%"UVxf" '%s', length = %"IVdf")",
+ PTR2UV(sv), SvPVX(sv), (IV)len));
} else if (flags & SVp_NOK) { /* SvNOKp(sv) => double */
NV nv = SvNV(sv);
* Watch for number being an integer in disguise.
*/
if (nv == (NV) (iv = I_V(nv))) {
- TRACEME(("double %lf is actually integer %ld", nv, iv));
+ TRACEME(("double %"NVff" is actually integer %"IVdf, nv, iv));
goto integer; /* Share code below */
}
if (cxt->netorder) {
- TRACEME(("double %lf stored as string", nv));
+ TRACEME(("double %"NVff" stored as string", nv));
pv = SvPV(sv, len);
goto string; /* Share code above */
}
PUTMARK(SX_DOUBLE);
WRITE(&nv, sizeof(nv));
- TRACEME(("ok (double 0x%lx, value = %lf)", (unsigned long) sv, nv));
+ TRACEME(("ok (double 0x%"UVxf", value = %"NVff")", PTR2UV(sv), nv));
} else if (flags & SVp_IOK) { /* SvIOKp(sv) => integer */
iv = SvIV(sv);
PUTMARK(siv);
TRACEME(("small integer stored as %d", siv));
} else if (cxt->netorder) {
- int niv;
+ I32 niv;
#ifdef HAS_HTONL
- niv = (int) htonl(iv);
+ niv = (I32) htonl(iv);
TRACEME(("using network order"));
#else
- niv = (int) iv;
+ niv = (I32) iv;
TRACEME(("as-is for network order"));
#endif
PUTMARK(SX_NETINT);
- WRITE(&niv, sizeof(niv));
+ WRITE_I32(niv);
} else {
PUTMARK(SX_INTEGER);
WRITE(&iv, sizeof(iv));
}
- TRACEME(("ok (integer 0x%lx, value = %d)", (unsigned long) sv, iv));
+ TRACEME(("ok (integer 0x%"UVxf", value = %"IVdf")", PTR2UV(sv), iv));
} else
- CROAK(("Can't determine type of %s(0x%lx)", sv_reftype(sv, FALSE),
- (unsigned long) sv));
+ CROAK(("Can't determine type of %s(0x%"UVxf")",
+ sv_reftype(sv, FALSE),
+ PTR2UV(sv)));
return 0; /* Ok, no recursion on scalars */
}
* Layout is SX_ARRAY <size> followed by each item, in increading index order.
* Each item is stored as <object>.
*/
-static int store_array(cxt, av)
-stcxt_t *cxt;
-AV *av;
+static int store_array(stcxt_t *cxt, AV *av)
{
SV **sav;
I32 len = av_len(av) + 1;
I32 i;
int ret;
- TRACEME(("store_array (0x%lx)", (unsigned long) av));
+ TRACEME(("store_array (0x%"UVxf")", PTR2UV(av)));
/*
* Signal array by emitting SX_ARRAY, followed by the array length.
* Borrowed from perl source file pp_ctl.c, where it is used by pp_sort.
*/
static int
-sortcmp(a, b)
-const void *a;
-const void *b;
+sortcmp(const void *a, const void *b)
{
return sv_cmp(*(SV * const *) a, *(SV * const *) b);
}
* Keys are stored as <length> <data>, the <data> section being omitted
* if length is 0.
*/
-static int store_hash(cxt, hv)
-stcxt_t *cxt;
-HV *hv;
+static int store_hash(stcxt_t *cxt, HV *hv)
{
I32 len = HvKEYS(hv);
I32 i;
I32 riter;
HE *eiter;
- TRACEME(("store_hash (0x%lx)", (unsigned long) hv));
+ TRACEME(("store_hash (0x%"UVxf")", PTR2UV(hv)));
/*
* Signal hash by emitting SX_HASH, followed by the table length.
* Store value first.
*/
- TRACEME(("(#%d) value 0x%lx", i, (unsigned long) val));
+ TRACEME(("(#%d) value 0x%"UVxf, i, PTR2UV(val)));
if (ret = store(cxt, val))
goto out;
* Store value first.
*/
- TRACEME(("(#%d) value 0x%lx", i, (unsigned long) val));
+ TRACEME(("(#%d) value 0x%"UVxf, i, PTR2UV(val)));
if (ret = store(cxt, val))
goto out;
}
}
- TRACEME(("ok (hash 0x%lx)", (unsigned long) hv));
+ TRACEME(("ok (hash 0x%"UVxf")", PTR2UV(hv)));
out:
HvRITER(hv) = riter; /* Restore hash iterator state */
* dealing with a tied hash, we store SX_TIED_HASH <hash object>, where
* <hash object> stands for the serialization of the tied hash.
*/
-static int store_tied(cxt, sv)
-stcxt_t *cxt;
-SV *sv;
+static int store_tied(stcxt_t *cxt, SV *sv)
{
MAGIC *mg;
int ret = 0;
int svt = SvTYPE(sv);
char mtype = 'P';
- TRACEME(("store_tied (0x%lx)", (unsigned long) sv));
+ TRACEME(("store_tied (0x%"UVxf")", PTR2UV(sv)));
/*
* We have a small run-time penalty here because we chose to factorise
* SX_TIED_KEY <object> <key>
* SX_TIED_IDX <object> <index>
*/
-static int store_tied_item(cxt, sv)
-stcxt_t *cxt;
-SV *sv;
+static int store_tied_item(stcxt_t *cxt, SV *sv)
{
MAGIC *mg;
int ret;
- TRACEME(("store_tied_item (0x%lx)", (unsigned long) sv));
+ TRACEME(("store_tied_item (0x%"UVxf")", PTR2UV(sv)));
if (!(mg = mg_find(sv, 'p')))
CROAK(("No magic 'p' found while storing reference to tied item"));
if (mg->mg_ptr) {
TRACEME(("store_tied_item: storing a ref to a tied hash item"));
PUTMARK(SX_TIED_KEY);
- TRACEME(("store_tied_item: storing OBJ 0x%lx",
- (unsigned long) mg->mg_obj));
+ TRACEME(("store_tied_item: storing OBJ 0x%"UVxf, PTR2UV(mg->mg_obj)));
if (ret = store(cxt, mg->mg_obj))
return ret;
- TRACEME(("store_tied_item: storing PTR 0x%lx",
- (unsigned long) mg->mg_ptr));
+ TRACEME(("store_tied_item: storing PTR 0x%"UVxf, PTR2UV(mg->mg_ptr)));
if (ret = store(cxt, (SV *) mg->mg_ptr))
return ret;
TRACEME(("store_tied_item: storing a ref to a tied array item "));
PUTMARK(SX_TIED_IDX);
- TRACEME(("store_tied_item: storing OBJ 0x%lx",
- (unsigned long) mg->mg_obj));
+ TRACEME(("store_tied_item: storing OBJ 0x%"UVxf, PTR2UV(mg->mg_obj)));
if (ret = store(cxt, mg->mg_obj))
return ret;
* recursion, until we reach flags indicating no recursion, at which point
* we know we've resynchronized with a single layout, after <flags>.
*/
-static int store_hook(cxt, sv, type, pkg, hook)
-stcxt_t *cxt;
-SV *sv;
-HV *pkg;
-SV *hook;
+static int store_hook(
+ stcxt_t *cxt,
+ SV *sv,
+ int type,
+ HV *pkg,
+ SV *hook)
{
I32 len;
char *class;
pkg_hide(cxt->hook, pkg, "STORABLE_freeze");
ASSERT(!pkg_can(cxt->hook, pkg, "STORABLE_freeze"), ("hook invisible"));
- TRACEME(("Ignoring STORABLE_freeze in class \"%s\"", class));
+ TRACEME(("ignoring STORABLE_freeze in class \"%s\"", class));
return store_blessed(cxt, sv, type, pkg);
}
pv = SvPV(ary[0], len2);
/*
- * Allocate a class ID if not already done.
- */
-
- if (!known_class(cxt, class, len, &classnum)) {
- TRACEME(("first time we see class %s, ID = %d", class, classnum));
- classnum = -1; /* Mark: we must store classname */
- } else {
- TRACEME(("already seen class %s, ID = %d", class, classnum));
- }
-
- /*
* If they returned more than one item, we need to serialize some
* extra references if not already done.
*
if (svh = hv_fetch(cxt->hseen, (char *) &xsv, sizeof(xsv), FALSE))
goto sv_seen; /* Avoid moving code too far to the right */
- TRACEME(("listed object %d at 0x%lx is unknown",
- i-1, (unsigned long) xsv));
+ TRACEME(("listed object %d at 0x%"UVxf" is unknown", i-1, PTR2UV(xsv)));
/*
* We need to recurse to store that object and get it to be known
sv_seen:
SvREFCNT_dec(xsv);
ary[i] = *svh;
- TRACEME(("listed object %d at 0x%lx is tag #%d",
- i-1, (unsigned long) xsv, (I32) *svh));
+ TRACEME(("listed object %d at 0x%"UVxf" is tag #%"UVuf,
+ i-1, PTR2UV(xsv), PTR2UV(*svh)));
+ }
+
+ /*
+ * Allocate a class ID if not already done.
+ *
+ * This needs to be done after the recursion above, since at retrieval
+ * time, we'll see the inner objects first. Many thanks to
+ * Salvador Ortiz Garcia <sog@msg.com.mx> who spot that bug and
+ * proposed the right fix. -- RAM, 15/09/2000
+ */
+
+ if (!known_class(cxt, class, len, &classnum)) {
+ TRACEME(("first time we see class %s, ID = %d", class, classnum));
+ classnum = -1; /* Mark: we must store classname */
+ } else {
+ TRACEME(("already seen class %s, ID = %d", class, classnum));
}
/*
* If we recursed, the SX_HOOK has already been emitted.
*/
- TRACEME(("SX_HOOK (recursed=%d) flags=0x%x class=%d len=%d len2=%d len3=%d",
- recursed, flags, classnum, len, len2, count-1));
+ TRACEME(("SX_HOOK (recursed=%d) flags=0x%x "
+ "class=%"IVdf" len=%"IVdf" len2=%"IVdf" len3=%d",
+ recursed, flags, (IV)classnum, (IV)len, (IV)len2, count-1));
/* SX_HOOK <flags> */
if (!recursed)
}
/* <len2> <frozen-str> */
- if (flags & SHF_LARGE_STRLEN)
- WLEN(len2);
- else {
+ if (flags & SHF_LARGE_STRLEN) {
+ I32 wlen2 = len2; /* STRLEN might be 8 bytes */
+ WLEN(wlen2); /* Must write an I32 for 64-bit machines */
+ } else {
unsigned char clen = (unsigned char) len2;
PUTMARK(clen);
}
for (i = 1; i < count; i++) {
I32 tagval = htonl(LOW_32BITS(ary[i]));
- WRITE(&tagval, sizeof(I32));
+ WRITE_I32(tagval);
TRACEME(("object %d, tag #%d", i-1, ntohl(tagval)));
}
}
* where <index> is the classname index, stored on 0 or 4 bytes depending
* on the high-order bit in flag (same encoding as above for <len>).
*/
-static int store_blessed(cxt, sv, type, pkg)
-stcxt_t *cxt;
-SV *sv;
-int type;
-HV *pkg;
+static int store_blessed(
+ stcxt_t *cxt,
+ SV *sv,
+ int type,
+ HV *pkg)
{
SV *hook;
I32 len;
class = HvNAME(pkg);
len = strlen(class);
- TRACEME(("blessed 0x%lx in %s, no hook: tagged #%d",
- (unsigned long) sv, class, cxt->tagnum));
+ TRACEME(("blessed 0x%"UVxf" in %s, no hook: tagged #%d",
+ PTR2UV(sv), class, cxt->tagnum));
/*
* Determine whether it is the first time we see that class name (in which
* true value, then don't croak, just warn, and store a placeholder string
* instead.
*/
-static int store_other(cxt, sv)
-stcxt_t *cxt;
-SV *sv;
+static int store_other(stcxt_t *cxt, SV *sv)
{
- STRLEN len;
+ I32 len;
static char buf[80];
TRACEME(("store_other"));
)
CROAK(("Can't store %s items", sv_reftype(sv, FALSE)));
- warn("Can't store item %s(0x%lx)",
- sv_reftype(sv, FALSE), (unsigned long) sv);
+ warn("Can't store item %s(0x%"UVxf")",
+ sv_reftype(sv, FALSE), PTR2UV(sv));
/*
* Store placeholder string as a scalar instead...
*/
- (void) sprintf(buf, "You lost %s(0x%lx)\0", sv_reftype(sv, FALSE),
- (unsigned long) sv);
+ (void) sprintf(buf, "You lost %s(0x%"UVxf")\0", sv_reftype(sv, FALSE),
+ PTR2UV(sv));
len = strlen(buf);
STORE_SCALAR(buf, len);
- TRACEME(("ok (dummy \"%s\", length = %d)", buf, len));
+ TRACEME(("ok (dummy \"%s\", length = %"IVdf")", buf, len));
return 0;
}
* Returns the type of the SV, identified by an integer. That integer
* may then be used to index the dynamic routine dispatch table.
*/
-static int sv_type(sv)
-SV *sv;
+static int sv_type(SV *sv)
{
switch (SvTYPE(sv)) {
case SVt_NULL:
* object (one for which storage has started -- it may not be over if we have
* a self-referenced structure). This data set forms a stored <object>.
*/
-static int store(cxt, sv)
-stcxt_t *cxt;
-SV *sv;
+static int store(stcxt_t *cxt, SV *sv)
{
SV **svh;
int ret;
SV *tag;
int type;
- HV *hseen = cxt->hseen;
+ HV *hseen = cxt->hseen;
- TRACEME(("store (0x%lx)", (unsigned long) sv));
+ TRACEME(("store (0x%"UVxf")", PTR2UV(sv)));
/*
* If object has already been stored, do not duplicate data.
if (svh) {
I32 tagval = htonl(LOW_32BITS(*svh));
- TRACEME(("object 0x%lx seen as #%d",
- (unsigned long) sv, ntohl(tagval)));
+ TRACEME(("object 0x%"UVxf" seen as #%d", PTR2UV(sv), ntohl(tagval)));
PUTMARK(SX_OBJECT);
- WRITE(&tagval, sizeof(I32));
+ WRITE_I32(tagval);
return 0;
}
* means that we must clean up the hash manually afterwards, but gives
* us a 15% throughput increase.
*
- * The (IV) cast below is for 64-bit machines, to avoid warnings from
- * the compiler. Please, let me know if it does not work.
- * -- RAM, 14/09/1999
*/
cxt->tagnum++;
if (!hv_store(hseen,
- (char *) &sv, sizeof(sv), (SV*)(IV) cxt->tagnum, 0))
+ (char *) &sv, sizeof(sv), INT2PTR(SV*, cxt->tagnum), 0))
return -1;
/*
type = sv_type(sv);
- TRACEME(("storing 0x%lx tag #%d, type %d...",
- (unsigned long) sv, cxt->tagnum, type));
+ TRACEME(("storing 0x%"UVxf" tag #%d, type %d...",
+ PTR2UV(sv), cxt->tagnum, type));
if (SvOBJECT(sv)) {
HV *pkg = SvSTASH(sv);
} else
ret = SV_STORE(type)(cxt, sv);
- TRACEME(("%s (stored 0x%lx, refcnt=%d, %s)",
- ret ? "FAILED" : "ok", (unsigned long) sv,
+ TRACEME(("%s (stored 0x%"UVxf", refcnt=%d, %s)",
+ ret ? "FAILED" : "ok", PTR2UV(sv),
SvREFCNT(sv), sv_reftype(sv, FALSE)));
return ret;
* Note that no byte ordering info is emitted when <network> is true, since
* integers will be emitted in network order in that case.
*/
-static int magic_write(cxt)
-stcxt_t *cxt;
+static int magic_write(stcxt_t *cxt)
{
char buf[256]; /* Enough room for 256 hexa digits */
unsigned char c;
PUTMARK((unsigned char) sizeof(int));
PUTMARK((unsigned char) sizeof(long));
PUTMARK((unsigned char) sizeof(char *));
+ PUTMARK((unsigned char) sizeof(NV));
- TRACEME(("ok (magic_write byteorder = 0x%lx [%d], I%d L%d P%d)",
- (unsigned long) BYTEORDER, (int) c,
- sizeof(int), sizeof(long), sizeof(char *)));
+ TRACEME(("ok (magic_write byteorder = 0x%lx [%d], I%d L%d P%d D%d)",
+ (unsigned long) BYTEORDER, (int) c,
+ (int) sizeof(int), (int) sizeof(long),
+ (int) sizeof(char *), (int) sizeof(NV)));
return 0;
}
* It is required to provide a non-null `res' when the operation type is not
* dclone() and store() is performed to memory.
*/
-static int do_store(f, sv, optype, network_order, res)
-PerlIO *f;
-SV *sv;
-int optype;
-int network_order;
-SV **res;
+static int do_store(
+ PerlIO *f,
+ SV *sv,
+ int optype,
+ int network_order,
+ SV **res)
{
dSTCXT;
int status;
* free up memory for them now.
*/
- if (cxt->dirty)
+ if (cxt->s_dirty)
clean_context(cxt);
/*
cxt->entry++;
ASSERT(cxt->entry == 1, ("starting new recursion"));
- ASSERT(!cxt->dirty, ("clean context"));
+ ASSERT(!cxt->s_dirty, ("clean context"));
/*
* Ensure sv is actually a reference. From perl, we called something
* Store the transitive data closure of given object to disk.
* Returns 0 on error, a true value otherwise.
*/
-int pstore(f, sv)
-PerlIO *f;
-SV *sv;
+int pstore(PerlIO *f, SV *sv)
{
TRACEME(("pstore"));
- return do_store(f, sv, 0, FALSE, (SV**)0);
+ return do_store(f, sv, 0, FALSE, (SV**) 0);
}
* Same as pstore(), but network order is used for integers and doubles are
* emitted as strings.
*/
-int net_pstore(f, sv)
-PerlIO *f;
-SV *sv;
+int net_pstore(PerlIO *f, SV *sv)
{
TRACEME(("net_pstore"));
- return do_store(f, sv, 0, TRUE, (SV**)0);
+ return do_store(f, sv, 0, TRUE, (SV**) 0);
}
/***
*
* Build a new SV out of the content of the internal memory buffer.
*/
-static SV *mbuf2sv()
+static SV *mbuf2sv(void)
{
dSTCXT;
* Store the transitive data closure of given object to memory.
* Returns undef on error, a scalar value containing the data otherwise.
*/
-SV *mstore(sv)
-SV *sv;
+SV *mstore(SV *sv)
{
dSTCXT;
SV *out;
TRACEME(("mstore"));
- if (!do_store((PerlIO*)0, sv, 0, FALSE, &out))
+ if (!do_store((PerlIO*) 0, sv, 0, FALSE, &out))
return &PL_sv_undef;
return out;
* Same as mstore(), but network order is used for integers and doubles are
* emitted as strings.
*/
-SV *net_mstore(sv)
-SV *sv;
+SV *net_mstore(SV *sv)
{
dSTCXT;
SV *out;
TRACEME(("net_mstore"));
- if (!do_store((PerlIO*)0, sv, 0, TRUE, &out))
+ if (!do_store((PerlIO*) 0, sv, 0, TRUE, &out))
return &PL_sv_undef;
return out;
* Return an error via croak, since it is not possible that we get here
* under normal conditions, when facing a file produced via pstore().
*/
-static SV *retrieve_other(cxt)
-stcxt_t *cxt;
+static SV *retrieve_other(stcxt_t *cxt)
{
if (
cxt->ver_major != STORABLE_BIN_MAJOR &&
* Layout is SX_IX_BLESS <index> <object> with SX_IX_BLESS already read.
* <index> can be coded on either 1 or 5 bytes.
*/
-static SV *retrieve_idx_blessed(cxt)
-stcxt_t *cxt;
+static SV *retrieve_idx_blessed(stcxt_t *cxt)
{
I32 idx;
char *class;
sva = av_fetch(cxt->aclass, idx, FALSE);
if (!sva)
- CROAK(("Class name #%d should have been seen already", idx));
+ CROAK(("Class name #%d should have been seen already", (int)idx));
class = SvPVX(*sva); /* We know it's a PV, by construction */
* Layout is SX_BLESS <len> <classname> <object> with SX_BLESS already read.
* <len> can be coded on either 1 or 5 bytes.
*/
-static SV *retrieve_blessed(cxt)
-stcxt_t *cxt;
+static SV *retrieve_blessed(stcxt_t *cxt)
{
I32 len;
SV *sv;
* is an unknown amount of serialized objects after the SX_HOOK mark. Until
* we reach a <flags> marker with the recursion bit cleared.
*/
-static SV *retrieve_hook(cxt)
-stcxt_t *cxt;
+static SV *retrieve_hook(stcxt_t *cxt)
{
I32 len;
char buf[LG_BLESS + 1]; /* Avoid malloc() if possible */
rv = retrieve(cxt);
if (!rv)
return (SV *) 0;
- TRACEME(("retrieve_hook back with rv=0x%lx", (unsigned long) rv));
+ TRACEME(("retrieve_hook back with rv=0x%"UVxf,
+ PTR2UV(rv)));
GETMARK(flags);
}
sva = av_fetch(cxt->aclass, idx, FALSE);
if (!sva)
- CROAK(("Class name #%d should have been seen already", idx));
+ CROAK(("Class name #%d should have been seen already", (int)idx));
class = SvPVX(*sva); /* We know it's a PV, by construction */
TRACEME(("class ID %d => %s", idx, class));
*SvEND(frozen) = '\0';
}
(void) SvPOK_only(frozen); /* Validates string pointer */
- SvTAINT(frozen);
+ if (cxt->s_tainted) /* Is input source tainted? */
+ SvTAINT(frozen);
TRACEME(("frozen string: %d bytes", len2));
SV **svh;
SV *xsv;
- READ(&tag, sizeof(I32));
+ READ_I32(tag);
tag = ntohl(tag);
svh = av_fetch(cxt->aseen, tag, FALSE);
if (!svh)
- CROAK(("Object #%d should have been retrieved already", tag));
+ CROAK(("Object #%d should have been retrieved already", (int)tag));
xsv = *svh;
ary[i] = SvREFCNT_inc(xsv);
}
BLESS(sv, class);
hook = pkg_can(cxt->hook, SvSTASH(sv), "STORABLE_thaw");
- if (!hook)
- CROAK(("No STORABLE_thaw defined for objects of class %s", class));
+ if (!hook) {
+ /*
+ * Hook not found. Maybe they did not require the module where this
+ * hook is defined yet?
+ *
+ * If the require below succeeds, we'll be able to find the hook.
+ * Still, it only works reliably when each class is defined in a
+ * file of its own.
+ */
+
+ SV *psv = newSVpvn("require ", 8);
+ sv_catpv(psv, class);
+
+ TRACEME(("No STORABLE_thaw defined for objects of class %s", class));
+ TRACEME(("Going to require module '%s' with '%s'", class, SvPVX(psv)));
+
+ perl_eval_sv(psv, G_DISCARD);
+ sv_free(psv);
+
+ /*
+ * We cache results of pkg_can, so we need to uncache before attempting
+ * the lookup again.
+ */
+
+ pkg_uncache(cxt->hook, SvSTASH(sv), "STORABLE_thaw");
+ hook = pkg_can(cxt->hook, SvSTASH(sv), "STORABLE_thaw");
+
+ if (!hook)
+ CROAK(("No STORABLE_thaw defined for objects of class %s "
+ "(even after a \"require %s;\")", class, class));
+ }
/*
* If we don't have an `av' yet, prepare one.
* the object itself being already created by the runtime.
*/
- TRACEME(("calling STORABLE_thaw on %s at 0x%lx (%d args)",
- class, (unsigned long) sv, AvFILLp(av) + 1));
+ TRACEME(("calling STORABLE_thaw on %s at 0x%"UVxf" (%"IVdf" args)",
+ class, PTR2UV(sv), AvFILLp(av) + 1));
rv = newRV(sv);
(void) scalar_call(rv, hook, clone, av, G_SCALAR|G_DISCARD);
* Retrieve reference to some other scalar.
* Layout is SX_REF <object>, with SX_REF already read.
*/
-static SV *retrieve_ref(cxt)
-stcxt_t *cxt;
+static SV *retrieve_ref(stcxt_t *cxt)
{
SV *rv;
SV *sv;
SvRV(rv) = sv; /* $rv = \$sv */
SvROK_on(rv);
- TRACEME(("ok (retrieve_ref at 0x%lx)", (unsigned long) rv));
+ TRACEME(("ok (retrieve_ref at 0x%"UVxf")", PTR2UV(rv)));
return rv;
}
* Retrieve reference to some other scalar with overloading.
* Layout is SX_OVERLOAD <object>, with SX_OVERLOAD already read.
*/
-static SV *retrieve_overloaded(cxt)
-stcxt_t *cxt;
+static SV *retrieve_overloaded(stcxt_t *cxt)
{
SV *rv;
SV *sv;
stash = (HV *) SvSTASH (sv);
if (!stash || !Gv_AMG(stash))
- CROAK(("Cannot restore overloading on %s(0x%lx)", sv_reftype(sv, FALSE),
- (unsigned long) sv));
+ CROAK(("Cannot restore overloading on %s(0x%"UVxf")",
+ sv_reftype(sv, FALSE),
+ PTR2UV(sv)));
SvAMAGIC_on(rv);
- TRACEME(("ok (retrieve_overloaded at 0x%lx)", (unsigned long) rv));
+ TRACEME(("ok (retrieve_overloaded at 0x%"UVxf")", PTR2UV(rv)));
return rv;
}
* Retrieve tied array
* Layout is SX_TIED_ARRAY <object>, with SX_TIED_ARRAY already read.
*/
-static SV *retrieve_tied_array(cxt)
-stcxt_t *cxt;
+static SV *retrieve_tied_array(stcxt_t *cxt)
{
SV *tv;
SV *sv;
sv_magic(tv, sv, 'P', Nullch, 0);
SvREFCNT_dec(sv); /* Undo refcnt inc from sv_magic() */
- TRACEME(("ok (retrieve_tied_array at 0x%lx)", (unsigned long) tv));
+ TRACEME(("ok (retrieve_tied_array at 0x%"UVxf")", PTR2UV(tv)));
return tv;
}
* Retrieve tied hash
* Layout is SX_TIED_HASH <object>, with SX_TIED_HASH already read.
*/
-static SV *retrieve_tied_hash(cxt)
-stcxt_t *cxt;
+static SV *retrieve_tied_hash(stcxt_t *cxt)
{
SV *tv;
SV *sv;
sv_magic(tv, sv, 'P', Nullch, 0);
SvREFCNT_dec(sv); /* Undo refcnt inc from sv_magic() */
- TRACEME(("ok (retrieve_tied_hash at 0x%lx)", (unsigned long) tv));
+ TRACEME(("ok (retrieve_tied_hash at 0x%"UVxf")", PTR2UV(tv)));
return tv;
}
sv_magic(tv, sv, 'q', Nullch, 0);
SvREFCNT_dec(sv); /* Undo refcnt inc from sv_magic() */
- TRACEME(("ok (retrieve_tied_scalar at 0x%lx)", (unsigned long) tv));
+ TRACEME(("ok (retrieve_tied_scalar at 0x%"UVxf")", PTR2UV(tv)));
return tv;
}
* Retrieve reference to value in a tied hash.
* Layout is SX_TIED_KEY <object> <key>, with SX_TIED_KEY already read.
*/
-static SV *retrieve_tied_key(cxt)
-stcxt_t *cxt;
+static SV *retrieve_tied_key(stcxt_t *cxt)
{
SV *tv;
SV *sv;
* Retrieve reference to value in a tied array.
* Layout is SX_TIED_IDX <object> <idx>, with SX_TIED_IDX already read.
*/
-static SV *retrieve_tied_idx(cxt)
-stcxt_t *cxt;
+static SV *retrieve_tied_idx(stcxt_t *cxt)
{
SV *tv;
SV *sv;
* The scalar is "long" in that <length> is larger than LG_SCALAR so it
* was not stored on a single byte.
*/
-static SV *retrieve_lscalar(cxt)
-stcxt_t *cxt;
+static SV *retrieve_lscalar(stcxt_t *cxt)
{
- STRLEN len;
+ I32 len;
SV *sv;
RLEN(len);
- TRACEME(("retrieve_lscalar (#%d), len = %d", cxt->tagnum, len));
+ TRACEME(("retrieve_lscalar (#%d), len = %"IVdf, cxt->tagnum, len));
/*
* Allocate an empty scalar of the suitable length.
SvCUR_set(sv, len); /* Record C string length */
*SvEND(sv) = '\0'; /* Ensure it's null terminated anyway */
(void) SvPOK_only(sv); /* Validate string pointer */
- SvTAINT(sv); /* External data cannot be trusted */
+ if (cxt->s_tainted) /* Is input source tainted? */
+ SvTAINT(sv); /* External data cannot be trusted */
- TRACEME(("large scalar len %d '%s'", len, SvPVX(sv)));
- TRACEME(("ok (retrieve_lscalar at 0x%lx)", (unsigned long) sv));
+ TRACEME(("large scalar len %"IVdf" '%s'", len, SvPVX(sv)));
+ TRACEME(("ok (retrieve_lscalar at 0x%"UVxf")", PTR2UV(sv)));
return sv;
}
* The scalar is "short" so <length> is single byte. If it is 0, there
* is no <data> section.
*/
-static SV *retrieve_scalar(cxt)
-stcxt_t *cxt;
+static SV *retrieve_scalar(stcxt_t *cxt)
{
int len;
SV *sv;
sv_upgrade(sv, SVt_PV);
SvGROW(sv, 1);
*SvEND(sv) = '\0'; /* Ensure it's null terminated anyway */
- TRACEME(("ok (retrieve_scalar empty at 0x%lx)", (unsigned long) sv));
+ TRACEME(("ok (retrieve_scalar empty at 0x%"UVxf")", PTR2UV(sv)));
} else {
/*
* Now, for efficiency reasons, read data directly inside the SV buffer,
}
(void) SvPOK_only(sv); /* Validate string pointer */
- SvTAINT(sv); /* External data cannot be trusted */
+ if (cxt->s_tainted) /* Is input source tainted? */
+ SvTAINT(sv); /* External data cannot be trusted */
+
+ TRACEME(("ok (retrieve_scalar at 0x%"UVxf")", PTR2UV(sv)));
+ return sv;
+}
+
+/*
+ * retrieve_utf8str
+ *
+ * Like retrieve_scalar(), but tag result as utf8.
+ * If we're retrieving UTF8 data in a non-UTF8 perl, croaks.
+ */
+static SV *retrieve_utf8str(stcxt_t *cxt)
+{
+ SV *sv;
+
+ TRACEME(("retrieve_utf8str"));
+
+ sv = retrieve_scalar(cxt);
+ if (sv)
+ SvUTF8_on(sv);
+
+ return sv;
+}
+
+/*
+ * retrieve_lutf8str
+ *
+ * Like retrieve_lscalar(), but tag result as utf8.
+ * If we're retrieving UTF8 data in a non-UTF8 perl, croaks.
+ */
+static SV *retrieve_lutf8str(stcxt_t *cxt)
+{
+ SV *sv;
+
+ TRACEME(("retrieve_lutf8str"));
+
+ sv = retrieve_lscalar(cxt);
+ if (sv)
+ SvUTF8_on(sv);
- TRACEME(("ok (retrieve_scalar at 0x%lx)", (unsigned long) sv));
return sv;
}
* Retrieve defined integer.
* Layout is SX_INTEGER <data>, whith SX_INTEGER already read.
*/
-static SV *retrieve_integer(cxt)
-stcxt_t *cxt;
+static SV *retrieve_integer(stcxt_t *cxt)
{
SV *sv;
IV iv;
sv = newSViv(iv);
SEEN(sv); /* Associate this new scalar with tag "tagnum" */
- TRACEME(("integer %d", iv));
- TRACEME(("ok (retrieve_integer at 0x%lx)", (unsigned long) sv));
+ TRACEME(("integer %"IVdf, iv));
+ TRACEME(("ok (retrieve_integer at 0x%"UVxf")", PTR2UV(sv)));
return sv;
}
* Retrieve defined integer in network order.
* Layout is SX_NETINT <data>, whith SX_NETINT already read.
*/
-static SV *retrieve_netint(cxt)
-stcxt_t *cxt;
+static SV *retrieve_netint(stcxt_t *cxt)
{
SV *sv;
- int iv;
+ I32 iv;
TRACEME(("retrieve_netint (#%d)", cxt->tagnum));
- READ(&iv, sizeof(iv));
+ READ_I32(iv);
#ifdef HAS_NTOHL
sv = newSViv((int) ntohl(iv));
TRACEME(("network integer %d", (int) ntohl(iv)));
#endif
SEEN(sv); /* Associate this new scalar with tag "tagnum" */
- TRACEME(("ok (retrieve_netint at 0x%lx)", (unsigned long) sv));
+ TRACEME(("ok (retrieve_netint at 0x%"UVxf")", PTR2UV(sv)));
return sv;
}
* Retrieve defined double.
* Layout is SX_DOUBLE <data>, whith SX_DOUBLE already read.
*/
-static SV *retrieve_double(cxt)
-stcxt_t *cxt;
+static SV *retrieve_double(stcxt_t *cxt)
{
SV *sv;
NV nv;
sv = newSVnv(nv);
SEEN(sv); /* Associate this new scalar with tag "tagnum" */
- TRACEME(("double %lf", nv));
- TRACEME(("ok (retrieve_double at 0x%lx)", (unsigned long) sv));
+ TRACEME(("double %"NVff, nv));
+ TRACEME(("ok (retrieve_double at 0x%"UVxf")", PTR2UV(sv)));
return sv;
}
* Retrieve defined byte (small integer within the [-128, +127] range).
* Layout is SX_BYTE <data>, whith SX_BYTE already read.
*/
-static SV *retrieve_byte(cxt)
-stcxt_t *cxt;
+static SV *retrieve_byte(stcxt_t *cxt)
{
SV *sv;
int siv;
SEEN(sv); /* Associate this new scalar with tag "tagnum" */
TRACEME(("byte %d", (unsigned char) siv - 128));
- TRACEME(("ok (retrieve_byte at 0x%lx)", (unsigned long) sv));
+ TRACEME(("ok (retrieve_byte at 0x%"UVxf")", PTR2UV(sv)));
return sv;
}
*
* Return the undefined value.
*/
-static SV *retrieve_undef(cxt)
-stcxt_t *cxt;
+static SV *retrieve_undef(stcxt_t *cxt)
{
SV* sv;
*
* Return the immortal undefined value.
*/
-static SV *retrieve_sv_undef(cxt)
-stcxt_t *cxt;
+static SV *retrieve_sv_undef(stcxt_t *cxt)
{
SV *sv = &PL_sv_undef;
*
* Return the immortal yes value.
*/
-static SV *retrieve_sv_yes(cxt)
-stcxt_t *cxt;
+static SV *retrieve_sv_yes(stcxt_t *cxt)
{
SV *sv = &PL_sv_yes;
*
* Return the immortal no value.
*/
-static SV *retrieve_sv_no(cxt)
-stcxt_t *cxt;
+static SV *retrieve_sv_no(stcxt_t *cxt)
{
SV *sv = &PL_sv_no;
*
* When we come here, SX_ARRAY has been read already.
*/
-static SV *retrieve_array(cxt)
-stcxt_t *cxt;
+static SV *retrieve_array(stcxt_t *cxt)
{
I32 len;
I32 i;
return (SV *) 0;
}
- TRACEME(("ok (retrieve_array at 0x%lx)", (unsigned long) av));
+ TRACEME(("ok (retrieve_array at 0x%"UVxf")", PTR2UV(av)));
return (SV *) av;
}
*
* When we come here, SX_HASH has been read already.
*/
-static SV *retrieve_hash(cxt)
-stcxt_t *cxt;
+static SV *retrieve_hash(stcxt_t *cxt)
{
I32 len;
I32 size;
return (SV *) 0;
}
- TRACEME(("ok (retrieve_hash at 0x%lx)", (unsigned long) hv));
+ TRACEME(("ok (retrieve_hash at 0x%"UVxf")", PTR2UV(hv)));
return (SV *) hv;
}
*
* When we come here, SX_ARRAY has been read already.
*/
-static SV *old_retrieve_array(cxt)
-stcxt_t *cxt;
+static SV *old_retrieve_array(stcxt_t *cxt)
{
I32 len;
I32 i;
continue; /* av_extend() already filled us with undef */
}
if (c != SX_ITEM)
- (void) retrieve_other(0); /* Will croak out */
+ (void) retrieve_other((stcxt_t *) 0); /* Will croak out */
TRACEME(("(#%d) item", i));
- sv = retrieve(cxt); /* Retrieve item */
+ sv = retrieve(cxt); /* Retrieve item */
if (!sv)
return (SV *) 0;
if (av_store(av, i, sv) == 0)
return (SV *) 0;
}
- TRACEME(("ok (old_retrieve_array at 0x%lx)", (unsigned long) av));
+ TRACEME(("ok (old_retrieve_array at 0x%"UVxf")", PTR2UV(av)));
return (SV *) av;
}
*
* When we come here, SX_HASH has been read already.
*/
-static SV *old_retrieve_hash(cxt)
-stcxt_t *cxt;
+static SV *old_retrieve_hash(stcxt_t *cxt)
{
I32 len;
I32 size;
if (!sv)
return (SV *) 0;
} else
- (void) retrieve_other(0); /* Will croak out */
+ (void) retrieve_other((stcxt_t *) 0); /* Will croak out */
/*
* Get key.
GETMARK(c);
if (c != SX_KEY)
- (void) retrieve_other(0); /* Will croak out */
+ (void) retrieve_other((stcxt_t *) 0); /* Will croak out */
RLEN(size); /* Get key size */
KBUFCHK(size); /* Grow hash key read pool if needed */
if (size)
return (SV *) 0;
}
- TRACEME(("ok (retrieve_hash at 0x%lx)", (unsigned long) hv));
+ TRACEME(("ok (retrieve_hash at 0x%"UVxf")", PTR2UV(hv)));
return (SV *) hv;
}
* Note that there's no byte ordering info emitted when network order was
* used at store time.
*/
-static SV *magic_check(cxt)
-stcxt_t *cxt;
+static SV *magic_check(stcxt_t *cxt)
{
char buf[256];
char byteorder[256];
if ((int) c != sizeof(char *))
CROAK(("Pointer integer size is not compatible"));
+ if (version_major >= 2 && version_minor >= 2) {
+ GETMARK(c); /* sizeof(NV) */
+ if ((int) c != sizeof(NV))
+ CROAK(("Double size is not compatible"));
+ }
+
return &PL_sv_undef; /* OK */
}
* root SV (which may be an AV or an HV for what we care).
* Returns null if there is a problem.
*/
-static SV *retrieve(cxt)
-stcxt_t *cxt;
+static SV *retrieve(stcxt_t *cxt)
{
int type;
SV **svh;
I32 tagn;
svh = hv_fetch(cxt->hseen, (char *) &tag, sizeof(tag), FALSE);
if (!svh)
- CROAK(("Old tag 0x%x should have been mapped already", tag));
+ CROAK(("Old tag 0x%x should have been mapped already", (unsigned)tag));
tagn = SvIV(*svh); /* Mapped tag number computed earlier below */
/*
svh = av_fetch(cxt->aseen, tagn, FALSE);
if (!svh)
- CROAK(("Object #%d should have been retrieved already", tagn));
+ CROAK(("Object #%d should have been retrieved already", (int)tagn));
sv = *svh;
- TRACEME(("has retrieved #%d at 0x%lx", tagn, (unsigned long) sv));
+ TRACEME(("has retrieved #%d at 0x%"UVxf, tagn, PTR2UV(sv)));
SvREFCNT_inc(sv); /* One more reference to this same sv */
return sv; /* The SV pointer where object was retrieved */
}
if (type == SX_OBJECT) {
I32 tag;
- READ(&tag, sizeof(I32));
+ READ_I32(tag);
tag = ntohl(tag);
svh = av_fetch(cxt->aseen, tag, FALSE);
if (!svh)
- CROAK(("Object #%d should have been retrieved already", tag));
+ CROAK(("Object #%d should have been retrieved already", (int)tag));
sv = *svh;
- TRACEME(("had retrieved #%d at 0x%lx", tag, (unsigned long) sv));
+ TRACEME(("had retrieved #%d at 0x%"UVxf, tag, PTR2UV(sv)));
SvREFCNT_inc(sv); /* One more reference to this same sv */
return sv; /* The SV pointer where object was retrieved */
}
}
}
- TRACEME(("ok (retrieved 0x%lx, refcnt=%d, %s)", (unsigned long) sv,
+ TRACEME(("ok (retrieved 0x%"UVxf", refcnt=%d, %s)", PTR2UV(sv),
SvREFCNT(sv) - 1, sv_reftype(sv, FALSE)));
return sv; /* Ok */
* Retrieve data held in file and return the root object.
* Common routine for pretrieve and mretrieve.
*/
-static SV *do_retrieve(f, in, optype)
-PerlIO *f;
-SV *in;
-int optype;
+static SV *do_retrieve(
+ PerlIO *f,
+ SV *in,
+ int optype)
{
dSTCXT;
SV *sv;
+ int is_tainted; /* Is input source tainted? */
struct extendable msave; /* Where potentially valid mbuf is saved */
TRACEME(("do_retrieve (optype = 0x%x)", optype));
* free up memory for them now.
*/
- if (cxt->dirty)
+ if (cxt->s_dirty)
clean_context(cxt);
/*
cxt->entry++;
ASSERT(cxt->entry == 1, ("starting new recursion"));
- ASSERT(!cxt->dirty, ("clean context"));
+ ASSERT(!cxt->s_dirty, ("clean context"));
/*
* Prepare context.
TRACEME(("data stored in %s format",
cxt->netorder ? "net order" : "native"));
- init_retrieve_context(cxt, optype);
+ /*
+ * Check whether input source is tainted, so that we don't wrongly
+ * taint perfectly good values...
+ *
+ * We assume file input is always tainted. If both `f' and `in' are
+ * NULL, then we come from dclone, and tainted is already filled in
+ * the context. That's a kludge, but the whole dclone() thing is
+ * already quite a kludge anyway! -- RAM, 15/09/2000.
+ */
+
+ is_tainted = f ? 1 : (in ? SvTAINTED(in) : cxt->s_tainted);
+ TRACEME(("input source is %s", is_tainted ? "tainted" : "trusted"));
+ init_retrieve_context(cxt, optype, is_tainted);
ASSERT(is_retrieving(), ("within retrieve operation"));
return &PL_sv_undef; /* Something went wrong, return undef */
}
- TRACEME(("retrieve got %s(0x%lx)",
- sv_reftype(sv, FALSE), (unsigned long) sv));
+ TRACEME(("retrieve got %s(0x%"UVxf")",
+ sv_reftype(sv, FALSE), PTR2UV(sv)));
/*
* Backward compatibility with Storable-0.5@9 (which we know we
*
* Retrieve data held in file and return the root object, undef on error.
*/
-SV *pretrieve(f)
-PerlIO *f;
+SV *pretrieve(PerlIO *f)
{
TRACEME(("pretrieve"));
return do_retrieve(f, Nullsv, 0);
*
* Retrieve data held in scalar and return the root object, undef on error.
*/
-SV *mretrieve(sv)
-SV *sv;
+SV *mretrieve(SV *sv)
{
TRACEME(("mretrieve"));
- return do_retrieve((PerlIO*)0, sv, 0);
+ return do_retrieve((PerlIO*) 0, sv, 0);
}
/***
* there. Not that efficient, but it should be faster than doing it from
* pure perl anyway.
*/
-SV *dclone(sv)
-SV *sv;
+SV *dclone(SV *sv)
{
dSTCXT;
int size;
* free up memory for them now.
*/
- if (cxt->dirty)
+ if (cxt->s_dirty)
clean_context(cxt);
/*
* we need to allocate one because we're deep cloning from a hook.
*/
- if (!do_store((PerlIO*)0, sv, ST_CLONE, FALSE, (SV**)0))
+ if (!do_store((PerlIO*) 0, sv, ST_CLONE, FALSE, (SV**) 0))
return &PL_sv_undef; /* Error during store */
/*
* Now, `cxt' may refer to a new context.
*/
- ASSERT(!cxt->dirty, ("clean context"));
+ ASSERT(!cxt->s_dirty, ("clean context"));
ASSERT(!cxt->entry, ("entry will not cause new context allocation"));
size = MBUF_SIZE();
TRACEME(("dclone stored %d bytes", size));
-
MBUF_INIT(size);
- out = do_retrieve((PerlIO*)0, Nullsv, ST_CLONE); /* Will free non-root context */
- TRACEME(("dclone returns 0x%lx", (unsigned long) out));
+ /*
+ * Since we're passing do_retrieve() both a NULL file and sv, we need
+ * to pre-compute the taintedness of the input by setting cxt->tainted
+ * to whatever state our own input string was. -- RAM, 15/09/2000
+ *
+ * do_retrieve() will free non-root context.
+ */
+
+ cxt->s_tainted = SvTAINTED(sv);
+ out = do_retrieve((PerlIO*) 0, Nullsv, ST_CLONE);
+
+ TRACEME(("dclone returns 0x%"UVxf, PTR2UV(out)));
return out;
}