3 * Copyright (c) 1991-1997, Larry Wall
5 * You may distribute under the terms of either the GNU General Public
6 * License or the Artistic License, as specified in the README file.
11 * "...for the Entwives desired order, and plenty, and peace (by which they
12 * meant that things should remain where they had set them)." --Treebeard
27 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
28 warn("av_reify called on tied array");
31 while (key > AvFILLp(av) + 1)
32 AvARRAY(av)[--key] = &sv_undef;
34 sv = AvARRAY(av)[--key];
36 if (sv != &sv_undef) {
38 (void)SvREFCNT_inc(sv);
41 key = AvARRAY(av) - AvALLOC(av);
43 AvALLOC(av)[--key] = &sv_undef;
48 av_extend(AV *av, I32 key)
50 dTHR; /* only necessary if we have to extend stack */
52 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
60 PUSHs(sv_2mortal(newSViv(key+1)));
62 perl_call_method("EXTEND", G_SCALAR|G_DISCARD);
68 if (key > AvMAX(av)) {
73 if (AvALLOC(av) != AvARRAY(av)) {
74 ary = AvALLOC(av) + AvFILLp(av) + 1;
75 tmp = AvARRAY(av) - AvALLOC(av);
76 Move(AvARRAY(av), AvALLOC(av), AvFILLp(av)+1, SV*);
78 SvPVX(av) = (char*)AvALLOC(av);
81 ary[--tmp] = &sv_undef;
84 if (key > AvMAX(av) - 10) {
85 newmax = key + AvMAX(av);
91 #ifndef STRANGE_MALLOC
95 newmax = key + AvMAX(av) / 5;
98 Renew(AvALLOC(av),newmax+1, SV*);
100 bytes = (newmax + 1) * sizeof(SV*);
101 #define MALLOC_OVERHEAD 16
102 tmp = MALLOC_OVERHEAD;
103 while (tmp - MALLOC_OVERHEAD < bytes)
105 tmp -= MALLOC_OVERHEAD;
107 assert(tmp > newmax);
109 New(2,ary, newmax+1, SV*);
110 Copy(AvALLOC(av), ary, AvMAX(av)+1, SV*);
112 offer_nice_chunk(AvALLOC(av), (AvMAX(av)+1) * sizeof(SV*));
114 Safefree(AvALLOC(av));
117 ary = AvALLOC(av) + AvMAX(av) + 1;
118 tmp = newmax - AvMAX(av);
119 if (av == curstack) { /* Oops, grew stack (via av_store()?) */
120 stack_sp = AvALLOC(av) + (stack_sp - stack_base);
121 stack_base = AvALLOC(av);
122 stack_max = stack_base + newmax;
126 newmax = key < 4 ? 4 : key;
127 New(2,AvALLOC(av), newmax+1, SV*);
128 ary = AvALLOC(av) + 1;
130 AvALLOC(av)[0] = &sv_undef; /* For the stacks */
134 ary[--tmp] = &sv_undef;
137 SvPVX(av) = (char*)AvALLOC(av);
144 av_fetch(register AV *av, I32 key, I32 lval)
152 key += AvFILL(av) + 1;
157 if (SvRMAGICAL(av)) {
158 if (mg_find((SV*)av,'P')) {
161 mg_copy((SV*)av, sv, 0, key);
167 if (key > AvFILLp(av)) {
174 return av_store(av,key,sv);
176 if (AvARRAY(av)[key] == &sv_undef) {
180 return av_store(av,key,sv);
185 && (!AvARRAY(av)[key] /* eg. @_ could have freed elts */
186 || SvTYPE(AvARRAY(av)[key]) == SVTYPEMASK)) {
187 AvARRAY(av)[key] = &sv_undef; /* 1/2 reify */
190 return &AvARRAY(av)[key];
194 av_store(register AV *av, I32 key, SV *val)
206 key += AvFILL(av) + 1;
211 if (SvREADONLY(av) && key >= AvFILL(av))
214 if (SvRMAGICAL(av)) {
215 if (mg_find((SV*)av,'P')) {
216 if (val != &sv_undef) {
217 mg_copy((SV*)av, val, 0, key);
223 if (!AvREAL(av) && AvREIFY(av))
228 if (AvFILLp(av) < key) {
231 if (av == curstack && key > stack_sp - stack_base)
232 stack_sp = stack_base + key; /* XPUSH in disguise */
234 ary[++AvFILLp(av)] = &sv_undef;
235 while (AvFILLp(av) < key);
240 SvREFCNT_dec(ary[key]);
242 if (SvSMAGICAL(av)) {
243 if (val != &sv_undef) {
244 MAGIC* mg = SvMAGIC(av);
245 sv_magic(val, (SV*)av, toLOWER(mg->mg_type), 0, key);
257 av = (AV*)NEWSV(3,0);
258 sv_upgrade((SV *)av, SVt_PVAV);
262 AvMAX(av) = AvFILLp(av) = -1;
267 av_make(register I32 size, register SV **strp)
273 av = (AV*)NEWSV(8,0);
274 sv_upgrade((SV *) av,SVt_PVAV);
275 AvFLAGS(av) = AVf_REAL;
276 if (size) { /* `defined' was returning undef for size==0 anyway. */
279 SvPVX(av) = (char*)ary;
280 AvFILLp(av) = size - 1;
281 AvMAX(av) = size - 1;
282 for (i = 0; i < size; i++) {
285 sv_setsv(ary[i], *strp);
293 av_fake(register I32 size, register SV **strp)
298 av = (AV*)NEWSV(9,0);
299 sv_upgrade((SV *)av, SVt_PVAV);
300 New(4,ary,size+1,SV*);
302 Copy(strp,ary,size,SV*);
303 AvFLAGS(av) = AVf_REIFY;
304 SvPVX(av) = (char*)ary;
305 AvFILLp(av) = size - 1;
306 AvMAX(av) = size - 1;
316 av_clear(register AV *av)
322 if (SvREFCNT(av) <= 0) {
323 warn("Attempt to clear deleted array");
333 /* Give any tie a chance to cleanup first */
342 key = AvFILLp(av) + 1;
344 SvREFCNT_dec(ary[--key]);
345 ary[key] = &sv_undef;
348 if (key = AvARRAY(av) - AvALLOC(av)) {
350 SvPVX(av) = (char*)AvALLOC(av);
357 av_undef(register AV *av)
365 /* Give any tie a chance to cleanup first */
366 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
367 av_fill(av, -1); /* mg_clear() ? */
370 key = AvFILLp(av) + 1;
372 SvREFCNT_dec(AvARRAY(av)[--key]);
374 Safefree(AvALLOC(av));
377 AvMAX(av) = AvFILLp(av) = -1;
379 SvREFCNT_dec(AvARYLEN(av));
385 av_push(register AV *av, SV *val)
393 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
402 perl_call_method("PUSH", G_SCALAR|G_DISCARD);
407 av_store(av,AvFILLp(av)+1,val);
411 av_pop(register AV *av)
416 if (!av || AvFILL(av) < 0)
420 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
427 if (perl_call_method("POP", G_SCALAR)) {
428 retval = newSVsv(*stack_sp--);
436 retval = AvARRAY(av)[AvFILLp(av)];
437 AvARRAY(av)[AvFILLp(av)--] = &sv_undef;
444 av_unshift(register AV *av, register I32 num)
455 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
466 perl_call_method("UNSHIFT", G_SCALAR|G_DISCARD);
472 if (!AvREAL(av) && AvREIFY(av))
474 i = AvARRAY(av) - AvALLOC(av);
482 SvPVX(av) = (char*)(AvARRAY(av) - i);
486 av_extend(av, i + num);
489 Move(ary, ary + num, i + 1, SV*);
491 ary[--num] = &sv_undef;
497 av_shift(register AV *av)
502 if (!av || AvFILL(av) < 0)
506 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
513 if (perl_call_method("SHIFT", G_SCALAR)) {
514 retval = newSVsv(*stack_sp--);
522 retval = *AvARRAY(av);
524 *AvARRAY(av) = &sv_undef;
525 SvPVX(av) = (char*)(AvARRAY(av) + 1);
534 av_len(register AV *av)
540 av_fill(register AV *av, I32 fill)
544 croak("panic: null array");
547 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
555 PUSHs(sv_2mortal(newSViv(fill+1)));
557 perl_call_method("STORESIZE", G_SCALAR|G_DISCARD);
563 if (fill <= AvMAX(av)) {
564 I32 key = AvFILLp(av);
565 SV** ary = AvARRAY(av);
569 SvREFCNT_dec(ary[key]);
570 ary[key--] = &sv_undef;
575 ary[++key] = &sv_undef;
583 (void)av_store(av,fill,&sv_undef);
593 keysp = av_fetch(av, 0, FALSE);
600 if (SvTYPE(sv) == SVt_PVHV)
605 croak("Can't coerce array into hash");
610 avhv_fetch(AV *av, char *key, U32 klen, I32 lval)
613 HV *keys = avhv_keys(av);
616 indsvp = hv_fetch(keys, key, klen, FALSE);
620 croak("Bad index while coercing array into hash");
625 ind = AvFILL(av) + 1;
626 hv_store(keys, key, klen, newSViv(ind), 0);
628 return av_fetch(av, ind, lval);
632 avhv_fetch_ent(AV *av, SV *keysv, I32 lval, U32 hash)
635 HV *keys = avhv_keys(av);
639 he = hv_fetch_ent(keys, keysv, FALSE, hash);
641 ind = SvIV(HeVAL(he));
643 croak("Bad index while coercing array into hash");
648 ind = AvFILL(av) + 1;
649 hv_store_ent(keys, keysv, newSViv(ind), 0);
651 return av_fetch(av, ind, lval);
655 avhv_store(AV *av, char *key, U32 klen, SV *val, U32 hash)
658 HV *keys = avhv_keys(av);
661 indsvp = hv_fetch(keys, key, klen, FALSE);
665 croak("Bad index while coercing array into hash");
667 ind = AvFILL(av) + 1;
668 hv_store(keys, key, klen, newSViv(ind), hash);
670 return av_store(av, ind, val);
674 avhv_store_ent(AV *av, SV *keysv, SV *val, U32 hash)
676 HV *keys = avhv_keys(av);
680 he = hv_fetch_ent(keys, keysv, FALSE, hash);
682 ind = SvIV(HeVAL(he));
684 croak("Bad index while coercing array into hash");
686 ind = AvFILL(av) + 1;
687 hv_store_ent(keys, keysv, newSViv(ind), hash);
689 return av_store(av, ind, val);
693 avhv_exists_ent(AV *av, SV *keysv, U32 hash)
695 HV *keys = avhv_keys(av);
696 return hv_exists_ent(keys, keysv, hash);
700 avhv_exists(AV *av, char *key, U32 klen)
702 HV *keys = avhv_keys(av);
703 return hv_exists(keys, key, klen);
706 /* avhv_delete leaks. Caller can re-index and compress if so desired. */
708 avhv_delete(AV *av, char *key, U32 klen, I32 flags)
710 HV *keys = avhv_keys(av);
715 sv = hv_delete(keys, key, klen, 0);
720 croak("Bad index while coercing array into hash");
721 svp = av_fetch(av, ind, FALSE);
724 if (flags & G_DISCARD) {
728 sv = sv_2mortal(*svp);
734 /* avhv_delete_ent leaks. Caller can re-index and compress if so desired. */
736 avhv_delete_ent(AV *av, SV *keysv, I32 flags, U32 hash)
738 HV *keys = avhv_keys(av);
743 sv = hv_delete_ent(keys, keysv, 0, hash);
748 croak("Bad index while coercing array into hash");
749 svp = av_fetch(av, ind, FALSE);
752 if (flags & G_DISCARD) {
756 sv = sv_2mortal(*svp);
763 avhv_iterinit(AV *av)
765 HV *keys = avhv_keys(av);
766 return hv_iterinit(keys);
770 avhv_iternext(AV *av)
772 HV *keys = avhv_keys(av);
773 return hv_iternext(keys);
777 avhv_iterval(AV *av, register HE *entry)
779 HV *keys = avhv_keys(av);
783 sv = hv_iterval(keys, entry);
786 croak("Bad index while coercing array into hash");
787 return *av_fetch(av, ind, TRUE);
791 avhv_iternextsv(AV *av, char **key, I32 *retlen)
793 HV *keys = avhv_keys(av);
798 he = hv_iternext(keys);
801 *key = hv_iterkey(he, retlen);
802 sv = hv_iterval(keys, he);
805 croak("Bad index while coercing array into hash");
806 return *av_fetch(av, ind, TRUE);