3 * Copyright (c) 1991-1997, Larry Wall
5 * You may distribute under the terms of either the GNU General Public
6 * License or the Artistic License, as specified in the README file.
11 * "...for the Entwives desired order, and plenty, and peace (by which they
12 * meant that things should remain where they had set them)." --Treebeard
27 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
28 warn("av_reify called on tied array");
31 while (key > AvFILLp(av) + 1)
32 AvARRAY(av)[--key] = &sv_undef;
34 sv = AvARRAY(av)[--key];
36 if (sv != &sv_undef) {
38 (void)SvREFCNT_inc(sv);
41 key = AvARRAY(av) - AvALLOC(av);
43 AvALLOC(av)[--key] = &sv_undef;
48 av_extend(AV *av, I32 key)
50 dTHR; /* only necessary if we have to extend stack */
52 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
60 PUSHs(sv_2mortal(newSViv(key+1)));
62 perl_call_method("EXTEND", G_SCALAR|G_DISCARD);
68 if (key > AvMAX(av)) {
73 if (AvALLOC(av) != AvARRAY(av)) {
74 ary = AvALLOC(av) + AvFILLp(av) + 1;
75 tmp = AvARRAY(av) - AvALLOC(av);
76 Move(AvARRAY(av), AvALLOC(av), AvFILLp(av)+1, SV*);
78 SvPVX(av) = (char*)AvALLOC(av);
81 ary[--tmp] = &sv_undef;
84 if (key > AvMAX(av) - 10) {
85 newmax = key + AvMAX(av);
91 #ifndef STRANGE_MALLOC
95 newmax = key + AvMAX(av) / 5;
98 Renew(AvALLOC(av),newmax+1, SV*);
100 bytes = (newmax + 1) * sizeof(SV*);
101 #define MALLOC_OVERHEAD 16
102 tmp = MALLOC_OVERHEAD;
103 while (tmp - MALLOC_OVERHEAD < bytes)
105 tmp -= MALLOC_OVERHEAD;
107 assert(tmp > newmax);
109 New(2,ary, newmax+1, SV*);
110 Copy(AvALLOC(av), ary, AvMAX(av)+1, SV*);
112 offer_nice_chunk(AvALLOC(av), (AvMAX(av)+1) * sizeof(SV*));
114 Safefree(AvALLOC(av));
117 ary = AvALLOC(av) + AvMAX(av) + 1;
118 tmp = newmax - AvMAX(av);
119 if (av == curstack) { /* Oops, grew stack (via av_store()?) */
120 stack_sp = AvALLOC(av) + (stack_sp - stack_base);
121 stack_base = AvALLOC(av);
122 stack_max = stack_base + newmax;
126 newmax = key < 4 ? 4 : key;
127 New(2,AvALLOC(av), newmax+1, SV*);
128 ary = AvALLOC(av) + 1;
130 AvALLOC(av)[0] = &sv_undef; /* For the stacks */
134 ary[--tmp] = &sv_undef;
137 SvPVX(av) = (char*)AvALLOC(av);
144 av_fetch(register AV *av, I32 key, I32 lval)
152 key += AvFILL(av) + 1;
157 if (SvRMAGICAL(av)) {
158 if (mg_find((SV*)av,'P')) {
161 mg_copy((SV*)av, sv, 0, key);
167 if (key > AvFILLp(av)) {
174 return av_store(av,key,sv);
176 if (AvARRAY(av)[key] == &sv_undef) {
180 return av_store(av,key,sv);
185 && (!AvARRAY(av)[key] /* eg. @_ could have freed elts */
186 || SvTYPE(AvARRAY(av)[key]) == SVTYPEMASK)) {
187 AvARRAY(av)[key] = &sv_undef; /* 1/2 reify */
190 return &AvARRAY(av)[key];
194 av_store(register AV *av, I32 key, SV *val)
206 key += AvFILL(av) + 1;
211 if (SvREADONLY(av) && key >= AvFILL(av))
214 if (SvRMAGICAL(av)) {
215 if (mg_find((SV*)av,'P')) {
216 if (val != &sv_undef) {
217 mg_copy((SV*)av, val, 0, key);
223 if (!AvREAL(av) && AvREIFY(av))
228 if (AvFILLp(av) < key) {
231 if (av == curstack && key > stack_sp - stack_base)
232 stack_sp = stack_base + key; /* XPUSH in disguise */
234 ary[++AvFILLp(av)] = &sv_undef;
235 while (AvFILLp(av) < key);
240 SvREFCNT_dec(ary[key]);
242 if (SvSMAGICAL(av)) {
243 if (val != &sv_undef) {
244 MAGIC* mg = SvMAGIC(av);
245 sv_magic(val, (SV*)av, toLOWER(mg->mg_type), 0, key);
257 av = (AV*)NEWSV(3,0);
258 sv_upgrade((SV *)av, SVt_PVAV);
262 AvMAX(av) = AvFILLp(av) = -1;
267 av_make(register I32 size, register SV **strp)
273 av = (AV*)NEWSV(8,0);
274 sv_upgrade((SV *) av,SVt_PVAV);
275 AvFLAGS(av) = AVf_REAL;
276 if (size) { /* `defined' was returning undef for size==0 anyway. */
279 SvPVX(av) = (char*)ary;
280 AvFILLp(av) = size - 1;
281 AvMAX(av) = size - 1;
282 for (i = 0; i < size; i++) {
285 sv_setsv(ary[i], *strp);
293 av_fake(register I32 size, register SV **strp)
298 av = (AV*)NEWSV(9,0);
299 sv_upgrade((SV *)av, SVt_PVAV);
300 New(4,ary,size+1,SV*);
302 Copy(strp,ary,size,SV*);
303 AvFLAGS(av) = AVf_REIFY;
304 SvPVX(av) = (char*)ary;
305 AvFILLp(av) = size - 1;
306 AvMAX(av) = size - 1;
316 av_clear(register AV *av)
322 if (SvREFCNT(av) <= 0) {
323 warn("Attempt to clear deleted array");
330 /* Give any tie a chance to cleanup first */
339 key = AvFILLp(av) + 1;
341 SvREFCNT_dec(ary[--key]);
342 ary[key] = &sv_undef;
345 if (key = AvARRAY(av) - AvALLOC(av)) {
347 SvPVX(av) = (char*)AvALLOC(av);
354 av_undef(register AV *av)
362 /* Give any tie a chance to cleanup first */
363 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
364 av_fill(av, -1); /* mg_clear() ? */
367 key = AvFILLp(av) + 1;
369 SvREFCNT_dec(AvARRAY(av)[--key]);
371 Safefree(AvALLOC(av));
375 AvMAX(av) = AvFILLp(av) = -1;
377 SvREFCNT_dec(AvARYLEN(av));
383 av_push(register AV *av, SV *val)
391 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
400 perl_call_method("PUSH", G_SCALAR|G_DISCARD);
405 av_store(av,AvFILLp(av)+1,val);
409 av_pop(register AV *av)
414 if (!av || AvFILL(av) < 0)
418 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
425 if (perl_call_method("POP", G_SCALAR)) {
426 retval = newSVsv(*stack_sp--);
434 retval = AvARRAY(av)[AvFILLp(av)];
435 AvARRAY(av)[AvFILLp(av)--] = &sv_undef;
442 av_unshift(register AV *av, register I32 num)
453 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
464 perl_call_method("UNSHIFT", G_SCALAR|G_DISCARD);
470 if (!AvREAL(av) && AvREIFY(av))
472 i = AvARRAY(av) - AvALLOC(av);
480 SvPVX(av) = (char*)(AvARRAY(av) - i);
484 av_extend(av, i + num);
487 Move(ary, ary + num, i + 1, SV*);
489 ary[--num] = &sv_undef;
495 av_shift(register AV *av)
500 if (!av || AvFILL(av) < 0)
504 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
511 if (perl_call_method("SHIFT", G_SCALAR)) {
512 retval = newSVsv(*stack_sp--);
520 retval = *AvARRAY(av);
522 *AvARRAY(av) = &sv_undef;
523 SvPVX(av) = (char*)(AvARRAY(av) + 1);
532 av_len(register AV *av)
538 av_fill(register AV *av, I32 fill)
542 croak("panic: null array");
545 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
553 PUSHs(sv_2mortal(newSViv(fill+1)));
555 perl_call_method("STORESIZE", G_SCALAR|G_DISCARD);
561 if (fill <= AvMAX(av)) {
562 I32 key = AvFILLp(av);
563 SV** ary = AvARRAY(av);
567 SvREFCNT_dec(ary[key]);
568 ary[key--] = &sv_undef;
573 ary[++key] = &sv_undef;
581 (void)av_store(av,fill,&sv_undef);
591 keysp = av_fetch(av, 0, FALSE);
598 if (SvTYPE(sv) == SVt_PVHV)
603 croak("Can't coerce array into hash");
608 avhv_fetch(AV *av, char *key, U32 klen, I32 lval)
611 HV *keys = avhv_keys(av);
614 indsvp = hv_fetch(keys, key, klen, FALSE);
618 croak("Bad index while coercing array into hash");
623 ind = AvFILL(av) + 1;
624 hv_store(keys, key, klen, newSViv(ind), 0);
626 return av_fetch(av, ind, lval);
630 avhv_fetch_ent(AV *av, SV *keysv, I32 lval, U32 hash)
633 HV *keys = avhv_keys(av);
637 he = hv_fetch_ent(keys, keysv, FALSE, hash);
639 ind = SvIV(HeVAL(he));
641 croak("Bad index while coercing array into hash");
646 ind = AvFILL(av) + 1;
647 hv_store_ent(keys, keysv, newSViv(ind), 0);
649 return av_fetch(av, ind, lval);
653 avhv_store(AV *av, char *key, U32 klen, SV *val, U32 hash)
656 HV *keys = avhv_keys(av);
659 indsvp = hv_fetch(keys, key, klen, FALSE);
663 croak("Bad index while coercing array into hash");
665 ind = AvFILL(av) + 1;
666 hv_store(keys, key, klen, newSViv(ind), hash);
668 return av_store(av, ind, val);
672 avhv_store_ent(AV *av, SV *keysv, SV *val, U32 hash)
674 HV *keys = avhv_keys(av);
678 he = hv_fetch_ent(keys, keysv, FALSE, hash);
680 ind = SvIV(HeVAL(he));
682 croak("Bad index while coercing array into hash");
684 ind = AvFILL(av) + 1;
685 hv_store_ent(keys, keysv, newSViv(ind), hash);
687 return av_store(av, ind, val);
691 avhv_exists_ent(AV *av, SV *keysv, U32 hash)
693 HV *keys = avhv_keys(av);
694 return hv_exists_ent(keys, keysv, hash);
698 avhv_exists(AV *av, char *key, U32 klen)
700 HV *keys = avhv_keys(av);
701 return hv_exists(keys, key, klen);
704 /* avhv_delete leaks. Caller can re-index and compress if so desired. */
706 avhv_delete(AV *av, char *key, U32 klen, I32 flags)
708 HV *keys = avhv_keys(av);
713 sv = hv_delete(keys, key, klen, 0);
718 croak("Bad index while coercing array into hash");
719 svp = av_fetch(av, ind, FALSE);
722 if (flags & G_DISCARD) {
726 sv = sv_2mortal(*svp);
732 /* avhv_delete_ent leaks. Caller can re-index and compress if so desired. */
734 avhv_delete_ent(AV *av, SV *keysv, I32 flags, U32 hash)
736 HV *keys = avhv_keys(av);
741 sv = hv_delete_ent(keys, keysv, 0, hash);
746 croak("Bad index while coercing array into hash");
747 svp = av_fetch(av, ind, FALSE);
750 if (flags & G_DISCARD) {
754 sv = sv_2mortal(*svp);
761 avhv_iterinit(AV *av)
763 HV *keys = avhv_keys(av);
764 return hv_iterinit(keys);
768 avhv_iternext(AV *av)
770 HV *keys = avhv_keys(av);
771 return hv_iternext(keys);
775 avhv_iterval(AV *av, register HE *entry)
777 HV *keys = avhv_keys(av);
781 sv = hv_iterval(keys, entry);
784 croak("Bad index while coercing array into hash");
785 return *av_fetch(av, ind, TRUE);
789 avhv_iternextsv(AV *av, char **key, I32 *retlen)
791 HV *keys = avhv_keys(av);
796 he = hv_iternext(keys);
799 *key = hv_iterkey(he, retlen);
800 sv = hv_iterval(keys, he);
803 croak("Bad index while coercing array into hash");
804 return *av_fetch(av, ind, TRUE);