3 * Copyright (c) 1991-1997, Larry Wall
5 * You may distribute under the terms of either the GNU General Public
6 * License or the Artistic License, as specified in the README file.
11 * "...for the Entwives desired order, and plenty, and peace (by which they
12 * meant that things should remain where they had set them)." --Treebeard
27 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
28 warn("av_reify called on tied array");
31 while (key > AvFILLp(av) + 1)
32 AvARRAY(av)[--key] = &sv_undef;
34 sv = AvARRAY(av)[--key];
36 if (sv != &sv_undef) {
38 (void)SvREFCNT_inc(sv);
41 key = AvARRAY(av) - AvALLOC(av);
43 AvALLOC(av)[--key] = &sv_undef;
48 av_extend(AV *av, I32 key)
50 dTHR; /* only necessary if we have to extend stack */
52 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
60 PUSHs(sv_2mortal(newSViv(key+1)));
62 perl_call_method("EXTEND", G_SCALAR|G_DISCARD);
68 if (key > AvMAX(av)) {
73 if (AvALLOC(av) != AvARRAY(av)) {
74 ary = AvALLOC(av) + AvFILLp(av) + 1;
75 tmp = AvARRAY(av) - AvALLOC(av);
76 Move(AvARRAY(av), AvALLOC(av), AvFILLp(av)+1, SV*);
78 SvPVX(av) = (char*)AvALLOC(av);
81 ary[--tmp] = &sv_undef;
84 if (key > AvMAX(av) - 10) {
85 newmax = key + AvMAX(av);
91 #ifndef STRANGE_MALLOC
95 newmax = key + AvMAX(av) / 5;
98 Renew(AvALLOC(av),newmax+1, SV*);
100 bytes = (newmax + 1) * sizeof(SV*);
101 #define MALLOC_OVERHEAD 16
102 tmp = MALLOC_OVERHEAD;
103 while (tmp - MALLOC_OVERHEAD < bytes)
105 tmp -= MALLOC_OVERHEAD;
107 assert(tmp > newmax);
109 New(2,ary, newmax+1, SV*);
110 Copy(AvALLOC(av), ary, AvMAX(av)+1, SV*);
112 offer_nice_chunk(AvALLOC(av), (AvMAX(av)+1) * sizeof(SV*));
114 Safefree(AvALLOC(av));
117 ary = AvALLOC(av) + AvMAX(av) + 1;
118 tmp = newmax - AvMAX(av);
119 if (av == curstack) { /* Oops, grew stack (via av_store()?) */
120 stack_sp = AvALLOC(av) + (stack_sp - stack_base);
121 stack_base = AvALLOC(av);
122 stack_max = stack_base + newmax;
126 newmax = key < 4 ? 4 : key;
127 New(2,AvALLOC(av), newmax+1, SV*);
128 ary = AvALLOC(av) + 1;
130 AvALLOC(av)[0] = &sv_undef; /* For the stacks */
134 ary[--tmp] = &sv_undef;
137 SvPVX(av) = (char*)AvALLOC(av);
144 av_fetch(register AV *av, I32 key, I32 lval)
152 key += AvFILL(av) + 1;
157 if (SvRMAGICAL(av)) {
158 if (mg_find((SV*)av,'P')) {
161 mg_copy((SV*)av, sv, 0, key);
167 if (key > AvFILLp(av)) {
174 return av_store(av,key,sv);
176 if (AvARRAY(av)[key] == &sv_undef) {
180 return av_store(av,key,sv);
185 && (!AvARRAY(av)[key] /* eg. @_ could have freed elts */
186 || SvTYPE(AvARRAY(av)[key]) == SVTYPEMASK)) {
187 AvARRAY(av)[key] = &sv_undef; /* 1/2 reify */
190 return &AvARRAY(av)[key];
194 av_store(register AV *av, I32 key, SV *val)
206 key += AvFILL(av) + 1;
211 if (SvREADONLY(av) && key >= AvFILL(av))
214 if (SvRMAGICAL(av)) {
215 if (mg_find((SV*)av,'P')) {
216 if (val != &sv_undef) {
217 mg_copy((SV*)av, val, 0, key);
223 if (!AvREAL(av) && AvREIFY(av))
228 if (AvFILLp(av) < key) {
231 if (av == curstack && key > stack_sp - stack_base)
232 stack_sp = stack_base + key; /* XPUSH in disguise */
234 ary[++AvFILLp(av)] = &sv_undef;
235 while (AvFILLp(av) < key);
240 SvREFCNT_dec(ary[key]);
242 if (SvSMAGICAL(av)) {
243 if (val != &sv_undef) {
244 MAGIC* mg = SvMAGIC(av);
245 sv_magic(val, (SV*)av, toLOWER(mg->mg_type), 0, key);
257 av = (AV*)NEWSV(3,0);
258 sv_upgrade((SV *)av, SVt_PVAV);
262 AvMAX(av) = AvFILLp(av) = -1;
267 av_make(register I32 size, register SV **strp)
273 av = (AV*)NEWSV(8,0);
274 sv_upgrade((SV *) av,SVt_PVAV);
275 AvFLAGS(av) = AVf_REAL;
276 if (size) { /* `defined' was returning undef for size==0 anyway. */
279 SvPVX(av) = (char*)ary;
280 AvFILLp(av) = size - 1;
281 AvMAX(av) = size - 1;
282 for (i = 0; i < size; i++) {
285 sv_setsv(ary[i], *strp);
293 av_fake(register I32 size, register SV **strp)
298 av = (AV*)NEWSV(9,0);
299 sv_upgrade((SV *)av, SVt_PVAV);
300 New(4,ary,size+1,SV*);
302 Copy(strp,ary,size,SV*);
303 AvFLAGS(av) = AVf_REIFY;
304 SvPVX(av) = (char*)ary;
305 AvFILLp(av) = size - 1;
306 AvMAX(av) = size - 1;
316 av_clear(register AV *av)
322 if (SvREFCNT(av) <= 0) {
323 warn("Attempt to clear deleted array");
330 /* Give any tie a chance to cleanup first */
339 key = AvFILLp(av) + 1;
341 SvREFCNT_dec(ary[--key]);
342 ary[key] = &sv_undef;
345 if (key = AvARRAY(av) - AvALLOC(av)) {
347 SvPVX(av) = (char*)AvALLOC(av);
354 av_undef(register AV *av)
362 /* Give any tie a chance to cleanup first */
363 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
364 av_fill(av, -1); /* mg_clear() ? */
367 key = AvFILLp(av) + 1;
369 SvREFCNT_dec(AvARRAY(av)[--key]);
371 Safefree(AvALLOC(av));
374 AvMAX(av) = AvFILLp(av) = -1;
376 SvREFCNT_dec(AvARYLEN(av));
382 av_push(register AV *av, SV *val)
390 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
399 perl_call_method("PUSH", G_SCALAR|G_DISCARD);
404 av_store(av,AvFILLp(av)+1,val);
408 av_pop(register AV *av)
413 if (!av || AvFILL(av) < 0)
417 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
424 if (perl_call_method("POP", G_SCALAR)) {
425 retval = newSVsv(*stack_sp--);
433 retval = AvARRAY(av)[AvFILLp(av)];
434 AvARRAY(av)[AvFILLp(av)--] = &sv_undef;
441 av_unshift(register AV *av, register I32 num)
452 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
463 perl_call_method("UNSHIFT", G_SCALAR|G_DISCARD);
469 if (!AvREAL(av) && AvREIFY(av))
471 i = AvARRAY(av) - AvALLOC(av);
479 SvPVX(av) = (char*)(AvARRAY(av) - i);
483 av_extend(av, i + num);
486 Move(ary, ary + num, i + 1, SV*);
488 ary[--num] = &sv_undef;
494 av_shift(register AV *av)
499 if (!av || AvFILL(av) < 0)
503 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
510 if (perl_call_method("SHIFT", G_SCALAR)) {
511 retval = newSVsv(*stack_sp--);
519 retval = *AvARRAY(av);
521 *AvARRAY(av) = &sv_undef;
522 SvPVX(av) = (char*)(AvARRAY(av) + 1);
531 av_len(register AV *av)
537 av_fill(register AV *av, I32 fill)
541 croak("panic: null array");
544 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
552 PUSHs(sv_2mortal(newSViv(fill+1)));
554 perl_call_method("STORESIZE", G_SCALAR|G_DISCARD);
560 if (fill <= AvMAX(av)) {
561 I32 key = AvFILLp(av);
562 SV** ary = AvARRAY(av);
566 SvREFCNT_dec(ary[key]);
567 ary[key--] = &sv_undef;
572 ary[++key] = &sv_undef;
580 (void)av_store(av,fill,&sv_undef);
590 keysp = av_fetch(av, 0, FALSE);
597 if (SvTYPE(sv) == SVt_PVHV)
602 croak("Can't coerce array into hash");
607 avhv_fetch(AV *av, char *key, U32 klen, I32 lval)
610 HV *keys = avhv_keys(av);
613 indsvp = hv_fetch(keys, key, klen, FALSE);
617 croak("Bad index while coercing array into hash");
622 ind = AvFILL(av) + 1;
623 hv_store(keys, key, klen, newSViv(ind), 0);
625 return av_fetch(av, ind, lval);
629 avhv_fetch_ent(AV *av, SV *keysv, I32 lval, U32 hash)
632 HV *keys = avhv_keys(av);
636 he = hv_fetch_ent(keys, keysv, FALSE, hash);
638 ind = SvIV(HeVAL(he));
640 croak("Bad index while coercing array into hash");
645 ind = AvFILL(av) + 1;
646 hv_store_ent(keys, keysv, newSViv(ind), 0);
648 return av_fetch(av, ind, lval);
652 avhv_store(AV *av, char *key, U32 klen, SV *val, U32 hash)
655 HV *keys = avhv_keys(av);
658 indsvp = hv_fetch(keys, key, klen, FALSE);
662 croak("Bad index while coercing array into hash");
664 ind = AvFILL(av) + 1;
665 hv_store(keys, key, klen, newSViv(ind), hash);
667 return av_store(av, ind, val);
671 avhv_store_ent(AV *av, SV *keysv, SV *val, U32 hash)
673 HV *keys = avhv_keys(av);
677 he = hv_fetch_ent(keys, keysv, FALSE, hash);
679 ind = SvIV(HeVAL(he));
681 croak("Bad index while coercing array into hash");
683 ind = AvFILL(av) + 1;
684 hv_store_ent(keys, keysv, newSViv(ind), hash);
686 return av_store(av, ind, val);
690 avhv_exists_ent(AV *av, SV *keysv, U32 hash)
692 HV *keys = avhv_keys(av);
693 return hv_exists_ent(keys, keysv, hash);
697 avhv_exists(AV *av, char *key, U32 klen)
699 HV *keys = avhv_keys(av);
700 return hv_exists(keys, key, klen);
703 /* avhv_delete leaks. Caller can re-index and compress if so desired. */
705 avhv_delete(AV *av, char *key, U32 klen, I32 flags)
707 HV *keys = avhv_keys(av);
712 sv = hv_delete(keys, key, klen, 0);
717 croak("Bad index while coercing array into hash");
718 svp = av_fetch(av, ind, FALSE);
721 if (flags & G_DISCARD) {
725 sv = sv_2mortal(*svp);
731 /* avhv_delete_ent leaks. Caller can re-index and compress if so desired. */
733 avhv_delete_ent(AV *av, SV *keysv, I32 flags, U32 hash)
735 HV *keys = avhv_keys(av);
740 sv = hv_delete_ent(keys, keysv, 0, hash);
745 croak("Bad index while coercing array into hash");
746 svp = av_fetch(av, ind, FALSE);
749 if (flags & G_DISCARD) {
753 sv = sv_2mortal(*svp);
760 avhv_iterinit(AV *av)
762 HV *keys = avhv_keys(av);
763 return hv_iterinit(keys);
767 avhv_iternext(AV *av)
769 HV *keys = avhv_keys(av);
770 return hv_iternext(keys);
774 avhv_iterval(AV *av, register HE *entry)
776 HV *keys = avhv_keys(av);
780 sv = hv_iterval(keys, entry);
783 croak("Bad index while coercing array into hash");
784 return *av_fetch(av, ind, TRUE);
788 avhv_iternextsv(AV *av, char **key, I32 *retlen)
790 HV *keys = avhv_keys(av);
795 he = hv_iternext(keys);
798 *key = hv_iterkey(he, retlen);
799 sv = hv_iterval(keys, he);
802 croak("Bad index while coercing array into hash");
803 return *av_fetch(av, ind, TRUE);