3 * Copyright (c) 1991-1997, Larry Wall
5 * You may distribute under the terms of either the GNU General Public
6 * License or the Artistic License, as specified in the README file.
11 * "...for the Entwives desired order, and plenty, and peace (by which they
12 * meant that things should remain where they had set them)." --Treebeard
27 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
28 warn("av_reify called on tied array");
31 while (key > AvFILLp(av) + 1)
32 AvARRAY(av)[--key] = &sv_undef;
34 sv = AvARRAY(av)[--key];
36 if (sv != &sv_undef) {
38 (void)SvREFCNT_inc(sv);
41 key = AvARRAY(av) - AvALLOC(av);
43 AvALLOC(av)[--key] = &sv_undef;
48 av_extend(AV *av, I32 key)
50 dTHR; /* only necessary if we have to extend stack */
52 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
60 PUSHs(sv_2mortal(newSViv(key+1)));
62 perl_call_method("EXTEND", G_SCALAR|G_DISCARD);
68 if (key > AvMAX(av)) {
73 if (AvALLOC(av) != AvARRAY(av)) {
74 ary = AvALLOC(av) + AvFILLp(av) + 1;
75 tmp = AvARRAY(av) - AvALLOC(av);
76 Move(AvARRAY(av), AvALLOC(av), AvFILLp(av)+1, SV*);
78 SvPVX(av) = (char*)AvALLOC(av);
81 ary[--tmp] = &sv_undef;
84 if (key > AvMAX(av) - 10) {
85 newmax = key + AvMAX(av);
91 #ifndef STRANGE_MALLOC
96 newmax = malloced_size((void*)AvALLOC(av))/sizeof(SV*) - 1;
101 newmax = key + AvMAX(av) / 5;
103 #if defined(STRANGE_MALLOC) || defined(MYMALLOC)
104 Renew(AvALLOC(av),newmax+1, SV*);
106 bytes = (newmax + 1) * sizeof(SV*);
107 #define MALLOC_OVERHEAD 16
108 tmp = MALLOC_OVERHEAD;
109 while (tmp - MALLOC_OVERHEAD < bytes)
111 tmp -= MALLOC_OVERHEAD;
113 assert(tmp > newmax);
115 New(2,ary, newmax+1, SV*);
116 Copy(AvALLOC(av), ary, AvMAX(av)+1, SV*);
118 offer_nice_chunk(AvALLOC(av), (AvMAX(av)+1) * sizeof(SV*));
120 Safefree(AvALLOC(av));
124 ary = AvALLOC(av) + AvMAX(av) + 1;
125 tmp = newmax - AvMAX(av);
126 if (av == curstack) { /* Oops, grew stack (via av_store()?) */
127 stack_sp = AvALLOC(av) + (stack_sp - stack_base);
128 stack_base = AvALLOC(av);
129 stack_max = stack_base + newmax;
133 newmax = key < 3 ? 3 : key;
134 New(2,AvALLOC(av), newmax+1, SV*);
135 ary = AvALLOC(av) + 1;
137 AvALLOC(av)[0] = &sv_undef; /* For the stacks */
141 ary[--tmp] = &sv_undef;
144 SvPVX(av) = (char*)AvALLOC(av);
151 av_fetch(register AV *av, I32 key, I32 lval)
159 key += AvFILL(av) + 1;
164 if (SvRMAGICAL(av)) {
165 if (mg_find((SV*)av,'P')) {
168 mg_copy((SV*)av, sv, 0, key);
174 if (key > AvFILLp(av)) {
181 return av_store(av,key,sv);
183 if (AvARRAY(av)[key] == &sv_undef) {
187 return av_store(av,key,sv);
192 && (!AvARRAY(av)[key] /* eg. @_ could have freed elts */
193 || SvTYPE(AvARRAY(av)[key]) == SVTYPEMASK)) {
194 AvARRAY(av)[key] = &sv_undef; /* 1/2 reify */
197 return &AvARRAY(av)[key];
201 av_store(register AV *av, I32 key, SV *val)
213 key += AvFILL(av) + 1;
218 if (SvREADONLY(av) && key >= AvFILL(av))
221 if (SvRMAGICAL(av)) {
222 if (mg_find((SV*)av,'P')) {
223 if (val != &sv_undef) {
224 mg_copy((SV*)av, val, 0, key);
230 if (!AvREAL(av) && AvREIFY(av))
235 if (AvFILLp(av) < key) {
238 if (av == curstack && key > stack_sp - stack_base)
239 stack_sp = stack_base + key; /* XPUSH in disguise */
241 ary[++AvFILLp(av)] = &sv_undef;
242 while (AvFILLp(av) < key);
247 SvREFCNT_dec(ary[key]);
249 if (SvSMAGICAL(av)) {
250 if (val != &sv_undef) {
251 MAGIC* mg = SvMAGIC(av);
252 sv_magic(val, (SV*)av, toLOWER(mg->mg_type), 0, key);
264 av = (AV*)NEWSV(3,0);
265 sv_upgrade((SV *)av, SVt_PVAV);
269 AvMAX(av) = AvFILLp(av) = -1;
274 av_make(register I32 size, register SV **strp)
280 av = (AV*)NEWSV(8,0);
281 sv_upgrade((SV *) av,SVt_PVAV);
282 AvFLAGS(av) = AVf_REAL;
283 if (size) { /* `defined' was returning undef for size==0 anyway. */
286 SvPVX(av) = (char*)ary;
287 AvFILLp(av) = size - 1;
288 AvMAX(av) = size - 1;
289 for (i = 0; i < size; i++) {
292 sv_setsv(ary[i], *strp);
300 av_fake(register I32 size, register SV **strp)
305 av = (AV*)NEWSV(9,0);
306 sv_upgrade((SV *)av, SVt_PVAV);
307 New(4,ary,size+1,SV*);
309 Copy(strp,ary,size,SV*);
310 AvFLAGS(av) = AVf_REIFY;
311 SvPVX(av) = (char*)ary;
312 AvFILLp(av) = size - 1;
313 AvMAX(av) = size - 1;
323 av_clear(register AV *av)
329 if (SvREFCNT(av) <= 0) {
330 warn("Attempt to clear deleted array");
340 /* Give any tie a chance to cleanup first */
349 key = AvFILLp(av) + 1;
351 SvREFCNT_dec(ary[--key]);
352 ary[key] = &sv_undef;
355 if (key = AvARRAY(av) - AvALLOC(av)) {
357 SvPVX(av) = (char*)AvALLOC(av);
364 av_undef(register AV *av)
372 /* Give any tie a chance to cleanup first */
373 if (SvRMAGICAL(av) && mg_find((SV*)av,'P'))
374 av_fill(av, -1); /* mg_clear() ? */
377 key = AvFILLp(av) + 1;
379 SvREFCNT_dec(AvARRAY(av)[--key]);
381 Safefree(AvALLOC(av));
384 AvMAX(av) = AvFILLp(av) = -1;
386 SvREFCNT_dec(AvARYLEN(av));
392 av_push(register AV *av, SV *val)
400 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
409 perl_call_method("PUSH", G_SCALAR|G_DISCARD);
414 av_store(av,AvFILLp(av)+1,val);
418 av_pop(register AV *av)
423 if (!av || AvFILL(av) < 0)
427 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
434 if (perl_call_method("POP", G_SCALAR)) {
435 retval = newSVsv(*stack_sp--);
443 retval = AvARRAY(av)[AvFILLp(av)];
444 AvARRAY(av)[AvFILLp(av)--] = &sv_undef;
451 av_unshift(register AV *av, register I32 num)
462 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
473 perl_call_method("UNSHIFT", G_SCALAR|G_DISCARD);
479 if (!AvREAL(av) && AvREIFY(av))
481 i = AvARRAY(av) - AvALLOC(av);
489 SvPVX(av) = (char*)(AvARRAY(av) - i);
493 av_extend(av, i + num);
496 Move(ary, ary + num, i + 1, SV*);
498 ary[--num] = &sv_undef;
504 av_shift(register AV *av)
509 if (!av || AvFILL(av) < 0)
513 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
520 if (perl_call_method("SHIFT", G_SCALAR)) {
521 retval = newSVsv(*stack_sp--);
529 retval = *AvARRAY(av);
531 *AvARRAY(av) = &sv_undef;
532 SvPVX(av) = (char*)(AvARRAY(av) + 1);
541 av_len(register AV *av)
547 av_fill(register AV *av, I32 fill)
551 croak("panic: null array");
554 if (SvRMAGICAL(av) && (mg = mg_find((SV*)av,'P'))) {
562 PUSHs(sv_2mortal(newSViv(fill+1)));
564 perl_call_method("STORESIZE", G_SCALAR|G_DISCARD);
570 if (fill <= AvMAX(av)) {
571 I32 key = AvFILLp(av);
572 SV** ary = AvARRAY(av);
576 SvREFCNT_dec(ary[key]);
577 ary[key--] = &sv_undef;
582 ary[++key] = &sv_undef;
590 (void)av_store(av,fill,&sv_undef);
600 keysp = av_fetch(av, 0, FALSE);
607 if (SvTYPE(sv) == SVt_PVHV)
612 croak("Can't coerce array into hash");
617 avhv_fetch(AV *av, char *key, U32 klen, I32 lval)
620 HV *keys = avhv_keys(av);
623 indsvp = hv_fetch(keys, key, klen, FALSE);
627 croak("Bad index while coercing array into hash");
632 ind = AvFILL(av) + 1;
633 hv_store(keys, key, klen, newSViv(ind), 0);
635 return av_fetch(av, ind, lval);
639 avhv_fetch_ent(AV *av, SV *keysv, I32 lval, U32 hash)
642 HV *keys = avhv_keys(av);
646 he = hv_fetch_ent(keys, keysv, FALSE, hash);
648 ind = SvIV(HeVAL(he));
650 croak("Bad index while coercing array into hash");
655 ind = AvFILL(av) + 1;
656 hv_store_ent(keys, keysv, newSViv(ind), 0);
658 return av_fetch(av, ind, lval);
662 avhv_store(AV *av, char *key, U32 klen, SV *val, U32 hash)
665 HV *keys = avhv_keys(av);
668 indsvp = hv_fetch(keys, key, klen, FALSE);
672 croak("Bad index while coercing array into hash");
674 ind = AvFILL(av) + 1;
675 hv_store(keys, key, klen, newSViv(ind), hash);
677 return av_store(av, ind, val);
681 avhv_store_ent(AV *av, SV *keysv, SV *val, U32 hash)
683 HV *keys = avhv_keys(av);
687 he = hv_fetch_ent(keys, keysv, FALSE, hash);
689 ind = SvIV(HeVAL(he));
691 croak("Bad index while coercing array into hash");
693 ind = AvFILL(av) + 1;
694 hv_store_ent(keys, keysv, newSViv(ind), hash);
696 return av_store(av, ind, val);
700 avhv_exists_ent(AV *av, SV *keysv, U32 hash)
702 HV *keys = avhv_keys(av);
703 return hv_exists_ent(keys, keysv, hash);
707 avhv_exists(AV *av, char *key, U32 klen)
709 HV *keys = avhv_keys(av);
710 return hv_exists(keys, key, klen);
713 /* avhv_delete leaks. Caller can re-index and compress if so desired. */
715 avhv_delete(AV *av, char *key, U32 klen, I32 flags)
717 HV *keys = avhv_keys(av);
722 sv = hv_delete(keys, key, klen, 0);
727 croak("Bad index while coercing array into hash");
728 svp = av_fetch(av, ind, FALSE);
731 if (flags & G_DISCARD) {
735 sv = sv_2mortal(*svp);
741 /* avhv_delete_ent leaks. Caller can re-index and compress if so desired. */
743 avhv_delete_ent(AV *av, SV *keysv, I32 flags, U32 hash)
745 HV *keys = avhv_keys(av);
750 sv = hv_delete_ent(keys, keysv, 0, hash);
755 croak("Bad index while coercing array into hash");
756 svp = av_fetch(av, ind, FALSE);
759 if (flags & G_DISCARD) {
763 sv = sv_2mortal(*svp);
770 avhv_iternext(AV *av)
772 HV *keys = avhv_keys(av);
773 return hv_iternext(keys);
777 avhv_iterval(AV *av, register HE *entry)
779 HV *keys = avhv_keys(av);
783 sv = hv_iterval(keys, entry);
786 croak("Bad index while coercing array into hash");
787 return *av_fetch(av, ind, TRUE);
791 avhv_iternextsv(AV *av, char **key, I32 *retlen)
793 HV *keys = avhv_keys(av);
798 he = hv_iternext(keys);
801 *key = hv_iterkey(he, retlen);
802 sv = hv_iterval(keys, he);
805 croak("Bad index while coercing array into hash");
806 return *av_fetch(av, ind, TRUE);