LCOV - code coverage report
Current view: directory - js/src - jsgc.h (source / functions) Found Hit Coverage
Test: app.info Lines: 533 478 89.7 %
Date: 2012-04-07 Functions: 126 114 90.5 %

       1                 : /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
       2                 :  *
       3                 :  * ***** BEGIN LICENSE BLOCK *****
       4                 :  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
       5                 :  *
       6                 :  * The contents of this file are subject to the Mozilla Public License Version
       7                 :  * 1.1 (the "License"); you may not use this file except in compliance with
       8                 :  * the License. You may obtain a copy of the License at
       9                 :  * http://www.mozilla.org/MPL/
      10                 :  *
      11                 :  * Software distributed under the License is distributed on an "AS IS" basis,
      12                 :  * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
      13                 :  * for the specific language governing rights and limitations under the
      14                 :  * License.
      15                 :  *
      16                 :  * The Original Code is Mozilla Communicator client code, released
      17                 :  * March 31, 1998.
      18                 :  *
      19                 :  * The Initial Developer of the Original Code is
      20                 :  * Netscape Communications Corporation.
      21                 :  * Portions created by the Initial Developer are Copyright (C) 1998
      22                 :  * the Initial Developer. All Rights Reserved.
      23                 :  *
      24                 :  * Contributor(s):
      25                 :  *
      26                 :  * Alternatively, the contents of this file may be used under the terms of
      27                 :  * either of the GNU General Public License Version 2 or later (the "GPL"),
      28                 :  * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
      29                 :  * in which case the provisions of the GPL or the LGPL are applicable instead
      30                 :  * of those above. If you wish to allow use of your version of this file only
      31                 :  * under the terms of either the GPL or the LGPL, and not to allow others to
      32                 :  * use your version of this file under the terms of the MPL, indicate your
      33                 :  * decision by deleting the provisions above and replace them with the notice
      34                 :  * and other provisions required by the GPL or the LGPL. If you do not delete
      35                 :  * the provisions above, a recipient may use your version of this file under
      36                 :  * the terms of any one of the MPL, the GPL or the LGPL.
      37                 :  *
      38                 :  * ***** END LICENSE BLOCK ***** */
      39                 : 
      40                 : #ifndef jsgc_h___
      41                 : #define jsgc_h___
      42                 : 
      43                 : /*
      44                 :  * JS Garbage Collector.
      45                 :  */
      46                 : #include <setjmp.h>
      47                 : 
      48                 : #include "mozilla/Util.h"
      49                 : 
      50                 : #include "jsalloc.h"
      51                 : #include "jstypes.h"
      52                 : #include "jsprvtd.h"
      53                 : #include "jspubtd.h"
      54                 : #include "jslock.h"
      55                 : #include "jsutil.h"
      56                 : #include "jsversion.h"
      57                 : #include "jscell.h"
      58                 : 
      59                 : #include "ds/BitArray.h"
      60                 : #include "gc/Statistics.h"
      61                 : #include "js/HashTable.h"
      62                 : #include "js/Vector.h"
      63                 : #include "js/TemplateLib.h"
      64                 : 
      65                 : struct JSCompartment;
      66                 : 
      67                 : extern "C" void
      68                 : js_TraceXML(JSTracer *trc, JSXML* thing);
      69                 : 
      70                 : #if JS_STACK_GROWTH_DIRECTION > 0
      71                 : # define JS_CHECK_STACK_SIZE(limit, lval)  ((uintptr_t)(lval) < limit)
      72                 : #else
      73                 : # define JS_CHECK_STACK_SIZE(limit, lval)  ((uintptr_t)(lval) > limit)
      74                 : #endif
      75                 : 
      76                 : namespace js {
      77                 : 
      78                 : class GCHelperThread;
      79                 : struct Shape;
      80                 : 
      81                 : namespace gc {
      82                 : 
      83                 : enum State {
      84                 :     NO_INCREMENTAL,
      85                 :     MARK_ROOTS,
      86                 :     MARK,
      87                 :     SWEEP,
      88                 :     INVALID
      89                 : };
      90                 : 
      91                 : struct Arena;
      92                 : 
      93                 : /*
      94                 :  * This must be an upper bound, but we do not need the least upper bound, so
      95                 :  * we just exclude non-background objects.
      96                 :  */
      97                 : const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
      98                 : 
      99                 : /*
     100                 :  * Page size is 4096 by default, except for SPARC, where it is 8192.
     101                 :  * Note: Do not use JS_CPU_SPARC here, this header is used outside JS.
     102                 :  * Bug 692267: Move page size definition to gc/Memory.h and include it
     103                 :  *             directly once jsgc.h is no longer an installed header.
     104                 :  */
     105                 : #if defined(SOLARIS) && (defined(__sparc) || defined(__sparcv9))
     106                 : const size_t PageShift = 13;
     107                 : #else
     108                 : const size_t PageShift = 12;
     109                 : #endif
     110                 : const size_t PageSize = size_t(1) << PageShift;
     111                 : 
     112                 : const size_t ChunkShift = 20;
     113                 : const size_t ChunkSize = size_t(1) << ChunkShift;
     114                 : const size_t ChunkMask = ChunkSize - 1;
     115                 : 
     116                 : const size_t ArenaShift = PageShift;
     117                 : const size_t ArenaSize = PageSize;
     118                 : const size_t ArenaMask = ArenaSize - 1;
     119                 : 
     120                 : /*
     121                 :  * This is the maximum number of arenas we allow in the FreeCommitted state
     122                 :  * before we trigger a GC_SHRINK to release free arenas to the OS.
     123                 :  */
     124                 : const static uint32_t FreeCommittedArenasThreshold = (32 << 20) / ArenaSize;
     125                 : 
     126                 : /*
     127                 :  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
     128                 :  * wastes space but allows to avoid expensive devisions by thing's size when
     129                 :  * accessing the bitmap. In addition this allows to use some bits for colored
     130                 :  * marking during the cycle GC.
     131                 :  */
     132                 : const size_t ArenaCellCount = size_t(1) << (ArenaShift - Cell::CellShift);
     133                 : const size_t ArenaBitmapBits = ArenaCellCount;
     134                 : const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
     135                 : const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
     136                 : 
     137                 : /*
     138                 :  * A FreeSpan represents a contiguous sequence of free cells in an Arena.
     139                 :  * |first| is the address of the first free cell in the span. |last| is the
     140                 :  * address of the last free cell in the span. This last cell holds a FreeSpan
     141                 :  * data structure for the next span unless this is the last span on the list
     142                 :  * of spans in the arena. For this last span |last| points to the last byte of
     143                 :  * the last thing in the arena and no linkage is stored there, so
     144                 :  * |last| == arenaStart + ArenaSize - 1. If the space at the arena end is
     145                 :  * fully used this last span is empty and |first| == |last + 1|.
     146                 :  *
     147                 :  * Thus |first| < |last| implies that we have either the last span with at least
     148                 :  * one element or that the span is not the last and contains at least 2
     149                 :  * elements. In both cases to allocate a thing from this span we need simply
     150                 :  * to increment |first| by the allocation size.
     151                 :  *
     152                 :  * |first| == |last| implies that we have a one element span that records the
     153                 :  * next span. So to allocate from it we need to update the span list head
     154                 :  * with a copy of the span stored at |last| address so the following
     155                 :  * allocations will use that span.
     156                 :  *
     157                 :  * |first| > |last| implies that we have an empty last span and the arena is
     158                 :  * fully used.
     159                 :  *
     160                 :  * Also only for the last span (|last| & 1)! = 0 as all allocation sizes are
     161                 :  * multiples of Cell::CellSize.
     162                 :  */
     163                 : struct FreeSpan {
     164                 :     uintptr_t   first;
     165                 :     uintptr_t   last;
     166                 : 
     167                 :   public:
     168         4049092 :     FreeSpan() {}
     169                 : 
     170        23687219 :     FreeSpan(uintptr_t first, uintptr_t last)
     171        23687219 :       : first(first), last(last) {
     172        23687219 :         checkSpan();
     173        23687216 :     }
     174                 : 
     175                 :     /*
     176                 :      * To minimize the size of the arena header the first span is encoded
     177                 :      * there as offsets from the arena start.
     178                 :      */
     179         2674283 :     static size_t encodeOffsets(size_t firstOffset, size_t lastOffset) {
     180                 :         /* Check that we can pack the offsets into uint16. */
     181                 :         JS_STATIC_ASSERT(ArenaShift < 16);
     182         2674283 :         JS_ASSERT(firstOffset <= ArenaSize);
     183         2674282 :         JS_ASSERT(lastOffset < ArenaSize);
     184         2674282 :         JS_ASSERT(firstOffset <= ((lastOffset + 1) & ~size_t(1)));
     185         2674282 :         return firstOffset | (lastOffset << 16);
     186                 :     }
     187                 : 
     188                 :     /*
     189                 :      * Encoded offsets for a full arena when its first span is the last one
     190                 :      * and empty.
     191                 :      */
     192                 :     static const size_t FullArenaOffsets = ArenaSize | ((ArenaSize - 1) << 16);
     193                 : 
     194        23687236 :     static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) {
     195        23687236 :         JS_ASSERT(!(arenaAddr & ArenaMask));
     196                 : 
     197        23687236 :         size_t firstOffset = offsets & 0xFFFF;
     198        23687236 :         size_t lastOffset = offsets >> 16;
     199        23687236 :         JS_ASSERT(firstOffset <= ArenaSize);
     200        23687236 :         JS_ASSERT(lastOffset < ArenaSize);
     201                 : 
     202                 :         /*
     203                 :          * We must not use | when calculating first as firstOffset is
     204                 :          * ArenaMask + 1 for the empty span.
     205                 :          */
     206        23687236 :         return FreeSpan(arenaAddr + firstOffset, arenaAddr | lastOffset);
     207                 :     }
     208                 : 
     209         1502845 :     void initAsEmpty(uintptr_t arenaAddr = 0) {
     210         1502845 :         JS_ASSERT(!(arenaAddr & ArenaMask));
     211         1502845 :         first = arenaAddr + ArenaSize;
     212         1502845 :         last = arenaAddr | (ArenaSize  - 1);
     213         1502845 :         JS_ASSERT(isEmpty());
     214         1502845 :     }
     215                 : 
     216        32417812 :     bool isEmpty() const {
     217        32417812 :         checkSpan();
     218        32417812 :         return first > last;
     219                 :     }
     220                 : 
     221        14160861 :     bool hasNext() const {
     222        14160861 :         checkSpan();
     223        14160861 :         return !(last & uintptr_t(1));
     224                 :     }
     225                 : 
     226        13598294 :     const FreeSpan *nextSpan() const {
     227        13598294 :         JS_ASSERT(hasNext());
     228        13598294 :         return reinterpret_cast<FreeSpan *>(last);
     229                 :     }
     230                 : 
     231         2397087 :     FreeSpan *nextSpanUnchecked(size_t thingSize) const {
     232                 : #ifdef DEBUG
     233         2397087 :         uintptr_t lastOffset = last & ArenaMask;
     234         2397087 :         JS_ASSERT(!(lastOffset & 1));
     235         2397087 :         JS_ASSERT((ArenaSize - lastOffset) % thingSize == 0);
     236                 : #endif
     237         2397087 :         return reinterpret_cast<FreeSpan *>(last);
     238                 :     }
     239                 : 
     240       562421111 :     uintptr_t arenaAddressUnchecked() const {
     241       562421111 :         return last & ~ArenaMask;
     242                 :     }
     243                 : 
     244         8100228 :     uintptr_t arenaAddress() const {
     245         8100228 :         checkSpan();
     246         8100228 :         return arenaAddressUnchecked();
     247                 :     }
     248                 : 
     249         1224592 :     ArenaHeader *arenaHeader() const {
     250         1224592 :         return reinterpret_cast<ArenaHeader *>(arenaAddress());
     251                 :     }
     252                 : 
     253          969496 :     bool isSameNonEmptySpan(const FreeSpan *another) const {
     254          969496 :         JS_ASSERT(!isEmpty());
     255          969496 :         JS_ASSERT(!another->isEmpty());
     256          969496 :         return first == another->first && last == another->last;
     257                 :     }
     258                 : 
     259         2589169 :     bool isWithinArena(uintptr_t arenaAddr) const {
     260         2589169 :         JS_ASSERT(!(arenaAddr & ArenaMask));
     261                 : 
     262                 :         /* Return true for the last empty span as well. */
     263         2589169 :         return arenaAddress() == arenaAddr;
     264                 :     }
     265                 : 
     266         2589169 :     size_t encodeAsOffsets() const {
     267                 :         /*
     268                 :          * We must use first - arenaAddress(), not first & ArenaMask as
     269                 :          * first == ArenaMask + 1 for an empty span.
     270                 :          */
     271         2589169 :         uintptr_t arenaAddr = arenaAddress();
     272         2589169 :         return encodeOffsets(first - arenaAddr, last & ArenaMask);
     273                 :     }
     274                 : 
     275                 :     /* See comments before FreeSpan for details. */
     276       214032028 :     JS_ALWAYS_INLINE void *allocate(size_t thingSize) {
     277       214032028 :         JS_ASSERT(thingSize % Cell::CellSize == 0);
     278       214032028 :         checkSpan();
     279       214032028 :         uintptr_t thing = first;
     280       214032028 :         if (thing < last) {
     281                 :             /* Bump-allocate from the current span. */
     282       211528816 :             first = thing + thingSize;
     283         2503212 :         } else if (JS_LIKELY(thing == last)) {
     284                 :             /*
     285                 :              * Move to the next span. We use JS_LIKELY as without PGO
     286                 :              * compilers mis-predict == here as unlikely to succeed.
     287                 :              */
     288          493116 :             *this = *reinterpret_cast<FreeSpan *>(thing);
     289                 :         } else {
     290         2010096 :             return NULL;
     291                 :         }
     292       212021932 :         checkSpan();
     293       212021932 :         return reinterpret_cast<void *>(thing);
     294                 :     }
     295                 : 
     296                 :     /* A version of allocate when we know that the span is not empty. */
     297           85138 :     JS_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
     298           85138 :         JS_ASSERT(thingSize % Cell::CellSize == 0);
     299           85138 :         checkSpan();
     300           85138 :         uintptr_t thing = first;
     301           85138 :         if (thing < last) {
     302           49619 :             first = thing + thingSize;
     303                 :         } else {
     304           35519 :             JS_ASSERT(thing == last);
     305           35519 :             *this = *reinterpret_cast<FreeSpan *>(thing);
     306                 :         }
     307           85138 :         checkSpan();
     308           85138 :         return reinterpret_cast<void *>(thing);
     309                 :     }
     310                 : 
     311                 :     /*
     312                 :      * Allocate from a newly allocated arena. We do not move the free list
     313                 :      * from the arena. Rather we set the arena up as fully used during the
     314                 :      * initialization so to allocate we simply return the first thing in the
     315                 :      * arena and set the free list to point to the second.
     316                 :      */
     317         1683327 :     JS_ALWAYS_INLINE void *allocateFromNewArena(uintptr_t arenaAddr, size_t firstThingOffset,
     318                 :                                                 size_t thingSize) {
     319         1683327 :         JS_ASSERT(!(arenaAddr & ArenaMask));
     320         1683327 :         uintptr_t thing = arenaAddr | firstThingOffset;
     321         1683327 :         first = thing + thingSize;
     322         1683327 :         last = arenaAddr | ArenaMask;
     323         1683327 :         checkSpan();
     324         1683327 :         return reinterpret_cast<void *>(thing);
     325                 :     }
     326                 : 
     327       513450769 :     void checkSpan() const {
     328                 : #ifdef DEBUG
     329                 :         /* We do not allow spans at the end of the address space. */
     330       513450769 :         JS_ASSERT(last != uintptr_t(-1));
     331       513450164 :         JS_ASSERT(first);
     332       513450164 :         JS_ASSERT(last);
     333       513450164 :         JS_ASSERT(first - 1 <= last);
     334       513450164 :         uintptr_t arenaAddr = arenaAddressUnchecked();
     335       513468657 :         if (last & 1) {
     336                 :             /* The span is the last. */
     337       472565072 :             JS_ASSERT((last & ArenaMask) == ArenaMask);
     338                 : 
     339       472569426 :             if (first - 1 == last) {
     340                 :                 /* The span is last and empty. The above start != 0 check
     341                 :                  * implies that we are not at the end of the address space.
     342                 :                  */
     343        29325505 :                 return;
     344                 :             }
     345       443243921 :             size_t spanLength = last - first + 1;
     346       443243921 :             JS_ASSERT(spanLength % Cell::CellSize == 0);
     347                 : 
     348                 :             /* Start and end must belong to the same arena. */
     349       443243888 :             JS_ASSERT((first & ~ArenaMask) == arenaAddr);
     350       443243888 :             return;
     351                 :         }
     352                 : 
     353                 :         /* The span is not the last and we have more spans to follow. */
     354        40903585 :         JS_ASSERT(first <= last);
     355        40903521 :         size_t spanLengthWithoutOneThing = last - first;
     356        40903521 :         JS_ASSERT(spanLengthWithoutOneThing % Cell::CellSize == 0);
     357                 : 
     358        40903521 :         JS_ASSERT((first & ~ArenaMask) == arenaAddr);
     359                 : 
     360                 :         /*
     361                 :          * If there is not enough space before the arena end to allocate one
     362                 :          * more thing, then the span must be marked as the last one to avoid
     363                 :          * storing useless empty span reference.
     364                 :          */
     365        40903521 :         size_t beforeTail = ArenaSize - (last & ArenaMask);
     366        40903521 :         JS_ASSERT(beforeTail >= sizeof(FreeSpan) + Cell::CellSize);
     367                 : 
     368        40903521 :         FreeSpan *next = reinterpret_cast<FreeSpan *>(last);
     369                 : 
     370                 :         /*
     371                 :          * The GC things on the list of free spans come from one arena
     372                 :          * and the spans are linked in ascending address order with
     373                 :          * at least one non-free thing between spans.
     374                 :          */
     375        40903521 :         JS_ASSERT(last < next->first);
     376        40903521 :         JS_ASSERT(arenaAddr == next->arenaAddressUnchecked());
     377                 : 
     378        40903758 :         if (next->first > next->last) {
     379                 :             /*
     380                 :              * The next span is the empty span that terminates the list for
     381                 :              * arenas that do not have any free things at the end.
     382                 :              */
     383         5330768 :             JS_ASSERT(next->first - 1 == next->last);
     384         5330767 :             JS_ASSERT(arenaAddr + ArenaSize == next->first);
     385                 :         }
     386                 : #endif
     387                 :     }
     388                 : 
     389                 : };
     390                 : 
     391                 : /* Every arena has a header. */
     392                 : struct ArenaHeader {
     393                 :     friend struct FreeLists;
     394                 : 
     395                 :     JSCompartment   *compartment;
     396                 : 
     397                 :     /*
     398                 :      * ArenaHeader::next has two purposes: when unallocated, it points to the
     399                 :      * next available Arena's header. When allocated, it points to the next
     400                 :      * arena of the same size class and compartment.
     401                 :      */
     402                 :     ArenaHeader     *next;
     403                 : 
     404                 :   private:
     405                 :     /*
     406                 :      * The first span of free things in the arena. We encode it as the start
     407                 :      * and end offsets within the arena, not as FreeSpan structure, to
     408                 :      * minimize the header size.
     409                 :      */
     410                 :     size_t          firstFreeSpanOffsets;
     411                 : 
     412                 :     /*
     413                 :      * One of AllocKind constants or FINALIZE_LIMIT when the arena does not
     414                 :      * contain any GC things and is on the list of empty arenas in the GC
     415                 :      * chunk. The latter allows to quickly check if the arena is allocated
     416                 :      * during the conservative GC scanning without searching the arena in the
     417                 :      * list.
     418                 :      */
     419                 :     size_t       allocKind          : 8;
     420                 : 
     421                 :     /*
     422                 :      * When recursive marking uses too much stack the marking is delayed and
     423                 :      * the corresponding arenas are put into a stack using the following field
     424                 :      * as a linkage. To distinguish the bottom of the stack from the arenas
     425                 :      * not present in the stack we use an extra flag to tag arenas on the
     426                 :      * stack.
     427                 :      *
     428                 :      * Delayed marking is also used for arenas that we allocate into during an
     429                 :      * incremental GC. In this case, we intend to mark all the objects in the
     430                 :      * arena, and it's faster to do this marking in bulk.
     431                 :      *
     432                 :      * To minimize the ArenaHeader size we record the next delayed marking
     433                 :      * linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
     434                 :      * field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
     435                 :      * ArenaShift - 1, so the compiler can use byte-level memory instructions
     436                 :      * to access it.
     437                 :      */
     438                 :   public:
     439                 :     size_t       hasDelayedMarking  : 1;
     440                 :     size_t       allocatedDuringIncremental : 1;
     441                 :     size_t       markOverflow : 1;
     442                 :     size_t       nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
     443                 : 
     444                 :     static void staticAsserts() {
     445                 :         /* We must be able to fit the allockind into uint8_t. */
     446                 :         JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
     447                 : 
     448                 :         /*
     449                 :          * nextDelayedMarkingpacking assumes that ArenaShift has enough bits
     450                 :          * to cover allocKind and hasDelayedMarking.
     451                 :          */
     452                 :         JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
     453                 :     }
     454                 : 
     455                 :     inline uintptr_t address() const;
     456                 :     inline Chunk *chunk() const;
     457                 : 
     458       811666512 :     bool allocated() const {
     459       811666512 :         JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
     460       811666512 :         return allocKind < size_t(FINALIZE_LIMIT);
     461                 :     }
     462                 : 
     463         1683327 :     void init(JSCompartment *comp, AllocKind kind) {
     464         1683327 :         JS_ASSERT(!allocated());
     465         1683327 :         JS_ASSERT(!markOverflow);
     466         1683327 :         JS_ASSERT(!allocatedDuringIncremental);
     467         1683327 :         JS_ASSERT(!hasDelayedMarking);
     468         1683327 :         compartment = comp;
     469                 : 
     470                 :         JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
     471         1683327 :         allocKind = size_t(kind);
     472                 : 
     473                 :         /* See comments in FreeSpan::allocateFromNewArena. */
     474         1683327 :         firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
     475         1683327 :     }
     476                 : 
     477        11887059 :     void setAsNotAllocated() {
     478        11887059 :         allocKind = size_t(FINALIZE_LIMIT);
     479        11887059 :         markOverflow = 0;
     480        11887059 :         allocatedDuringIncremental = 0;
     481        11887059 :         hasDelayedMarking = 0;
     482        11887059 :         nextDelayedMarking = 0;
     483        11887059 :     }
     484                 : 
     485        31190911 :     uintptr_t arenaAddress() const {
     486        31190911 :         return address();
     487                 :     }
     488                 : 
     489         3004483 :     Arena *getArena() {
     490         3004483 :         return reinterpret_cast<Arena *>(arenaAddress());
     491                 :     }
     492                 : 
     493       493583740 :     AllocKind getAllocKind() const {
     494       493583740 :         JS_ASSERT(allocated());
     495       493583474 :         return AllocKind(allocKind);
     496                 :     }
     497                 : 
     498                 :     inline size_t getThingSize() const;
     499                 : 
     500        15174922 :     bool hasFreeThings() const {
     501        15174922 :         return firstFreeSpanOffsets != FreeSpan::FullArenaOffsets;
     502                 :     }
     503                 : 
     504                 :     inline bool isEmpty() const;
     505                 : 
     506          228511 :     void setAsFullyUsed() {
     507          228511 :         firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
     508          228511 :     }
     509                 : 
     510        12401776 :     FreeSpan getFirstFreeSpan() const {
     511                 : #ifdef DEBUG
     512        12401776 :         checkSynchronizedWithFreeList();
     513                 : #endif
     514        12401679 :         return FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
     515                 :     }
     516                 : 
     517         2589169 :     void setFirstFreeSpan(const FreeSpan *span) {
     518         2589169 :         JS_ASSERT(span->isWithinArena(arenaAddress()));
     519         2589169 :         firstFreeSpanOffsets = span->encodeAsOffsets();
     520         2589169 :     }
     521                 : 
     522                 : #ifdef DEBUG
     523                 :     void checkSynchronizedWithFreeList() const;
     524                 : #endif
     525                 : 
     526                 :     inline ArenaHeader *getNextDelayedMarking() const;
     527                 :     inline void setNextDelayedMarking(ArenaHeader *aheader);
     528                 : };
     529                 : 
     530                 : struct Arena {
     531                 :     /*
     532                 :      * Layout of an arena:
     533                 :      * An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
     534                 :      * descriptor followed by some pad bytes. The remainder of the arena is
     535                 :      * filled with the array of T things. The pad bytes ensure that the thing
     536                 :      * array ends exactly at the end of the arena.
     537                 :      *
     538                 :      * +-------------+-----+----+----+-----+----+
     539                 :      * | ArenaHeader | pad | T0 | T1 | ... | Tn |
     540                 :      * +-------------+-----+----+----+-----+----+
     541                 :      *
     542                 :      * <----------------------------------------> = ArenaSize bytes
     543                 :      * <-------------------> = first thing offset
     544                 :      */
     545                 :     ArenaHeader aheader;
     546                 :     uint8_t     data[ArenaSize - sizeof(ArenaHeader)];
     547                 : 
     548                 :   private:
     549                 :     static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
     550                 :     static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
     551                 : 
     552                 :   public:
     553                 :     static void staticAsserts();
     554                 : 
     555       554979090 :     static size_t thingSize(AllocKind kind) {
     556       554979090 :         return ThingSizes[kind];
     557                 :     }
     558                 : 
     559        20091857 :     static size_t firstThingOffset(AllocKind kind) {
     560        20091857 :         return FirstThingOffsets[kind];
     561                 :     }
     562                 : 
     563         4025285 :     static size_t thingsPerArena(size_t thingSize) {
     564         4025285 :         JS_ASSERT(thingSize % Cell::CellSize == 0);
     565                 : 
     566                 :         /* We should be able to fit FreeSpan in any GC thing. */
     567         4025285 :         JS_ASSERT(thingSize >= sizeof(FreeSpan));
     568                 : 
     569         4025285 :         return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
     570                 :     }
     571                 : 
     572                 :     static size_t thingsSpan(size_t thingSize) {
     573                 :         return thingsPerArena(thingSize) * thingSize;
     574                 :     }
     575                 : 
     576       271476876 :     static bool isAligned(uintptr_t thing, size_t thingSize) {
     577                 :         /* Things ends at the arena end. */
     578       271476876 :         uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
     579       271476876 :         return tailOffset % thingSize == 0;
     580                 :     }
     581                 : 
     582        10507103 :     uintptr_t address() const {
     583        10507103 :         return aheader.address();
     584                 :     }
     585                 : 
     586         6724665 :     uintptr_t thingsStart(AllocKind thingKind) {
     587         6724665 :         return address() | firstThingOffset(thingKind);
     588                 :     }
     589                 : 
     590         3782438 :     uintptr_t thingsEnd() {
     591         3782438 :         return address() + ArenaSize;
     592                 :     }
     593                 : 
     594                 :     template <typename T>
     595                 :     bool finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize);
     596                 : };
     597                 : 
     598                 : /* The chunk header (located at the end of the chunk to preserve arena alignment). */
     599                 : struct ChunkInfo {
     600                 :     Chunk           *next;
     601                 :     Chunk           **prevp;
     602                 : 
     603                 :     /* Free arenas are linked together with aheader.next. */
     604                 :     ArenaHeader     *freeArenasHead;
     605                 : 
     606                 :     /*
     607                 :      * Decommitted arenas are tracked by a bitmap in the chunk header. We use
     608                 :      * this offset to start our search iteration close to a decommitted arena
     609                 :      * that we can allocate.
     610                 :      */
     611                 :     uint32_t        lastDecommittedArenaOffset;
     612                 : 
     613                 :     /* Number of free arenas, either committed or decommitted. */
     614                 :     uint32_t        numArenasFree;
     615                 : 
     616                 :     /* Number of free, committed arenas. */
     617                 :     uint32_t        numArenasFreeCommitted;
     618                 : 
     619                 :     /* Number of GC cycles this chunk has survived. */
     620                 :     uint32_t        age;
     621                 : };
     622                 : 
     623                 : /*
     624                 :  * Calculating ArenasPerChunk:
     625                 :  *
     626                 :  * In order to figure out how many Arenas will fit in a chunk, we need to know
     627                 :  * how much extra space is available after we allocate the header data. This
     628                 :  * is a problem because the header size depends on the number of arenas in the
     629                 :  * chunk. The two dependent fields are bitmap and decommittedArenas.
     630                 :  *
     631                 :  * For the mark bitmap, we know that each arena will use a fixed number of full
     632                 :  * bytes: ArenaBitmapBytes. The full size of the header data is this number
     633                 :  * multiplied by the eventual number of arenas we have in the header. We,
     634                 :  * conceptually, distribute this header data among the individual arenas and do
     635                 :  * not include it in the header. This way we do not have to worry about its
     636                 :  * variable size: it gets attached to the variable number we are computing.
     637                 :  *
     638                 :  * For the decommitted arena bitmap, we only have 1 bit per arena, so this
     639                 :  * technique will not work. Instead, we observe that we do not have enough
     640                 :  * header info to fill 8 full arenas: it is currently 4 on 64bit, less on
     641                 :  * 32bit. Thus, with current numbers, we need 64 bytes for decommittedArenas.
     642                 :  * This will not become 63 bytes unless we double the data required in the
     643                 :  * header. Therefore, we just compute the number of bytes required to track
     644                 :  * every possible arena and do not worry about slop bits, since there are too
     645                 :  * few to usefully allocate.
     646                 :  *
     647                 :  * To actually compute the number of arenas we can allocate in a chunk, we
     648                 :  * divide the amount of available space less the header info (not including
     649                 :  * the mark bitmap which is distributed into the arena size) by the size of
     650                 :  * the arena (with the mark bitmap bytes it uses).
     651                 :  */
     652                 : const size_t BytesPerArenaWithHeader = ArenaSize + ArenaBitmapBytes;
     653                 : const size_t ChunkDecommitBitmapBytes = ChunkSize / ArenaSize / JS_BITS_PER_BYTE;
     654                 : const size_t ChunkBytesAvailable = ChunkSize - sizeof(ChunkInfo) - ChunkDecommitBitmapBytes;
     655                 : const size_t ArenasPerChunk = ChunkBytesAvailable / BytesPerArenaWithHeader;
     656                 : 
     657                 : /* A chunk bitmap contains enough mark bits for all the cells in a chunk. */
     658                 : struct ChunkBitmap {
     659                 :     uintptr_t bitmap[ArenaBitmapWords * ArenasPerChunk];
     660                 : 
     661                 :     JS_ALWAYS_INLINE void getMarkWordAndMask(const Cell *cell, uint32_t color,
     662                 :                                              uintptr_t **wordp, uintptr_t *maskp);
     663                 : 
     664       642428391 :     JS_ALWAYS_INLINE bool isMarked(const Cell *cell, uint32_t color) {
     665                 :         uintptr_t *word, mask;
     666       642428391 :         getMarkWordAndMask(cell, color, &word, &mask);
     667       642428391 :         return *word & mask;
     668                 :     }
     669                 : 
     670       366717501 :     JS_ALWAYS_INLINE bool markIfUnmarked(const Cell *cell, uint32_t color) {
     671                 :         uintptr_t *word, mask;
     672       366717501 :         getMarkWordAndMask(cell, BLACK, &word, &mask);
     673       366717501 :         if (*word & mask)
     674       117678446 :             return false;
     675       249039055 :         *word |= mask;
     676       249039055 :         if (color != BLACK) {
     677                 :             /*
     678                 :              * We use getMarkWordAndMask to recalculate both mask and word as
     679                 :              * doing just mask << color may overflow the mask.
     680                 :              */
     681               0 :             getMarkWordAndMask(cell, color, &word, &mask);
     682               0 :             if (*word & mask)
     683               0 :                 return false;
     684               0 :             *word |= mask;
     685                 :         }
     686       249039055 :         return true;
     687                 :     }
     688                 : 
     689                 :     JS_ALWAYS_INLINE void unmark(const Cell *cell, uint32_t color) {
     690                 :         uintptr_t *word, mask;
     691                 :         getMarkWordAndMask(cell, color, &word, &mask);
     692                 :         *word &= ~mask;
     693                 :     }
     694                 : 
     695          123883 :     void clear() {
     696          123883 :         PodArrayZero(bitmap);
     697          123883 :     }
     698                 : 
     699                 : #ifdef DEBUG
     700            2248 :     bool noBitsSet(ArenaHeader *aheader) {
     701                 :         /*
     702                 :          * We assume that the part of the bitmap corresponding to the arena
     703                 :          * has the exact number of words so we do not need to deal with a word
     704                 :          * that covers bits from two arenas.
     705                 :          */
     706                 :         JS_STATIC_ASSERT(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD);
     707                 : 
     708                 :         uintptr_t *word, unused;
     709            2248 :         getMarkWordAndMask(reinterpret_cast<Cell *>(aheader->address()), BLACK, &word, &unused);
     710           38216 :         for (size_t i = 0; i != ArenaBitmapWords; i++) {
     711           35968 :             if (word[i])
     712               0 :                 return false;
     713                 :         }
     714            2248 :         return true;
     715                 :     }
     716                 : #endif
     717                 : };
     718                 : 
     719                 : JS_STATIC_ASSERT(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap));
     720                 : 
     721                 : typedef BitArray<ArenasPerChunk> PerArenaBitmap;
     722                 : 
     723                 : const size_t ChunkPadSize = ChunkSize
     724                 :                             - (sizeof(Arena) * ArenasPerChunk)
     725                 :                             - sizeof(ChunkBitmap)
     726                 :                             - sizeof(PerArenaBitmap)
     727                 :                             - sizeof(ChunkInfo);
     728                 : JS_STATIC_ASSERT(ChunkPadSize < BytesPerArenaWithHeader);
     729                 : 
     730                 : /*
     731                 :  * Chunks contain arenas and associated data structures (mark bitmap, delayed
     732                 :  * marking state).
     733                 :  */
     734                 : struct Chunk {
     735                 :     Arena           arenas[ArenasPerChunk];
     736                 : 
     737                 :     /* Pad to full size to ensure cache alignment of ChunkInfo. */
     738                 :     uint8_t         padding[ChunkPadSize];
     739                 : 
     740                 :     ChunkBitmap     bitmap;
     741                 :     PerArenaBitmap  decommittedArenas;
     742                 :     ChunkInfo       info;
     743                 : 
     744       208257559 :     static Chunk *fromAddress(uintptr_t addr) {
     745       208257559 :         addr &= ~ChunkMask;
     746       208257559 :         return reinterpret_cast<Chunk *>(addr);
     747                 :     }
     748                 : 
     749              -1 :     static bool withinArenasRange(uintptr_t addr) {
     750              -1 :         uintptr_t offset = addr & ChunkMask;
     751              -1 :         return offset < ArenasPerChunk * ArenaSize;
     752                 :     }
     753                 : 
     754        11555918 :     static size_t arenaIndex(uintptr_t addr) {
     755        11555918 :         JS_ASSERT(withinArenasRange(addr));
     756        11555918 :         return (addr & ChunkMask) >> ArenaShift;
     757                 :     }
     758                 : 
     759                 :     uintptr_t address() const {
     760                 :         uintptr_t addr = reinterpret_cast<uintptr_t>(this);
     761                 :         JS_ASSERT(!(addr & ChunkMask));
     762                 :         return addr;
     763                 :     }
     764                 : 
     765         1765950 :     bool unused() const {
     766         1765950 :         return info.numArenasFree == ArenasPerChunk;
     767                 :     }
     768                 : 
     769         3418436 :     bool hasAvailableArenas() const {
     770         3418436 :         return info.numArenasFree != 0;
     771                 :     }
     772                 : 
     773                 :     inline void addToAvailableList(JSCompartment *compartment);
     774                 :     inline void insertToAvailableList(Chunk **insertPoint);
     775                 :     inline void removeFromAvailableList();
     776                 : 
     777                 :     ArenaHeader *allocateArena(JSCompartment *comp, AllocKind kind);
     778                 : 
     779                 :     void releaseArena(ArenaHeader *aheader);
     780                 : 
     781                 :     static Chunk *allocate(JSRuntime *rt);
     782                 : 
     783                 :     /* Must be called with the GC lock taken. */
     784                 :     static inline void release(JSRuntime *rt, Chunk *chunk);
     785                 :     static inline void releaseList(JSRuntime *rt, Chunk *chunkListHead);
     786                 : 
     787                 :     /* Must be called with the GC lock taken. */
     788                 :     inline void prepareToBeFreed(JSRuntime *rt);
     789                 : 
     790                 :     /*
     791                 :      * Assuming that the info.prevp points to the next field of the previous
     792                 :      * chunk in a doubly-linked list, get that chunk.
     793                 :      */
     794               0 :     Chunk *getPrevious() {
     795               0 :         JS_ASSERT(info.prevp);
     796               0 :         return fromPointerToNext(info.prevp);
     797                 :     }
     798                 : 
     799                 :     /* Get the chunk from a pointer to its info.next field. */
     800               0 :     static Chunk *fromPointerToNext(Chunk **nextFieldPtr) {
     801               0 :         uintptr_t addr = reinterpret_cast<uintptr_t>(nextFieldPtr);
     802               0 :         JS_ASSERT((addr & ChunkMask) == offsetof(Chunk, info.next));
     803               0 :         return reinterpret_cast<Chunk *>(addr - offsetof(Chunk, info.next));
     804                 :     }
     805                 : 
     806                 :   private:
     807                 :     inline void init();
     808                 : 
     809                 :     /* Search for a decommitted arena to allocate. */
     810                 :     unsigned findDecommittedArenaOffset();
     811                 :     ArenaHeader* fetchNextDecommittedArena();
     812                 : 
     813                 :   public:
     814                 :     /* Unlink and return the freeArenasHead. */
     815                 :     inline ArenaHeader* fetchNextFreeArena(JSRuntime *rt);
     816                 : 
     817                 :     inline void addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader);
     818                 : };
     819                 : 
     820                 : JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
     821                 : 
     822                 : class ChunkPool {
     823                 :     Chunk   *emptyChunkListHead;
     824                 :     size_t  emptyCount;
     825                 : 
     826                 :   public:
     827           18761 :     ChunkPool()
     828                 :       : emptyChunkListHead(NULL),
     829           18761 :         emptyCount(0) { }
     830                 : 
     831               0 :     size_t getEmptyCount() const {
     832               0 :         return emptyCount;
     833                 :     }
     834                 : 
     835                 :     inline bool wantBackgroundAllocation(JSRuntime *rt) const;
     836                 : 
     837                 :     /* Must be called with the GC lock taken. */
     838                 :     inline Chunk *get(JSRuntime *rt);
     839                 : 
     840                 :     /* Must be called either during the GC or with the GC lock taken. */
     841                 :     inline void put(Chunk *chunk);
     842                 : 
     843                 :     /*
     844                 :      * Return the list of chunks that can be released outside the GC lock.
     845                 :      * Must be called either during the GC or with the GC lock taken.
     846                 :      */
     847                 :     Chunk *expire(JSRuntime *rt, bool releaseAll);
     848                 : 
     849                 :     /* Must be called with the GC lock taken. */
     850                 :     void expireAndFree(JSRuntime *rt, bool releaseAll);
     851                 : 
     852                 :     /* Must be called either during the GC or with the GC lock taken. */
     853                 :     JS_FRIEND_API(int64_t) countCleanDecommittedArenas(JSRuntime *rt);
     854                 : };
     855                 : 
     856                 : inline uintptr_t
     857              -1 : Cell::address() const
     858                 : {
     859              -1 :     uintptr_t addr = uintptr_t(this);
     860              -1 :     JS_ASSERT(addr % Cell::CellSize == 0);
     861              -1 :     JS_ASSERT(Chunk::withinArenasRange(addr));
     862              -1 :     return addr;
     863                 : }
     864                 : 
     865                 : inline ArenaHeader *
     866              -1 : Cell::arenaHeader() const
     867                 : {
     868              -1 :     uintptr_t addr = address();
     869              -1 :     addr &= ~ArenaMask;
     870              -1 :     return reinterpret_cast<ArenaHeader *>(addr);
     871                 : }
     872                 : 
     873                 : Chunk *
     874      1009018784 : Cell::chunk() const
     875                 : {
     876      1009018784 :     uintptr_t addr = uintptr_t(this);
     877      1009018784 :     JS_ASSERT(addr % Cell::CellSize == 0);
     878      1009018784 :     addr &= ~(ChunkSize - 1);
     879      1009018784 :     return reinterpret_cast<Chunk *>(addr);
     880                 : }
     881                 : 
     882                 : AllocKind
     883       183095292 : Cell::getAllocKind() const
     884                 : {
     885       183095292 :     return arenaHeader()->getAllocKind();
     886                 : }
     887                 : 
     888                 : #ifdef DEBUG
     889                 : inline bool
     890       263204738 : Cell::isAligned() const
     891                 : {
     892       263204738 :     return Arena::isAligned(address(), arenaHeader()->getThingSize());
     893                 : }
     894                 : #endif
     895                 : 
     896                 : inline uintptr_t
     897        43385703 : ArenaHeader::address() const
     898                 : {
     899        43385703 :     uintptr_t addr = reinterpret_cast<uintptr_t>(this);
     900        43385703 :     JS_ASSERT(!(addr & ArenaMask));
     901        43385703 :     JS_ASSERT(Chunk::withinArenasRange(addr));
     902        43385417 :     return addr;
     903                 : }
     904                 : 
     905                 : inline Chunk *
     906         1685575 : ArenaHeader::chunk() const
     907                 : {
     908         1685575 :     return Chunk::fromAddress(address());
     909                 : }
     910                 : 
     911                 : inline bool
     912           85138 : ArenaHeader::isEmpty() const
     913                 : {
     914                 :     /* Arena is empty if its first span covers the whole arena. */
     915           85138 :     JS_ASSERT(allocated());
     916           85138 :     size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
     917           85138 :     return firstFreeSpanOffsets == FreeSpan::encodeOffsets(firstThingOffset, ArenaMask);
     918                 : }
     919                 : 
     920                 : inline size_t
     921       286036969 : ArenaHeader::getThingSize() const
     922                 : {
     923       286036969 :     JS_ASSERT(allocated());
     924       286036905 :     return Arena::thingSize(getAllocKind());
     925                 : }
     926                 : 
     927                 : inline ArenaHeader *
     928           50996 : ArenaHeader::getNextDelayedMarking() const
     929                 : {
     930           50996 :     return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
     931                 : }
     932                 : 
     933                 : inline void
     934           50996 : ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
     935                 : {
     936           50996 :     JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
     937           50996 :     hasDelayedMarking = 1;
     938           50996 :     nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
     939           50996 : }
     940                 : 
     941                 : JS_ALWAYS_INLINE void
     942      1009148140 : ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
     943                 :                                 uintptr_t **wordp, uintptr_t *maskp)
     944                 : {
     945      1009148140 :     size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
     946      1009148140 :     JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
     947      1009148140 :     *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
     948      1009148140 :     *wordp = &bitmap[bit / JS_BITS_PER_WORD];
     949      1009148140 : }
     950                 : 
     951                 : static void
     952      1009018784 : AssertValidColor(const void *thing, uint32_t color)
     953                 : {
     954                 : #ifdef DEBUG
     955      1009018784 :     ArenaHeader *aheader = reinterpret_cast<const js::gc::Cell *>(thing)->arenaHeader();
     956      1009018784 :     JS_ASSERT_IF(color, color < aheader->getThingSize() / Cell::CellSize);
     957                 : #endif
     958      1009018784 : }
     959                 : 
     960                 : inline bool
     961       642301283 : Cell::isMarked(uint32_t color) const
     962                 : {
     963       642301283 :     AssertValidColor(this, color);
     964       642301283 :     return chunk()->bitmap.isMarked(this, color);
     965                 : }
     966                 : 
     967                 : bool
     968       366717501 : Cell::markIfUnmarked(uint32_t color) const
     969                 : {
     970       366717501 :     AssertValidColor(this, color);
     971       366717501 :     return chunk()->bitmap.markIfUnmarked(this, color);
     972                 : }
     973                 : 
     974                 : void
     975                 : Cell::unmark(uint32_t color) const
     976                 : {
     977                 :     JS_ASSERT(color != BLACK);
     978                 :     AssertValidColor(this, color);
     979                 :     chunk()->bitmap.unmark(this, color);
     980                 : }
     981                 : 
     982                 : JSCompartment *
     983     -1033425325 : Cell::compartment() const
     984                 : {
     985     -1033425325 :     return arenaHeader()->compartment;
     986                 : }
     987                 : 
     988                 : static inline JSGCTraceKind
     989        79852964 : MapAllocToTraceKind(AllocKind thingKind)
     990                 : {
     991                 :     static const JSGCTraceKind map[FINALIZE_LIMIT] = {
     992                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
     993                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
     994                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2 */
     995                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2_BACKGROUND */
     996                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT4 */
     997                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT4_BACKGROUND */
     998                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT8 */
     999                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT8_BACKGROUND */
    1000                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT12 */
    1001                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT12_BACKGROUND */
    1002                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT16 */
    1003                 :         JSTRACE_OBJECT,     /* FINALIZE_OBJECT16_BACKGROUND */
    1004                 :         JSTRACE_SCRIPT,     /* FINALIZE_SCRIPT */
    1005                 :         JSTRACE_SHAPE,      /* FINALIZE_SHAPE */
    1006                 :         JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
    1007                 :         JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
    1008                 : #if JS_HAS_XML_SUPPORT      /* FINALIZE_XML */
    1009                 :         JSTRACE_XML,
    1010                 : #endif
    1011                 :         JSTRACE_STRING,     /* FINALIZE_SHORT_STRING */
    1012                 :         JSTRACE_STRING,     /* FINALIZE_STRING */
    1013                 :         JSTRACE_STRING,     /* FINALIZE_EXTERNAL_STRING */
    1014                 :     };
    1015        79852964 :     return map[thingKind];
    1016                 : }
    1017                 : 
    1018                 : inline JSGCTraceKind
    1019                 : GetGCThingTraceKind(const void *thing);
    1020                 : 
    1021                 : struct ArenaLists {
    1022                 : 
    1023                 :     /*
    1024                 :      * ArenaList::head points to the start of the list. Normally cursor points
    1025                 :      * to the first arena in the list with some free things and all arenas
    1026                 :      * before cursor are fully allocated. However, as the arena currently being
    1027                 :      * allocated from is considered full while its list of free spans is moved
    1028                 :      * into the freeList, during the GC or cell enumeration, when an
    1029                 :      * unallocated freeList is moved back to the arena, we can see an arena
    1030                 :      * with some free cells before the cursor. The cursor is an indirect
    1031                 :      * pointer to allow for efficient list insertion at the cursor point and
    1032                 :      * other list manipulations.
    1033                 :      */
    1034                 :     struct ArenaList {
    1035                 :         ArenaHeader     *head;
    1036                 :         ArenaHeader     **cursor;
    1037                 : 
    1038         2300972 :         ArenaList() {
    1039         2300972 :             clear();
    1040         2300972 :         }
    1041                 : 
    1042         2457920 :         void clear() {
    1043         2457920 :             head = NULL;
    1044         2457920 :             cursor = &head;
    1045         2457920 :         }
    1046                 :     };
    1047                 : 
    1048                 :   private:
    1049                 :     /*
    1050                 :      * For each arena kind its free list is represented as the first span with
    1051                 :      * free things. Initially all the spans are initialized as empty. After we
    1052                 :      * find a new arena with available things we move its first free span into
    1053                 :      * the list and set the arena as fully allocated. way we do not need to
    1054                 :      * update the arena header after the initial allocation. When starting the
    1055                 :      * GC we only move the head of the of the list of spans back to the arena
    1056                 :      * only for the arena that was not fully allocated.
    1057                 :      */
    1058                 :     FreeSpan       freeLists[FINALIZE_LIMIT];
    1059                 : 
    1060                 :     ArenaList      arenaLists[FINALIZE_LIMIT];
    1061                 : 
    1062                 : #ifdef JS_THREADSAFE
    1063                 :     /*
    1064                 :      * The background finalization adds the finalized arenas to the list at
    1065                 :      * the *cursor position. backgroundFinalizeState controls the interaction
    1066                 :      * between the GC lock and the access to the list from the allocation
    1067                 :      * thread.
    1068                 :      *
    1069                 :      * BFS_DONE indicates that the finalizations is not running or cannot
    1070                 :      * affect this arena list. The allocation thread can access the list
    1071                 :      * outside the GC lock.
    1072                 :      *
    1073                 :      * In BFS_RUN and BFS_JUST_FINISHED the allocation thread must take the
    1074                 :      * lock. The former indicates that the finalization still runs. The latter
    1075                 :      * signals that finalization just added to the list finalized arenas. In
    1076                 :      * that case the lock effectively serves as a read barrier to ensure that
    1077                 :      * the allocation thread see all the writes done during finalization.
    1078                 :      */
    1079                 :     enum BackgroundFinalizeState {
    1080                 :         BFS_DONE,
    1081                 :         BFS_RUN,
    1082                 :         BFS_JUST_FINISHED
    1083                 :     };
    1084                 : 
    1085                 :     volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
    1086                 : #endif
    1087                 : 
    1088                 :   public:
    1089           41285 :     ArenaLists() {
    1090          866985 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
    1091          825700 :             freeLists[i].initAsEmpty();
    1092                 : #ifdef JS_THREADSAFE
    1093          866985 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
    1094          825700 :             backgroundFinalizeState[i] = BFS_DONE;
    1095                 : #endif
    1096           41285 :     }
    1097                 : 
    1098           41285 :     ~ArenaLists() {
    1099         1733970 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
    1100                 : #ifdef JS_THREADSAFE
    1101                 :             /*
    1102                 :              * We can only call this during the shutdown after the last GC when
    1103                 :              * the background finalization is disabled.
    1104                 :              */
    1105          825700 :             JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
    1106                 : #endif
    1107          825700 :             ArenaHeader **headp = &arenaLists[i].head;
    1108         1538658 :             while (ArenaHeader *aheader = *headp) {
    1109          356479 :                 *headp = aheader->next;
    1110          356479 :                 aheader->chunk()->releaseArena(aheader);
    1111                 :             }
    1112                 :         }
    1113           41285 :     }
    1114                 : 
    1115         9942003 :     const FreeSpan *getFreeList(AllocKind thingKind) const {
    1116         9942003 :         return &freeLists[thingKind];
    1117                 :     }
    1118                 : 
    1119          352014 :     ArenaHeader *getFirstArena(AllocKind thingKind) const {
    1120          352014 :         return arenaLists[thingKind].head;
    1121                 :     }
    1122                 : 
    1123           45414 :     bool arenaListsAreEmpty() const {
    1124          518685 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
    1125                 : #ifdef JS_THREADSAFE
    1126                 :             /*
    1127                 :              * The arena cannot be empty if the background finalization is not yet
    1128                 :              * done.
    1129                 :              */
    1130          496161 :             if (backgroundFinalizeState[i] != BFS_DONE)
    1131           22773 :                 return false;
    1132                 : #endif
    1133          473388 :             if (arenaLists[i].head)
    1134             117 :                 return false;
    1135                 :         }
    1136           22524 :         return true;
    1137                 :     }
    1138                 : 
    1139                 : #ifdef DEBUG
    1140             117 :     bool checkArenaListAllUnmarked() const {
    1141            2457 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
    1142                 : # ifdef JS_THREADSAFE
    1143                 :             /* The background finalization must have stopped at this point. */
    1144            2550 :             JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
    1145            2550 :                       backgroundFinalizeState[i] == BFS_JUST_FINISHED);
    1146                 : # endif
    1147            4588 :             for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) {
    1148            2248 :                 if (!aheader->chunk()->bitmap.noBitsSet(aheader))
    1149               0 :                     return false;
    1150                 :             }
    1151                 :         }
    1152             117 :         return true;
    1153                 :     }
    1154                 : #endif
    1155                 : 
    1156                 : #ifdef JS_THREADSAFE
    1157                 :     bool doneBackgroundFinalize(AllocKind kind) const {
    1158                 :         return backgroundFinalizeState[kind] == BFS_DONE;
    1159                 :     }
    1160                 : #endif
    1161                 : 
    1162                 :     /*
    1163                 :      * Return the free list back to the arena so the GC finalization will not
    1164                 :      * run the finalizers over unitialized bytes from free things.
    1165                 :      */
    1166           83697 :     void purge() {
    1167         1757637 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
    1168         1673940 :             FreeSpan *headSpan = &freeLists[i];
    1169         1673940 :             if (!headSpan->isEmpty()) {
    1170          408799 :                 ArenaHeader *aheader = headSpan->arenaHeader();
    1171          408799 :                 aheader->setFirstFreeSpan(headSpan);
    1172          408799 :                 headSpan->initAsEmpty();
    1173                 :             }
    1174                 :         }
    1175           83697 :     }
    1176                 : 
    1177                 :     inline void prepareForIncrementalGC(JSRuntime *rt);
    1178                 : 
    1179                 :     /*
    1180                 :      * Temporarily copy the free list heads to the arenas so the code can see
    1181                 :      * the proper value in ArenaHeader::freeList when accessing the latter
    1182                 :      * outside the GC.
    1183                 :      */
    1184           95817 :     void copyFreeListsToArenas() {
    1185         2012157 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
    1186         1916340 :             copyFreeListToArena(AllocKind(i));
    1187           95817 :     }
    1188                 : 
    1189         1947053 :     void copyFreeListToArena(AllocKind thingKind) {
    1190         1947053 :         FreeSpan *headSpan = &freeLists[thingKind];
    1191         1947053 :         if (!headSpan->isEmpty()) {
    1192          552172 :             ArenaHeader *aheader = headSpan->arenaHeader();
    1193          552172 :             JS_ASSERT(!aheader->hasFreeThings());
    1194          552172 :             aheader->setFirstFreeSpan(headSpan);
    1195                 :         }
    1196         1947053 :     }
    1197                 : 
    1198                 :     /*
    1199                 :      * Clear the free lists in arenas that were temporarily set there using
    1200                 :      * copyToArenas.
    1201                 :      */
    1202           73293 :     void clearFreeListsInArenas() {
    1203         1539153 :         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
    1204         1465860 :             clearFreeListInArena(AllocKind(i));
    1205           73293 :     }
    1206                 : 
    1207                 : 
    1208         1496573 :     void clearFreeListInArena(AllocKind kind) {
    1209         1496573 :         FreeSpan *headSpan = &freeLists[kind];
    1210         1496573 :         if (!headSpan->isEmpty()) {
    1211          143373 :             ArenaHeader *aheader = headSpan->arenaHeader();
    1212          143373 :             JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
    1213          143373 :             aheader->setAsFullyUsed();
    1214                 :         }
    1215         1496573 :     }
    1216                 : 
    1217                 :     /*
    1218                 :      * Check that the free list is either empty or were synchronized with the
    1219                 :      * arena using copyToArena().
    1220                 :      */
    1221          321665 :     bool isSynchronizedFreeList(AllocKind kind) {
    1222          321665 :         FreeSpan *headSpan = &freeLists[kind];
    1223          321665 :         if (headSpan->isEmpty())
    1224          252500 :             return true;
    1225           69165 :         ArenaHeader *aheader = headSpan->arenaHeader();
    1226           69165 :         if (aheader->hasFreeThings()) {
    1227                 :             /*
    1228                 :              * If the arena has a free list, it must be the same as one in
    1229                 :              * lists.
    1230                 :              */
    1231           38452 :             JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
    1232           38452 :             return true;
    1233                 :         }
    1234           30713 :         return false;
    1235                 :     }
    1236                 : 
    1237       214032028 :     JS_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
    1238       214032028 :         return freeLists[thingKind].allocate(thingSize);
    1239                 :     }
    1240                 : 
    1241                 :     static void *refillFreeList(JSContext *cx, AllocKind thingKind);
    1242                 : 
    1243           22524 :     void checkEmptyFreeLists() {
    1244                 : #ifdef DEBUG
    1245          473004 :         for (size_t i = 0; i < mozilla::ArrayLength(freeLists); ++i)
    1246          450480 :             JS_ASSERT(freeLists[i].isEmpty());
    1247                 : #endif
    1248           22524 :     }
    1249                 : 
    1250                 :     void checkEmptyFreeList(AllocKind kind) {
    1251                 :         JS_ASSERT(freeLists[kind].isEmpty());
    1252                 :     }
    1253                 : 
    1254                 :     void finalizeObjects(FreeOp *fop);
    1255                 :     void finalizeStrings(FreeOp *fop);
    1256                 :     void finalizeShapes(FreeOp *fop);
    1257                 :     void finalizeScripts(FreeOp *fop);
    1258                 : 
    1259                 : #ifdef JS_THREADSAFE
    1260                 :     static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead);
    1261                 : #endif
    1262                 : 
    1263                 :   private:
    1264                 :     inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
    1265                 :     inline void finalizeLater(FreeOp *fop, AllocKind thingKind);
    1266                 : 
    1267                 :     inline void *allocateFromArena(JSCompartment *comp, AllocKind thingKind);
    1268                 : };
    1269                 : 
    1270                 : /*
    1271                 :  * Initial allocation size for data structures holding chunks is set to hold
    1272                 :  * chunks with total capacity of 16MB to avoid buffer resizes during browser
    1273                 :  * startup.
    1274                 :  */
    1275                 : const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize;
    1276                 : 
    1277                 : /* The number of GC cycles an empty chunk can survive before been released. */
    1278                 : const size_t MAX_EMPTY_CHUNK_AGE = 4;
    1279                 : 
    1280                 : inline Cell *
    1281                 : AsCell(JSObject *obj)
    1282                 : {
    1283                 :     return reinterpret_cast<Cell *>(obj);
    1284                 : }
    1285                 : 
    1286                 : } /* namespace gc */
    1287                 : 
    1288                 : struct GCPtrHasher
    1289                 : {
    1290                 :     typedef void *Lookup;
    1291                 : 
    1292               0 :     static HashNumber hash(void *key) {
    1293               0 :         return HashNumber(uintptr_t(key) >> JS_GCTHING_ZEROBITS);
    1294                 :     }
    1295                 : 
    1296               0 :     static bool match(void *l, void *k) { return l == k; }
    1297                 : };
    1298                 : 
    1299                 : typedef HashMap<void *, uint32_t, GCPtrHasher, SystemAllocPolicy> GCLocks;
    1300                 : 
    1301                 : struct RootInfo {
    1302        19230101 :     RootInfo() {}
    1303           18837 :     RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {}
    1304                 :     const char *name;
    1305                 :     JSGCRootType type;
    1306                 : };
    1307                 : 
    1308                 : typedef js::HashMap<void *,
    1309                 :                     RootInfo,
    1310                 :                     js::DefaultHasher<void *>,
    1311                 :                     js::SystemAllocPolicy> RootedValueMap;
    1312                 : 
    1313                 : } /* namespace js */
    1314                 : 
    1315                 : extern JS_FRIEND_API(JSGCTraceKind)
    1316                 : js_GetGCThingTraceKind(void *thing);
    1317                 : 
    1318                 : extern JSBool
    1319                 : js_InitGC(JSRuntime *rt, uint32_t maxbytes);
    1320                 : 
    1321                 : extern void
    1322                 : js_FinishGC(JSRuntime *rt);
    1323                 : 
    1324                 : extern JSBool
    1325                 : js_AddRoot(JSContext *cx, js::Value *vp, const char *name);
    1326                 : 
    1327                 : extern JSBool
    1328                 : js_AddGCThingRoot(JSContext *cx, void **rp, const char *name);
    1329                 : 
    1330                 : #ifdef DEBUG
    1331                 : extern void
    1332                 : js_DumpNamedRoots(JSRuntime *rt,
    1333                 :                   void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
    1334                 :                   void *data);
    1335                 : #endif
    1336                 : 
    1337                 : extern uint32_t
    1338                 : js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data);
    1339                 : 
    1340                 : /* Table of pointers with count valid members. */
    1341                 : typedef struct JSPtrTable {
    1342                 :     size_t      count;
    1343                 :     void        **array;
    1344                 : } JSPtrTable;
    1345                 : 
    1346                 : extern JSBool
    1347                 : js_LockGCThingRT(JSRuntime *rt, void *thing);
    1348                 : 
    1349                 : extern void
    1350                 : js_UnlockGCThingRT(JSRuntime *rt, void *thing);
    1351                 : 
    1352                 : extern JS_FRIEND_API(bool)
    1353                 : IsAboutToBeFinalized(const js::gc::Cell *thing);
    1354                 : 
    1355                 : extern bool
    1356                 : IsAboutToBeFinalized(const js::Value &value);
    1357                 : 
    1358                 : extern bool
    1359                 : js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind, void **thing);
    1360                 : 
    1361                 : namespace js {
    1362                 : 
    1363                 : extern void
    1364                 : MarkCompartmentActive(js::StackFrame *fp);
    1365                 : 
    1366                 : extern void
    1367                 : TraceRuntime(JSTracer *trc);
    1368                 : 
    1369                 : extern JS_FRIEND_API(void)
    1370                 : MarkContext(JSTracer *trc, JSContext *acx);
    1371                 : 
    1372                 : /* Must be called with GC lock taken. */
    1373                 : extern void
    1374                 : TriggerGC(JSRuntime *rt, js::gcreason::Reason reason);
    1375                 : 
    1376                 : /* Must be called with GC lock taken. */
    1377                 : extern void
    1378                 : TriggerCompartmentGC(JSCompartment *comp, js::gcreason::Reason reason);
    1379                 : 
    1380                 : extern void
    1381                 : MaybeGC(JSContext *cx);
    1382                 : 
    1383                 : extern void
    1384                 : ShrinkGCBuffers(JSRuntime *rt);
    1385                 : 
    1386                 : extern void
    1387                 : PrepareForFullGC(JSRuntime *rt);
    1388                 : 
    1389                 : /*
    1390                 :  * Kinds of js_GC invocation.
    1391                 :  */
    1392                 : typedef enum JSGCInvocationKind {
    1393                 :     /* Normal invocation. */
    1394                 :     GC_NORMAL           = 0,
    1395                 : 
    1396                 :     /* Minimize GC triggers and release empty GC chunks right away. */
    1397                 :     GC_SHRINK             = 1
    1398                 : } JSGCInvocationKind;
    1399                 : 
    1400                 : extern void
    1401                 : GC(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
    1402                 : 
    1403                 : extern void
    1404                 : GCSlice(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
    1405                 : 
    1406                 : extern void
    1407                 : GCDebugSlice(JSContext *cx, bool limit, int64_t objCount);
    1408                 : 
    1409                 : extern void
    1410                 : PrepareForDebugGC(JSRuntime *rt);
    1411                 : 
    1412                 : } /* namespace js */
    1413                 : 
    1414                 : namespace js {
    1415                 : 
    1416                 : void
    1417                 : InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback);
    1418                 : 
    1419                 : #ifdef JS_THREADSAFE
    1420                 : 
    1421           18761 : class GCHelperThread {
    1422                 :     enum State {
    1423                 :         IDLE,
    1424                 :         SWEEPING,
    1425                 :         ALLOCATING,
    1426                 :         CANCEL_ALLOCATION,
    1427                 :         SHUTDOWN
    1428                 :     };
    1429                 : 
    1430                 :     /*
    1431                 :      * During the finalization we do not free immediately. Rather we add the
    1432                 :      * corresponding pointers to a buffer which we later release on a
    1433                 :      * separated thread.
    1434                 :      *
    1435                 :      * The buffer is implemented as a vector of 64K arrays of pointers, not as
    1436                 :      * a simple vector, to avoid realloc calls during the vector growth and to
    1437                 :      * not bloat the binary size of the inlined freeLater method. Any OOM
    1438                 :      * during buffer growth results in the pointer being freed immediately.
    1439                 :      */
    1440                 :     static const size_t FREE_ARRAY_SIZE = size_t(1) << 16;
    1441                 :     static const size_t FREE_ARRAY_LENGTH = FREE_ARRAY_SIZE / sizeof(void *);
    1442                 : 
    1443                 :     JSRuntime         *const rt;
    1444                 :     PRThread          *thread;
    1445                 :     PRCondVar         *wakeup;
    1446                 :     PRCondVar         *done;
    1447                 :     volatile State    state;
    1448                 : 
    1449                 :     JSContext         *finalizationContext;
    1450                 :     bool              shrinkFlag;
    1451                 : 
    1452                 :     Vector<void **, 16, js::SystemAllocPolicy> freeVector;
    1453                 :     void            **freeCursor;
    1454                 :     void            **freeCursorEnd;
    1455                 : 
    1456                 :     Vector<js::gc::ArenaHeader *, 64, js::SystemAllocPolicy> finalizeVector;
    1457                 : 
    1458                 :     bool    backgroundAllocation;
    1459                 : 
    1460                 :     friend struct js::gc::ArenaLists;
    1461                 : 
    1462                 :     JS_FRIEND_API(void)
    1463                 :     replenishAndFreeLater(void *ptr);
    1464                 : 
    1465           12426 :     static void freeElementsAndArray(void **array, void **end) {
    1466           12426 :         JS_ASSERT(array <= end);
    1467          101742 :         for (void **p = array; p != end; ++p)
    1468           89316 :             js::Foreground::free_(*p);
    1469           12426 :         js::Foreground::free_(array);
    1470           12426 :     }
    1471                 : 
    1472                 :     static void threadMain(void* arg);
    1473                 :     void threadLoop();
    1474                 : 
    1475                 :     /* Must be called with the GC lock taken. */
    1476                 :     void doSweep();
    1477                 : 
    1478                 :   public:
    1479           18761 :     GCHelperThread(JSRuntime *rt)
    1480                 :       : rt(rt),
    1481                 :         thread(NULL),
    1482                 :         wakeup(NULL),
    1483                 :         done(NULL),
    1484                 :         state(IDLE),
    1485                 :         finalizationContext(NULL),
    1486                 :         shrinkFlag(false),
    1487                 :         freeCursor(NULL),
    1488                 :         freeCursorEnd(NULL),
    1489           18761 :         backgroundAllocation(true)
    1490           18761 :     { }
    1491                 : 
    1492                 :     bool init();
    1493                 :     void finish();
    1494                 : 
    1495                 :     /* Must be called with the GC lock taken. */
    1496                 :     void startBackgroundSweep(JSContext *cx, bool shouldShrink);
    1497                 : 
    1498                 :     /* Must be called with the GC lock taken. */
    1499                 :     void startBackgroundShrink();
    1500                 : 
    1501                 :     /* Must be called with the GC lock taken. */
    1502                 :     void waitBackgroundSweepEnd();
    1503                 : 
    1504                 :     /* Must be called with the GC lock taken. */
    1505                 :     void waitBackgroundSweepOrAllocEnd();
    1506                 : 
    1507                 :     /* Must be called with the GC lock taken. */
    1508                 :     inline void startBackgroundAllocationIfIdle();
    1509                 : 
    1510           42904 :     bool canBackgroundAllocate() const {
    1511           42904 :         return backgroundAllocation;
    1512                 :     }
    1513                 : 
    1514                 :     void disableBackgroundAllocation() {
    1515                 :         backgroundAllocation = false;
    1516                 :     }
    1517                 : 
    1518                 :     PRThread *getThread() const {
    1519                 :         return thread;
    1520                 :     }
    1521                 : 
    1522                 :     /*
    1523                 :      * Outside the GC lock may give true answer when in fact the sweeping has
    1524                 :      * been done.
    1525                 :      */
    1526         4125546 :     bool sweeping() const {
    1527         4125546 :         return state == SWEEPING;
    1528                 :     }
    1529                 : 
    1530                 :     bool shouldShrink() const {
    1531                 :         JS_ASSERT(sweeping());
    1532                 :         return shrinkFlag;
    1533                 :     }
    1534                 : 
    1535           89316 :     void freeLater(void *ptr) {
    1536           89316 :         JS_ASSERT(!sweeping());
    1537           89316 :         if (freeCursor != freeCursorEnd)
    1538           76890 :             *freeCursor++ = ptr;
    1539                 :         else
    1540           12426 :             replenishAndFreeLater(ptr);
    1541           89316 :     }
    1542                 : 
    1543                 :     /* Must be called with the GC lock taken. */
    1544                 :     bool prepareForBackgroundSweep();
    1545                 : };
    1546                 : 
    1547                 : #endif /* JS_THREADSAFE */
    1548                 : 
    1549                 : struct GCChunkHasher {
    1550                 :     typedef gc::Chunk *Lookup;
    1551                 : 
    1552                 :     /*
    1553                 :      * Strip zeros for better distribution after multiplying by the golden
    1554                 :      * ratio.
    1555                 :      */
    1556       206734384 :     static HashNumber hash(gc::Chunk *chunk) {
    1557       206734384 :         JS_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask));
    1558       206734384 :         return HashNumber(uintptr_t(chunk) >> gc::ChunkShift);
    1559                 :     }
    1560                 : 
    1561        11599634 :     static bool match(gc::Chunk *k, gc::Chunk *l) {
    1562        11599634 :         JS_ASSERT(!(uintptr_t(k) & gc::ChunkMask));
    1563        11599634 :         JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
    1564        11599634 :         return k == l;
    1565                 :     }
    1566                 : };
    1567                 : 
    1568                 : typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
    1569                 : 
    1570                 : template<class T>
    1571                 : struct MarkStack {
    1572                 :     T *stack;
    1573                 :     T *tos;
    1574                 :     T *limit;
    1575                 : 
    1576                 :     T *ballast;
    1577                 :     T *ballastLimit;
    1578                 : 
    1579                 :     size_t sizeLimit;
    1580                 : 
    1581           18761 :     MarkStack(size_t sizeLimit)
    1582                 :       : stack(NULL),
    1583                 :         tos(NULL),
    1584                 :         limit(NULL),
    1585                 :         ballast(NULL),
    1586                 :         ballastLimit(NULL),
    1587           18761 :         sizeLimit(sizeLimit) { }
    1588                 : 
    1589           18761 :     ~MarkStack() {
    1590           18761 :         if (stack != ballast)
    1591               0 :             js_free(stack);
    1592           18761 :         js_free(ballast);
    1593           18761 :     }
    1594                 : 
    1595           18761 :     bool init(size_t ballastcap) {
    1596           18761 :         JS_ASSERT(!stack);
    1597                 : 
    1598           18761 :         if (ballastcap == 0)
    1599               0 :             return true;
    1600                 : 
    1601           18761 :         ballast = (T *)js_malloc(sizeof(T) * ballastcap);
    1602           18761 :         if (!ballast)
    1603               0 :             return false;
    1604           18761 :         ballastLimit = ballast + ballastcap;
    1605           18761 :         initFromBallast();
    1606           18761 :         return true;
    1607                 :     }
    1608                 : 
    1609           60071 :     void initFromBallast() {
    1610           60071 :         stack = ballast;
    1611           60071 :         limit = ballastLimit;
    1612           60071 :         if (size_t(limit - stack) > sizeLimit)
    1613               0 :             limit = stack + sizeLimit;
    1614           60071 :         tos = stack;
    1615           60071 :     }
    1616                 : 
    1617               0 :     void setSizeLimit(size_t size) {
    1618               0 :         JS_ASSERT(isEmpty());
    1619                 : 
    1620               0 :         sizeLimit = size;
    1621               0 :         reset();
    1622               0 :     }
    1623                 : 
    1624         4120959 :     bool push(T item) {
    1625         4120959 :         if (tos == limit) {
    1626              18 :             if (!enlarge())
    1627               0 :                 return false;
    1628                 :         }
    1629         4120959 :         JS_ASSERT(tos < limit);
    1630         4120959 :         *tos++ = item;
    1631         4120959 :         return true;
    1632                 :     }
    1633                 : 
    1634        11472047 :     bool push(T item1, T item2, T item3) {
    1635        11472047 :         T *nextTos = tos + 3;
    1636        11472047 :         if (nextTos > limit) {
    1637               0 :             if (!enlarge())
    1638               0 :                 return false;
    1639               0 :             nextTos = tos + 3;
    1640                 :         }
    1641        11472047 :         JS_ASSERT(nextTos <= limit);
    1642        11472047 :         tos[0] = item1;
    1643        11472047 :         tos[1] = item2;
    1644        11472047 :         tos[2] = item3;
    1645        11472047 :         tos = nextTos;
    1646        11472047 :         return true;
    1647                 :     }
    1648                 : 
    1649        54550636 :     bool isEmpty() const {
    1650        54550636 :         return tos == stack;
    1651                 :     }
    1652                 : 
    1653        38533602 :     T pop() {
    1654        38533602 :         JS_ASSERT(!isEmpty());
    1655        38533602 :         return *--tos;
    1656                 :     }
    1657                 : 
    1658         5784525 :     ptrdiff_t position() const {
    1659         5784525 :         return tos - stack;
    1660                 :     }
    1661                 : 
    1662           41310 :     void reset() {
    1663           41310 :         if (stack != ballast)
    1664              18 :             js_free(stack);
    1665           41310 :         initFromBallast();
    1666           41310 :         JS_ASSERT(stack == ballast);
    1667           41310 :     }
    1668                 : 
    1669              18 :     bool enlarge() {
    1670              18 :         size_t tosIndex = tos - stack;
    1671              18 :         size_t cap = limit - stack;
    1672              18 :         if (cap == sizeLimit)
    1673               0 :             return false;
    1674              18 :         size_t newcap = cap * 2;
    1675              18 :         if (newcap == 0)
    1676               0 :             newcap = 32;
    1677              18 :         if (newcap > sizeLimit)
    1678               0 :             newcap = sizeLimit;
    1679                 : 
    1680                 :         T *newStack;
    1681              18 :         if (stack == ballast) {
    1682              18 :             newStack = (T *)js_malloc(sizeof(T) * newcap);
    1683              18 :             if (!newStack)
    1684               0 :                 return false;
    1685          589860 :             for (T *src = stack, *dst = newStack; src < tos; )
    1686          589824 :                 *dst++ = *src++;
    1687                 :         } else {
    1688               0 :             newStack = (T *)js_realloc(stack, sizeof(T) * newcap);
    1689               0 :             if (!newStack)
    1690               0 :                 return false;
    1691                 :         }
    1692              18 :         stack = newStack;
    1693              18 :         tos = stack + tosIndex;
    1694              18 :         limit = newStack + newcap;
    1695              18 :         return true;
    1696                 :     }
    1697                 : 
    1698               0 :     size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
    1699               0 :         size_t n = 0;
    1700               0 :         if (stack != ballast)
    1701               0 :             n += mallocSizeOf(stack);
    1702               0 :         n += mallocSizeOf(ballast);
    1703               0 :         return n;
    1704                 :     }
    1705                 : };
    1706                 : 
    1707                 : /*
    1708                 :  * This class records how much work has been done in a given GC slice, so that
    1709                 :  * we can return before pausing for too long. Some slices are allowed to run for
    1710                 :  * unlimited time, and others are bounded. To reduce the number of gettimeofday
    1711                 :  * calls, we only check the time every 1000 operations.
    1712                 :  */
    1713                 : struct SliceBudget {
    1714                 :     int64_t deadline; /* in microseconds */
    1715                 :     intptr_t counter;
    1716                 : 
    1717                 :     static const intptr_t CounterReset = 1000;
    1718                 : 
    1719                 :     static const int64_t Unlimited = 0;
    1720                 :     static int64_t TimeBudget(int64_t millis);
    1721                 :     static int64_t WorkBudget(int64_t work);
    1722                 : 
    1723                 :     /* Equivalent to SliceBudget(UnlimitedBudget). */
    1724                 :     SliceBudget();
    1725                 : 
    1726                 :     /* Instantiate as SliceBudget(Time/WorkBudget(n)). */
    1727                 :     SliceBudget(int64_t budget);
    1728                 : 
    1729               0 :     void reset() {
    1730               0 :         deadline = INT64_MAX;
    1731               0 :         counter = INTPTR_MAX;
    1732               0 :     }
    1733                 : 
    1734        69485312 :     void step(intptr_t amt = 1) {
    1735        69485312 :         counter -= amt;
    1736        69485312 :     }
    1737                 : 
    1738                 :     bool checkOverBudget();
    1739                 : 
    1740        84729706 :     bool isOverBudget() {
    1741        84729706 :         if (counter >= 0)
    1742        84729607 :             return false;
    1743              99 :         return checkOverBudget();
    1744                 :     }
    1745                 : };
    1746                 : 
    1747                 : static const size_t MARK_STACK_LENGTH = 32768;
    1748                 : 
    1749           18761 : struct GCMarker : public JSTracer {
    1750                 :   private:
    1751                 :     /*
    1752                 :      * We use a common mark stack to mark GC things of different types and use
    1753                 :      * the explicit tags to distinguish them when it cannot be deduced from
    1754                 :      * the context of push or pop operation.
    1755                 :      */
    1756                 :     enum StackTag {
    1757                 :         ValueArrayTag,
    1758                 :         ObjectTag,
    1759                 :         TypeTag,
    1760                 :         XmlTag,
    1761                 :         SavedValueArrayTag,
    1762                 :         LastTag = SavedValueArrayTag
    1763                 :     };
    1764                 : 
    1765                 :     static const uintptr_t StackTagMask = 7;
    1766                 : 
    1767                 :     static void staticAsserts() {
    1768                 :         JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
    1769                 :         JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
    1770                 :     }
    1771                 : 
    1772                 :   public:
    1773                 :     explicit GCMarker();
    1774                 :     bool init();
    1775                 : 
    1776               0 :     void setSizeLimit(size_t size) { stack.setSizeLimit(size); }
    1777               0 :     size_t sizeLimit() const { return stack.sizeLimit; }
    1778                 : 
    1779                 :     void start(JSRuntime *rt);
    1780                 :     void stop();
    1781                 :     void reset();
    1782                 : 
    1783         3697803 :     void pushObject(JSObject *obj) {
    1784         3697803 :         pushTaggedPtr(ObjectTag, obj);
    1785         3697803 :     }
    1786                 : 
    1787          422962 :     void pushType(types::TypeObject *type) {
    1788          422962 :         pushTaggedPtr(TypeTag, type);
    1789          422962 :     }
    1790                 : 
    1791             194 :     void pushXML(JSXML *xml) {
    1792             194 :         pushTaggedPtr(XmlTag, xml);
    1793             194 :     }
    1794                 : 
    1795       112185007 :     uint32_t getMarkColor() const {
    1796       112185007 :         return color;
    1797                 :     }
    1798                 : 
    1799                 :     /*
    1800                 :      * The only valid color transition during a GC is from black to gray. It is
    1801                 :      * wrong to switch the mark color from gray to black. The reason is that the
    1802                 :      * cycle collector depends on the invariant that there are no black to gray
    1803                 :      * edges in the GC heap. This invariant lets the CC not trace through black
    1804                 :      * objects. If this invariant is violated, the cycle collector may free
    1805                 :      * objects that are still reachable.
    1806                 :      */
    1807           38436 :     void setMarkColorGray() {
    1808           38436 :         JS_ASSERT(isDrained());
    1809           38436 :         JS_ASSERT(color == gc::BLACK);
    1810           38436 :         color = gc::GRAY;
    1811           38436 :     }
    1812                 : 
    1813                 :     inline void delayMarkingArena(gc::ArenaHeader *aheader);
    1814                 :     void delayMarkingChildren(const void *thing);
    1815                 :     void markDelayedChildren(gc::ArenaHeader *aheader);
    1816                 :     bool markDelayedChildren(SliceBudget &budget);
    1817           77201 :     bool hasDelayedChildren() const {
    1818           77201 :         return !!unmarkedArenaStackTop;
    1819                 :     }
    1820                 : 
    1821          348807 :     bool isDrained() {
    1822          348807 :         return isMarkStackEmpty() && !unmarkedArenaStackTop;
    1823                 :     }
    1824                 : 
    1825                 :     bool drainMarkStack(SliceBudget &budget);
    1826                 : 
    1827                 :     /*
    1828                 :      * Gray marking must be done after all black marking is complete. However,
    1829                 :      * we do not have write barriers on XPConnect roots. Therefore, XPConnect
    1830                 :      * roots must be accumulated in the first slice of incremental GC. We
    1831                 :      * accumulate these roots in the GrayRootMarker and then mark them later,
    1832                 :      * after black marking is complete. This accumulation can fail, but in that
    1833                 :      * case we switch to non-incremental GC.
    1834                 :      */
    1835                 :     bool hasBufferedGrayRoots() const;
    1836                 :     void startBufferingGrayRoots();
    1837                 :     void endBufferingGrayRoots();
    1838                 :     void markBufferedGrayRoots();
    1839                 : 
    1840                 :     static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
    1841                 : 
    1842                 :     size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const;
    1843                 : 
    1844                 :     MarkStack<uintptr_t> stack;
    1845                 : 
    1846                 :   private:
    1847                 : #ifdef DEBUG
    1848                 :     void checkCompartment(void *p);
    1849                 : #else
    1850                 :     void checkCompartment(void *p) {}
    1851                 : #endif
    1852                 : 
    1853         4120959 :     void pushTaggedPtr(StackTag tag, void *ptr) {
    1854         4120959 :         checkCompartment(ptr);
    1855         4120959 :         uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
    1856         4120959 :         JS_ASSERT(!(addr & StackTagMask));
    1857         4120959 :         if (!stack.push(addr | uintptr_t(tag)))
    1858               0 :             delayMarkingChildren(ptr);
    1859         4120959 :     }
    1860                 : 
    1861        18604267 :     void pushValueArray(JSObject *obj, void *start, void *end) {
    1862        18604267 :         checkCompartment(obj);
    1863                 : 
    1864        18604267 :         if (start == end)
    1865         7132220 :             return;
    1866                 : 
    1867        11472047 :         JS_ASSERT(start <= end);
    1868        11472047 :         uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
    1869        11472047 :         uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
    1870        11472047 :         uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
    1871                 : 
    1872                 :         /*
    1873                 :          * Push in the reverse order so obj will be on top. If we cannot push
    1874                 :          * the array, we trigger delay marking for the whole object.
    1875                 :          */
    1876        11472047 :         if (!stack.push(endAddr, startAddr, tagged))
    1877               0 :             delayMarkingChildren(obj);
    1878                 :     }
    1879                 : 
    1880          350253 :     bool isMarkStackEmpty() {
    1881          350253 :         return stack.isEmpty();
    1882                 :     }
    1883                 : 
    1884                 :     bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
    1885                 :     void saveValueRanges();
    1886                 :     inline void processMarkStackTop(SliceBudget &budget);
    1887                 : 
    1888                 :     void appendGrayRoot(void *thing, JSGCTraceKind kind);
    1889                 : 
    1890                 :     /* The color is only applied to objects, functions and xml. */
    1891                 :     uint32_t color;
    1892                 : 
    1893                 :     DebugOnly<bool> started;
    1894                 : 
    1895                 :     /* Pointer to the top of the stack of arenas we are delaying marking on. */
    1896                 :     js::gc::ArenaHeader *unmarkedArenaStackTop;
    1897                 :     /* Count of arenas that are currently in the stack. */
    1898                 :     DebugOnly<size_t> markLaterArenas;
    1899                 : 
    1900               0 :     struct GrayRoot {
    1901                 :         void *thing;
    1902                 :         JSGCTraceKind kind;
    1903                 : #ifdef DEBUG
    1904                 :         JSTraceNamePrinter debugPrinter;
    1905                 :         const void *debugPrintArg;
    1906                 :         size_t debugPrintIndex;
    1907                 : #endif
    1908                 : 
    1909               0 :         GrayRoot(void *thing, JSGCTraceKind kind)
    1910               0 :           : thing(thing), kind(kind) {}
    1911                 :     };
    1912                 : 
    1913                 :     bool grayFailed;
    1914                 :     Vector<GrayRoot, 0, SystemAllocPolicy> grayRoots;
    1915                 : };
    1916                 : 
    1917                 : void
    1918                 : SetMarkStackLimit(JSRuntime *rt, size_t limit);
    1919                 : 
    1920                 : void
    1921                 : MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
    1922                 : 
    1923                 : typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk);
    1924                 : typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena,
    1925                 :                                      JSGCTraceKind traceKind, size_t thingSize);
    1926                 : typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing,
    1927                 :                                     JSGCTraceKind traceKind, size_t thingSize);
    1928                 : 
    1929                 : /*
    1930                 :  * This function calls |compartmentCallback| on every compartment,
    1931                 :  * |arenaCallback| on every in-use arena, and |cellCallback| on every in-use
    1932                 :  * cell in the GC heap.
    1933                 :  */
    1934                 : extern JS_FRIEND_API(void)
    1935                 : IterateCompartmentsArenasCells(JSRuntime *rt, void *data,
    1936                 :                                JSIterateCompartmentCallback compartmentCallback,
    1937                 :                                IterateArenaCallback arenaCallback,
    1938                 :                                IterateCellCallback cellCallback);
    1939                 : 
    1940                 : /*
    1941                 :  * Invoke chunkCallback on every in-use chunk.
    1942                 :  */
    1943                 : extern JS_FRIEND_API(void)
    1944                 : IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback);
    1945                 : 
    1946                 : /*
    1947                 :  * Invoke cellCallback on every in-use object of the specified thing kind for
    1948                 :  * the given compartment or for all compartments if it is null.
    1949                 :  */
    1950                 : extern JS_FRIEND_API(void)
    1951                 : IterateCells(JSRuntime *rt, JSCompartment *compartment, gc::AllocKind thingKind,
    1952                 :              void *data, IterateCellCallback cellCallback);
    1953                 : 
    1954                 : } /* namespace js */
    1955                 : 
    1956                 : extern void
    1957                 : js_FinalizeStringRT(JSRuntime *rt, JSString *str);
    1958                 : 
    1959                 : /*
    1960                 :  * Macro to test if a traversal is the marking phase of the GC.
    1961                 :  */
    1962                 : #define IS_GC_MARKING_TRACER(trc) \
    1963                 :     ((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback)
    1964                 : 
    1965                 : namespace js {
    1966                 : namespace gc {
    1967                 : 
    1968                 : JSCompartment *
    1969                 : NewCompartment(JSContext *cx, JSPrincipals *principals);
    1970                 : 
    1971                 : /* Tries to run a GC no matter what (used for GC zeal). */
    1972                 : void
    1973                 : RunDebugGC(JSContext *cx);
    1974                 : 
    1975                 : void
    1976                 : SetDeterministicGC(JSContext *cx, bool enabled);
    1977                 : 
    1978                 : #if defined(JSGC_ROOT_ANALYSIS) && defined(DEBUG) && !defined(JS_THREADSAFE)
    1979                 : /* Overwrites stack references to GC things which have not been rooted. */
    1980                 : void CheckStackRoots(JSContext *cx);
    1981                 : 
    1982                 : inline void MaybeCheckStackRoots(JSContext *cx) { CheckStackRoots(cx); }
    1983                 : #else
    1984       199062626 : inline void MaybeCheckStackRoots(JSContext *cx) {}
    1985                 : #endif
    1986                 : 
    1987                 : const int ZealPokeValue = 1;
    1988                 : const int ZealAllocValue = 2;
    1989                 : const int ZealFrameGCValue = 3;
    1990                 : const int ZealVerifierValue = 4;
    1991                 : const int ZealFrameVerifierValue = 5;
    1992                 : 
    1993                 : #ifdef JS_GC_ZEAL
    1994                 : 
    1995                 : /* Check that write barriers have been used correctly. See jsgc.cpp. */
    1996                 : void
    1997                 : VerifyBarriers(JSContext *cx);
    1998                 : 
    1999                 : void
    2000                 : MaybeVerifyBarriers(JSContext *cx, bool always = false);
    2001                 : 
    2002                 : #else
    2003                 : 
    2004                 : static inline void
    2005                 : VerifyBarriers(JSContext *cx)
    2006                 : {
    2007                 : }
    2008                 : 
    2009                 : static inline void
    2010                 : MaybeVerifyBarriers(JSContext *cx, bool always = false)
    2011                 : {
    2012                 : }
    2013                 : 
    2014                 : #endif
    2015                 : 
    2016                 : } /* namespace gc */
    2017                 : 
    2018                 : static inline JSCompartment *
    2019                 : GetObjectCompartment(JSObject *obj) { return reinterpret_cast<js::gc::Cell *>(obj)->compartment(); }
    2020                 : 
    2021                 : } /* namespace js */
    2022                 : 
    2023                 : #endif /* jsgc_h___ */

Generated by: LCOV version 1.7