/[jscoverage]/trunk/js/jsgc.cpp
ViewVC logotype

Diff of /trunk/js/jsgc.cpp

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 399 by siliconforks, Tue Dec 9 03:37:47 2008 UTC revision 460 by siliconforks, Sat Sep 26 23:15:22 2009 UTC
# Line 74  Line 74 
74  #include "jsparse.h"  #include "jsparse.h"
75  #include "jsscope.h"  #include "jsscope.h"
76  #include "jsscript.h"  #include "jsscript.h"
77    #include "jsstaticcheck.h"
78  #include "jsstr.h"  #include "jsstr.h"
79  #include "jstracer.h"  #include "jstracer.h"
80    
# Line 123  Line 124 
124  #   if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)  #   if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
125  #    define MAP_ANONYMOUS MAP_ANON  #    define MAP_ANONYMOUS MAP_ANON
126  #   endif  #   endif
127    #   if !defined(MAP_ANONYMOUS)
128    #    define MAP_ANONYMOUS 0
129    #   endif
130  #  else  #  else
131  #   if JS_GC_USE_MMAP  #   if JS_GC_USE_MMAP
132  #    error "JS_GC_USE_MMAP is set when mmap is not available"  #    error "JS_GC_USE_MMAP is set when mmap is not available"
# Line 132  Line 136 
136  #endif  #endif
137    
138  /*  /*
139     * Check JSTempValueUnion has the size of jsval and void * so we can
140     * reinterpret jsval as void* GC-thing pointer and use JSTVU_SINGLE for
141     * different GC-things.
142     */
143    JS_STATIC_ASSERT(sizeof(JSTempValueUnion) == sizeof(jsval));
144    JS_STATIC_ASSERT(sizeof(JSTempValueUnion) == sizeof(void *));
145    
146    
147    /*
148     * Check that JSTRACE_XML follows JSTRACE_OBJECT, JSTRACE_DOUBLE and
149     * JSTRACE_STRING.
150     */
151    JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
152    JS_STATIC_ASSERT(JSTRACE_DOUBLE == 1);
153    JS_STATIC_ASSERT(JSTRACE_STRING == 2);
154    JS_STATIC_ASSERT(JSTRACE_XML    == 3);
155    
156    /*
157     * JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
158     * trace kind when JS_HAS_XML_SUPPORT is false.
159     */
160    JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
161    
162    /*
163     * The number of used GCX-types must stay within GCX_LIMIT.
164     */
165    JS_STATIC_ASSERT(GCX_NTYPES <= GCX_LIMIT);
166    
167    
168    /*
169     * Check that we can reinterpret double as JSGCDoubleCell.
170     */
171    JS_STATIC_ASSERT(sizeof(JSGCDoubleCell) == sizeof(double));
172    
173    /*
174     * Check that we can use memset(p, 0, ...) to implement JS_CLEAR_WEAK_ROOTS.
175     */
176    JS_STATIC_ASSERT(JSVAL_NULL == 0);
177    
178    
179    /*
180   * A GC arena contains a fixed number of flag bits for each thing in its heap,   * A GC arena contains a fixed number of flag bits for each thing in its heap,
181   * and supports O(1) lookup of a flag given its thing's address.   * and supports O(1) lookup of a flag given its thing's address.
182   *   *
# Line 1253  Line 1298 
1298      rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes;      rt->gcMaxBytes = rt->gcMaxMallocBytes = maxbytes;
1299      rt->gcEmptyArenaPoolLifespan = 30000;      rt->gcEmptyArenaPoolLifespan = 30000;
1300    
1301        /*
1302         * By default the trigger factor gets maximum possible value. This
1303         * means that GC will not be triggered by growth of GC memory (gcBytes).
1304         */
1305        rt->gcTriggerFactor = (uint32) -1;
1306    
1307        /*
1308         * The assigned value prevents GC from running when GC memory is too low
1309         * (during JS engine start).
1310         */
1311        rt->gcLastBytes = 8192;
1312    
1313      METER(memset(&rt->gcStats, 0, sizeof rt->gcStats));      METER(memset(&rt->gcStats, 0, sizeof rt->gcStats));
1314      return JS_TRUE;      return JS_TRUE;
1315  }  }
# Line 1466  Line 1523 
1523       * properly with a racing GC, without calling JS_AddRoot from a request.       * properly with a racing GC, without calling JS_AddRoot from a request.
1524       * We have to preserve API compatibility here, now that we avoid holding       * We have to preserve API compatibility here, now that we avoid holding
1525       * rt->gcLock across the mark phase (including the root hashtable mark).       * rt->gcLock across the mark phase (including the root hashtable mark).
      *  
      * If the GC is running and we're called on another thread, wait for this  
      * GC activation to finish.  We can safely wait here (in the case where we  
      * are called within a request on another thread's context) without fear  
      * of deadlock because the GC doesn't set rt->gcRunning until after it has  
      * waited for all active requests to end.  
1526       */       */
1527      JS_LOCK_GC(rt);      JS_LOCK_GC(rt);
1528  #ifdef JS_THREADSAFE      js_WaitForGC(rt);
     JS_ASSERT(!rt->gcRunning || rt->gcLevel > 0);  
     if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {  
         do {  
             JS_AWAIT_GC_DONE(rt);  
         } while (rt->gcLevel > 0);  
     }  
 #endif  
1529      rhe = (JSGCRootHashEntry *)      rhe = (JSGCRootHashEntry *)
1530            JS_DHashTableOperate(&rt->gcRootsHash, rp, JS_DHASH_ADD);            JS_DHashTableOperate(&rt->gcRootsHash, rp, JS_DHASH_ADD);
1531      if (rhe) {      if (rhe) {
# Line 1503  Line 1547 
1547       * Same synchronization drill as above in js_AddRoot.       * Same synchronization drill as above in js_AddRoot.
1548       */       */
1549      JS_LOCK_GC(rt);      JS_LOCK_GC(rt);
1550  #ifdef JS_THREADSAFE      js_WaitForGC(rt);
     JS_ASSERT(!rt->gcRunning || rt->gcLevel > 0);  
     if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {  
         do {  
             JS_AWAIT_GC_DONE(rt);  
         } while (rt->gcLevel > 0);  
     }  
 #endif  
1551      (void) JS_DHashTableOperate(&rt->gcRootsHash, rp, JS_DHASH_REMOVE);      (void) JS_DHashTableOperate(&rt->gcRootsHash, rp, JS_DHASH_REMOVE);
1552      rt->gcPoke = JS_TRUE;      rt->gcPoke = JS_TRUE;
1553      JS_UNLOCK_GC(rt);      JS_UNLOCK_GC(rt);
# Line 1682  Line 1719 
1719  #define NGCHIST 64  #define NGCHIST 64
1720    
1721  static struct GCHist {  static struct GCHist {
1722      JSBool      lastDitch;      bool        lastDitch;
1723      JSGCThing   *freeList;      JSGCThing   *freeList;
1724  } gchist[NGCHIST];  } gchist[NGCHIST];
1725    
# Line 1757  Line 1794 
1794    
1795  #endif  #endif
1796    
1797    static JS_INLINE bool
1798    IsGCThresholdReached(JSRuntime *rt)
1799    {
1800    #ifdef JS_GC_ZEAL
1801        if (rt->gcZeal >= 1)
1802            return true;
1803    #endif
1804    
1805        /*
1806         * Since the initial value of the gcLastBytes parameter is not equal to
1807         * zero (see the js_InitGC function) the return value is false when
1808         * the gcBytes value is close to zero at the JS engine start.
1809         */
1810        return rt->gcMallocBytes >= rt->gcMaxMallocBytes ||
1811               rt->gcBytes / rt->gcTriggerFactor >= rt->gcLastBytes / 100;
1812    }
1813    
1814  void *  void *
1815  js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes)  js_NewGCThing(JSContext *cx, uintN flags, size_t nbytes)
1816  {  {
1817      JSRuntime *rt;      JSRuntime *rt;
1818      uintN flindex;      uintN flindex;
1819      JSBool doGC;      bool doGC;
1820      JSGCThing *thing;      JSGCThing *thing;
1821      uint8 *flagp;      uint8 *flagp;
1822      JSGCArenaList *arenaList;      JSGCArenaList *arenaList;
# Line 1823  Line 1877 
1877          return NULL;          return NULL;
1878      }      }
1879    
1880      doGC = (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke);  #if defined JS_GC_ZEAL && defined JS_TRACER
1881  #ifdef JS_GC_ZEAL      if (rt->gcZeal >= 1 && JS_TRACE_MONITOR(cx).useReservedObjects)
1882      doGC = doGC || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke);          goto testReservedObjects;
1883  #endif  #endif
1884    
1885      arenaList = &rt->gcArenaList[flindex];      arenaList = &rt->gcArenaList[flindex];
1886        doGC = IsGCThresholdReached(rt);
1887      for (;;) {      for (;;) {
1888          if (doGC && !JS_ON_TRACE(cx)) {          if (doGC
1889    #ifdef JS_TRACER
1890                && !JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).useReservedObjects
1891    #endif
1892                ) {
1893              /*              /*
1894               * Keep rt->gcLock across the call into js_GC so we don't starve               * Keep rt->gcLock across the call into js_GC so we don't starve
1895               * and lose to racing threads who deplete the heap just after               * and lose to racing threads who deplete the heap just after
# Line 1855  Line 1914 
1914               * Refill the local free list by taking several things from the               * Refill the local free list by taking several things from the
1915               * global free list unless we are still at rt->gcMaxMallocBytes               * global free list unless we are still at rt->gcMaxMallocBytes
1916               * barrier or the free list is already populated. The former               * barrier or the free list is already populated. The former
1917               * happens when GC is canceled due to !gcCallback(cx, JSGC_BEGIN)               * happens when GC is canceled due to gcCallback(cx, JSGC_BEGIN)
1918               * or no gcPoke. The latter is caused via allocating new things               * returning false. The latter is caused via allocating new
1919               * in gcCallback(cx, JSGC_END).               * things in gcCallback(cx, JSGC_END).
1920               */               */
1921              if (rt->gcMallocBytes >= rt->gcMaxMallocBytes)              if (rt->gcMallocBytes >= rt->gcMaxMallocBytes)
1922                  break;                  break;
# Line 1895  Line 1954 
1954              JS_ASSERT(arenaList->lastCount < thingsLimit);              JS_ASSERT(arenaList->lastCount < thingsLimit);
1955              a = arenaList->last;              a = arenaList->last;
1956          } else {          } else {
1957    #ifdef JS_TRACER
1958                if (JS_TRACE_MONITOR(cx).useReservedObjects) {
1959    #ifdef JS_GC_ZEAL
1960    testReservedObjects:
1961    #endif
1962                    JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
1963    
1964                    thing = (JSGCThing *) tm->reservedObjects;
1965                    flagp = GetGCThingFlags(thing);
1966                    JS_ASSERT(thing);
1967                    tm->reservedObjects = JSVAL_TO_OBJECT(tm->reservedObjects->fslots[0]);
1968                    break;
1969                }
1970    #endif
1971    
1972              a = NewGCArena(rt);              a = NewGCArena(rt);
1973              if (!a) {              if (!a) {
1974                  if (doGC || JS_ON_TRACE(cx))                  if (doGC || JS_ON_TRACE(cx))
1975                      goto fail;                      goto fail;
1976                  doGC = JS_TRUE;                  doGC = true;
1977                  continue;                  continue;
1978              }              }
1979              a->list = arenaList;              a->list = arenaList;
# Line 1997  Line 2071 
2071      if (gcLocked)      if (gcLocked)
2072          JS_UNLOCK_GC(rt);          JS_UNLOCK_GC(rt);
2073  #endif  #endif
     JS_COUNT_OPERATION(cx, JSOW_ALLOCATION);  
2074      return thing;      return thing;
2075    
2076  fail:  fail:
# Line 2006  Line 2079 
2079          JS_UNLOCK_GC(rt);          JS_UNLOCK_GC(rt);
2080  #endif  #endif
2081      METER(astats->fail++);      METER(astats->fail++);
2082      if (!JS_ON_TRACE(cx))      js_ReportOutOfMemory(cx);
         JS_ReportOutOfMemory(cx);  
2083      return NULL;      return NULL;
2084  }  }
2085    
# Line 2033  Line 2105 
2105          return NULL;          return NULL;
2106      }      }
2107    
2108      if (rt->gcMallocBytes >= rt->gcMaxMallocBytes && rt->gcPoke      if (IsGCThresholdReached(rt))
 #ifdef JS_GC_ZEAL  
         && (rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke))  
 #endif  
         ) {  
2109          goto do_gc;          goto do_gc;
     }  
2110    
2111      /*      /*
2112       * Loop until we find a flag bitmap byte with unset bits indicating free       * Loop until we find a flag bitmap byte with unset bits indicating free
# Line 2058  Line 2125 
2125                      if (didGC || JS_ON_TRACE(cx)) {                      if (didGC || JS_ON_TRACE(cx)) {
2126                          METER(rt->gcStats.doubleArenaStats.fail++);                          METER(rt->gcStats.doubleArenaStats.fail++);
2127                          JS_UNLOCK_GC(rt);                          JS_UNLOCK_GC(rt);
2128                          if (!JS_ON_TRACE(cx))                          js_ReportOutOfMemory(cx);
                             JS_ReportOutOfMemory(cx);  
2129                          return NULL;                          return NULL;
2130                      }                      }
2131                      js_GC(cx, GC_LAST_DITCH);                      js_GC(cx, GC_LAST_DITCH);
# Line 2144  Line 2210 
2210          } while (bit != 0);          } while (bit != 0);
2211      }      }
2212      JS_ASSERT(list);      JS_ASSERT(list);
     JS_COUNT_OPERATION(cx, JSOW_ALLOCATION * JS_BITS_PER_WORD);  
2213    
2214      /*      /*
2215       * We delegate assigning cx->doubleFreeList to js_NewDoubleInRootedValue as       * We delegate assigning cx->doubleFreeList to js_NewDoubleInRootedValue as
# Line 2200  Line 2265 
2265      return dp;      return dp;
2266  }  }
2267    
2268    #ifdef JS_TRACER
2269    JSBool
2270    js_ReserveObjects(JSContext *cx, size_t nobjects)
2271    {
2272        /*
2273         * Ensure at least nobjects objects are in the list. fslots[1] of each
2274         * object on the reservedObjects list is the length of the list from there.
2275         */
2276        JSObject *&head = JS_TRACE_MONITOR(cx).reservedObjects;
2277        size_t i = head ? JSVAL_TO_INT(head->fslots[1]) : 0;
2278        while (i < nobjects) {
2279            JSObject *obj = (JSObject *) js_NewGCThing(cx, GCX_OBJECT, sizeof(JSObject));
2280            if (!obj)
2281                return JS_FALSE;
2282            memset(obj, 0, sizeof(JSObject));
2283            /* The class must be set to something for finalization. */
2284            obj->classword = (jsuword) &js_ObjectClass;
2285            obj->fslots[0] = OBJECT_TO_JSVAL(head);
2286            i++;
2287            obj->fslots[1] = INT_TO_JSVAL(i);
2288            head = obj;
2289        }
2290    
2291        return JS_TRUE;
2292    }
2293    #endif
2294    
2295  JSBool  JSBool
2296  js_AddAsGCBytes(JSContext *cx, size_t sz)  js_AddAsGCBytes(JSContext *cx, size_t sz)
2297  {  {
# Line 2207  Line 2299 
2299    
2300      rt = cx->runtime;      rt = cx->runtime;
2301      if (rt->gcBytes >= rt->gcMaxBytes ||      if (rt->gcBytes >= rt->gcMaxBytes ||
2302          sz > (size_t) (rt->gcMaxBytes - rt->gcBytes)          sz > (size_t) (rt->gcMaxBytes - rt->gcBytes) ||
2303  #ifdef JS_GC_ZEAL          IsGCThresholdReached(rt)) {
         || rt->gcZeal >= 2 || (rt->gcZeal >= 1 && rt->gcPoke)  
 #endif  
         ) {  
2304          if (JS_ON_TRACE(cx)) {          if (JS_ON_TRACE(cx)) {
2305              JS_UNLOCK_GC(rt);              /*
2306              return JS_FALSE;               * If we can't leave the trace, signal OOM condition, otherwise
2307                 * exit from trace and proceed with GC.
2308                 */
2309                if (!js_CanLeaveTrace(cx)) {
2310                    JS_UNLOCK_GC(rt);
2311                    return JS_FALSE;
2312                }
2313                js_LeaveTrace(cx);
2314          }          }
2315          js_GC(cx, GC_LAST_DITCH);          js_GC(cx, GC_LAST_DITCH);
2316          if (rt->gcBytes >= rt->gcMaxBytes ||          if (rt->gcBytes >= rt->gcMaxBytes ||
# Line 2791  Line 2887 
2887          JS_CALL_OBJECT_TRACER(trc, fp->varobj, "variables");          JS_CALL_OBJECT_TRACER(trc, fp->varobj, "variables");
2888      if (fp->script) {      if (fp->script) {
2889          js_TraceScript(trc, fp->script);          js_TraceScript(trc, fp->script);
2890          if (fp->regs) {  
2891            /* fp->slots is null for watch pseudo-frames, see js_watch_set. */
2892            if (fp->slots) {
2893              /*              /*
2894               * Don't mark what has not been pushed yet, or what has been               * Don't mark what has not been pushed yet, or what has been
2895               * popped already.               * popped already.
2896               */               */
2897              nslots = (uintN) (fp->regs->sp - fp->slots);              if (fp->regs) {
2898                    nslots = (uintN) (fp->regs->sp - fp->slots);
2899                    JS_ASSERT(nslots >= fp->script->nfixed);
2900                } else {
2901                    nslots = fp->script->nfixed;
2902                }
2903              TRACE_JSVALS(trc, nslots, fp->slots, "slot");              TRACE_JSVALS(trc, nslots, fp->slots, "slot");
2904          }          }
2905      } else {      } else {
# Line 2874  Line 2977 
2977      js_CallValueTracerIfGCThing(trc, wr->lastInternalResult);      js_CallValueTracerIfGCThing(trc, wr->lastInternalResult);
2978  }  }
2979    
2980  JS_FRIEND_API(void)  JS_REQUIRES_STACK JS_FRIEND_API(void)
2981  js_TraceContext(JSTracer *trc, JSContext *acx)  js_TraceContext(JSTracer *trc, JSContext *acx)
2982  {  {
2983      JSStackFrame *fp, *nextChain;      JSStackFrame *fp, *nextChain;
# Line 2922  Line 3025 
3025       * Iterate frame chain and dormant chains.       * Iterate frame chain and dormant chains.
3026       *       *
3027       * (NB: see comment on this whole "dormant" thing in js_Execute.)       * (NB: see comment on this whole "dormant" thing in js_Execute.)
3028         *
3029         * Since js_GetTopStackFrame needs to dereference cx->thread to check for
3030         * JIT frames, we check for non-null thread here and avoid null checks
3031         * there. See bug 471197.
3032       */       */
3033      fp = acx->fp;  #ifdef JS_THREADSAFE
3034      nextChain = acx->dormantFrameChain;      if (acx->thread)
3035      if (!fp)  #endif
3036          goto next_chain;      {
3037            fp = js_GetTopStackFrame(acx);
3038            nextChain = acx->dormantFrameChain;
3039            if (!fp)
3040                goto next_chain;
3041    
3042      /* The top frame must not be dormant. */          /* The top frame must not be dormant. */
3043      JS_ASSERT(!fp->dormantNext);          JS_ASSERT(!fp->dormantNext);
3044      for (;;) {          for (;;) {
3045          do {              do {
3046              js_TraceStackFrame(trc, fp);                  js_TraceStackFrame(trc, fp);
3047          } while ((fp = fp->down) != NULL);              } while ((fp = fp->down) != NULL);
3048    
3049        next_chain:            next_chain:
3050          if (!nextChain)              if (!nextChain)
3051              break;                  break;
3052          fp = nextChain;              fp = nextChain;
3053          nextChain = nextChain->dormantNext;              nextChain = nextChain->dormantNext;
3054            }
3055      }      }
3056    
3057      /* Mark other roots-by-definition in acx. */      /* Mark other roots-by-definition in acx. */
3058      if (acx->globalObject)      if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
3059          JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object");          JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object");
3060      TraceWeakRoots(trc, &acx->weakRoots);      TraceWeakRoots(trc, &acx->weakRoots);
3061      if (acx->throwing) {      if (acx->throwing) {
# Line 2981  Line 3093 
3093            case JSTVU_WEAK_ROOTS:            case JSTVU_WEAK_ROOTS:
3094              TraceWeakRoots(trc, tvr->u.weakRoots);              TraceWeakRoots(trc, tvr->u.weakRoots);
3095              break;              break;
3096            case JSTVU_PARSE_CONTEXT:            case JSTVU_COMPILER:
3097              js_TraceParseContext(trc, tvr->u.parseContext);              tvr->u.compiler->trace(trc);
3098              break;              break;
3099            case JSTVU_SCRIPT:            case JSTVU_SCRIPT:
3100              js_TraceScript(trc, tvr->u.script);              js_TraceScript(trc, tvr->u.script);
# Line 2995  Line 3107 
3107    
3108      if (acx->sharpObjectMap.depth > 0)      if (acx->sharpObjectMap.depth > 0)
3109          js_TraceSharpMap(trc, &acx->sharpObjectMap);          js_TraceSharpMap(trc, &acx->sharpObjectMap);
3110    
3111        js_TraceRegExpStatics(trc, acx);
3112    
3113    #ifdef JS_TRACER
3114        if (acx->nativeVp)
3115            TRACE_JSVALS(trc, acx->nativeVpLen, acx->nativeVp, "nativeVp");
3116    #endif
3117  }  }
3118    
3119  void  #ifdef JS_TRACER
3120  js_TraceTraceMonitor(JSTracer *trc, JSTraceMonitor *tm)  
3121    static void
3122    MarkReservedObjects(JSTraceMonitor *tm)
3123  {  {
3124      if (IS_GC_MARKING_TRACER(trc)) {      /* Keep the reserved objects. */
3125          tm->recoveryDoublePoolPtr = tm->recoveryDoublePool;      for (JSObject *obj = tm->reservedObjects; obj; obj = JSVAL_TO_OBJECT(obj->fslots[0])) {
3126          /* Make sure the global shape changes and will force a flush          uint8 *flagp = GetGCThingFlags(obj);
3127             of the code cache. */          JS_ASSERT((*flagp & GCF_TYPEMASK) == GCX_OBJECT);
3128          tm->globalShape = -1;          JS_ASSERT(*flagp != GCF_FINAL);
3129            *flagp |= GCF_MARK;
3130      }      }
3131  }  }
3132    
3133  void  #ifdef JS_THREADSAFE
3134    static JSDHashOperator
3135    reserved_objects_marker(JSDHashTable *table, JSDHashEntryHdr *hdr,
3136                            uint32, void *)
3137    {
3138        JSThread *thread = ((JSThreadsHashEntry *) hdr)->thread;
3139    
3140        MarkReservedObjects(&thread->data.traceMonitor);
3141        return JS_DHASH_NEXT;
3142    }
3143    #endif
3144    
3145    #endif
3146    
3147    JS_REQUIRES_STACK void
3148  js_TraceRuntime(JSTracer *trc, JSBool allAtoms)  js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
3149  {  {
3150      JSRuntime *rt = trc->context->runtime;      JSRuntime *rt = trc->context->runtime;
# Line 3028  Line 3164 
3164      if (rt->gcExtraRootsTraceOp)      if (rt->gcExtraRootsTraceOp)
3165          rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);          rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
3166    
3167    #ifdef JS_TRACER
3168        for (int i = 0; i < JSBUILTIN_LIMIT; i++) {
3169            if (rt->builtinFunctions[i])
3170                JS_CALL_OBJECT_TRACER(trc, rt->builtinFunctions[i], "builtin function");
3171        }
3172    
3173        /* Mark the reserved objects unless we are shutting down. */
3174        if (IS_GC_MARKING_TRACER(trc) && rt->state != JSRTS_LANDING) {
3175  #ifdef JS_THREADSAFE  #ifdef JS_THREADSAFE
3176      /* Trace the loop table(s) which can contain pointers to code objects. */          JS_DHashTableEnumerate(&rt->threads, reserved_objects_marker, NULL);
    while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {  
        if (!acx->thread)  
            continue;  
        js_TraceTraceMonitor(trc, &acx->thread->traceMonitor);  
    }  
3177  #else  #else
3178     js_TraceTraceMonitor(trc, &rt->traceMonitor);          MarkReservedObjects(&rt->threadData.traceMonitor);
3179    #endif
3180        }
3181    
3182    #endif
3183    }
3184    
3185    void
3186    js_TriggerGC(JSContext *cx, JSBool gcLocked)
3187    {
3188        JSRuntime *rt = cx->runtime;
3189    
3190    #ifdef JS_THREADSAFE
3191        JS_ASSERT(cx->requestDepth > 0);
3192  #endif  #endif
3193        JS_ASSERT(!rt->gcRunning);
3194        if (rt->gcIsNeeded)
3195            return;
3196    
3197        /*
3198         * Trigger the GC when it is safe to call an operation callback on any
3199         * thread.
3200         */
3201        rt->gcIsNeeded = JS_TRUE;
3202        js_TriggerAllOperationCallbacks(rt, gcLocked);
3203  }  }
3204    
3205  static void  static void
3206  ProcessSetSlotRequest(JSContext *cx, JSSetSlotRequest *ssr)  ProcessSetSlotRequest(JSContext *cx, JSSetSlotRequest *ssr)
3207  {  {
3208      JSObject *obj, *pobj;      JSObject *obj = ssr->obj;
3209      uint32 slot;      JSObject *pobj = ssr->pobj;
3210        uint32 slot = ssr->slot;
     obj = ssr->obj;  
     pobj = ssr->pobj;  
     slot = ssr->slot;  
3211    
3212      while (pobj) {      while (pobj) {
3213          pobj = js_GetWrappedObject(cx, pobj);          pobj = js_GetWrappedObject(cx, pobj);
3214          if (pobj == obj) {          if (pobj == obj) {
3215              ssr->errnum = JSMSG_CYCLIC_VALUE;              ssr->cycle = true;
3216              return;              return;
3217          }          }
3218          pobj = JSVAL_TO_OBJECT(STOBJ_GET_SLOT(pobj, slot));          pobj = JSVAL_TO_OBJECT(STOBJ_GET_SLOT(pobj, slot));
3219      }      }
3220    
3221      pobj = ssr->pobj;      pobj = ssr->pobj;
3222        if (slot == JSSLOT_PROTO) {
3223      if (slot == JSSLOT_PROTO && OBJ_IS_NATIVE(obj)) {          STOBJ_SET_PROTO(obj, pobj);
3224          JSScope *scope, *newscope;      } else {
3225          JSObject *oldproto;          JS_ASSERT(slot == JSSLOT_PARENT);
3226            STOBJ_SET_PARENT(obj, pobj);
         /* Check to see whether obj shares its prototype's scope. */  
         scope = OBJ_SCOPE(obj);  
         oldproto = STOBJ_GET_PROTO(obj);  
         if (oldproto && OBJ_SCOPE(oldproto) == scope) {  
             /* Either obj needs a new empty scope, or it should share pobj's. */  
             if (!pobj ||  
                 !OBJ_IS_NATIVE(pobj) ||  
                 OBJ_GET_CLASS(cx, pobj) != STOBJ_GET_CLASS(oldproto)) {  
                 /*  
                  * With no proto and no scope of its own, obj is truly empty.  
                  *  
                  * If pobj is not native, obj needs its own empty scope -- it  
                  * should not continue to share oldproto's scope once oldproto  
                  * is not on obj's prototype chain.  That would put properties  
                  * from oldproto's scope ahead of properties defined by pobj,  
                  * in lookup order.  
                  *  
                  * If pobj's class differs from oldproto's, we may need a new  
                  * scope to handle differences in private and reserved slots,  
                  * so we suboptimally but safely make one.  
                  */  
                 if (!js_GetMutableScope(cx, obj)) {  
                     ssr->errnum = JSMSG_OUT_OF_MEMORY;  
                     return;  
                 }  
             } else if (OBJ_SCOPE(pobj) != scope) {  
                 newscope = (JSScope *) js_HoldObjectMap(cx, pobj->map);  
                 obj->map = &newscope->map;  
                 js_DropObjectMap(cx, &scope->map, obj);  
                 JS_TRANSFER_SCOPE_LOCK(cx, scope, newscope);  
             }  
         }  
   
         /*  
          * Regenerate property cache shape ids for all of the scopes along the  
          * old prototype chain, in case any property cache entries were filled  
          * by looking up starting from obj.  
          */  
         while (oldproto && OBJ_IS_NATIVE(oldproto)) {  
             scope = OBJ_SCOPE(oldproto);  
             SCOPE_MAKE_UNIQUE_SHAPE(cx, scope);  
             oldproto = STOBJ_GET_PROTO(scope->object);  
         }  
3227      }      }
   
     /* Finally, do the deed. */  
     STOBJ_SET_SLOT(obj, slot, OBJECT_TO_JSVAL(pobj));  
3228  }  }
3229    
3230  static void  void
3231  DestroyScriptsToGC(JSContext *cx, JSScript **listp)  js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data)
3232  {  {
3233      JSScript *script;      JSScript **listp, *script;
3234    
3235      while ((script = *listp) != NULL) {      for (size_t i = 0; i != JS_ARRAY_LENGTH(data->scriptsToGC); ++i) {
3236          *listp = script->u.nextToGC;          listp = &data->scriptsToGC[i];
3237          script->u.nextToGC = NULL;          while ((script = *listp) != NULL) {
3238          js_DestroyScript(cx, script);              *listp = script->u.nextToGC;
3239                script->u.nextToGC = NULL;
3240                js_DestroyScript(cx, script);
3241            }
3242      }      }
3243  }  }
3244    
# Line 3146  Line 3262 
3262      JSBool allClear;      JSBool allClear;
3263  #ifdef JS_THREADSAFE  #ifdef JS_THREADSAFE
3264      uint32 requestDebit;      uint32 requestDebit;
     JSContext *acx, *iter;  
3265  #endif  #endif
3266  #ifdef JS_GCMETER  #ifdef JS_GCMETER
3267      uint32 nlivearenas, nkilledarenas, nthings;      uint32 nlivearenas, nkilledarenas, nthings;
# Line 3154  Line 3269 
3269    
3270      JS_ASSERT_IF(gckind == GC_LAST_DITCH, !JS_ON_TRACE(cx));      JS_ASSERT_IF(gckind == GC_LAST_DITCH, !JS_ON_TRACE(cx));
3271      rt = cx->runtime;      rt = cx->runtime;
3272    
3273  #ifdef JS_THREADSAFE  #ifdef JS_THREADSAFE
3274        /*
3275         * We allow js_GC calls outside a request but the context must be bound
3276         * to the current thread.
3277         */
3278        JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
3279    
3280      /* Avoid deadlock. */      /* Avoid deadlock. */
3281      JS_ASSERT(!JS_IS_RUNTIME_LOCKED(rt));      JS_ASSERT(!JS_IS_RUNTIME_LOCKED(rt));
3282  #endif  #endif
# Line 3215  Line 3337 
3337      rt->gcPoke = JS_FALSE;      rt->gcPoke = JS_FALSE;
3338    
3339  #ifdef JS_THREADSAFE  #ifdef JS_THREADSAFE
3340      JS_ASSERT(cx->thread->id == js_CurrentThreadId());      /*
3341         * Check if the GC is already running on this or another thread and
3342         * delegate the job to it.
3343         */
3344        if (rt->gcLevel > 0) {
3345            JS_ASSERT(rt->gcThread);
3346    
3347      /* Bump gcLevel and return rather than nest on this thread. */          /* Bump gcLevel to restart the current GC, so it finds new garbage. */
     if (rt->gcThread == cx->thread) {  
         JS_ASSERT(rt->gcLevel > 0);  
3348          rt->gcLevel++;          rt->gcLevel++;
3349          METER_UPDATE_MAX(rt->gcStats.maxlevel, rt->gcLevel);          METER_UPDATE_MAX(rt->gcStats.maxlevel, rt->gcLevel);
         if (!(gckind & GC_LOCK_HELD))  
             JS_UNLOCK_GC(rt);  
         return;  
     }  
   
     /*  
      * If we're in one or more requests (possibly on more than one context)  
      * running on the current thread, indicate, temporarily, that all these  
      * requests are inactive.  If cx->thread is NULL, then cx is not using  
      * the request model, and does not contribute to rt->requestCount.  
      */  
     requestDebit = 0;  
     if (cx->thread) {  
         JSCList *head, *link;  
3350    
3351          /*          /*
3352           * Check all contexts on cx->thread->contextList for active requests,           * If the GC runs on another thread, temporarily suspend the current
3353           * counting each such context against requestDebit.           * request and wait until the GC is done.
3354           */           */
3355          head = &cx->thread->contextList;          if (rt->gcThread != cx->thread) {
3356          for (link = head->next; link != head; link = link->next) {              requestDebit = js_DiscountRequestsForGC(cx);
3357              acx = CX_FROM_THREAD_LINKS(link);              js_RecountRequestsAfterGC(rt, requestDebit);
             JS_ASSERT(acx->thread == cx->thread);  
             if (acx->requestDepth)  
                 requestDebit++;  
3358          }          }
     } else {  
         /*  
          * We assert, but check anyway, in case someone is misusing the API.  
          * Avoiding the loop over all of rt's contexts is a win in the event  
          * that the GC runs only on request-less contexts with null threads,  
          * in a special thread such as might be used by the UI/DOM/Layout  
          * "mozilla" or "main" thread in Mozilla-the-browser.  
          */  
         JS_ASSERT(cx->requestDepth == 0);  
         if (cx->requestDepth)  
             requestDebit = 1;  
     }  
     if (requestDebit) {  
         JS_ASSERT(requestDebit <= rt->requestCount);  
         rt->requestCount -= requestDebit;  
         if (rt->requestCount == 0)  
             JS_NOTIFY_REQUEST_DONE(rt);  
     }  
   
     /* If another thread is already in GC, don't attempt GC; wait instead. */  
     if (rt->gcLevel > 0) {  
         /* Bump gcLevel to restart the current GC, so it finds new garbage. */  
         rt->gcLevel++;  
         METER_UPDATE_MAX(rt->gcStats.maxlevel, rt->gcLevel);  
   
         /* Wait for the other thread to finish, then resume our request. */  
         while (rt->gcLevel > 0)  
             JS_AWAIT_GC_DONE(rt);  
         if (requestDebit)  
             rt->requestCount += requestDebit;  
3359          if (!(gckind & GC_LOCK_HELD))          if (!(gckind & GC_LOCK_HELD))
3360              JS_UNLOCK_GC(rt);              JS_UNLOCK_GC(rt);
3361          return;          return;
# Line 3287  Line 3365 
3365      rt->gcLevel = 1;      rt->gcLevel = 1;
3366      rt->gcThread = cx->thread;      rt->gcThread = cx->thread;
3367    
3368      /* Wait for all other requests to finish. */      /*
3369         * Notify all operation callbacks, which will give them a chance to
3370         * yield their current request. Contexts that are not currently
3371         * executing will perform their callback at some later point,
3372         * which then will be unnecessary, but harmless.
3373         */
3374        js_NudgeOtherContexts(cx);
3375    
3376        /*
3377         * Discount all the requests on the current thread from contributing
3378         * to rt->requestCount before we wait for all other requests to finish.
3379         * JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
3380         * rt->requestCount transitions to 0.
3381         */
3382        requestDebit = js_CountThreadRequests(cx);
3383        JS_ASSERT_IF(cx->requestDepth != 0, requestDebit >= 1);
3384        rt->requestCount -= requestDebit;
3385      while (rt->requestCount > 0)      while (rt->requestCount > 0)
3386          JS_AWAIT_REQUEST_DONE(rt);          JS_AWAIT_REQUEST_DONE(rt);
3387        rt->requestCount += requestDebit;
3388    
3389  #else  /* !JS_THREADSAFE */  #else  /* !JS_THREADSAFE */
3390    
# Line 3329  Line 3424 
3424           * collect garbage only if a racing thread attempted GC and is waiting           * collect garbage only if a racing thread attempted GC and is waiting
3425           * for us to finish (gcLevel > 1) or if someone already poked us.           * for us to finish (gcLevel > 1) or if someone already poked us.
3426           */           */
3427          if (rt->gcLevel == 1 && !rt->gcPoke)          if (rt->gcLevel == 1 && !rt->gcPoke && !rt->gcIsNeeded)
3428              goto done_running;              goto done_running;
3429    
3430          rt->gcLevel = 0;          rt->gcLevel = 0;
# Line 3337  Line 3432 
3432          rt->gcRunning = JS_FALSE;          rt->gcRunning = JS_FALSE;
3433  #ifdef JS_THREADSAFE  #ifdef JS_THREADSAFE
3434          rt->gcThread = NULL;          rt->gcThread = NULL;
         rt->requestCount += requestDebit;  
3435  #endif  #endif
3436          gckind = GC_LOCK_HELD;          gckind = GC_LOCK_HELD;
3437          goto restart_at_beginning;          goto restart_at_beginning;
# Line 3349  Line 3443 
3443      if (JS_ON_TRACE(cx))      if (JS_ON_TRACE(cx))
3444          goto out;          goto out;
3445  #endif  #endif
3446        VOUCH_HAVE_STACK();
3447    
3448        /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
3449        rt->gcIsNeeded = JS_FALSE;
3450    
3451      /* Reset malloc counter. */      /* Reset malloc counter. */
3452      rt->gcMallocBytes = 0;      rt->gcMallocBytes = 0;
# Line 3359  Line 3457 
3457    }    }
3458  #endif  #endif
3459    
     /* Clear property and JIT oracle caches (only for cx->thread if JS_THREADSAFE). */  
     js_FlushPropertyCache(cx);  
 #ifdef JS_TRACER  
     js_FlushJITOracle(cx);  
 #endif  
   
     /* Destroy eval'ed scripts. */  
     DestroyScriptsToGC(cx, &JS_SCRIPTS_TO_GC(cx));  
   
 #ifdef JS_THREADSAFE  
     /*  
      * Clear thread-based caches. To avoid redundant clearing we unroll the  
      * current thread's step.  
      *  
      * In case a JSScript wrapped within an object was finalized, we null  
      * acx->thread->gsnCache.script and finish the cache's hashtable. Note  
      * that js_DestroyScript, called from script_finalize, will have already  
      * cleared cx->thread->gsnCache above during finalization, so we don't  
      * have to here.  
      */  
     iter = NULL;  
     while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {  
         if (!acx->thread || acx->thread == cx->thread)  
             continue;  
         GSN_CACHE_CLEAR(&acx->thread->gsnCache);  
         js_FlushPropertyCache(acx);  
3460  #ifdef JS_TRACER  #ifdef JS_TRACER
3461          js_FlushJITOracle(acx);      js_PurgeJITOracle();
 #endif  
         DestroyScriptsToGC(cx, &acx->thread->scriptsToGC);  
     }  
 #else  
     /* The thread-unsafe case just has to clear the runtime's GSN cache. */  
     GSN_CACHE_CLEAR(&rt->gsnCache);  
3462  #endif  #endif
3463        js_PurgeThreads(cx);
3464    
3465    restart:    restart:
3466      rt->gcNumber++;      rt->gcNumber++;
3467      JS_ASSERT(!rt->gcUntracedArenaStackTop);      JS_ASSERT(!rt->gcUntracedArenaStackTop);
3468      JS_ASSERT(rt->gcTraceLaterCount == 0);      JS_ASSERT(rt->gcTraceLaterCount == 0);
3469    
3470      /* Reset the property cache's type id generator so we can compress ids. */      /*
3471         * Reset the property cache's type id generator so we can compress ids.
3472         * Same for the protoHazardShape proxy-shape standing in for all object
3473         * prototypes having readonly or setter properties.
3474         */
3475      rt->shapeGen = 0;      rt->shapeGen = 0;
3476        rt->protoHazardShape = 0;
3477    
3478      /*      /*
3479       * Mark phase.       * Mark phase.
# Line 3668  Line 3740 
3740       * We want to restart GC if js_GC was called recursively or if any of the       * We want to restart GC if js_GC was called recursively or if any of the
3741       * finalizers called js_RemoveRoot or js_UnlockGCThingRT.       * finalizers called js_RemoveRoot or js_UnlockGCThingRT.
3742       */       */
3743      if (rt->gcLevel > 1 || rt->gcPoke) {      if (!JS_ON_TRACE(cx) && (rt->gcLevel > 1 || rt->gcPoke)) {
3744            VOUCH_HAVE_STACK();
3745          rt->gcLevel = 1;          rt->gcLevel = 1;
3746          rt->gcPoke = JS_FALSE;          rt->gcPoke = JS_FALSE;
3747          JS_UNLOCK_GC(rt);          JS_UNLOCK_GC(rt);
3748          goto restart;          goto restart;
3749      }      }
3750    
     if (rt->shapeGen >= SHAPE_OVERFLOW_BIT - 1) {  
         /*  
          * FIXME bug 440834: The shape id space has overflowed. Currently we  
          * cope badly with this. Every call to js_GenerateShape does GC, and  
          * we never re-enable the property cache.  
          */  
         js_DisablePropertyCache(cx);  
 #ifdef JS_THREADSAFE  
         iter = NULL;  
         while ((acx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {  
             if (!acx->thread || acx->thread == cx->thread)  
                 continue;  
             js_DisablePropertyCache(acx);  
         }  
 #endif  
     }  
   
3751      rt->gcLastBytes = rt->gcBytes;      rt->gcLastBytes = rt->gcBytes;
3752    done_running:    done_running:
3753      rt->gcLevel = 0;      rt->gcLevel = 0;
3754      rt->gcRunning = JS_FALSE;      rt->gcRunning = JS_FALSE;
3755    
3756  #ifdef JS_THREADSAFE  #ifdef JS_THREADSAFE
     /* If we were invoked during a request, pay back the temporary debit. */  
     if (requestDebit)  
         rt->requestCount += requestDebit;  
3757      rt->gcThread = NULL;      rt->gcThread = NULL;
3758      JS_NOTIFY_GC_DONE(rt);      JS_NOTIFY_GC_DONE(rt);
3759    

Legend:
Removed from v.399  
changed lines
  Added in v.460

  ViewVC Help
Powered by ViewVC 1.1.24