/[jscoverage]/trunk/js/jstracer.cpp
ViewVC logotype

Diff of /trunk/js/jstracer.cpp

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 459 by siliconforks, Tue Dec 9 03:37:47 2008 UTC revision 460 by siliconforks, Sat Sep 26 23:15:22 2009 UTC
# Line 1  Line 1 
1  /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-  /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2   * vim: set ts=4 sw=4 et tw=99:   * vim: set ts=4 sw=4 et tw=99:
3   *   *
4   * ***** BEGIN LICENSE BLOCK *****   * ***** BEGIN LICENSE BLOCK *****
# Line 50  Line 50 
50  #ifdef SOLARIS  #ifdef SOLARIS
51  #include <alloca.h>  #include <alloca.h>
52  #endif  #endif
53    #include <limits.h>
54    
55  #include "nanojit/nanojit.h"  #include "nanojit/nanojit.h"
56  #include "jsarray.h"            // higher-level library and API headers  #include "jsapi.h"              // higher-level library and API headers
57    #include "jsarray.h"
58  #include "jsbool.h"  #include "jsbool.h"
59  #include "jscntxt.h"  #include "jscntxt.h"
60  #include "jsdbgapi.h"  #include "jsdbgapi.h"
# Line 68  Line 70 
70  #include "jsdate.h"  #include "jsdate.h"
71  #include "jsstaticcheck.h"  #include "jsstaticcheck.h"
72  #include "jstracer.h"  #include "jstracer.h"
73    #include "jsxml.h"
74    
75  #include "jsautooplen.h"        // generated headers last  #include "jsautooplen.h"        // generated headers last
76    #include "imacros.c.out"
77    
78  /* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and  #if JS_HAS_XML_SUPPORT
79     the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then  #define ABORT_IF_XML(v)                                                       \
80        JS_BEGIN_MACRO                                                            \
81        if (!JSVAL_IS_PRIMITIVE(v) && OBJECT_IS_XML(BOGUS_CX, JSVAL_TO_OBJECT(v)))\
82            ABORT_TRACE("xml detected");                                          \
83        JS_END_MACRO
84    #else
85    #define ABORT_IF_XML(cx, v) ((void) 0)
86    #endif
87    
88    /* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and
89       the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then
90     handle the undefined case properly (bug 457363). */     handle the undefined case properly (bug 457363). */
91  #undef JSVAL_IS_BOOLEAN  #undef JSVAL_IS_BOOLEAN
92  #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)  #define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0)
93    
94  /* Use a fake tag to represent boxed values, borrowing from the integer tag  /* Use a fake tag to represent boxed values, borrowing from the integer tag
95     range since we only use JSVAL_INT to indicate integers. */     range since we only use JSVAL_INT to indicate integers. */
96  #define JSVAL_BOXED 3  #define JSVAL_BOXED 3
97    
98    /* Another fake jsval tag, used to distinguish null from object values. */
99    #define JSVAL_TNULL 5
100    
101    /* A last fake jsval tag distinguishing functions from non-function objects. */
102    #define JSVAL_TFUN 7
103    
104  /* Map to translate a type tag into a printable representation. */  /* Map to translate a type tag into a printable representation. */
105  static const char typeChar[] = "OIDVS?B?";  static const char typeChar[] = "OIDXSNBF";
106    static const char tagChar[]  = "OIDISIBI";
107    
108    /* Blacklist parameters. */
109    
110  /* Number of iterations of a loop where we start tracing.  That is, we don't  /* Number of iterations of a loop where we start tracing.  That is, we don't
111     start tracing until the beginning of the HOTLOOP-th iteration. */     start tracing until the beginning of the HOTLOOP-th iteration. */
112  #define HOTLOOP 2  #define HOTLOOP 2
113    
114    /* Attempt recording this many times before blacklisting permanently. */
115    #define BL_ATTEMPTS 2
116    
117    /* Skip this many future hits before allowing recording again after blacklisting. */
118    #define BL_BACKOFF 32
119    
120  /* Number of times we wait to exit on a side exit before we try to extend the tree. */  /* Number of times we wait to exit on a side exit before we try to extend the tree. */
121  #define HOTEXIT 1  #define HOTEXIT 1
122    
123  /* Max call depths for inlining. */  /* Number of times we try to extend the tree along a side exit. */
124  #define MAX_CALLDEPTH 10  #define MAXEXIT 3
125    
126  /* Max number of type mismatchs before we trash the tree. */  /* Maximum number of peer trees allowed. */
127  #define MAX_MISMATCH 20  #define MAXPEERS 9
128    
129  /* Max blacklist level of inner tree immediate recompiling  */  /* Max call depths for inlining. */
130  #define MAX_INNER_RECORD_BLACKLIST  -16  #define MAX_CALLDEPTH 10
131    
132  /* Max native stack size. */  /* Max native stack size. */
133  #define MAX_NATIVE_STACK_SLOTS 1024  #define MAX_NATIVE_STACK_SLOTS 1024
# Line 106  Line 135 
135  /* Max call stack size. */  /* Max call stack size. */
136  #define MAX_CALL_STACK_ENTRIES 64  #define MAX_CALL_STACK_ENTRIES 64
137    
138    /* Max global object size. */
139    #define MAX_GLOBAL_SLOTS 4096
140    
141    /* Max memory needed to rebuild the interpreter stack when falling off trace. */
142    #define MAX_INTERP_STACK_BYTES                                                \
143        (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) +                                 \
144         MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) +                         \
145         sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
146    
147  /* Max number of branches per tree. */  /* Max number of branches per tree. */
148  #define MAX_BRANCHES 16  #define MAX_BRANCHES 32
149    
150  /* Macros for demote slot lists */  #define CHECK_STATUS(expr)                                                    \
151  #define ALLOCA_UNDEMOTE_SLOTLIST(num)     (unsigned*)alloca(((num) + 1) * sizeof(unsigned))      JS_BEGIN_MACRO                                                            \
152  #define ADD_UNDEMOTE_SLOT(list, slot)     list[++list[0]] = slot          JSRecordingStatus _status = (expr);                                   \
153  #define NUM_UNDEMOTE_SLOTS(list)          list[0]          if (_status != JSRS_CONTINUE)                                        \
154  #define CLEAR_UNDEMOTE_SLOTLIST(list)     list[0] = 0            return _status;                                                     \
155        JS_END_MACRO
156    
157  #ifdef JS_JIT_SPEW  #ifdef JS_JIT_SPEW
158  #define ABORT_TRACE(msg)   do { debug_only_v(fprintf(stdout, "abort: %d: %s\n", __LINE__, msg);)  return false; } while (0)  #define debug_only_a(x) if (js_verboseAbort || js_verboseDebug ) { x; }
159    #define ABORT_TRACE_RV(msg, value)                                    \
160        JS_BEGIN_MACRO                                                            \
161            debug_only_a(fprintf(stdout, "abort: %d: %s\n", __LINE__, (msg));)    \
162            return (value);                                                       \
163        JS_END_MACRO
164  #else  #else
165  #define ABORT_TRACE(msg)   return false  #define debug_only_a(x)
166    #define ABORT_TRACE_RV(msg, value)   return (value)
167  #endif  #endif
168    
169    #define ABORT_TRACE(msg)         ABORT_TRACE_RV(msg, JSRS_STOP)
170    #define ABORT_TRACE_ERROR(msg)   ABORT_TRACE_RV(msg, JSRS_ERROR)
171    
172  #ifdef JS_JIT_SPEW  #ifdef JS_JIT_SPEW
173  struct __jitstats {  struct __jitstats {
174  #define JITSTAT(x) uint64 x;  #define JITSTAT(x) uint64 x;
# Line 201  Line 249 
249  #define AUDIT(x) ((void)0)  #define AUDIT(x) ((void)0)
250  #endif /* JS_JIT_SPEW */  #endif /* JS_JIT_SPEW */
251    
252  #define INS_CONST(c)    addName(lir->insImm(c), #c)  #define INS_CONST(c)        addName(lir->insImm(c), #c)
253  #define INS_CONSTPTR(p) addName(lir->insImmPtr((void*) (p)), #p)  #define INS_CONSTPTR(p)     addName(lir->insImmPtr(p), #p)
254    #define INS_CONSTFUNPTR(p)  addName(lir->insImmPtr(JS_FUNC_TO_DATA_PTR(void*, p)), #p)
255    #define INS_CONSTWORD(v)    addName(lir->insImmPtr((void *) v), #v)
256    
257  using namespace avmplus;  using namespace avmplus;
258  using namespace nanojit;  using namespace nanojit;
# Line 213  Line 263 
263    
264  #ifdef JS_JIT_SPEW  #ifdef JS_JIT_SPEW
265  void  void
266  js_DumpPeerStability(Fragmento* frago, const void* ip);  js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint32 globalShape, uint32 argc);
267  #endif  #endif
268    
269  /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */  /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
270  static bool nesting_enabled = true;  static bool did_we_check_processor_features = false;
 #if defined(NANOJIT_IA32)  
 static bool did_we_check_sse2 = false;  
 #endif  
271    
272  #ifdef JS_JIT_SPEW  #ifdef JS_JIT_SPEW
273  static bool verbose_debug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");  bool js_verboseDebug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
274  #define debug_only_v(x) if (verbose_debug) { x; }  bool js_verboseStats = js_verboseDebug ||
275  #else      (getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "stats"));
276  #define debug_only_v(x)  bool js_verboseAbort = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "abort");
277  #endif  #endif
278    
279  /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst  /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
280     case cause performance regressions. */     case cause performance regressions. */
281  static Oracle oracle;  static Oracle oracle;
282    
 /* Blacklists the root peer fragment at a fragment's PC.  This is so blacklisting stays at the  
    top of the peer list and not scattered around. */  
 void  
 js_BlacklistPC(Fragmento* frago, Fragment* frag);  
   
283  Tracker::Tracker()  Tracker::Tracker()
284  {  {
285      pagelist = 0;      pagelist = 0;
# Line 296  Line 338 
338  }  }
339    
340  #if defined NANOJIT_64BIT  #if defined NANOJIT_64BIT
341  #define PAGEMASK        0x7ff  #define PAGEMASK 0x7ff
342  #else  #else
343  #define PAGEMASK        0xfff  #define PAGEMASK 0xfff
344  #endif  #endif
345    
346  LIns*  LIns*
# Line 319  Line 361 
361      p->map[(jsuword(v) & PAGEMASK) >> 2] = i;      p->map[(jsuword(v) & PAGEMASK) >> 2] = i;
362  }  }
363    
364    static inline jsuint argSlots(JSStackFrame* fp)
365    {
366        return JS_MAX(fp->argc, fp->fun->nargs);
367    }
368    
369  static inline bool isNumber(jsval v)  static inline bool isNumber(jsval v)
370  {  {
371      return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);      return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);
# Line 341  Line 388 
388      return JSDOUBLE_IS_INT(d, i);      return JSDOUBLE_IS_INT(d, i);
389  }  }
390    
391    static inline jsint asInt32(jsval v)
392    {
393        JS_ASSERT(isNumber(v));
394        if (JSVAL_IS_INT(v))
395            return JSVAL_TO_INT(v);
396    #ifdef DEBUG
397        jsint i;
398        JS_ASSERT(JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i));
399    #endif
400        return jsint(*JSVAL_TO_DOUBLE(v));
401    }
402    
403  /* Return JSVAL_DOUBLE for all numbers (int and double) and the tag otherwise. */  /* Return JSVAL_DOUBLE for all numbers (int and double) and the tag otherwise. */
404  static inline uint8 getPromotedType(jsval v)  static inline uint8 getPromotedType(jsval v)
405  {  {
406      return JSVAL_IS_INT(v) ? JSVAL_DOUBLE : uint8(JSVAL_TAG(v));      if (JSVAL_IS_INT(v))
407            return JSVAL_DOUBLE;
408        if (JSVAL_IS_OBJECT(v)) {
409            if (JSVAL_IS_NULL(v))
410                return JSVAL_TNULL;
411            if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
412                return JSVAL_TFUN;
413            return JSVAL_OBJECT;
414        }
415        return uint8(JSVAL_TAG(v));
416  }  }
417    
418  /* Return JSVAL_INT for all whole numbers that fit into signed 32-bit and the tag otherwise. */  /* Return JSVAL_INT for all whole numbers that fit into signed 32-bit and the tag otherwise. */
419  static inline uint8 getCoercedType(jsval v)  static inline uint8 getCoercedType(jsval v)
420  {  {
421      return isInt32(v) ? JSVAL_INT : (uint8) JSVAL_TAG(v);      if (isInt32(v))
422            return JSVAL_INT;
423        if (JSVAL_IS_OBJECT(v)) {
424            if (JSVAL_IS_NULL(v))
425                return JSVAL_TNULL;
426            if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)))
427                return JSVAL_TFUN;
428            return JSVAL_OBJECT;
429        }
430        return uint8(JSVAL_TAG(v));
431    }
432    
433    /*
434     * Constant seed and accumulate step borrowed from the DJB hash.
435     */
436    
437    #define ORACLE_MASK (ORACLE_SIZE - 1)
438    #define FRAGMENT_TABLE_MASK (FRAGMENT_TABLE_SIZE - 1)
439    #define HASH_SEED 5381
440    
441    static inline void
442    hash_accum(uintptr_t& h, uintptr_t i, uintptr_t mask)
443    {
444        h = ((h << 5) + h + (mask & i)) & mask;
445    }
446    
447    JS_REQUIRES_STACK static inline int
448    stackSlotHash(JSContext* cx, unsigned slot)
449    {
450        uintptr_t h = HASH_SEED;
451        hash_accum(h, uintptr_t(cx->fp->script), ORACLE_MASK);
452        hash_accum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK);
453        hash_accum(h, uintptr_t(slot), ORACLE_MASK);
454        return int(h);
455    }
456    
457    JS_REQUIRES_STACK static inline int
458    globalSlotHash(JSContext* cx, unsigned slot)
459    {
460        uintptr_t h = HASH_SEED;
461        JSStackFrame* fp = cx->fp;
462    
463        while (fp->down)
464            fp = fp->down;
465    
466        hash_accum(h, uintptr_t(fp->script), ORACLE_MASK);
467        hash_accum(h, uintptr_t(OBJ_SHAPE(JS_GetGlobalForObject(cx, fp->scopeChain))),
468                   ORACLE_MASK);
469        hash_accum(h, uintptr_t(slot), ORACLE_MASK);
470        return int(h);
471    }
472    
473    Oracle::Oracle()
474    {
475        /* Grow the oracle bitsets to their (fixed) size here, once. */
476        _stackDontDemote.set(&gc, ORACLE_SIZE-1);
477        _globalDontDemote.set(&gc, ORACLE_SIZE-1);
478        clear();
479  }  }
480    
481  /* Tell the oracle that a certain global variable should not be demoted. */  /* Tell the oracle that a certain global variable should not be demoted. */
482  void  JS_REQUIRES_STACK void
483  Oracle::markGlobalSlotUndemotable(JSScript* script, unsigned slot)  Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
484  {  {
485      _dontDemote.set(&gc, (slot % ORACLE_SIZE));      _globalDontDemote.set(&gc, globalSlotHash(cx, slot));
486  }  }
487    
488  /* Consult with the oracle whether we shouldn't demote a certain global variable. */  /* Consult with the oracle whether we shouldn't demote a certain global variable. */
489  bool  JS_REQUIRES_STACK bool
490  Oracle::isGlobalSlotUndemotable(JSScript* script, unsigned slot) const  Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
491  {  {
492      return _dontDemote.get(slot % ORACLE_SIZE);      return _globalDontDemote.get(globalSlotHash(cx, slot));
493  }  }
494    
495  /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */  /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
496  void  JS_REQUIRES_STACK void
497  Oracle::markStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot)  Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
498  {  {
499      uint32 hash = uint32(intptr_t(ip)) + (slot << 5);      _stackDontDemote.set(&gc, stackSlotHash(cx, slot));
     hash %= ORACLE_SIZE;  
     _dontDemote.set(&gc, hash);  
500  }  }
501    
502  /* Consult with the oracle whether we shouldn't demote a certain slot. */  /* Consult with the oracle whether we shouldn't demote a certain slot. */
503  bool  JS_REQUIRES_STACK bool
504  Oracle::isStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot) const  Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
505  {  {
506      uint32 hash = uint32(intptr_t(ip)) + (slot << 5);      return _stackDontDemote.get(stackSlotHash(cx, slot));
     hash %= ORACLE_SIZE;  
     return _dontDemote.get(hash);  
507  }  }
508    
 /* Clear the oracle. */  
509  void  void
510  Oracle::clear()  Oracle::clearDemotability()
511  {  {
512      _dontDemote.reset();      _stackDontDemote.reset();
513        _globalDontDemote.reset();
514    }
515    
516    
517    struct PCHashEntry : public JSDHashEntryStub {
518        size_t          count;
519    };
520    
521    #define PC_HASH_COUNT 1024
522    
523    static void
524    js_Blacklist(jsbytecode* pc)
525    {
526        JS_ASSERT(*pc == JSOP_LOOP || *pc == JSOP_NOP);
527        *pc = JSOP_NOP;
528    }
529    
530    static void
531    js_Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree=NULL)
532    {
533        JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
534    
535        if (table->ops) {
536            PCHashEntry *entry = (PCHashEntry *)
537                JS_DHashTableOperate(table, pc, JS_DHASH_ADD);
538            
539            if (entry) {
540                if (!entry->key) {
541                    entry->key = pc;
542                    JS_ASSERT(entry->count == 0);
543                }
544                JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
545                if (entry->count++ > (BL_ATTEMPTS * MAXPEERS)) {
546                    entry->count = 0;
547                    js_Blacklist(pc);
548                    return;
549                }
550            }
551        }
552    
553        if (tree) {
554            tree->hits() -= BL_BACKOFF;
555    
556            /*
557             * In case there is no entry or no table (due to OOM) or some
558             * serious imbalance in the recording-attempt distribution on a
559             * multitree, give each tree another chance to blacklist here as
560             * well.
561             */
562            if (++tree->recordAttempts > BL_ATTEMPTS)
563                js_Blacklist(pc);
564        }
565    }
566    
567    static void
568    js_resetRecordingAttempts(JSContext *cx, jsbytecode* pc)
569    {
570        JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
571        if (table->ops) {
572            PCHashEntry *entry = (PCHashEntry *)
573                JS_DHashTableOperate(table, pc, JS_DHASH_LOOKUP);
574    
575            if (JS_DHASH_ENTRY_IS_FREE(&(entry->hdr)))
576                return;
577            JS_ASSERT(JS_DHASH_ENTRY_IS_LIVE(&(entry->hdr)));
578            entry->count = 0;
579        }
580    }
581    
582    static inline size_t
583    fragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
584    {
585        uintptr_t h = HASH_SEED;
586        hash_accum(h, uintptr_t(ip), FRAGMENT_TABLE_MASK);
587        hash_accum(h, uintptr_t(globalObj), FRAGMENT_TABLE_MASK);
588        hash_accum(h, uintptr_t(globalShape), FRAGMENT_TABLE_MASK);
589        hash_accum(h, uintptr_t(argc), FRAGMENT_TABLE_MASK);
590        return size_t(h);
591    }
592    
593    /*
594     * argc is cx->fp->argc at the trace loop header, i.e., the number of arguments
595     * pushed for the innermost JS frame. This is required as part of the fragment
596     * key because the fragment will write those arguments back to the interpreter
597     * stack when it exits, using its typemap, which implicitly incorporates a given
598     * value of argc. Without this feature, a fragment could be called as an inner
599     * tree with two different values of argc, and entry type checking or exit
600     * frame synthesis could crash.
601     */
602    struct VMFragment : public Fragment
603    {
604        VMFragment(const void* _ip, JSObject* _globalObj, uint32 _globalShape, uint32 _argc) :
605            Fragment(_ip),
606            next(NULL),
607            globalObj(_globalObj),
608            globalShape(_globalShape),
609            argc(_argc)
610        {}
611        VMFragment* next;
612        JSObject* globalObj;
613        uint32 globalShape;
614        uint32 argc;
615    };
616    
617    static VMFragment*
618    getVMFragment(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
619                  uint32 argc)
620    {
621        size_t h = fragmentHash(ip, globalObj, globalShape, argc);
622        VMFragment* vf = tm->vmfragments[h];
623        while (vf &&
624               ! (vf->globalObj == globalObj &&
625                  vf->globalShape == globalShape &&
626                  vf->ip == ip &&
627                  vf->argc == argc)) {
628            vf = vf->next;
629        }
630        return vf;
631    }
632    
633    static VMFragment*
634    getLoop(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
635            uint32 argc)
636    {
637        return getVMFragment(tm, ip, globalObj, globalShape, argc);
638    }
639    
640    static Fragment*
641    getAnchor(JSTraceMonitor* tm, const void *ip, JSObject* globalObj, uint32 globalShape,
642              uint32 argc)
643    {
644        VMFragment *f = new (&gc) VMFragment(ip, globalObj, globalShape, argc);
645        JS_ASSERT(f);
646    
647        Fragment *p = getVMFragment(tm, ip, globalObj, globalShape, argc);
648    
649        if (p) {
650            f->first = p;
651            /* append at the end of the peer list */
652            Fragment* next;
653            while ((next = p->peer) != NULL)
654                p = next;
655            p->peer = f;
656        } else {
657            /* this is the first fragment */
658            f->first = f;
659            size_t h = fragmentHash(ip, globalObj, globalShape, argc);
660            f->next = tm->vmfragments[h];
661            tm->vmfragments[h] = f;
662        }
663        f->anchor = f;
664        f->root = f;
665        f->kind = LoopTrace;
666        return f;
667    }
668    
669    #ifdef DEBUG
670    static void
671    ensureTreeIsUnique(JSTraceMonitor* tm, VMFragment* f, TreeInfo* ti)
672    {
673        JS_ASSERT(f->root == f);
674        /*
675         * Check for duplicate entry type maps.  This is always wrong and hints at
676         * trace explosion since we are trying to stabilize something without
677         * properly connecting peer edges.
678         */
679        TreeInfo* ti_other;
680        for (Fragment* peer = getLoop(tm, f->ip, f->globalObj, f->globalShape, f->argc);
681             peer != NULL;
682             peer = peer->peer) {
683            if (!peer->code() || peer == f)
684                continue;
685            ti_other = (TreeInfo*)peer->vmprivate;
686            JS_ASSERT(ti_other);
687            JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap));
688        }
689    }
690    #endif
691    
692    static void
693    js_AttemptCompilation(JSContext *cx, JSTraceMonitor* tm, JSObject* globalObj, jsbytecode* pc,
694                          uint32 argc)
695    {
696        /*
697         * If we already permanently blacklisted the location, undo that.
698         */
699        JS_ASSERT(*(jsbytecode*)pc == JSOP_NOP || *(jsbytecode*)pc == JSOP_LOOP);
700        *(jsbytecode*)pc = JSOP_LOOP;
701        js_resetRecordingAttempts(cx, pc);
702    
703        /*
704         * Breath new live into all peer fragments at the designated loop header.
705         */
706        Fragment* f = (VMFragment*)getLoop(tm, pc, globalObj, OBJ_SHAPE(globalObj),
707                                           argc);
708        if (!f) {
709            /*
710             * If the global object's shape changed, we can't easily find the
711             * corresponding loop header via a hash table lookup. In this
712             * we simply bail here and hope that the fragment has another
713             * outstanding compilation attempt. This case is extremely rare.
714             */
715            return;
716        }
717        JS_ASSERT(f->root == f);
718        f = f->first;
719        while (f) {
720            JS_ASSERT(f->root == f);
721            --f->recordAttempts;
722            f->hits() = HOTLOOP;
723            f = f->peer;
724        }
725  }  }
726    
 #if defined(NJ_SOFTFLOAT)  
727  JS_DEFINE_CALLINFO_1(static, DOUBLE,    i2f, INT32,                 1, 1)  JS_DEFINE_CALLINFO_1(static, DOUBLE,    i2f, INT32,                 1, 1)
728  JS_DEFINE_CALLINFO_1(static, DOUBLE,    u2f, UINT32,                1, 1)  JS_DEFINE_CALLINFO_1(static, DOUBLE,    u2f, UINT32,                1, 1)
 #endif  
729    
730  static bool isi2f(LInsp i)  static bool isi2f(LInsp i)
731  {  {
732      if (i->isop(LIR_i2f))      if (i->isop(LIR_i2f))
733          return true;          return true;
734    
735  #if defined(NJ_SOFTFLOAT)      if (nanojit::AvmCore::config.soft_float &&
736      if (i->isop(LIR_qjoin) &&          i->isop(LIR_qjoin) &&
737          i->oprnd1()->isop(LIR_call) &&          i->oprnd1()->isop(LIR_call) &&
738          i->oprnd2()->isop(LIR_callh))          i->oprnd2()->isop(LIR_callh))
739      {      {
740          if (i->oprnd1()->callInfo() == &i2f_ci)          if (i->oprnd1()->callInfo() == &i2f_ci)
741              return true;              return true;
742      }      }
 #endif  
743    
744      return false;      return false;
745  }  }
# Line 420  Line 749 
749      if (i->isop(LIR_u2f))      if (i->isop(LIR_u2f))
750          return true;          return true;
751    
752  #if defined(NJ_SOFTFLOAT)      if (nanojit::AvmCore::config.soft_float &&
753      if (i->isop(LIR_qjoin) &&          i->isop(LIR_qjoin) &&
754          i->oprnd1()->isop(LIR_call) &&          i->oprnd1()->isop(LIR_call) &&
755          i->oprnd2()->isop(LIR_callh))          i->oprnd2()->isop(LIR_callh))
756      {      {
757          if (i->oprnd1()->callInfo() == &u2f_ci)          if (i->oprnd1()->callInfo() == &u2f_ci)
758              return true;              return true;
759      }      }
 #endif  
760    
761      return false;      return false;
762  }  }
763    
764  static LInsp iu2fArg(LInsp i)  static LInsp iu2fArg(LInsp i)
765  {  {
766  #if defined(NJ_SOFTFLOAT)      if (nanojit::AvmCore::config.soft_float &&
767      if (i->isop(LIR_qjoin))          i->isop(LIR_qjoin))
768        {
769          return i->oprnd1()->arg(0);          return i->oprnd1()->arg(0);
770  #endif      }
771    
772      return i->oprnd1();      return i->oprnd1();
773  }  }
# Line 460  Line 789 
789    
790  static bool isPromoteInt(LIns* i)  static bool isPromoteInt(LIns* i)
791  {  {
792      jsdouble d;      if (isi2f(i) || i->isconst())
793      return isi2f(i) || i->isconst() ||          return true;
794          (i->isconstq() && (d = i->constvalf()) == jsdouble(jsint(d)) && !JSDOUBLE_IS_NEGZERO(d));      if (!i->isconstq())
795            return false;
796        jsdouble d = i->constvalf();
797        return d == jsdouble(jsint(d)) && !JSDOUBLE_IS_NEGZERO(d);
798  }  }
799    
800  static bool isPromoteUint(LIns* i)  static bool isPromoteUint(LIns* i)
801  {  {
802      jsdouble d;      if (isu2f(i) || i->isconst())
803      return isu2f(i) || i->isconst() ||          return true;
804          (i->isconstq() && (d = i->constvalf()) == (jsdouble)(jsuint)d && !JSDOUBLE_IS_NEGZERO(d));      if (!i->isconstq())
805            return false;
806        jsdouble d = i->constvalf();
807        return d == jsdouble(jsuint(d)) && !JSDOUBLE_IS_NEGZERO(d);
808  }  }
809    
810  static bool isPromote(LIns* i)  static bool isPromote(LIns* i)
# Line 491  Line 826 
826              ((c->constval() > 0)));              ((c->constval() > 0)));
827  }  }
828    
829  #if defined(NJ_SOFTFLOAT)  /* soft float support */
 /* soft float */  
830    
831  JS_DEFINE_CALLINFO_1(static, DOUBLE,    fneg, DOUBLE,               1, 1)  JS_DEFINE_CALLINFO_1(static, DOUBLE,    fneg, DOUBLE,               1, 1)
832  JS_DEFINE_CALLINFO_2(static, INT32,     fcmpeq, DOUBLE, DOUBLE,     1, 1)  JS_DEFINE_CALLINFO_2(static, INT32,     fcmpeq, DOUBLE, DOUBLE,     1, 1)
# Line 646  Line 980 
980      }      }
981  };  };
982    
 #endif // NJ_SOFTFLOAT  
   
983  class FuncFilter: public LirWriter  class FuncFilter: public LirWriter
984  {  {
985  public:  public:
# Line 728  Line 1060 
1060    
1061      LInsp insCall(const CallInfo *ci, LInsp args[])      LInsp insCall(const CallInfo *ci, LInsp args[])
1062      {      {
         LInsp s0 = args[0];  
1063          if (ci == &js_DoubleToUint32_ci) {          if (ci == &js_DoubleToUint32_ci) {
1064                LInsp s0 = args[0];
1065              if (s0->isconstq())              if (s0->isconstq())
1066                  return out->insImm(js_DoubleToECMAUint32(s0->constvalf()));                  return out->insImm(js_DoubleToECMAUint32(s0->constvalf()));
1067              if (isi2f(s0) || isu2f(s0))              if (isi2f(s0) || isu2f(s0))
1068                  return iu2fArg(s0);                  return iu2fArg(s0);
1069          } else if (ci == &js_DoubleToInt32_ci) {          } else if (ci == &js_DoubleToInt32_ci) {
1070                LInsp s0 = args[0];
1071              if (s0->isconstq())              if (s0->isconstq())
1072                  return out->insImm(js_DoubleToECMAInt32(s0->constvalf()));                  return out->insImm(js_DoubleToECMAInt32(s0->constvalf()));
1073              if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub)) {              if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub)) {
# Line 748  Line 1081 
1081              if (isi2f(s0) || isu2f(s0))              if (isi2f(s0) || isu2f(s0))
1082                  return iu2fArg(s0);                  return iu2fArg(s0);
1083              // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))              // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
1084              if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci) {              if (s0->isCall()) {
1085                  LIns* args2[] = { callArgN(s0, 0) };                  const CallInfo* ci2 = s0->callInfo();
1086                  return out->insCall(&js_UnboxInt32_ci, args2);                  if (ci2 == &js_UnboxDouble_ci) {
1087              }                      LIns* args2[] = { callArgN(s0, 0) };
1088              if (s0->isCall() && s0->callInfo() == &js_StringToNumber_ci) {                      return out->insCall(&js_UnboxInt32_ci, args2);
1089                  // callArgN's ordering is that as seen by the builtin, not as stored in args here.                  } else if (ci2 == &js_StringToNumber_ci) {
1090                  // True story!                      // callArgN's ordering is that as seen by the builtin, not as stored in
1091                  LIns* args2[] = { callArgN(s0, 1), callArgN(s0, 0) };                      // args here. True story!
1092                  return out->insCall(&js_StringToInt32_ci, args2);                      LIns* args2[] = { callArgN(s0, 1), callArgN(s0, 0) };
1093                        return out->insCall(&js_StringToInt32_ci, args2);
1094                    } else if (ci2 == &js_String_p_charCodeAt0_ci) {
1095                        // Use a fast path builtin for a charCodeAt that converts to an int right away.
1096                        LIns* args2[] = { callArgN(s0, 0) };
1097                        return out->insCall(&js_String_p_charCodeAt0_int_ci, args2);
1098                    } else if (ci2 == &js_String_p_charCodeAt_ci) {
1099                        LIns* idx = callArgN(s0, 1);
1100                        // If the index is not already an integer, force it to be an integer.
1101                        idx = isPromote(idx)
1102                            ? demote(out, idx)
1103                            : out->insCall(&js_DoubleToInt32_ci, &idx);
1104                        LIns* args2[] = { idx, callArgN(s0, 0) };
1105                        return out->insCall(&js_String_p_charCodeAt_int_ci, args2);
1106                    }
1107              }              }
1108          } else if (ci == &js_BoxDouble_ci) {          } else if (ci == &js_BoxDouble_ci) {
1109                LInsp s0 = args[0];
1110              JS_ASSERT(s0->isQuad());              JS_ASSERT(s0->isQuad());
1111              if (s0->isop(LIR_i2f)) {              if (isi2f(s0)) {
1112                  LIns* args2[] = { s0->oprnd1(), args[1] };                  LIns* args2[] = { iu2fArg(s0), args[1] };
1113                  return out->insCall(&js_BoxInt32_ci, args2);                  return out->insCall(&js_BoxInt32_ci, args2);
1114              }              }
1115              if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)              if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)
# Line 772  Line 1120 
1120  };  };
1121    
1122  /* In debug mode vpname contains a textual description of the type of the  /* In debug mode vpname contains a textual description of the type of the
1123     slot during the forall iteration over al slots. */     slot during the forall iteration over all slots. If JS_JIT_SPEW is not
1124       defined, vpnum is set to a very large integer to catch invalid uses of
1125       it. Non-debug code should never use vpnum. */
1126  #ifdef JS_JIT_SPEW  #ifdef JS_JIT_SPEW
1127  #define DEF_VPNAME          const char* vpname; unsigned vpnum  #define DEF_VPNAME          const char* vpname; unsigned vpnum
1128  #define SET_VPNAME(name)    do { vpname = name; vpnum = 0; } while(0)  #define SET_VPNAME(name)    do { vpname = name; vpnum = 0; } while(0)
# Line 780  Line 1130 
1130  #else  #else
1131  #define DEF_VPNAME          do {} while (0)  #define DEF_VPNAME          do {} while (0)
1132  #define vpname ""  #define vpname ""
1133  #define vpnum 0  #define vpnum 0x40000000
1134  #define SET_VPNAME(name)    ((void)0)  #define SET_VPNAME(name)    ((void)0)
1135  #define INC_VPNUM()         ((void)0)  #define INC_VPNUM()         ((void)0)
1136  #endif  #endif
# Line 815  Line 1165 
1165                  vp = &fp->argv[-1];                                           \                  vp = &fp->argv[-1];                                           \
1166                  { code; }                                                     \                  { code; }                                                     \
1167                  SET_VPNAME("argv");                                           \                  SET_VPNAME("argv");                                           \
1168                  vp = &fp->argv[0]; vpstop = &fp->argv[fp->fun->nargs];        \                  vp = &fp->argv[0]; vpstop = &fp->argv[argSlots(fp)];          \
1169                  while (vp < vpstop) { code; ++vp; INC_VPNUM(); }              \                  while (vp < vpstop) { code; ++vp; INC_VPNUM(); }              \
1170              }                                                                 \              }                                                                 \
1171              SET_VPNAME("vars");                                               \              SET_VPNAME("vars");                                               \
# Line 863  Line 1213 
1213    
1214  #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code)                    \  #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code)                    \
1215      JS_BEGIN_MACRO                                                            \      JS_BEGIN_MACRO                                                            \
         FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code);                       \  
1216          FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code);                  \          FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code);                  \
1217            FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code);                       \
1218      JS_END_MACRO      JS_END_MACRO
1219    
1220  /* Calculate the total number of native frame slots we need from this frame  /* Calculate the total number of native frame slots we need from this frame
1221     all the way back to the entry frame, including the current stack usage. */     all the way back to the entry frame, including the current stack usage. */
1222  unsigned  JS_REQUIRES_STACK unsigned
1223  js_NativeStackSlots(JSContext *cx, unsigned callDepth)  js_NativeStackSlots(JSContext *cx, unsigned callDepth)
1224  {  {
1225      JSStackFrame* fp = cx->fp;      JSStackFrame* fp = cx->fp;
# Line 884  Line 1234 
1234              slots += fp->script->nfixed;              slots += fp->script->nfixed;
1235          if (callDepth-- == 0) {          if (callDepth-- == 0) {
1236              if (fp->callee)              if (fp->callee)
1237                  slots += 2/*callee,this*/ + fp->fun->nargs;                  slots += 2/*callee,this*/ + argSlots(fp);
1238  #if defined _DEBUG  #if defined _DEBUG
1239              unsigned int m = 0;              unsigned int m = 0;
1240              FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);              FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);
# Line 901  Line 1251 
1251      JS_NOT_REACHED("js_NativeStackSlots");      JS_NOT_REACHED("js_NativeStackSlots");
1252  }  }
1253    
1254  /* Capture the type map for the selected slots of the global object. */  /*
1255  void   * Capture the type map for the selected slots of the global object and currently pending
1256  TypeMap::captureGlobalTypes(JSContext* cx, SlotList& slots)   * stack frames.
1257     */
1258    JS_REQUIRES_STACK void
1259    TypeMap::captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth)
1260  {  {
1261      unsigned ngslots = slots.length();      unsigned ngslots = slots.length();
1262      uint16* gslots = slots.data();      uint16* gslots = slots.data();
1263      setLength(ngslots);      setLength(js_NativeStackSlots(cx, callDepth) + ngslots);
1264      uint8* map = data();      uint8* map = data();
1265      uint8* m = map;      uint8* m = map;
1266        FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1267            uint8 type = getCoercedType(*vp);
1268            if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map)))
1269                type = JSVAL_DOUBLE;
1270            JS_ASSERT(type != JSVAL_BOXED);
1271            debug_only_v(printf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1272            JS_ASSERT(uintptr_t(m - map) < length());
1273            *m++ = type;
1274        );
1275      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1276          uint8 type = getCoercedType(*vp);          uint8 type = getCoercedType(*vp);
1277          if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx->fp->script, gslots[n]))          if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
1278              type = JSVAL_DOUBLE;              type = JSVAL_DOUBLE;
1279          JS_ASSERT(type != JSVAL_BOXED);          JS_ASSERT(type != JSVAL_BOXED);
1280            debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1281            JS_ASSERT(uintptr_t(m - map) < length());
1282          *m++ = type;          *m++ = type;
1283      );      );
1284        JS_ASSERT(uintptr_t(m - map) == length());
1285  }  }
1286    
1287  /* Capture the type map for the currently pending stack frames. */  JS_REQUIRES_STACK void
1288  void  TypeMap::captureMissingGlobalTypes(JSContext* cx, SlotList& slots, unsigned stackSlots)
 TypeMap::captureStackTypes(JSContext* cx, unsigned callDepth)  
1289  {  {
1290      setLength(js_NativeStackSlots(cx, callDepth));      unsigned oldSlots = length() - stackSlots;
1291      uint8* map = data();      int diff = slots.length() - oldSlots;
1292        JS_ASSERT(diff >= 0);
1293        unsigned ngslots = slots.length();
1294        uint16* gslots = slots.data();
1295        setLength(length() + diff);
1296        uint8* map = data() + stackSlots;
1297      uint8* m = map;      uint8* m = map;
1298      FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1299          uint8 type = getCoercedType(*vp);          if (n >= oldSlots) {
1300          if ((type == JSVAL_INT) &&              uint8 type = getCoercedType(*vp);
1301              oracle.isStackSlotUndemotable(cx->fp->script, cx->fp->regs->pc, unsigned(m - map))) {              if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
1302              type = JSVAL_DOUBLE;                  type = JSVAL_DOUBLE;
1303                JS_ASSERT(type != JSVAL_BOXED);
1304                debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
1305                *m = type;
1306                JS_ASSERT((m > map + oldSlots) || (*m == type));
1307          }          }
1308          debug_only_v(printf("capture %s%d: %d\n", vpname, vpnum, type);)          m++;
         *m++ = type;  
1309      );      );
1310  }  }
1311    
# Line 959  Line 1331 
1331      *plength = clength;      *plength = clength;
1332  }  }
1333    
1334    /* Specializes a tree to any missing globals, including any dependent trees. */
1335    static JS_REQUIRES_STACK void
1336    specializeTreesToMissingGlobals(JSContext* cx, TreeInfo* root)
1337    {
1338        TreeInfo* ti = root;
1339    
1340        ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
1341        JS_ASSERT(ti->globalSlots->length() == ti->typeMap.length() - ti->nStackTypes);
1342      
1343        for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
1344            ti = (TreeInfo*)root->dependentTrees.data()[i]->vmprivate;
1345            /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
1346            if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1347                specializeTreesToMissingGlobals(cx, ti);
1348        }
1349        for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
1350            ti = (TreeInfo*)root->linkedTrees.data()[i]->vmprivate;
1351            if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
1352                specializeTreesToMissingGlobals(cx, ti);
1353        }
1354    }
1355    
1356  static void  static void
1357  js_TrashTree(JSContext* cx, Fragment* f);  js_TrashTree(JSContext* cx, Fragment* f);
1358    
1359    JS_REQUIRES_STACK
1360  TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,  TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
1361          TreeInfo* ti, unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,          TreeInfo* ti, unsigned stackSlots, unsigned ngslots, uint8* typeMap,
1362          VMSideExit* innermostNestedGuard, Fragment* outerToBlacklist)          VMSideExit* innermostNestedGuard, jsbytecode* outer, uint32 outerArgc)
1363  {  {
1364      JS_ASSERT(!_fragment->vmprivate && ti);      JS_ASSERT(!_fragment->vmprivate && ti && cx->fp->regs->pc == (jsbytecode*)_fragment->ip);
1365    
1366        /* Reset the fragment state we care about in case we got a recycled fragment. */
1367        _fragment->lastIns = NULL;
1368    
1369      this->cx = cx;      this->cx = cx;
1370      this->traceMonitor = &JS_TRACE_MONITOR(cx);      this->traceMonitor = &JS_TRACE_MONITOR(cx);
1371      this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);      this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
1372        this->lexicalBlock = cx->fp->blockChain;
1373      this->anchor = _anchor;      this->anchor = _anchor;
1374      this->fragment = _fragment;      this->fragment = _fragment;
1375      this->lirbuf = _fragment->lirbuf;      this->lirbuf = _fragment->lirbuf;
1376      this->treeInfo = ti;      this->treeInfo = ti;
1377      this->callDepth = _anchor ? _anchor->calldepth : 0;      this->callDepth = _anchor ? _anchor->calldepth : 0;
1378      this->atoms = cx->fp->script->atomMap.vector;      this->atoms = FrameAtomBase(cx, cx->fp);
1379      this->deepAborted = false;      this->deepAborted = false;
1380      this->applyingArguments = false;      this->trashSelf = false;
     this->trashTree = false;  
     this->whichTreeToTrash = _fragment->root;  
1381      this->global_dslots = this->globalObj->dslots;      this->global_dslots = this->globalObj->dslots;
1382      this->terminate = false;      this->loop = true; /* default assumption is we are compiling a loop */
     this->outerToBlacklist = outerToBlacklist;  
1383      this->wasRootFragment = _fragment == _fragment->root;      this->wasRootFragment = _fragment == _fragment->root;
1384        this->outer = outer;
1385        this->outerArgc = outerArgc;
1386        this->pendingTraceableNative = NULL;
1387        this->newobj_ins = NULL;
1388        this->generatedTraceableNative = new JSTraceableNative();
1389        JS_ASSERT(generatedTraceableNative);
1390    
1391      debug_only_v(printf("recording starting from %s:%u@%u\n",      debug_only_v(printf("recording starting from %s:%u@%u\n",
1392                          cx->fp->script->filename,                          ti->treeFileName, ti->treeLineNumber, ti->treePCOffset);)
1393                          js_FramePCToLineNumber(cx, cx->fp),      debug_only_v(printf("globalObj=%p, shape=%d\n", (void*)this->globalObj, OBJ_SHAPE(this->globalObj));)
                         FramePCOffset(cx->fp));)  
     debug_only_v(printf("globalObj=%p, shape=%d\n", this->globalObj, OBJ_SHAPE(this->globalObj));)  
1394    
1395      lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);      lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);
1396  #ifdef DEBUG      debug_only_v(lir = verbose_filter = new (&gc) VerboseWriter(&gc, lir, lirbuf->names);)
1397      if (verbose_debug)      if (nanojit::AvmCore::config.soft_float)
1398          lir = verbose_filter = new (&gc) VerboseWriter(&gc, lir, lirbuf->names);          lir = float_filter = new (&gc) SoftFloatFilter(lir);
1399  #endif      else
1400  #ifdef NJ_SOFTFLOAT          float_filter = 0;
     lir = float_filter = new (&gc) SoftFloatFilter(lir);  
 #endif  
1401      lir = cse_filter = new (&gc) CseFilter(lir, &gc);      lir = cse_filter = new (&gc) CseFilter(lir, &gc);
1402      lir = expr_filter = new (&gc) ExprFilter(lir);      lir = expr_filter = new (&gc) ExprFilter(lir);
1403      lir = func_filter = new (&gc) FuncFilter(lir);      lir = func_filter = new (&gc) FuncFilter(lir);
1404      lir->ins0(LIR_start);      lir->ins0(LIR_start);
1405    
1406      if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment)      if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment)
1407          lirbuf->state = addName(lir->insParam(0, 0), "state");          lirbuf->state = addName(lir->insParam(0, 0), "state");
1408    
1409      lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");      lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
1410      lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");      lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
1411      cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");      cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
     gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");  
1412      eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");      eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
1413      eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");      eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
1414    
1415        /* If we came from exit, we might not have enough global types. */
1416        if (ti->globalSlots->length() > ti->nGlobalTypes())
1417            specializeTreesToMissingGlobals(cx, ti);
1418    
1419      /* read into registers all values on the stack and all globals we know so far */      /* read into registers all values on the stack and all globals we know so far */
1420      import(treeInfo, lirbuf->sp, ngslots, callDepth, globalTypeMap, stackTypeMap);      import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
1421    
1422        if (fragment == fragment->root) {
1423            /*
1424             * We poll the operation callback request flag. It is updated asynchronously whenever
1425             * the callback is to be invoked.
1426             */
1427            LIns* x = lir->insLoadi(cx_ins, offsetof(JSContext, operationCallbackFlag));
1428            guard(true, lir->ins_eq0(x), snapshot(TIMEOUT_EXIT));
1429        }
1430    
1431      /* If we are attached to a tree call guard, make sure the guard the inner tree exited from      /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
1432         is what we expect it to be. */         is what we expect it to be. */
1433      if (_anchor && _anchor->exitType == NESTED_EXIT) {      if (_anchor && _anchor->exitType == NESTED_EXIT) {
1434          LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,          LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
1435                                                  offsetof(InterpState, lastTreeExitGuard)),                                                  offsetof(InterpState, lastTreeExitGuard)),
1436                                                  "lastTreeExitGuard");                                                  "lastTreeExitGuard");
1437          guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);          guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
1438      }      }
# Line 1031  Line 1441 
1441  TreeInfo::~TreeInfo()  TreeInfo::~TreeInfo()
1442  {  {
1443      UnstableExit* temp;      UnstableExit* temp;
1444        
1445      while (unstableExits) {      while (unstableExits) {
1446          temp = unstableExits->next;          temp = unstableExits->next;
1447          delete unstableExits;          delete unstableExits;
# Line 1056  Line 1466 
1466              JS_ASSERT(!fragment->root->vmprivate);              JS_ASSERT(!fragment->root->vmprivate);
1467              delete treeInfo;              delete treeInfo;
1468          }          }
1469          if (trashTree)  
1470              js_TrashTree(cx, whichTreeToTrash);          if (trashSelf)
1471                js_TrashTree(cx, fragment->root);
1472    
1473            for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
1474                js_TrashTree(cx, whichTreesToTrash.get(i));
1475      } else if (wasRootFragment) {      } else if (wasRootFragment) {
1476          delete treeInfo;          delete treeInfo;
1477      }      }
# Line 1067  Line 1481 
1481      delete cse_filter;      delete cse_filter;
1482      delete expr_filter;      delete expr_filter;
1483      delete func_filter;      delete func_filter;
 #ifdef NJ_SOFTFLOAT  
1484      delete float_filter;      delete float_filter;
 #endif  
1485      delete lir_buf_writer;      delete lir_buf_writer;
1486        delete generatedTraceableNative;
1487  }  }
1488    
1489  void TraceRecorder::removeFragmentoReferences()  void TraceRecorder::removeFragmentoReferences()
# Line 1078  Line 1491 
1491      fragment = NULL;      fragment = NULL;
1492  }  }
1493    
1494    void TraceRecorder::deepAbort()
1495    {
1496        debug_only_v(printf("deep abort");)
1497        deepAborted = true;
1498    }
1499    
1500  /* Add debug information to a LIR instruction as we emit it. */  /* Add debug information to a LIR instruction as we emit it. */
1501  inline LIns*  inline LIns*
1502  TraceRecorder::addName(LIns* ins, const char* name)  TraceRecorder::addName(LIns* ins, const char* name)
1503  {  {
1504  #ifdef DEBUG  #ifdef JS_JIT_SPEW
1505      lirbuf->names->addName(ins, name);      if (js_verboseDebug)
1506            lirbuf->names->addName(ins, name);
1507  #endif  #endif
1508      return ins;      return ins;
1509  }  }
# Line 1101  Line 1521 
1521  {  {
1522      JS_ASSERT(isGlobal(p));      JS_ASSERT(isGlobal(p));
1523      if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)      if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
1524          return size_t(p - globalObj->fslots) * sizeof(double);          return sizeof(InterpState) + size_t(p - globalObj->fslots) * sizeof(double);
1525      return ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);      return sizeof(InterpState) + ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
1526  }  }
1527    
1528  /* Determine whether a value is a global stack slot */  /* Determine whether a value is a global stack slot */
# Line 1114  Line 1534 
1534  }  }
1535    
1536  /* Determine the offset in the native stack for a jsval we track */  /* Determine the offset in the native stack for a jsval we track */
1537  ptrdiff_t  JS_REQUIRES_STACK ptrdiff_t
1538  TraceRecorder::nativeStackOffset(jsval* p) const  TraceRecorder::nativeStackOffset(jsval* p) const
1539  {  {
1540  #ifdef DEBUG  #ifdef DEBUG
# Line 1152  Line 1572 
1572          fp = *fsp;          fp = *fsp;
1573          if (fp->callee) {          if (fp->callee) {
1574              if (fsp == fstack) {              if (fsp == fstack) {
1575                  if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + fp->fun->nargs))                  if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + argSlots(fp)))
1576                      RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));                      RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));
1577                  offset += (2/*callee,this*/ + fp->fun->nargs) * sizeof(double);                  offset += (2/*callee,this*/ + argSlots(fp)) * sizeof(double);
1578              }              }
1579              if (size_t(p - &fp->slots[0]) < fp->script->nfixed)              if (size_t(p - &fp->slots[0]) < fp->script->nfixed)
1580                  RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));                  RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));
# Line 1194  Line 1614 
1614          treeInfo->maxNativeStackSlots = slots;          treeInfo->maxNativeStackSlots = slots;
1615  }  }
1616    
1617  /* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of  /* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of
1618     storing a pointer to them). We now assert instead of type checking, the caller must ensure the     storing a pointer to them). We now assert instead of type checking, the caller must ensure the
1619     types are compatible. */     types are compatible. */
1620  static void  static void
1621  ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)  ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)
1622  {  {
1623      unsigned tag = JSVAL_TAG(v);      unsigned tag = JSVAL_TAG(v);
1624      switch (type) {      switch (type) {
1625          case JSVAL_OBJECT:
1626            JS_ASSERT(tag == JSVAL_OBJECT);
1627            JS_ASSERT(!JSVAL_IS_NULL(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v)));
1628            *(JSObject**)slot = JSVAL_TO_OBJECT(v);
1629            debug_only_v(printf("object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1630                                JSVAL_IS_NULL(v)
1631                                ? "null"
1632                                : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1633            return;
1634        case JSVAL_INT:        case JSVAL_INT:
1635          jsint i;          jsint i;
1636          if (JSVAL_IS_INT(v))          if (JSVAL_IS_INT(v))
# Line 1222  Line 1651 
1651          *(jsdouble*)slot = d;          *(jsdouble*)slot = d;
1652          debug_only_v(printf("double<%g> ", d);)          debug_only_v(printf("double<%g> ", d);)
1653          return;          return;
1654        case JSVAL_BOOLEAN:        case JSVAL_BOXED:
1655          JS_ASSERT(tag == JSVAL_BOOLEAN);          JS_NOT_REACHED("found boxed type in an entry type map");
         *(JSBool*)slot = JSVAL_TO_BOOLEAN(v);  
         debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)  
1656          return;          return;
1657        case JSVAL_STRING:        case JSVAL_STRING:
1658          JS_ASSERT(tag == JSVAL_STRING);          JS_ASSERT(tag == JSVAL_STRING);
1659          *(JSString**)slot = JSVAL_TO_STRING(v);          *(JSString**)slot = JSVAL_TO_STRING(v);
1660          debug_only_v(printf("string<%p> ", *(JSString**)slot);)          debug_only_v(printf("string<%p> ", (void*)(*(JSString**)slot));)
1661          return;          return;
1662        default:        case JSVAL_TNULL:
         /* Note: we should never see JSVAL_BOXED in an entry type map. */  
         JS_ASSERT(type == JSVAL_OBJECT);  
1663          JS_ASSERT(tag == JSVAL_OBJECT);          JS_ASSERT(tag == JSVAL_OBJECT);
1664          *(JSObject**)slot = JSVAL_TO_OBJECT(v);          *(JSObject**)slot = NULL;
1665          debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),          debug_only_v(printf("null ");)
1666                              JSVAL_IS_NULL(v)          return;
1667                              ? "null"        case JSVAL_BOOLEAN:
1668                              : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)          /* Watch out for pseudo-booleans. */
1669            JS_ASSERT(tag == JSVAL_BOOLEAN);
1670            *(JSBool*)slot = JSVAL_TO_PSEUDO_BOOLEAN(v);
1671            debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1672            return;
1673          case JSVAL_TFUN: {
1674            JS_ASSERT(tag == JSVAL_OBJECT);
1675            JSObject* obj = JSVAL_TO_OBJECT(v);
1676            *(JSObject**)slot = obj;
1677    #ifdef DEBUG
1678            JSFunction* fun = GET_FUNCTION_PRIVATE(cx, obj);
1679            debug_only_v(printf("function<%p:%s> ", (void*) obj,
1680                                fun->atom
1681                                ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
1682                                : "unnamed");)
1683    #endif
1684          return;          return;
1685          }
1686      }      }
1687    
1688        JS_NOT_REACHED("unexpected type");
1689  }  }
1690    
1691  /* We maintain an emergency recovery pool of doubles so we can recover safely if a trace runs  /* We maintain an emergency pool of doubles so we can recover safely if a trace runs
1692     out of memory (doubles or objects). */     out of memory (doubles or objects). */
1693  static jsval  static jsval
1694  AllocateDoubleFromRecoveryPool(JSContext* cx)  AllocateDoubleFromReservedPool(JSContext* cx)
1695  {  {
1696      JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);      JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
1697      JS_ASSERT(tm->recoveryDoublePoolPtr > tm->recoveryDoublePool);      JS_ASSERT(tm->reservedDoublePoolPtr > tm->reservedDoublePool);
1698      return *--tm->recoveryDoublePoolPtr;      return *--tm->reservedDoublePoolPtr;
1699  }  }
1700    
1701  static bool  static bool
1702  js_ReplenishRecoveryPool(JSContext* cx, JSTraceMonitor* tm)  js_ReplenishReservedPool(JSContext* cx, JSTraceMonitor* tm)
1703  {  {
1704      /* We should not be called with a full pool. */      /* We should not be called with a full pool. */
1705      JS_ASSERT((size_t) (tm->recoveryDoublePoolPtr - tm->recoveryDoublePool) <      JS_ASSERT((size_t) (tm->reservedDoublePoolPtr - tm->reservedDoublePool) <
1706                MAX_NATIVE_STACK_SLOTS);                MAX_NATIVE_STACK_SLOTS);
1707    
1708      /*      /*
1709       * When the GC runs in js_NewDoubleInRootedValue, it resets       * When the GC runs in js_NewDoubleInRootedValue, it resets
1710       * tm->recoveryDoublePoolPtr back to tm->recoveryDoublePool.       * tm->reservedDoublePoolPtr back to tm->reservedDoublePool.
1711       */       */
1712      JSRuntime* rt = cx->runtime;      JSRuntime* rt = cx->runtime;
1713      uintN gcNumber = rt->gcNumber;      uintN gcNumber = rt->gcNumber;
1714      jsval* ptr = tm->recoveryDoublePoolPtr;      uintN lastgcNumber = gcNumber;
1715      while (ptr < tm->recoveryDoublePool + MAX_NATIVE_STACK_SLOTS) {      jsval* ptr = tm->reservedDoublePoolPtr;
1716          if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))      while (ptr < tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) {
1717            if (!js_NewDoubleInRootedValue(cx, 0.0, ptr))
1718              goto oom;              goto oom;
1719          if (rt->gcNumber != gcNumber) {  
1720              JS_ASSERT(tm->recoveryDoublePoolPtr == tm->recoveryDoublePool);          /* Check if the last call to js_NewDoubleInRootedValue GC'd. */
1721              ptr = tm->recoveryDoublePool;          if (rt->gcNumber != lastgcNumber) {
1722                lastgcNumber = rt->gcNumber;
1723                JS_ASSERT(tm->reservedDoublePoolPtr == tm->reservedDoublePool);
1724                ptr = tm->reservedDoublePool;
1725    
1726                /*
1727                 * Have we GC'd more than once? We're probably running really
1728                 * low on memory, bail now.
1729                 */
1730              if (uintN(rt->gcNumber - gcNumber) > uintN(1))              if (uintN(rt->gcNumber - gcNumber) > uintN(1))
1731                  goto oom;                  goto oom;
1732              continue;              continue;
1733          }          }
1734          ++ptr;          ++ptr;
1735      }      }
1736      tm->recoveryDoublePoolPtr = ptr;      tm->reservedDoublePoolPtr = ptr;
1737      return true;      return true;
1738    
1739  oom:  oom:
# Line 1289  Line 1741 
1741       * Already massive GC pressure, no need to hold doubles back.       * Already massive GC pressure, no need to hold doubles back.
1742       * We won't run any native code anyway.       * We won't run any native code anyway.
1743       */       */
1744      tm->recoveryDoublePoolPtr = tm->recoveryDoublePool;      tm->reservedDoublePoolPtr = tm->reservedDoublePool;
1745      return false;      return false;
1746  }  }
1747    
1748  /* Box a value from the native stack back into the jsval format. Integers  /* Box a value from the native stack back into the jsval format. Integers
1749     that are too large to fit into a jsval are automatically boxed into     that are too large to fit into a jsval are automatically boxed into
1750     heap-allocated doubles. */     heap-allocated doubles. */
1751  static bool  static void
1752  NativeToValue(JSContext* cx, jsval& v, uint8 type, double* slot)  NativeToValue(JSContext* cx, jsval& v, uint8 type, double* slot)
1753  {  {
1754      jsint i;      jsint i;
1755      jsdouble d;      jsdouble d;
1756      switch (type) {      switch (type) {
1757        case JSVAL_BOOLEAN:        case JSVAL_OBJECT:
1758          v = BOOLEAN_TO_JSVAL(*(JSBool*)slot);          v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1759          debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)          JS_ASSERT(JSVAL_TAG(v) == JSVAL_OBJECT); /* if this fails the pointer was not aligned */
1760            JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
1761            debug_only_v(printf("object<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1762                                JSVAL_IS_NULL(v)
1763                                ? "null"
1764                                : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1765          break;          break;
1766        case JSVAL_INT:        case JSVAL_INT:
1767          i = *(jsint*)slot;          i = *(jsint*)slot;
# Line 1326  Line 1783 
1783             double boxes. */             double boxes. */
1784          if (cx->doubleFreeList) {          if (cx->doubleFreeList) {
1785  #ifdef DEBUG  #ifdef DEBUG
1786              bool ok =              JSBool ok =
1787  #endif  #endif
1788                  js_NewDoubleInRootedValue(cx, d, &v);                  js_NewDoubleInRootedValue(cx, d, &v);
1789              JS_ASSERT(ok);              JS_ASSERT(ok);
1790              return true;              return;
1791          }          }
1792          v = AllocateDoubleFromRecoveryPool(cx);          v = AllocateDoubleFromReservedPool(cx);
1793          JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);          JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);
1794          *JSVAL_TO_DOUBLE(v) = d;          *JSVAL_TO_DOUBLE(v) = d;
1795          return true;          return;
1796        }        }
1797          case JSVAL_BOXED:
1798            v = *(jsval*)slot;
1799            JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
1800            debug_only_v(printf("box<%lx> ", v));
1801            break;
1802        case JSVAL_STRING:        case JSVAL_STRING:
1803          v = STRING_TO_JSVAL(*(JSString**)slot);          v = STRING_TO_JSVAL(*(JSString**)slot);
1804          JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING); /* if this fails the pointer was not aligned */          JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING); /* if this fails the pointer was not aligned */
1805          debug_only_v(printf("string<%p> ", *(JSString**)slot);)          debug_only_v(printf("string<%p> ", *(JSString**)slot);)
1806          break;          break;
1807        case JSVAL_BOXED:        case JSVAL_TNULL:
1808          v = *(jsval*)slot;          JS_ASSERT(*(JSObject**)slot == NULL);
1809          debug_only_v(printf("box<%lx> ", v));          v = JSVAL_NULL;
1810            debug_only_v(printf("null<%p> ", *(JSObject**)slot));
1811          break;          break;
1812        default:        case JSVAL_BOOLEAN:
1813          JS_ASSERT(type == JSVAL_OBJECT);          /* Watch out for pseudo-booleans. */
1814            v = PSEUDO_BOOLEAN_TO_JSVAL(*(JSBool*)slot);
1815            debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1816            break;
1817          case JSVAL_TFUN: {
1818            JS_ASSERT(HAS_FUNCTION_CLASS(*(JSObject**)slot));
1819          v = OBJECT_TO_JSVAL(*(JSObject**)slot);          v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1820          JS_ASSERT(JSVAL_TAG(v) == JSVAL_OBJECT); /* if this fails the pointer was not aligned */  #ifdef DEBUG
1821          debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),          JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v));
1822                              JSVAL_IS_NULL(v)          debug_only_v(printf("function<%p:%s> ", (void*)JSVAL_TO_OBJECT(v),
1823                              ? "null"                              fun->atom
1824                              : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)                              ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
1825                                : "unnamed");)
1826    #endif
1827          break;          break;
1828          }
1829      }      }
     return true;  
1830  }  }
1831    
1832  /* Attempt to unbox the given list of interned globals onto the native global frame. */  /* Attempt to unbox the given list of interned globals onto the native global frame. */
1833  static void  static JS_REQUIRES_STACK void
1834  BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)  BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1835  {  {
1836      debug_only_v(printf("global: ");)      debug_only_v(printf("global: ");)
# Line 1372  Line 1842 
1842  }  }
1843    
1844  /* Attempt to unbox the given JS frame onto a native frame. */  /* Attempt to unbox the given JS frame onto a native frame. */
1845  static void  static JS_REQUIRES_STACK void
1846  BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)  BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
1847  {  {
1848      debug_only_v(printf("stack: ");)      debug_only_v(printf("stack: ");)
# Line 1384  Line 1854 
1854      debug_only_v(printf("\n");)      debug_only_v(printf("\n");)
1855  }  }
1856    
1857  /* Box the given native frame into a JS frame. This only fails due to a hard error  /* Box the given native frame into a JS frame. This is infallible. */
1858     (out of memory for example). */  static JS_REQUIRES_STACK int
 static int  
1859  FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)  FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1860  {  {
1861      uint8* mp_base = mp;      uint8* mp_base = mp;
1862      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1863          if (!NativeToValue(cx, *vp, *mp, np + gslots[n]))          debug_only_v(printf("%s%u=", vpname, vpnum);)
1864              return -1;          NativeToValue(cx, *vp, *mp, np + gslots[n]);
1865          ++mp;          ++mp;
1866      );      );
1867      debug_only_v(printf("\n");)      debug_only_v(printf("\n");)
1868      return mp - mp_base;      return mp - mp_base;
1869  }  }
1870    
1871    /*
1872     * Generic function to read upvars on trace.
1873     *     T   Traits type parameter. Must provide static functions:
1874     *             interp_get(fp, slot)     Read the value out of an interpreter frame.
1875     *             native_slot(argc, slot)  Return the position of the desired value in the on-trace
1876     *                                      stack frame (with position 0 being callee).
1877     *
1878     *     level       Static level of the function containing the upvar definition.
1879     *     slot        Identifies the value to get. The meaning is defined by the traits type.
1880     *     callDepth   Call depth of current point relative to trace entry
1881     */
1882    template<typename T>
1883    uint32 JS_INLINE
1884    js_GetUpvarOnTrace(JSContext* cx, uint32 level, int32 slot, uint32 callDepth, double* result)
1885    {
1886        InterpState* state = cx->interpState;
1887        FrameInfo** fip = state->rp + callDepth;
1888    
1889        /*
1890         * First search the FrameInfo call stack for an entry containing
1891         * our upvar, namely one with level == upvarLevel.
1892         */
1893        while (--fip >= state->callstackBase) {
1894            FrameInfo* fi = *fip;
1895            JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi->callee);
1896            uintN calleeLevel = fun->u.i.script->staticLevel;
1897            if (calleeLevel == level) {
1898                /*
1899                 * Now find the upvar's value in the native stack.
1900                 * nativeStackFramePos is the offset of the start of the
1901                 * activation record corresponding to *fip in the native
1902                 * stack.
1903                 */
1904                int32 nativeStackFramePos = state->callstackBase[0]->spoffset;
1905                for (FrameInfo** fip2 = state->callstackBase; fip2 <= fip; fip2++)
1906                    nativeStackFramePos += (*fip2)->spdist;
1907                nativeStackFramePos -= (2 + (*fip)->get_argc());
1908                uint32 native_slot = T::native_slot((*fip)->get_argc(), slot);
1909                *result = state->stackBase[nativeStackFramePos + native_slot];
1910                return fi->get_typemap()[native_slot];
1911            }
1912        }
1913    
1914        // Next search the trace entry frame, which is not in the FrameInfo stack.
1915        if (state->outermostTree->script->staticLevel == level) {
1916            uint32 argc = ((VMFragment*) state->outermostTree->fragment)->argc;
1917            uint32 native_slot = T::native_slot(argc, slot);
1918            *result = state->stackBase[native_slot];
1919            return state->callstackBase[0]->get_typemap()[native_slot];
1920        }
1921    
1922        /*
1923         * If we did not find the upvar in the frames for the active traces,
1924         * then we simply get the value from the interpreter state.
1925         */
1926        JS_ASSERT(level < JS_DISPLAY_SIZE);
1927        JSStackFrame* fp = cx->display[level];
1928        jsval v = T::interp_get(fp, slot);
1929        uint8 type = getCoercedType(v);
1930        ValueToNative(cx, v, type, result);
1931        return type;
1932    }
1933    
1934    // For this traits type, 'slot' is the argument index, which may be -2 for callee.
1935    struct UpvarArgTraits {
1936        static jsval interp_get(JSStackFrame* fp, int32 slot) {
1937            return fp->argv[slot];
1938        }
1939    
1940        static uint32 native_slot(uint32 argc, int32 slot) {
1941            return 2 /*callee,this*/ + slot;
1942        }
1943    };
1944    
1945    uint32 JS_FASTCALL
1946    js_GetUpvarArgOnTrace(JSContext* cx, uint32 staticLevel, int32 slot, uint32 callDepth, double* result)
1947    {
1948        return js_GetUpvarOnTrace<UpvarArgTraits>(cx, staticLevel, slot, callDepth, result);
1949    }
1950    
1951    // For this traits type, 'slot' is an index into the local slots array.
1952    struct UpvarVarTraits {
1953        static jsval interp_get(JSStackFrame* fp, int32 slot) {
1954            return fp->slots[slot];
1955        }
1956    
1957        static uint32 native_slot(uint32 argc, int32 slot) {
1958            return 2 /*callee,this*/ + argc + slot;
1959        }
1960    };
1961    
1962    uint32 JS_FASTCALL
1963    js_GetUpvarVarOnTrace(JSContext* cx, uint32 staticLevel, int32 slot, uint32 callDepth, double* result)
1964    {
1965        return js_GetUpvarOnTrace<UpvarVarTraits>(cx, staticLevel, slot, callDepth, result);
1966    }
1967    
1968    /*
1969     * For this traits type, 'slot' is an index into the stack area (within slots, after nfixed)
1970     * of a frame with no function. (On trace, the top-level frame is the only one that can have
1971     * no function.)
1972     */
1973    struct UpvarStackTraits {
1974        static jsval interp_get(JSStackFrame* fp, int32 slot) {
1975            return fp->slots[slot + fp->script->nfixed];
1976        }
1977    
1978        static uint32 native_slot(uint32 argc, int32 slot) {
1979            /*
1980             * Locals are not imported by the tracer when the frame has no function, so
1981             * we do not add fp->script->nfixed.
1982             */
1983            JS_ASSERT(argc == 0);
1984            return slot;
1985        }
1986    };
1987    
1988    uint32 JS_FASTCALL
1989    js_GetUpvarStackOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDepth, double* result)
1990    {
1991        return js_GetUpvarOnTrace<UpvarStackTraits>(cx, upvarLevel, slot, callDepth, result);
1992    }
1993    
1994  /**  /**
1995   * Box the given native stack frame into the virtual machine stack. This fails   * Box the given native stack frame into the virtual machine stack. This
1996   * only due to a hard error (out of memory for example).   * is infallible.
1997   *   *
1998   * @param callDepth the distance between the entry frame into our trace and   * @param callDepth the distance between the entry frame into our trace and
1999   *                  cx->fp when we make this call.  If this is not called as a   *                  cx->fp when we make this call.  If this is not called as a
# Line 1414  Line 2006 
2006   *                  be restored.   *                  be restored.
2007   * @return the number of things we popped off of np.   * @return the number of things we popped off of np.
2008   */   */
2009  static int  static JS_REQUIRES_STACK int
2010  FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np,  FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np,
2011                        JSStackFrame* stopFrame)                        JSStackFrame* stopFrame)
2012  {  {
# Line 1424  Line 2016 
2016      FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,      FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2017          if (vp == stopAt) goto skip;          if (vp == stopAt) goto skip;
2018          debug_only_v(printf("%s%u=", vpname, vpnum);)          debug_only_v(printf("%s%u=", vpname, vpnum);)
2019          if (!NativeToValue(cx, *vp, *mp, np))          NativeToValue(cx, *vp, *mp, np);
             return -1;  
2020          ++mp; ++np          ++mp; ++np
2021      );      );
2022  skip:  skip:
# Line 1447  Line 2038 
2038          }          }
2039          for (; n != 0; fp = fp->down) {          for (; n != 0; fp = fp->down) {
2040              --n;              --n;
2041              if (fp->callee) { // might not have it if the entry frame is global              if (fp->callee) {
2042                    /*
2043                     * We might return from trace with a different callee object, but it still
2044                     * has to be the same JSFunction (FIXME: bug 471425, eliminate fp->callee).
2045                     */
2046                  JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));                  JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));
2047                    JS_ASSERT(HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(fp->argv[-2])));
2048                    JS_ASSERT(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fp->argv[-2])) ==
2049                              GET_FUNCTION_PRIVATE(cx, fp->callee));
2050                    JS_ASSERT(GET_FUNCTION_PRIVATE(cx, fp->callee) == fp->fun);
2051                    fp->callee = JSVAL_TO_OBJECT(fp->argv[-2]);
2052    
2053                    /*
2054                     * SynthesizeFrame sets scopeChain to NULL, because we can't calculate the
2055                     * correct scope chain until we have the final callee. Calculate the real
2056                     * scope object here.
2057                     */
2058                    if (!fp->scopeChain) {
2059                        fp->scopeChain = OBJ_GET_PARENT(cx, fp->callee);
2060                        if (fp->fun->flags & JSFUN_HEAVYWEIGHT) {
2061                            /*
2062                             * Set hookData to null because the failure case for js_GetCallObject
2063                             * involves it calling the debugger hook.
2064                             *
2065                             * Allocating the Call object must not fail, so use an object
2066                             * previously reserved by js_ExecuteTree if needed.
2067                             */
2068                            void* hookData = ((JSInlineFrame*)fp)->hookData;
2069                            ((JSInlineFrame*)fp)->hookData = NULL;
2070                            JS_ASSERT(!JS_TRACE_MONITOR(cx).useReservedObjects);
2071                            JS_TRACE_MONITOR(cx).useReservedObjects = JS_TRUE;
2072    #ifdef DEBUG
2073                            JSObject *obj =
2074    #endif
2075                                js_GetCallObject(cx, fp);
2076                            JS_ASSERT(obj);
2077                            JS_TRACE_MONITOR(cx).useReservedObjects = JS_FALSE;
2078                            ((JSInlineFrame*)fp)->hookData = hookData;
2079                        }
2080                    }
2081                  fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);                  fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
2082                    if (fp->flags & JSFRAME_CONSTRUCTING) // constructors always compute 'this'
2083                        fp->flags |= JSFRAME_COMPUTED_THIS;
2084              }              }
2085          }          }
2086      }      }
# Line 1458  Line 2089 
2089  }  }
2090    
2091  /* Emit load instructions onto the trace that read the initial stack state. */  /* Emit load instructions onto the trace that read the initial stack state. */
2092  void  JS_REQUIRES_STACK void
2093  TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t,  TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8 t,
2094                        const char *prefix, uintN index, JSStackFrame *fp)                        const char *prefix, uintN index, JSStackFrame *fp)
2095  {  {
2096      LIns* ins;      LIns* ins;
# Line 1472  Line 2103 
2103          ins = lir->insLoadi(base, offset);          ins = lir->insLoadi(base, offset);
2104          ins = lir->ins1(LIR_i2f, ins);          ins = lir->ins1(LIR_i2f, ins);
2105      } else {      } else {
2106          JS_ASSERT(t == JSVAL_BOXED || isNumber(*p) == (t == JSVAL_DOUBLE));          JS_ASSERT_IF(t != JSVAL_BOXED, isNumber(*p) == (t == JSVAL_DOUBLE));
2107          if (t == JSVAL_DOUBLE) {          if (t == JSVAL_DOUBLE) {
2108              ins = lir->insLoad(LIR_ldq, base, offset);              ins = lir->insLoad(LIR_ldq, base, offset);
2109          } else if (t == JSVAL_BOOLEAN) {          } else if (t == JSVAL_BOOLEAN) {
# Line 1481  Line 2112 
2112              ins = lir->insLoad(LIR_ldp, base, offset);              ins = lir->insLoad(LIR_ldp, base, offset);
2113          }          }
2114      }      }
2115        checkForGlobalObjectReallocation();
2116      tracker.set(p, ins);      tracker.set(p, ins);
2117    
2118  #ifdef DEBUG  #ifdef DEBUG
2119      char name[64];      char name[64];
2120      JS_ASSERT(strlen(prefix) < 10);      JS_ASSERT(strlen(prefix) < 10);
# Line 1490  Line 2123 
2123      const char* funName = NULL;      const char* funName = NULL;
2124      if (*prefix == 'a' || *prefix == 'v') {      if (*prefix == 'a' || *prefix == 'v') {
2125          mark = JS_ARENA_MARK(&cx->tempPool);          mark = JS_ARENA_MARK(&cx->tempPool);
2126          if (JS_GET_LOCAL_NAME_COUNT(fp->fun) != 0)          if (fp->fun->hasLocalNames())
2127              localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);              localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
2128          funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";          funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";
2129      }      }
# Line 1513  Line 2146 
2146      addName(ins, name);      addName(ins, name);
2147    
2148      static const char* typestr[] = {      static const char* typestr[] = {
2149          "object", "int", "double", "3", "string", "5", "boolean", "any"          "object", "int", "double", "boxed", "string", "null", "boolean", "function"
2150      };      };
2151      debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n",      debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n",
2152                          p, name, typestr[t & 7], t >> 3);)                          (void*)p, name, typestr[t & 7], t >> 3);)
2153  #endif  #endif
2154  }  }
2155    
2156  void  JS_REQUIRES_STACK void
2157  TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned callDepth,  TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots,
2158                        uint8* globalTypeMap, uint8* stackTypeMap)                        unsigned callDepth, uint8* typeMap)
2159  {  {
2160      /* If we get a partial list that doesn't have all the types (i.e. recording from a side      /* If we get a partial list that doesn't have all the types (i.e. recording from a side
2161         exit that was recorded but we added more global slots later), merge the missing types         exit that was recorded but we added more global slots later), merge the missing types
# Line 1534  Line 2167 
2167         is if that other trace had at its end a compatible type distribution with the entry         is if that other trace had at its end a compatible type distribution with the entry
2168         map. Since thats exactly what we used to fill in the types our current side exit         map. Since thats exactly what we used to fill in the types our current side exit
2169         didn't provide, this is always safe to do. */         didn't provide, this is always safe to do. */
2170      unsigned length;  
2171      if (ngslots < (length = traceMonitor->globalTypeMap->length()))      uint8* globalTypeMap = typeMap + stackSlots;
2172          mergeTypeMaps(&globalTypeMap, &ngslots,      unsigned length = treeInfo->nGlobalTypes();
2173                        traceMonitor->globalTypeMap->data(), length,  
2174        /*
2175         * This is potentially the typemap of the side exit and thus shorter than the tree's
2176         * global type map.
2177         */
2178        if (ngslots < length) {
2179            mergeTypeMaps(&globalTypeMap/*out param*/, &ngslots/*out param*/,
2180                          treeInfo->globalTypeMap(), length,
2181                        (uint8*)alloca(sizeof(uint8) * length));                        (uint8*)alloca(sizeof(uint8) * length));
2182      JS_ASSERT(ngslots == traceMonitor->globalTypeMap->length());      }
2183        JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2184    
2185      /* the first time we compile a tree this will be empty as we add entries lazily */      /*
2186      uint16* gslots = traceMonitor->globalSlots->data();       * Check whether there are any values on the stack we have to unbox and do that first
2187      uint8* m = globalTypeMap;       * before we waste any time fetching the state from the stack.
2188         */
2189        ptrdiff_t offset = -treeInfo->nativeStackBase;
2190        uint8* m = typeMap;
2191        FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2192            if (*m == JSVAL_BOXED) {
2193                import(sp, offset, vp, JSVAL_BOXED, "boxed", vpnum, cx->fp);
2194                LIns* vp_ins = get(vp);
2195                unbox_jsval(*vp, vp_ins, copy(anchor));
2196                set(vp, vp_ins);
2197            }
2198            m++; offset += sizeof(double);
2199        );
2200    
2201        /*
2202         * The first time we compile a tree this will be empty as we add entries lazily.
2203         */
2204        uint16* gslots = treeInfo->globalSlots->data();
2205        m = globalTypeMap;
2206      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2207          import(gp_ins, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);          JS_ASSERT(*m != JSVAL_BOXED);
2208            import(lirbuf->state, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
2209          m++;          m++;
2210      );      );
2211      ptrdiff_t offset = -treeInfo->nativeStackBase;      offset = -treeInfo->nativeStackBase;
2212      m = stackTypeMap;      m = typeMap;
2213      FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,      FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
2214          import(sp, offset, vp, *m, vpname, vpnum, fp);          if (*m != JSVAL_BOXED)
2215                import(sp, offset, vp, *m, vpname, vpnum, fp);
2216          m++; offset += sizeof(double);          m++; offset += sizeof(double);
2217      );      );
2218  }  }
2219    
2220    JS_REQUIRES_STACK bool
2221    TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop)
2222    {
2223        uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
2224    
2225        if (setflags) {
2226            if (!SPROP_HAS_STUB_SETTER(sprop))
2227                ABORT_TRACE_RV("non-stub setter", false);
2228            if (sprop->attrs & JSPROP_READONLY)
2229                ABORT_TRACE_RV("writing to a read-only property", false);
2230        }
2231        /* This check applies even when setflags == 0. */
2232        if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop))
2233            ABORT_TRACE_RV("non-stub getter", false);
2234    
2235        if (!SPROP_HAS_VALID_SLOT(sprop, scope))
2236            ABORT_TRACE_RV("slotless obj property", false);
2237    
2238        return true;
2239    }
2240    
2241  /* Lazily import a global slot if we don't already have it in the tracker. */  /* Lazily import a global slot if we don't already have it in the tracker. */
2242  bool  JS_REQUIRES_STACK bool
2243  TraceRecorder::lazilyImportGlobalSlot(unsigned slot)  TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
2244  {  {
2245      if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */      if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
2246          return false;          return false;
2247        /*
2248         * If the global object grows too large, alloca in js_ExecuteTree might fail, so
2249         * abort tracing on global objects with unreasonably many slots.
2250         */
2251        if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
2252            return false;
2253      jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);      jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
2254      if (tracker.has(vp))      if (known(vp))
2255          return true; /* we already have it */          return true; /* we already have it */
2256      unsigned index = traceMonitor->globalSlots->length();      unsigned index = treeInfo->globalSlots->length();
     /* If this the first global we are adding, remember the shape of the global object. */  
     if (index == 0)  
         traceMonitor->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain));  
2257      /* Add the slot to the list of interned global slots. */      /* Add the slot to the list of interned global slots. */
2258      traceMonitor->globalSlots->add(slot);      JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length());
2259        treeInfo->globalSlots->add(slot);
2260      uint8 type = getCoercedType(*vp);      uint8 type = getCoercedType(*vp);
2261      if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx->fp->script, slot))      if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, slot))
2262          type = JSVAL_DOUBLE;          type = JSVAL_DOUBLE;
2263      traceMonitor->globalTypeMap->add(type);      treeInfo->typeMap.add(type);
2264      import(gp_ins, slot*sizeof(double), vp, type, "global", index, NULL);      import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
2265               vp, type, "global", index, NULL);
2266        specializeTreesToMissingGlobals(cx, treeInfo);
2267      return true;      return true;
2268  }  }
2269    
# Line 1592  Line 2280 
2280  }  }
2281    
2282  /* Update the tracker, then issue a write back store. */  /* Update the tracker, then issue a write back store. */
2283  void  JS_REQUIRES_STACK void
2284  TraceRecorder::set(jsval* p, LIns* i, bool initializing)  TraceRecorder::set(jsval* p, LIns* i, bool initializing)
2285  {  {
2286      JS_ASSERT(initializing || tracker.has(p));      JS_ASSERT(i != NULL);
2287        JS_ASSERT(initializing || known(p));
2288        checkForGlobalObjectReallocation();
2289      tracker.set(p, i);      tracker.set(p, i);
2290      /* If we are writing to this location for the first time, calculate the offset into the      /* If we are writing to this location for the first time, calculate the offset into the
2291         native frame manually, otherwise just look up the last load or store associated with         native frame manually, otherwise just look up the last load or store associated with
# Line 1603  Line 2293 
2293      LIns* x = nativeFrameTracker.get(p);      LIns* x = nativeFrameTracker.get(p);
2294      if (!x) {      if (!x) {
2295          if (isGlobal(p))          if (isGlobal(p))
2296              x = writeBack(i, gp_ins, nativeGlobalOffset(p));              x = writeBack(i, lirbuf->state, nativeGlobalOffset(p));
2297          else          else
2298              x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));              x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
2299          nativeFrameTracker.set(p, x);          nativeFrameTracker.set(p, x);
2300      } else {      } else {
2301  #define ASSERT_VALID_CACHE_HIT(base, offset)                                  \  #define ASSERT_VALID_CACHE_HIT(base, offset)                                  \
2302      JS_ASSERT(base == lirbuf->sp || base == gp_ins);                          \      JS_ASSERT(base == lirbuf->sp || base == lirbuf->state);                   \
2303      JS_ASSERT(offset == ((base == lirbuf->sp)                                 \      JS_ASSERT(offset == ((base == lirbuf->sp)                                 \
2304          ? -treeInfo->nativeStackBase + nativeStackOffset(p)                   \          ? -treeInfo->nativeStackBase + nativeStackOffset(p)                   \
2305          : nativeGlobalOffset(p)));                                            \          : nativeGlobalOffset(p)));                                            \
# Line 1626  Line 2316 
2316  #undef ASSERT_VALID_CACHE_HIT  #undef ASSERT_VALID_CACHE_HIT
2317  }  }
2318    
2319  LIns*  JS_REQUIRES_STACK LIns*
2320  TraceRecorder::get(jsval* p) const  TraceRecorder::get(jsval* p)
2321  {  {
2322        checkForGlobalObjectReallocation();
2323      return tracker.get(p);      return tracker.get(p);
2324  }  }
2325    
2326  /* Determine whether the current branch instruction terminates the loop. */  JS_REQUIRES_STACK bool
2327  static bool  TraceRecorder::known(jsval* p)
 js_IsLoopExit(jsbytecode* pc, jsbytecode* header)  
2328  {  {
2329      switch (*pc) {      checkForGlobalObjectReallocation();
2330        case JSOP_LT:      return tracker.has(p);
2331        case JSOP_GT:  }
       case JSOP_LE:  
       case JSOP_GE:  
       case JSOP_NE:  
       case JSOP_EQ:  
         /* These ops try to dispatch a JSOP_IFEQ or JSOP_IFNE that follows. */  
         JS_ASSERT(js_CodeSpec[*pc].length == 1);  
         pc++;  
         break;  
   
       default:  
         for (;;) {  
             if (*pc == JSOP_AND || *pc == JSOP_OR)  
                 pc += GET_JUMP_OFFSET(pc);  
             else if (*pc == JSOP_ANDX || *pc == JSOP_ORX)  
                 pc += GET_JUMPX_OFFSET(pc);  
             else  
                 break;  
         }  
     }  
   
     switch (*pc) {  
       case JSOP_IFEQ:  
       case JSOP_IFNE:  
         /*  
          * Forward jumps are usually intra-branch, but for-in loops jump to the  
          * trailing enditer to clean up, so check for that case here.  
          */  
         if (pc[GET_JUMP_OFFSET(pc)] == JSOP_ENDITER)  
             return true;  
         return pc + GET_JUMP_OFFSET(pc) == header;  
   
       case JSOP_IFEQX:  
       case JSOP_IFNEX:  
         if (pc[GET_JUMPX_OFFSET(pc)] == JSOP_ENDITER)  
             return true;  
         return pc + GET_JUMPX_OFFSET(pc) == header;  
2332    
2333        default:;  /*
2334     * The dslots of the global object are sometimes reallocated by the interpreter.
2335     * This function check for that condition and re-maps the entries of the tracker
2336     * accordingly.
2337     */
2338    JS_REQUIRES_STACK void
2339    TraceRecorder::checkForGlobalObjectReallocation()
2340    {
2341        if (global_dslots != globalObj->dslots) {
2342            debug_only_v(printf("globalObj->dslots relocated, updating tracker\n");)
2343            jsval* src = global_dslots;
2344            jsval* dst = globalObj->dslots;
2345            jsuint length = globalObj->dslots[-1] - JS_INITIAL_NSLOTS;
2346            LIns** map = (LIns**)alloca(sizeof(LIns*) * length);
2347            for (jsuint n = 0; n < length; ++n) {
2348                map[n] = tracker.get(src);
2349                tracker.set(src++, NULL);
2350            }
2351            for (jsuint n = 0; n < length; ++n)
2352                tracker.set(dst++, map[n]);
2353            global_dslots = globalObj->dslots;
2354      }      }
     return false;  
2355  }  }
2356    
2357  /* Determine whether the current branch is a loop edge (taken or not taken). */  /* Determine whether the current branch is a loop edge (taken or not taken). */
2358  static bool  static JS_REQUIRES_STACK bool
2359  js_IsLoopEdge(jsbytecode* pc, jsbytecode* header)  js_IsLoopEdge(jsbytecode* pc, jsbytecode* header)
2360  {  {
2361      switch (*pc) {      switch (*pc) {
# Line 1693  Line 2366 
2366        case JSOP_IFNEX:        case JSOP_IFNEX:
2367          return ((pc + GET_JUMPX_OFFSET(pc)) == header);          return ((pc + GET_JUMPX_OFFSET(pc)) == header);
2368        default:        default:
2369          JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||          JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
2370                    (*pc == JSOP_OR) || (*pc == JSOP_ORX));                    (*pc == JSOP_OR) || (*pc == JSOP_ORX));
2371      }      }
2372      return false;      return false;
2373  }  }
2374    
2375  /* Promote slots if necessary to match the called tree' type map and report error if thats  /*
2376     impossible. */   * Promote slots if necessary to match the called tree's type map. This function is
2377  bool   * infallible and must only be called if we are certain that it is possible to
2378  TraceRecorder::adjustCallerTypes(Fragment* f, unsigned* demote_slots, bool& trash)   * reconcile the types for each slot in the inner and outer trees.
2379     */
2380    JS_REQUIRES_STACK void
2381    TraceRecorder::adjustCallerTypes(Fragment* f)
2382  {  {
2383      JSTraceMonitor* tm = traceMonitor;      uint16* gslots = treeInfo->globalSlots->data();
2384      uint8* m = tm->globalTypeMap->data();      unsigned ngslots = treeInfo->globalSlots->length();
2385      uint16* gslots = traceMonitor->globalSlots->data();      JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2386      unsigned ngslots = traceMonitor->globalSlots->length();      TreeInfo* ti = (TreeInfo*)f->vmprivate;
2387      uint8* map = ((TreeInfo*)f->vmprivate)->stackTypeMap.data();      uint8* map = ti->globalTypeMap();
2388      bool ok = true;      uint8* m = map;
2389      trash = false;      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,  
2390          LIns* i = get(vp);          LIns* i = get(vp);
2391          bool isPromote = isPromoteInt(i);          bool isPromote = isPromoteInt(i);
2392          if (isPromote && *m == JSVAL_DOUBLE)          if (isPromote && *m == JSVAL_DOUBLE)
2393              lir->insStorei(get(vp), gp_ins, nativeGlobalOffset(vp));              lir->insStorei(get(vp), lirbuf->state, nativeGlobalOffset(vp));
2394          else if (!isPromote && *m == JSVAL_INT) {          JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
             oracle.markGlobalSlotUndemotable(cx->fp->script, nativeGlobalOffset(vp)/sizeof(double));  
             trash = true;  
             ok = false;  
         }  
2395          ++m;          ++m;
2396      );      );
2397        JS_ASSERT(unsigned(m - map) == ti->nGlobalTypes());
2398        map = ti->stackTypeMap();
2399      m = map;      m = map;
2400      FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,      FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2401          LIns* i = get(vp);          LIns* i = get(vp);
2402          bool isPromote = isPromoteInt(i);          bool isPromote = isPromoteInt(i);
2403          if (isPromote && *m == JSVAL_DOUBLE) {          if (isPromote && *m == JSVAL_DOUBLE) {
2404              lir->insStorei(get(vp), lirbuf->sp,              lir->insStorei(get(vp), lirbuf->sp,
2405                             -treeInfo->nativeStackBase + nativeStackOffset(vp));                             -treeInfo->nativeStackBase + nativeStackOffset(vp));
2406              /* Aggressively undo speculation so the inner tree will compile if this fails. */              /* Aggressively undo speculation so the inner tree will compile if this fails. */
2407              ADD_UNDEMOTE_SLOT(demote_slots, unsigned(m - map));              oracle.markStackSlotUndemotable(cx, unsigned(m - map));
         } else if (!isPromote && *m == JSVAL_INT) {  
             debug_only_v(printf("adjusting will fail, %s%d, slot %d\n", vpname, vpnum, m - map);)  
             ok = false;  
             ADD_UNDEMOTE_SLOT(demote_slots, unsigned(m - map));  
         } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {  
             /* Aggressively undo speculation so the inner tree will compile if this fails. */  
             ADD_UNDEMOTE_SLOT(demote_slots, unsigned(m - map));  
2408          }          }
2409            JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
2410          ++m;          ++m;
2411      );      );
2412      /* If this isn't okay, tell the oracle. */      JS_ASSERT(unsigned(m - map) == ti->nStackTypes);
     if (!ok) {  
         for (unsigned i = 1; i <= NUM_UNDEMOTE_SLOTS(demote_slots); i++)  
             oracle.markStackSlotUndemotable(cx->fp->script, cx->fp->regs->pc, demote_slots[i]);  
     }  
2413      JS_ASSERT(f == f->root);      JS_ASSERT(f == f->root);
     return ok;  
2414  }  }
2415    
2416  uint8  JS_REQUIRES_STACK uint8
2417  TraceRecorder::determineSlotType(jsval* vp) const  TraceRecorder::determineSlotType(jsval* vp)
2418  {  {
2419      uint8 m;      uint8 m;
2420      LIns* i = get(vp);      LIns* i = get(vp);
2421      m = isNumber(*vp)      if (isNumber(*vp)) {
2422          ? (isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE)          m = isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE;
2423          : JSVAL_TAG(*vp);      } else if (JSVAL_IS_OBJECT(*vp)) {
2424            if (JSVAL_IS_NULL(*vp))
2425                m = JSVAL_TNULL;
2426            else if (HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp)))
2427                m = JSVAL_TFUN;
2428            else
2429                m = JSVAL_OBJECT;
2430        } else {
2431            m = JSVAL_TAG(*vp);
2432        }
2433      JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));      JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));
2434      return m;      return m;
2435  }  }
2436    
2437  #define IMACRO_PC_ADJ_BITS   8  JS_REQUIRES_STACK VMSideExit*
 #define SCRIPT_PC_ADJ_BITS   (32 - IMACRO_PC_ADJ_BITS)  
   
 // The stored imacro_pc_adj byte offset is biased by 1.  
 #define IMACRO_PC_ADJ_LIMIT  (JS_BIT(IMACRO_PC_ADJ_BITS) - 1)  
 #define SCRIPT_PC_ADJ_LIMIT  JS_BIT(SCRIPT_PC_ADJ_BITS)  
   
 #define IMACRO_PC_ADJ(ip)    ((uintptr_t)(ip) >> SCRIPT_PC_ADJ_BITS)  
 #define SCRIPT_PC_ADJ(ip)    ((ip) & JS_BITMASK(SCRIPT_PC_ADJ_BITS))  
   
 #define FI_SCRIPT_PC(fi,fp)  ((fp)->script->code + SCRIPT_PC_ADJ((fi).ip_adj))  
   
 #define FI_IMACRO_PC(fi,fp)  (IMACRO_PC_ADJ((fi).ip_adj)                      \  
                               ? imacro_code[*FI_SCRIPT_PC(fi, fp)] +          \  
                                 IMACRO_PC_ADJ((fi).ip_adj)                    \  
                               : NULL)  
   
 #define IMACRO_PC_OK(fp,pc)  JS_ASSERT(uintN((pc)-imacro_code[*(fp)->imacpc]) \  
                                        < JS_BIT(IMACRO_PC_ADJ_BITS))  
 #define ENCODE_IP_ADJ(fp,pc) ((fp)->imacpc                                    \  
                               ? (IMACRO_PC_OK(fp, pc),                        \  
                                  (((pc) - imacro_code[*(fp)->imacpc])         \  
                                   << SCRIPT_PC_ADJ_BITS) +                    \  
                                  (fp)->imacpc - (fp)->script->code)           \  
                               : (pc) - (fp)->script->code)  
   
 #define DECODE_IP_ADJ(ip,fp) (IMACRO_PC_ADJ(ip)                               \  
                               ? (fp)->imacpc = (fp)->script->code +           \  
                                                SCRIPT_PC_ADJ(ip),             \  
                                 (fp)->regs->pc = imacro_code[*(fp)->imacpc] + \  
                                                  IMACRO_PC_ADJ(ip)            \  
                               : (fp)->regs->pc = (fp)->script->code + (ip))  
   
 static jsbytecode* imacro_code[JSOP_LIMIT];  
   
 LIns*  
2438  TraceRecorder::snapshot(ExitType exitType)  TraceRecorder::snapshot(ExitType exitType)
2439  {  {
2440      JSStackFrame* fp = cx->fp;      JSStackFrame* fp = cx->fp;
2441      JSFrameRegs* regs = fp->regs;      JSFrameRegs* regs = fp->regs;
2442      jsbytecode* pc = regs->pc;      jsbytecode* pc = regs->pc;
     if (exitType == BRANCH_EXIT && js_IsLoopExit(pc, (jsbytecode*)fragment->root->ip))  
         exitType = LOOP_EXIT;  
2443    
2444      /* Check for a return-value opcode that needs to restart at the next instruction. */      /* Check for a return-value opcode that needs to restart at the next instruction. */
2445      const JSCodeSpec& cs = js_CodeSpec[*pc];      const JSCodeSpec& cs = js_CodeSpec[*pc];
2446    
2447      /* WARNING: don't return before restoring the original pc if (resumeAfter). */      /*
2448         * When calling a _FAIL native, make the snapshot's pc point to the next
2449         * instruction after the CALL or APPLY. Even on failure, a _FAIL native must not
2450         * be called again from the interpreter.
2451         */
2452      bool resumeAfter = (pendingTraceableNative &&      bool resumeAfter = (pendingTraceableNative &&
2453                          JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL);                          JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS);
2454      if (resumeAfter) {      if (resumeAfter) {
2455          JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEXTITER);          JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW);
2456          pc += cs.length;          pc += cs.length;
2457          regs->pc = pc;          regs->pc = pc;
2458          MUST_FLOW_THROUGH("restore_pc");          MUST_FLOW_THROUGH("restore_pc");
# Line 1828  Line 2466 
2466      trackNativeStackUse(stackSlots + 1);      trackNativeStackUse(stackSlots + 1);
2467    
2468      /* Capture the type map into a temporary location. */      /* Capture the type map into a temporary location. */
2469      unsigned ngslots = traceMonitor->globalSlots->length();      unsigned ngslots = treeInfo->globalSlots->length();
2470      unsigned typemap_size = (stackSlots + ngslots) * sizeof(uint8);      unsigned typemap_size = (stackSlots + ngslots) * sizeof(uint8);
2471      uint8* typemap = (uint8*)alloca(typemap_size);      void *mark = JS_ARENA_MARK(&cx->tempPool);
2472        uint8* typemap;
2473        JS_ARENA_ALLOCATE_CAST(typemap, uint8*, &cx->tempPool, typemap_size);
2474      uint8* m = typemap;      uint8* m = typemap;
2475    
2476      /* Determine the type of a store by looking at the current type of the actual value the      /* Determine the type of a store by looking at the current type of the actual value the
2477         interpreter is using. For numbers we have to check what kind of store we used last         interpreter is using. For numbers we have to check what kind of store we used last
2478         (integer or double) to figure out what the side exit show reflect in its typemap. */         (integer or double) to figure out what the side exit show reflect in its typemap. */
2479      FORALL_SLOTS(cx, ngslots, traceMonitor->globalSlots->data(), callDepth,      FORALL_SLOTS(cx, ngslots, treeInfo->globalSlots->data(), callDepth,
2480          *m++ = determineSlotType(vp);          *m++ = determineSlotType(vp);
2481      );      );
2482      JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);      JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
2483    
2484      /* If we are capturing the stack state on a specific instruction, the value on or near      /*
2485         the top of the stack is a boxed value. Either pc[-cs.length] is JSOP_NEXTITER and we       * If we are currently executing a traceable native or we are attaching a second trace
2486         want one below top of stack, or else it's JSOP_CALL and we want top of stack. */       * to it, the value on top of the stack is boxed. Make a note of this in the typemap.
2487      if (resumeAfter) {       */
2488          m[(pc[-cs.length] == JSOP_NEXTITER) ? -2 : -1] = JSVAL_BOXED;      if (pendingTraceableNative && (pendingTraceableNative->flags & JSTN_UNBOX_AFTER))
2489            typemap[stackSlots - 1] = JSVAL_BOXED;
2490    
2491          /* Now restore the the original pc (after which early returns are ok). */      /* Now restore the the original pc (after which early returns are ok). */
2492        if (resumeAfter) {
2493          MUST_FLOW_LABEL(restore_pc);          MUST_FLOW_LABEL(restore_pc);
2494          regs->pc = pc - cs.length;          regs->pc = pc - cs.length;
2495      } else {      } else {
2496          /* If we take a snapshot on a goto, advance to the target address. This avoids inner          /* If we take a snapshot on a goto, advance to the target address. This avoids inner
2497             trees returning on a break goto, which the outer recorder then would confuse with             trees returning on a break goto, which the outer recorder then would confuse with
2498             a break in the outer tree. */             a break in the outer tree. */
2499          if (*pc == JSOP_GOTO)          if (*pc == JSOP_GOTO)
2500              pc += GET_JUMP_OFFSET(pc);              pc += GET_JUMP_OFFSET(pc);
2501          else if (*pc == JSOP_GOTOX)          else if (*pc == JSOP_GOTOX)
2502              pc += GET_JUMPX_OFFSET(pc);              pc += GET_JUMPX_OFFSET(pc);
2503      }      }
     intptr_t ip_adj = ENCODE_IP_ADJ(fp, pc);  
2504    
2505      /* Check if we already have a matching side exit. If so use that side exit structure,      /*
2506         otherwise we have to create our own. */       * Check if we already have a matching side exit; if so we can return that
2507         * side exit instead of creating a new one.
2508         */
2509      VMSideExit** exits = treeInfo->sideExits.data();      VMSideExit** exits = treeInfo->sideExits.data();
2510      unsigned nexits = treeInfo->sideExits.length();      unsigned nexits = treeInfo->sideExits.length();
2511      if (exitType == LOOP_EXIT) {      if (exitType == LOOP_EXIT) {
2512          for (unsigned n = 0; n < nexits; ++n) {          for (unsigned n = 0; n < nexits; ++n) {
2513              VMSideExit* e = exits[n];              VMSideExit* e = exits[n];
2514              if (e->ip_adj == ip_adj &&              if (e->pc == pc && e->imacpc == fp->imacpc &&
2515                  !memcmp(getTypeMap(exits[n]), typemap, typemap_size)) {                  ngslots == e->numGlobalSlots &&
2516                  LIns* data = lir_buf_writer->skip(sizeof(GuardRecord));                  !memcmp(getFullTypeMap(exits[n]), typemap, typemap_size)) {
                 GuardRecord* rec = (GuardRecord*)data->payload();  
                 /* setup guard record structure with shared side exit */  
                 memset(rec, 0, sizeof(GuardRecord));  
                 VMSideExit* exit = exits[n];  
                 rec->exit = exit;  
                 exit->addGuard(rec);  
2517                  AUDIT(mergedLoopExits);                  AUDIT(mergedLoopExits);
2518                  return data;                  JS_ARENA_RELEASE(&cx->tempPool, mark);
2519                    return e;
2520              }              }
2521          }          }
2522      }      }
2523    
2524      /* We couldn't find a matching side exit, so create our own side exit structure. */      if (sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8) >= MAX_SKIP_BYTES) {
2525      LIns* data = lir_buf_writer->skip(sizeof(GuardRecord) +          /*
2526                                        sizeof(VMSideExit) +           * ::snapshot() is infallible in the sense that callers don't
2527                                        (stackSlots + ngslots) * sizeof(uint8));           * expect errors; but this is a trace-aborting error condition. So
2528      GuardRecord* rec = (GuardRecord*)data->payload();           * mangle the request to consume zero slots, and mark the tree as
2529      VMSideExit* exit = (VMSideExit*)(rec + 1);           * to-be-trashed. This should be safe as the trace will be aborted
2530      /* setup guard record structure */           * before assembly or execution due to the call to
2531      memset(rec, 0, sizeof(GuardRecord));           * trackNativeStackUse above.
2532      rec->exit = exit;           */
2533      /* setup side exit structure */          stackSlots = 0;
2534            ngslots = 0;
2535            typemap_size = 0;
2536            trashSelf = true;
2537        }
2538    
2539        /* We couldn't find a matching side exit, so create a new one. */
2540        LIns* data = lir->skip(sizeof(VMSideExit) + (stackSlots + ngslots) * sizeof(uint8));
2541        VMSideExit* exit = (VMSideExit*) data->payload();
2542    
2543        /* Setup side exit structure. */
2544      memset(exit, 0, sizeof(VMSideExit));      memset(exit, 0, sizeof(VMSideExit));
2545      exit->from = fragment;      exit->from = fragment;
2546      exit->calldepth = callDepth;      exit->calldepth = callDepth;
# Line 1902  Line 2550 
2550          ? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)          ? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)
2551          : 0;          : 0;
2552      exit->exitType = exitType;      exit->exitType = exitType;
2553      exit->addGuard(rec);      exit->block = fp->blockChain;
2554      exit->ip_adj = ip_adj;      exit->pc = pc;
2555        exit->imacpc = fp->imacpc;
2556      exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;      exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
2557      exit->rp_adj = exit->calldepth * sizeof(FrameInfo);      exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
2558      memcpy(getTypeMap(exit), typemap, typemap_size);      exit->nativeCalleeWord = 0;
2559        memcpy(getFullTypeMap(exit), typemap, typemap_size);
2560    
2561      /* BIG FAT WARNING: If compilation fails, we currently don't reset the lirbuf so its safe      JS_ARENA_RELEASE(&cx->tempPool, mark);
2562         to keep references to the side exits here. If we ever start rewinding those lirbufs,      return exit;
2563         we have to make sure we purge the side exits that then no longer will be in valid  }
2564         memory. */  
2565      if (exitType == LOOP_EXIT)  JS_REQUIRES_STACK LIns*
2566    TraceRecorder::createGuardRecord(VMSideExit* exit)
2567    {
2568        LIns* guardRec = lir->skip(sizeof(GuardRecord));
2569        GuardRecord* gr = (GuardRecord*) guardRec->payload();
2570    
2571        memset(gr, 0, sizeof(GuardRecord));
2572        gr->exit = exit;
2573        exit->addGuard(gr);
2574    
2575        return guardRec;
2576    }
2577    
2578    /*
2579     * Emit a guard for condition (cond), expecting to evaluate to boolean result
2580     * (expected) and using the supplied side exit if the conditon doesn't hold.
2581     */
2582    JS_REQUIRES_STACK void
2583    TraceRecorder::guard(bool expected, LIns* cond, VMSideExit* exit)
2584    {
2585        LIns* guardRec = createGuardRecord(exit);
2586    
2587        /*
2588         * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
2589         * safe to keep references to the side exits here. If we ever start
2590         * rewinding those lirbufs, we have to make sure we purge the side exits
2591         * that then no longer will be in valid memory.
2592         */
2593        if (exit->exitType == LOOP_EXIT)
2594          treeInfo->sideExits.add(exit);          treeInfo->sideExits.add(exit);
2595      return data;  
2596        if (!cond->isCond()) {
2597            expected = !expected;
2598            cond = lir->ins_eq0(cond);
2599        }
2600    
2601        LIns* guardIns =
2602            lir->insGuard(expected ? LIR_xf : LIR_xt, cond, guardRec);
2603        if (guardIns) {
2604            debug_only_v(printf("    SideExit=%p exitType=%d\n", (void*)exit, exit->exitType);)
2605        } else {
2606            debug_only_v(printf("    redundant guard, eliminated\n");)
2607        }
2608  }  }
2609    
2610  /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)  JS_REQUIRES_STACK VMSideExit*
2611     and using the supplied side exit if the conditon doesn't hold. */  TraceRecorder::copy(VMSideExit* copy)
 LIns*  
 TraceRecorder::guard(bool expected, LIns* cond, LIns* exit)  
2612  {  {
2613      return lir->insGuard(expected ? LIR_xf : LIR_xt, cond, exit);      size_t typemap_size = copy->numGlobalSlots + copy->numStackSlots;
2614        LIns* data = lir->skip(sizeof(VMSideExit) +
2615                               typemap_size * sizeof(uint8));
2616        VMSideExit* exit = (VMSideExit*) data->payload();
2617    
2618        /* Copy side exit structure. */
2619        memcpy(exit, copy, sizeof(VMSideExit) + typemap_size * sizeof(uint8));
2620        exit->guards = NULL;
2621        exit->from = fragment;
2622        exit->target = NULL;
2623    
2624        /*
2625         * BIG FAT WARNING: If compilation fails we don't reset the lirbuf, so it's
2626         * safe to keep references to the side exits here. If we ever start
2627         * rewinding those lirbufs, we have to make sure we purge the side exits
2628         * that then no longer will be in valid memory.
2629         */
2630        if (exit->exitType == LOOP_EXIT)
2631            treeInfo->sideExits.add(exit);
2632        return exit;
2633  }  }
2634    
2635  /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)  /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
2636     and generate a side exit with type exitType to jump to if the condition does not hold. */     and generate a side exit with type exitType to jump to if the condition does not hold. */
2637  LIns*  JS_REQUIRES_STACK void
2638  TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)  TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
2639  {  {
2640      return guard(expected, cond, snapshot(exitType));      guard(expected, cond, snapshot(exitType));
2641  }  }
2642    
2643  /* Try to match the type of a slot to type t. checkType is used to verify that the type of  /* Try to match the type of a slot to type t. checkType is used to verify that the type of
# Line 1943  Line 2650 
2650   * @param stage_count   Outparam for set() buffer count.   * @param stage_count   Outparam for set() buffer count.
2651   * @return              True if types are compatible, false otherwise.   * @return              True if types are compatible, false otherwise.
2652   */   */
2653  bool  JS_REQUIRES_STACK bool
2654  TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins,  TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins,
2655                           unsigned& stage_count)                           unsigned& stage_count)
2656  {  {
2657      if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */      if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */
2658          debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n",          debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n",
2659                              t,                              t,
2660                              isNumber(v),                              isNumber(v),
2661                              isPromoteInt(get(&v)),                              isPromoteInt(get(&v)),
# Line 1985  Line 2692 
2692          }          }
2693          return true;          return true;
2694      }      }
2695        if (t == JSVAL_TNULL)
2696            return JSVAL_IS_NULL(v);
2697        if (t == JSVAL_TFUN)
2698            return !JSVAL_IS_PRIMITIVE(v) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v));
2699        if (t == JSVAL_OBJECT)
2700            return !JSVAL_IS_PRIMITIVE(v) && !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(v));
2701    
2702      /* for non-number types we expect a precise match of the type */      /* for non-number types we expect a precise match of the type */
2703        uint8 vt = getCoercedType(v);
2704  #ifdef DEBUG  #ifdef DEBUG
2705      if (JSVAL_TAG(v) != t) {      if (vt != t) {
2706          debug_only_v(printf("Type mismatch: val %c, map %c ", typeChar[JSVAL_TAG(v)],          debug_only_v(printf("Type mismatch: val %c, map %c ", typeChar[vt],
2707                              typeChar[t]);)                              typeChar[t]);)
2708      }      }
2709  #endif  #endif
2710      debug_only_v(printf("checkType(tag=%d, t=%d) stage_count=%d\n",      debug_only_v(printf("checkType(vt=%d, t=%d) stage_count=%d\n",
2711                          (int) JSVAL_TAG(v), t, stage_count);)                          (int) vt, t, stage_count);)
2712      return JSVAL_TAG(v) == t;      return vt == t;
2713  }  }
2714    
2715  /**  /**
# Line 2003  Line 2718 
2718   *   *
2719   * @param root_peer         First fragment in peer list.   * @param root_peer         First fragment in peer list.
2720   * @param stable_peer       Outparam for first type stable peer.   * @param stable_peer       Outparam for first type stable peer.
2721   * @param trash             Whether to trash the tree (demotion).   * @param demote            True if stability was achieved through demotion.
  * @param demotes           Array to store demotable stack slots.  
2722   * @return                  True if type stable, false otherwise.   * @return                  True if type stable, false otherwise.
2723   */   */
2724  bool  JS_REQUIRES_STACK bool
2725  TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, unsigned* demotes)  TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, bool& demote)
2726  {  {
2727      uint8* m;      uint8* m;
2728      uint8* typemap;      uint8* typemap;
2729      unsigned ngslots = traceMonitor->globalSlots->length();      unsigned ngslots = treeInfo->globalSlots->length();
2730      uint16* gslots = traceMonitor->globalSlots->data();      uint16* gslots = treeInfo->globalSlots->data();
2731      JS_ASSERT(traceMonitor->globalTypeMap->length() == ngslots);      JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
2732    
2733      if (stable_peer)      if (stable_peer)
2734          *stable_peer = NULL;          *stable_peer = NULL;
2735    
     CLEAR_UNDEMOTE_SLOTLIST(demotes);  
   
2736      /*      /*
2737       * Rather than calculate all of this stuff twice, it gets cached locally.  The "stage" buffers       * Rather than calculate all of this stuff twice, it gets cached locally.  The "stage" buffers
2738       * are for calls to set() that will change the exit types.       * are for calls to set() that will change the exit types.
2739       */       */
2740      bool success;      bool success;
     bool unstable_from_undemotes;  
2741      unsigned stage_count;      unsigned stage_count;
2742      jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (ngslots + treeInfo->stackTypeMap.length()));      jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (treeInfo->typeMap.length()));
2743      LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (ngslots + treeInfo->stackTypeMap.length()));      LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (treeInfo->typeMap.length()));
2744    
2745      /* First run through and see if we can close ourselves - best case! */      /* First run through and see if we can close ourselves - best case! */
2746      stage_count = 0;      stage_count = 0;
2747      success = false;      success = false;
     unstable_from_undemotes = false;  
2748    
2749      debug_only_v(printf("Checking type stability against self=%p\n", fragment);)      debug_only_v(printf("Checking type stability against self=%p\n", (void*)fragment);)
2750    
2751      m = typemap = traceMonitor->globalTypeMap->data();      m = typemap = treeInfo->globalTypeMap();
2752      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,      FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
2753          debug_only_v(printf("%s%d ", vpname, vpnum);)          debug_only_v(printf("%s%d ", vpname, vpnum);)
2754          if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {          if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
2755              /* If the failure was an int->double, tell the oracle. */              /* If the failure was an int->double, tell the oracle. */
2756              if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp)))              if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
2757                  oracle.markGlobalSlotUndemotable(cx->fp->script, gslots[n]);                  oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2758              trashTree = true;                  demote = true;
2759              goto checktype_fail_1;              } else {
2760                    goto checktype_fail_1;
2761                }
2762          }          }
2763          ++m;          ++m;
2764      );      );
2765      m = typemap = treeInfo->stackTypeMap.data();      m = typemap = treeInfo->stackTypeMap();
2766      FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,      FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2767          debug_only_v(printf("%s%d ", vpname, vpnum);)          debug_only_v(printf("%s%d ", vpname, vpnum);)
2768          if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {          if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
2769              if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp)))              if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
2770                  ADD_UNDEMOTE_SLOT(demotes, unsigned(m - typemap));                  oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2771              else                  demote = true;
2772                } else {
2773                  goto checktype_fail_1;                  goto checktype_fail_1;
2774                }
2775          }          }
2776          ++m;          ++m;
2777      );      );
2778    
2779      /*      success = true;
      * If there's an exit that's unstable because of undemotable slots, we want to search for  
      * peers just in case we can make a connection.  
      */  
     if (NUM_UNDEMOTE_SLOTS(demotes))  
         unstable_from_undemotes = true;  
     else  
         success = true;  
2780    
2781  checktype_fail_1:  checktype_fail_1:
2782      /* If we got a success and we don't need to recompile, we should just close here. */      /* If we got a success and we don't need to recompile, we should just close here. */
2783      if (success) {      if (success && !demote) {
2784          for (unsigned i = 0; i < stage_count; i++)          for (unsigned i = 0; i < stage_count; i++)
2785              set(stage_vals[i], stage_ins[i]);              set(stage_vals[i], stage_ins[i]);
2786          return true;          return true;
2787      /* If we need to trash, don't bother checking peers. */      /* If we need to trash, don't bother checking peers. */
2788      } else if (trashTree) {      } else if (trashSelf) {
2789          return false;          return false;
     } else {  
         CLEAR_UNDEMOTE_SLOTLIST(demotes);  
2790      }      }
2791    
2792      /* At this point the tree is about to be incomplete, so let's see if we can connect to any      demote = false;
2793    
2794        /* At this point the tree is about to be incomplete, so let's see if we can connect to any
2795       * peer fragment that is type stable.       * peer fragment that is type stable.
2796       */       */
2797      Fragment* f;      Fragment* f;
2798      TreeInfo* ti;      TreeInfo* ti;
2799      for (f = root_peer; f != NULL; f = f->peer) {      for (f = root_peer; f != NULL; f = f->peer) {
2800          debug_only_v(printf("Checking type stability against peer=%p (code=%p)\n", f, f->code());)          debug_only_v(printf("Checking type stability against peer=%p (code=%p)\n", (void*)f, f->code());)
2801          if (!f->code())          if (!f->code())
2802              continue;              continue;
2803          ti = (TreeInfo*)f->vmprivate;          ti = (TreeInfo*)f->vmprivate;
2804          /* Don't allow varying stack depths */          /* Don't allow varying stack depths */
2805          if (ti->stackTypeMap.length() != treeInfo->stackTypeMap.length())          if ((ti->nStackTypes != treeInfo->nStackTypes) ||
2806                (ti->typeMap.length() != treeInfo->typeMap.length()) ||
2807                (ti->globalSlots->length() != treeInfo->globalSlots->length()))
2808              continue;              continue;
2809          stage_count = 0;          stage_count = 0;
2810          success = false;          success = false;
         m = ti->stackTypeMap.data();  
2811    
2812            m = ti->globalTypeMap();
2813            FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
2814                    if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
2815                        goto checktype_fail_2;
2816                    ++m;
2817                );
2818    
2819            m = ti->stackTypeMap();
2820          FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,          FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2821              if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))                  if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
2822                  goto checktype_fail_2;                      goto checktype_fail_2;
2823              ++m;                  ++m;
2824          );              );
2825    
2826          success = true;          success = true;
2827    
2828  checktype_fail_2:  checktype_fail_2:
2829          if (success) {          if (success) {
2830              /*              /*
2831               * There was a successful match.  We don't care about restoring the saved staging, but               * There was a successful match.  We don't care about restoring the saved staging, but
2832               * we do need to clear the original undemote list.               * we do need to clear the original undemote list.
2833               */               */
2834              for (unsigned i = 0; i < stage_count; i++)              for (unsigned i = 0; i < stage_count; i++)
2835                  set(stage_vals[i], stage_ins[i]);                  set(stage_vals[i], stage_ins[i]);
2836              if (stable_peer)              if (stable_peer)
2837                  *stable_peer = f;                  *stable_peer = f;
2838                demote = false;
2839              return false;              return false;
2840          }          }
2841      }      }
2842    
     JS_ASSERT(NUM_UNDEMOTE_SLOTS(demotes) == 0);  
   
2843      /*      /*
2844       * If this is a loop trace and it would be stable with demotions, build an undemote list       * If this is a loop trace and it would be stable with demotions, build an undemote list
2845       * and return true.  Our caller should sniff this and trash the tree, recording a new one       * and return true.  Our caller should sniff this and trash the tree, recording a new one
2846       * that will assumedly stabilize.       * that will assumedly stabilize.
2847       */       */
2848      if (unstable_from_undemotes && fragment->kind == LoopTrace) {      if (demote && fragment->kind == LoopTrace) {
2849          typemap = m = treeInfo->stackTypeMap.data();          typemap = m = treeInfo->globalTypeMap();
2850          FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,          FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
2851              if (*m == JSVAL_INT) {              if (*m == JSVAL_INT) {
2852                  JS_ASSERT(isNumber(*vp));                  JS_ASSERT(isNumber(*vp));
2853                  if (!isPromoteInt(get(vp)))                  if (!isPromoteInt(get(vp)))
2854                      ADD_UNDEMOTE_SLOT(demotes, unsigned(m - typemap));                      oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2855              } else if (*m == JSVAL_DOUBLE) {              } else if (*m == JSVAL_DOUBLE) {
2856                  JS_ASSERT(isNumber(*vp));                  JS_ASSERT(isNumber(*vp));
2857                  ADD_UNDEMOTE_SLOT(demotes, unsigned(m - typemap));                  oracle.markGlobalSlotUndemotable(cx, gslots[n]);
2858              } else {              } else {
2859                  JS_ASSERT(*m == JSVAL_TAG(*vp));                  JS_ASSERT(*m == JSVAL_TAG(*vp));
2860              }              }
2861              m++;              m++;
2862          );          );
2863    
2864            typemap = m = treeInfo->stackTypeMap();
2865            FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
2866                if (*m == JSVAL_INT) {
2867                    JS_ASSERT(isNumber(*vp));
2868                    if (!isPromoteInt(get(vp)))
2869                        oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2870                } else if (*m == JSVAL_DOUBLE) {
2871                    JS_ASSERT(isNumber(*vp));
2872                    oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
2873                } else {
2874                    JS_ASSERT((*m == JSVAL_TNULL)
2875                              ? JSVAL_IS_NULL(*vp)
2876                              : *m == JSVAL_TFUN
2877                              ? !JSVAL_IS_PRIMITIVE(*vp) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))
2878                              : *m == JSVAL_TAG(*vp));
2879                }
2880                m++;
2881            );
2882          return true;          return true;
2883        } else {
2884            demote = false;
2885      }      }
2886    
2887      return false;      return false;
2888  }  }
2889    
2890  /* Check whether the current pc location is the loop header of the loop this recorder records. */  static JS_REQUIRES_STACK void
2891  bool  FlushJITCache(JSContext* cx)
 TraceRecorder::isLoopHeader(JSContext* cx) const  
2892  {  {
2893      return cx->fp->regs->pc == fragment->root->ip;      if (!TRACING_ENABLED(cx))
2894            return;
2895        JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2896        debug_only_v(printf("Flushing cache.\n");)
2897        if (tm->recorder)
2898            js_AbortRecording(cx, "flush cache");
2899        TraceRecorder* tr;
2900        while ((tr = tm->abortStack) != NULL) {
2901            tr->removeFragmentoReferences();
2902            tr->deepAbort();
2903            tr->popAbortStack();
2904        }
2905        Fragmento* fragmento = tm->fragmento;
2906        if (fragmento) {
2907            if (tm->prohibitFlush) {
2908                debug_only_v(printf("Deferring fragmento flush due to deep bail.\n");)
2909                tm->needFlush = JS_TRUE;
2910                return;
2911            }
2912    
2913            fragmento->clearFrags();
2914    #ifdef DEBUG
2915            JS_ASSERT(fragmento->labels);
2916            fragmento->labels->clear();
2917    #endif
2918            tm->lirbuf->rewind();
2919            for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
2920                VMFragment* f = tm->vmfragments[i];
2921                while (f) {
2922                    VMFragment* next = f->next;
2923                    fragmento->clearFragment(f);
2924                    f = next;
2925                }
2926                tm->vmfragments[i] = NULL;
2927            }
2928            for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
2929                tm->globalStates[i].globalShape = -1;
2930                tm->globalStates[i].globalSlots->clear();
2931            }
2932        }
2933        tm->needFlush = JS_FALSE;
2934  }  }
2935    
2936  /* Compile the current fragment. */  /* Compile the current fragment. */
2937  void  JS_REQUIRES_STACK void
2938  TraceRecorder::compile(Fragmento* fragmento)  TraceRecorder::compile(JSTraceMonitor* tm)
2939  {  {
2940        if (tm->needFlush) {
2941            FlushJITCache(cx);
2942            return;
2943        }
2944        Fragmento* fragmento = tm->fragmento;
2945      if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {      if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
2946          debug_only_v(printf("Trace rejected: excessive stack use.\n"));          debug_only_v(printf("Blacklist: excessive stack use.\n"));
2947          js_BlacklistPC(fragmento, fragment);          js_Blacklist((jsbytecode*) fragment->root->ip);
2948          return;          return;
2949      }      }
2950      ++treeInfo->branchCount;      if (anchor && anchor->exitType != CASE_EXIT)
2951      if (lirbuf->outOmem()) {          ++treeInfo->branchCount;
2952        if (lirbuf->outOMem()) {
2953          fragmento->assm()->setError(nanojit::OutOMem);          fragmento->assm()->setError(nanojit::OutOMem);
2954          return;          return;
2955      }      }
2956      ::compile(fragmento->assm(), fragment);      ::compile(fragmento->assm(), fragment);
2957      if (anchor)      if (fragmento->assm()->error() == nanojit::OutOMem)
         fragmento->assm()->patch(anchor);  
     if (fragmento->assm()->error() != nanojit::None)  
2958          return;          return;
2959        if (fragmento->assm()->error() != nanojit::None) {
2960            debug_only_v(printf("Blacklisted: error during compilation\n");)
2961            js_Blacklist((jsbytecode*) fragment->root->ip);
2962            return;
2963        }
2964        js_resetRecordingAttempts(cx, (jsbytecode*) fragment->ip);
2965        js_resetRecordingAttempts(cx, (jsbytecode*) fragment->root->ip);
2966        if (anchor) {
2967    #ifdef NANOJIT_IA32
2968            if (anchor->exitType == CASE_EXIT)
2969                fragmento->assm()->patch(anchor, anchor->switchInfo);
2970            else
2971    #endif
2972                fragmento->assm()->patch(anchor);
2973        }
2974      JS_ASSERT(fragment->code());      JS_ASSERT(fragment->code());
2975      JS_ASSERT(!fragment->vmprivate);      JS_ASSERT(!fragment->vmprivate);
2976      if (fragment == fragment->root)      if (fragment == fragment->root)
# Line 2194  Line 2988 
2988  }  }
2989    
2990  static bool  static bool
2991  js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree,  js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree,
2992                           VMSideExit* exit)                           VMSideExit* exit)
2993  {  {
2994      JS_ASSERT(exit->numStackSlots == stableTree->stackTypeMap.length());      JS_ASSERT(exit->numStackSlots == stableTree->nStackTypes);
2995    
2996      /* Must have a matching type unstable exit. */      /* Must have a matching type unstable exit. */
2997      if (memcmp(getTypeMap(exit) + exit->numGlobalSlots,      if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) ||
2998                 stableTree->stackTypeMap.data(),          memcmp(getFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) {
2999                 stableTree->stackTypeMap.length()) != 0) {         return false;
        return false;  
3000      }      }
3001    
3002      exit->target = stableFrag;      exit->target = stableFrag;
3003      frago->assm()->patch(exit);      frago->assm()->patch(exit);
3004    
3005      stableTree->dependentTrees.addUnique(exit->from->root);      stableTree->dependentTrees.addUnique(exit->from->root);
3006        ((TreeInfo*)exit->from->root->vmprivate)->linkedTrees.addUnique(stableFrag);
3007    
3008      return true;      return true;
3009  }  }
3010    
3011  /* Complete and compile a trace and link it to the existing tree if appropriate. */  /* Complete and compile a trace and link it to the existing tree if appropriate. */
3012  bool  JS_REQUIRES_STACK void
3013  TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes)  TraceRecorder::closeLoop(JSTraceMonitor* tm, bool& demote)
3014  {  {
3015        /*
3016         * We should have arrived back at the loop header, and hence we don't want to be in an imacro
3017         * here and the opcode should be either JSOP_LOOP, or in case this loop was blacklisted in the
3018         * meantime JSOP_NOP.
3019         */
3020        JS_ASSERT((*cx->fp->regs->pc == JSOP_LOOP || *cx->fp->regs->pc == JSOP_NOP) && !cx->fp->imacpc);
3021    
3022      bool stable;      bool stable;
     LIns* exitIns;  
3023      Fragment* peer;      Fragment* peer;
3024      VMSideExit* exit;      VMFragment* peer_root;
3025      Fragment* peer_root;      Fragmento* fragmento = tm->fragmento;
   
     demote = false;  
       
     exitIns = snapshot(UNSTABLE_LOOP_EXIT);  
     exit = (VMSideExit*)((GuardRecord*)exitIns->payload())->exit;  
3026    
3027      if (callDepth != 0) {      if (callDepth != 0) {
3028          debug_only_v(printf("Stack depth mismatch, possible recursion\n");)          debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
3029          js_BlacklistPC(fragmento, fragment);          js_Blacklist((jsbytecode*) fragment->root->ip);
3030          trashTree = true;          trashSelf = true;
3031          return false;          return;
3032      }      }
3033    
3034      JS_ASSERT(exit->numStackSlots == treeInfo->stackTypeMap.length());      VMSideExit* exit = snapshot(UNSTABLE_LOOP_EXIT);
3035        JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
3036    
3037      peer_root = fragmento->getLoop(fragment->root->ip);      VMFragment* root = (VMFragment*)fragment->root;
3038        peer_root = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape, root->argc);
3039      JS_ASSERT(peer_root != NULL);      JS_ASSERT(peer_root != NULL);
     stable = deduceTypeStability(peer_root, &peer, demotes);  
3040    
3041      #if DEBUG      stable = deduceTypeStability(peer_root, &peer, demote);
3042      if (!stable || NUM_UNDEMOTE_SLOTS(demotes))  
3043    #if DEBUG
3044        if (!stable)
3045          AUDIT(unstableLoopVariable);          AUDIT(unstableLoopVariable);
3046      #endif  #endif
3047    
3048      if (trashTree) {      if (trashSelf) {
3049          debug_only_v(printf("Trashing tree from type instability.\n");)          debug_only_v(printf("Trashing tree from type instability.\n");)
3050          return false;          return;
3051      }      }
3052    
3053      if (stable && NUM_UNDEMOTE_SLOTS(demotes)) {      if (stable && demote) {
3054          JS_ASSERT(fragment->kind == LoopTrace);          JS_ASSERT(fragment->kind == LoopTrace);
3055          demote = true;          return;
         return false;  
3056      }      }
3057    
3058      if (!stable) {      if (!stable) {
3059          fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exitIns);          fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(exit));
3060    
3061          /*          /*
3062           * If we didn't find a type stable peer, we compile the loop anyway and           * If we didn't find a type stable peer, we compile the loop anyway and
# Line 2277  Line 3075 
3075              uexit->exit = exit;              uexit->exit = exit;
3076              uexit->next = treeInfo->unstableExits;              uexit->next = treeInfo->unstableExits;
3077              treeInfo->unstableExits = uexit;              treeInfo->unstableExits = uexit;
   
             /*  
              * If we walked out of a loop, this exit is wrong. We need to back  
              * up to the if operation.  
              */  
             if (walkedOutOfLoop())  
                 exit->ip_adj = terminate_ip_adj;  
   
             /* If we were trying to stabilize a promotable tree, trash it. */  
             if (promotedPeer)  
                 js_TrashTree(cx, promotedPeer);  
3078          } else {          } else {
3079              JS_ASSERT(peer->code());              JS_ASSERT(peer->code());
3080              exit->target = peer;              exit->target = peer;
3081              debug_only_v(printf("Joining type-unstable trace to target fragment %p.\n", peer);)              debug_only_v(printf("Joining type-unstable trace to target fragment %p.\n", (void*)peer);)
3082              stable = true;              stable = true;
3083              ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root);              ((TreeInfo*)peer->vmprivate)->dependentTrees.addUnique(fragment->root);
3084                treeInfo->linkedTrees.addUnique(peer);
3085          }          }
   
         compile(fragmento);  
3086      } else {      } else {
3087          exit->target = fragment->root;          exit->target = fragment->root;
3088  #if defined(JS_HAS_OPERATION_COUNT) && !JS_HAS_OPERATION_COUNT          fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), createGuardRecord(exit));
         exit->exitType = TIMEOUT_EXIT;  
         guard(false,  
               lir->ins_eq0(lir->insLoadi(cx_ins,  
                                          offsetof(JSContext, operationCount))),  
               exitIns);  
 #endif  
         fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), exitIns);  
         compile(fragmento);  
3089      }      }
3090        compile(tm);
3091    
3092      if (fragmento->assm()->error() != nanojit::None)      if (fragmento->assm()->error() != nanojit::None)
3093          return false;          return;
3094    
3095      joinEdgesToEntry(fragmento, peer_root);      joinEdgesToEntry(fragmento, peer_root);
3096    
3097        debug_only_v(printf("updating specializations on dependent and linked trees\n"))
3098        if (fragment->root->vmprivate)
3099            specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
3100    
3101        /*
3102         * If this is a newly formed tree, and the outer tree has not been compiled yet, we
3103         * should try to compile the outer tree again.
3104         */
3105        if (outer)
3106            js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
3107        
3108      debug_only_v(printf("recording completed at %s:%u@%u via closeLoop\n",      debug_only_v(printf("recording completed at %s:%u@%u via closeLoop\n",
3109                          cx->fp->script->filename,                          cx->fp->script->filename,
3110                          js_FramePCToLineNumber(cx, cx->fp),                          js_FramePCToLineNumber(cx, cx->fp),
3111                          FramePCOffset(cx->fp));)                          FramePCOffset(cx->fp));)
     return true;  
3112  }  }
3113    
3114  void  JS_REQUIRES_STACK void
3115  TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root)  TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
3116  {  {
3117      if (fragment->kind == LoopTrace) {      if (fragment->kind == LoopTrace) {
3118          TreeInfo* ti;          TreeInfo* ti;
3119          Fragment* peer;          Fragment* peer;
3120          uint8* t1, *t2;          uint8* t1, *t2;
3121          UnstableExit* uexit, **unext;          UnstableExit* uexit, **unext;
3122            uint32* stackDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nStackTypes);
3123            uint32* globalDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nGlobalTypes());
3124    
         unsigned* demotes = (unsigned*)alloca(treeInfo->stackTypeMap.length() * sizeof(unsigned));  
3125          for (peer = peer_root; peer != NULL; peer = peer->peer) {          for (peer = peer_root; peer != NULL; peer = peer->peer) {
3126              if (!peer->code())              if (!peer->code())
3127                  continue;                  continue;
# Line 2341  Line 3131 
3131              while (uexit != NULL) {              while (uexit != NULL) {
3132                  bool remove = js_JoinPeersIfCompatible(fragmento, fragment, treeInfo, uexit->exit);                  bool remove = js_JoinPeersIfCompatible(fragmento, fragment, treeInfo, uexit->exit);
3133                  JS_ASSERT(!remove || fragment != peer);                  JS_ASSERT(!remove || fragment != peer);
3134                  debug_only_v(if (remove) {                  debug_only_v(if (remove) {
3135                               printf("Joining type-stable trace to target exit %p->%p.\n",                               printf("Joining type-stable trace to target exit %p->%p.\n",
3136                                      uexit->fragment, uexit->exit); });                                      (void*)uexit->fragment, (void*)uexit->exit); });
3137                  if (!remove) {                  if (!remove) {
3138                      /* See if this exit contains mismatch demotions, which imply trashing a tree.                      /* See if this exit contains mismatch demotions, which imply trashing a tree.
3139                         This is actually faster than trashing the original tree as soon as the                         This is actually faster than trashing the original tree as soon as the
3140                         instability is detected, since we could have compiled a fairly stable                         instability is detected, since we could have compiled a fairly stable
3141                         tree that ran faster with integers. */                         tree that ran faster with integers. */
3142                      unsigned count = 0;                      unsigned stackCount = 0;
3143                      t1 = treeInfo->stackTypeMap.data();                      unsigned globalCount = 0;
3144                      t2 = getTypeMap(uexit->exit) + uexit->exit->numGlobalSlots;                      t1 = treeInfo->stackTypeMap();
3145                        t2 = getStackTypeMap(uexit->exit);
3146                      for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) {                      for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) {
3147                          if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {                          if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
3148                              demotes[count++] = i;                              stackDemotes[stackCount++] = i;
3149                          } else if (t2[i] != t1[i]) {                          } else if (t2[i] != t1[i]) {
3150                              count = 0;                              stackCount = 0;
3151                              break;                              break;
3152                          }                          }
3153                      }                      }
3154                      if (count) {                      t1 = treeInfo->globalTypeMap();
3155                          for (unsigned i = 0; i < count; i++)                      t2 = getGlobalTypeMap(uexit->exit);
3156                              oracle.markStackSlotUndemotable(cx->fp->script,                      for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
3157                                                              cx->fp->regs->pc, demotes[i]);                          if (t2[i] == JSVAL_INT && t1[i] == JSVAL_DOUBLE) {
3158                          js_TrashTree(cx, uexit->fragment->root);                              globalDemotes[globalCount++] = i;
3159                            } else if (t2[i] != t1[i]) {
3160                                globalCount = 0;
3161                                stackCount = 0;
3162                                break;
3163                            }
3164                        }
3165                        if (stackCount || globalCount) {
3166                            for (unsigned i = 0; i < stackCount; i++)
3167                                oracle.markStackSlotUndemotable(cx, stackDemotes[i]);
3168                            for (unsigned i = 0; i < globalCount; i++)
3169                                oracle.markGlobalSlotUndemotable(cx, ti->globalSlots->data()[globalDemotes[i]]);
3170                            JS_ASSERT(peer == uexit->fragment->root);
3171                            if (fragment == peer)
3172                                trashSelf = true;
3173                            else
3174                                whichTreesToTrash.addUnique(uexit->fragment->root);
3175                          break;                          break;
3176                      }                      }
3177                  }                  }
# Line 2376  Line 3183 
3183                      unext = &uexit->next;                      unext = &uexit->next;
3184                      uexit = uexit->next;                      uexit = uexit->next;
3185                  }                  }
3186              }              }
3187          }          }
3188      }      }
3189    
3190      debug_only_v(js_DumpPeerStability(fragmento, peer_root->ip);)      debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip, peer_root->globalObj,
3191                                          peer_root->globalShape, peer_root->argc);)
3192  }  }
3193    
3194  /* Emit an always-exit guard and compile the tree (used for break statements. */  /* Emit an always-exit guard and compile the tree (used for break statements. */
3195  void  JS_REQUIRES_STACK void
3196  TraceRecorder::endLoop(Fragmento* fragmento)  TraceRecorder::endLoop(JSTraceMonitor* tm)
3197  {  {
     LIns* exitIns = snapshot(LOOP_EXIT);  
   
3198      if (callDepth != 0) {      if (callDepth != 0) {
3199          debug_only_v(printf("Stack depth mismatch, possible recursion\n");)          debug_only_v(printf("Blacklisted: stack depth mismatch, possible recursion.\n");)
3200          js_BlacklistPC(fragmento, fragment);          js_Blacklist((jsbytecode*) fragment->root->ip);
3201          trashTree = true;          trashSelf = true;
3202          return;          return;
3203      }      }
3204    
3205      fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exitIns);      fragment->lastIns =
3206      compile(fragmento);          lir->insGuard(LIR_x, lir->insImm(1), createGuardRecord(snapshot(LOOP_EXIT)));
3207        compile(tm);
3208    
3209      if (fragmento->assm()->error() != nanojit::None)      if (tm->fragmento->assm()->error() != nanojit::None)
3210          return;          return;
3211    
3212      joinEdgesToEntry(fragmento, fragmento->getLoop(fragment->root->ip));      VMFragment* root = (VMFragment*)fragment->root;
3213        joinEdgesToEntry(tm->fragmento, getLoop(tm, root->ip, root->globalObj, root->globalShape, root->argc));
3214    
3215        /* Note: this must always be done, in case we added new globals on trace and haven't yet
3216           propagated those to linked and dependent trees. */
3217        debug_only_v(printf("updating specializations on dependent and linked trees\n"))
3218        if (fragment->root->vmprivate)
3219            specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
3220    
3221        /*
3222         * If this is a newly formed tree, and the outer tree has not been compiled yet, we
3223         * should try to compile the outer tree again.
3224         */
3225        if (outer)
3226            js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
3227        
3228      debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n",      debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n",
3229                          cx->fp->script->filename,                          cx->fp->script->filename,
3230                          js_FramePCToLineNumber(cx, cx->fp),                          js_FramePCToLineNumber(cx, cx->fp),
# Line 2411  Line 3232 
3232  }  }
3233    
3234  /* Emit code to adjust the stack to match the inner tree's stack expectations. */  /* Emit code to adjust the stack to match the inner tree's stack expectations. */
3235  void  JS_REQUIRES_STACK void
3236  TraceRecorder::prepareTreeCall(Fragment* inner)  TraceRecorder::prepareTreeCall(Fragment* inner)
3237  {  {
3238      TreeInfo* ti = (TreeInfo*)inner->vmprivate;      TreeInfo* ti = (TreeInfo*)inner->vmprivate;
# Line 2425  Line 3246 
3246             any outer frames that the inner tree doesn't expect but the outer tree has. */             any outer frames that the inner tree doesn't expect but the outer tree has. */
3247          ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);          ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);
3248          /* Calculate the amount we have to lift the call stack by */          /* Calculate the amount we have to lift the call stack by */
3249          ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo);          ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
3250          /* Guard that we have enough stack space for the tree we are trying to call on top          /* Guard that we have enough stack space for the tree we are trying to call on top
3251             of the new value for sp. */             of the new value for sp. */
3252          debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",          debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",
# Line 2437  Line 3258 
3258          guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);          guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);
3259          /* Guard that we have enough call stack space. */          /* Guard that we have enough call stack space. */
3260          LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +          LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +
3261                  ti->maxCallDepth * sizeof(FrameInfo));                  ti->maxCallDepth * sizeof(FrameInfo*));
3262          guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);          guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);
3263          /* We have enough space, so adjust sp and rp to their new level. */          /* We have enough space, so adjust sp and rp to their new level. */
3264          lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,          lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,
# Line 2451  Line 3272 
3272  }  }
3273    
3274  /* Record a call to an inner tree. */  /* Record a call to an inner tree. */
3275  void  JS_REQUIRES_STACK void
3276  TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)  TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
3277  {  {
3278      TreeInfo* ti = (TreeInfo*)inner->vmprivate;      TreeInfo* ti = (TreeInfo*)inner->vmprivate;
3279    
3280      /* Invoke the inner tree. */      /* Invoke the inner tree. */
3281      LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */      LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
3282      LIns* ret = lir->insCall(&js_CallTree_ci, args);      LIns* ret = lir->insCall(&js_CallTree_ci, args);
3283    
3284      /* Read back all registers, in case the called tree changed any of them. */      /* Read back all registers, in case the called tree changed any of them. */
3285      import(ti, inner_sp_ins, exit->numGlobalSlots, exit->calldepth,      JS_ASSERT(!memchr(getGlobalTypeMap(exit), JSVAL_BOXED, exit->numGlobalSlots) &&
3286             getTypeMap(exit), getTypeMap(exit) + exit->numGlobalSlots);                !memchr(getStackTypeMap(exit), JSVAL_BOXED, exit->numStackSlots));
3287        /* bug 502604 - It is illegal to extend from the outer typemap without first extending from the
3288         * inner. Make a new typemap here.
3289         */
3290        TypeMap fullMap;
3291        fullMap.add(getStackTypeMap(exit), exit->numStackSlots);
3292        fullMap.add(getGlobalTypeMap(exit), exit->numGlobalSlots);
3293        TreeInfo* innerTree = (TreeInfo*)exit->from->root->vmprivate;
3294        if (exit->numGlobalSlots < innerTree->nGlobalTypes()) {
3295            fullMap.add(innerTree->globalTypeMap() + exit->numGlobalSlots,
3296                        innerTree->nGlobalTypes() - exit->numGlobalSlots);
3297        }
3298        import(ti, inner_sp_ins, exit->numStackSlots, fullMap.length() - exit->numStackSlots,
3299               exit->calldepth, fullMap.data());
3300    
3301      /* Restore sp and rp to their original values (we still have them in a register). */      /* Restore sp and rp to their original values (we still have them in a register). */
3302      if (callDepth > 0) {      if (callDepth > 0) {
3303          lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));          lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
3304          lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));          lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
3305      }      }
3306      /* Guard that we come out of the inner tree along the same side exit we came out when  
3307         we called the inner tree at recording time. */      /*
3308         * Guard that we come out of the inner tree along the same side exit we came out when
3309         * we called the inner tree at recording time.
3310         */
3311      guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(exit)), NESTED_EXIT);      guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(exit)), NESTED_EXIT);
3312      /* Register us as a dependent tree of the inner tree. */      /* Register us as a dependent tree of the inner tree. */
3313      ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);      ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);
3314        treeInfo->linkedTrees.addUnique(inner);
3315  }  }
3316    
3317  /* Add a if/if-else control-flow merge point to the list of known merge points. */  /* Add a if/if-else control-flow merge point to the list of known merge points. */
3318  void  JS_REQUIRES_STACK void
3319  TraceRecorder::trackCfgMerges(jsbytecode* pc)  TraceRecorder::trackCfgMerges(jsbytecode* pc)
3320  {  {
3321      /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */      /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
# Line 2482  Line 3323 
3323      jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);      jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
3324      if (sn != NULL) {      if (sn != NULL) {
3325          if (SN_TYPE(sn) == SRC_IF) {          if (SN_TYPE(sn) == SRC_IF) {
3326              cfgMerges.add((*pc == JSOP_IFEQ)              cfgMerges.add((*pc == JSOP_IFEQ)
3327                            ? pc + GET_JUMP_OFFSET(pc)                            ? pc + GET_JUMP_OFFSET(pc)
3328                            : pc + GET_JUMPX_OFFSET(pc));                            : pc + GET_JUMPX_OFFSET(pc));
3329          } else if (SN_TYPE(sn) == SRC_IF_ELSE)          } else if (SN_TYPE(sn) == SRC_IF_ELSE)
3330              cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));              cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
3331      }      }
3332  }  }
3333    
3334  /* Invert the direction of the guard if this is a loop edge that is not  /* Invert the direction of the guard if this is a loop edge that is not
3335     taken (thin loop). */     taken (thin loop). */
3336  void  JS_REQUIRES_STACK void
3337  TraceRecorder::flipIf(jsbytecode* pc, bool& cond)  TraceRecorder::emitIf(jsbytecode* pc, bool cond, LIns* x)
3338  {  {
3339        ExitType exitType;
3340      if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {      if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
3341          switch (*pc) {          exitType = LOOP_EXIT;
3342            case JSOP_IFEQ:  
3343            case JSOP_IFEQX:          /*
3344              if (!cond)           * If we are about to walk out of the loop, generate code for the inverse loop
3345                  return;           * condition, pretending we recorded the case that stays on trace.
3346              break;           */
3347            case JSOP_IFNE:          if ((*pc == JSOP_IFEQ || *pc == JSOP_IFEQX) == cond) {
3348            case JSOP_IFNEX:              JS_ASSERT(*pc == JSOP_IFNE || *pc == JSOP_IFNEX || *pc == JSOP_IFEQ || *pc == JSOP_IFEQX);
3349              if (cond)              debug_only_v(printf("Walking out of the loop, terminating it anyway.\n");)
3350                  return;              cond = !cond;
             break;  
           default:  
             JS_NOT_REACHED("flipIf");  
3351          }          }
3352          /* We are about to walk out of the loop, so terminate it with  
3353             an inverse loop condition. */          /*
3354          debug_only_v(printf("Walking out of the loop, terminating it anyway.\n");)           * Conditional guards do not have to be emitted if the condition is constant. We
3355          cond = !cond;           * make a note whether the loop condition is true or false here, so we later know
3356          terminate = true;           * whether to emit a loop edge or a loop end.
         /* If when we get to closeLoop the tree is decided to be type unstable, we need to  
            reverse this logic because the loop won't be closed after all.  Store the real  
            value of the IP the interpreter expects, so we can use it in our final LIR_x.  
3357           */           */
3358          if (*pc == JSOP_IFEQX || *pc == JSOP_IFNEX)          if (x->isconst()) {
3359              pc += GET_JUMPX_OFFSET(pc);              loop = (x->constval() == cond);
3360          else              return;
3361              pc += GET_JUMP_OFFSET(pc);          }
3362          terminate_ip_adj = ENCODE_IP_ADJ(cx->fp, pc);      } else {
3363            exitType = BRANCH_EXIT;
3364      }      }
3365        if (!x->isconst())
3366            guard(cond, x, exitType);
3367  }  }
3368    
3369  /* Emit code for a fused IFEQ/IFNE. */  /* Emit code for a fused IFEQ/IFNE. */
3370  void  JS_REQUIRES_STACK void
3371  TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)  TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
3372  {  {
3373      if (x->isconst()) // no need to guard if condition is constant      if (*pc == JSOP_IFEQ || *pc == JSOP_IFNE) {
3374          return;          emitIf(pc, cond, x);
3375      if (*pc == JSOP_IFEQ) {          if (*pc == JSOP_IFEQ)
3376          flipIf(pc, cond);              trackCfgMerges(pc);
         guard(cond, x, BRANCH_EXIT);  
         trackCfgMerges(pc);  
     } else if (*pc == JSOP_IFNE) {  
         flipIf(pc, cond);  
         guard(cond, x, BRANCH_EXIT);  
3377      }      }
3378  }  }
3379    
3380    /* Check whether we have reached the end of the trace. */
3381    JS_REQUIRES_STACK JSRecordingStatus
3382    TraceRecorder::checkTraceEnd(jsbytecode *pc)
3383    {
3384        if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
3385            /*
3386             * If we compile a loop, the trace should have a zero stack balance at the loop
3387             * edge. Currently we are parked on a comparison op or IFNE/IFEQ, so advance
3388             * pc to the loop header and adjust the stack pointer and pretend we have
3389             * reached the loop header.
3390             */
3391            if (loop) {
3392                JS_ASSERT(!cx->fp->imacpc && (pc == cx->fp->regs->pc || pc == cx->fp->regs->pc + 1));
3393                bool fused = pc != cx->fp->regs->pc;
3394                JSFrameRegs orig = *cx->fp->regs;
3395    
3396                cx->fp->regs->pc = (jsbytecode*)fragment->root->ip;
3397                cx->fp->regs->sp -= fused ? 2 : 1;
3398    
3399                bool demote = false;
3400                closeLoop(traceMonitor, demote);
3401    
3402                *cx->fp->regs = orig;
3403    
3404                /*
3405                 * If compiling this loop generated new oracle information which will likely
3406                 * lead to a different compilation result, immediately trigger another
3407                 * compiler run. This is guaranteed to converge since the oracle only
3408                 * accumulates adverse information but never drops it (except when we
3409                 * flush it during garbage collection.)
3410                 */
3411                if (demote)
3412                    js_AttemptCompilation(cx, traceMonitor, globalObj, outer, outerArgc);
3413            } else {
3414                endLoop(traceMonitor);
3415            }
3416            return JSRS_STOP;
3417        }
3418        return JSRS_CONTINUE;
3419    }
3420    
3421  bool  bool
3422  TraceRecorder::hasMethod(JSObject* obj, jsid id)  TraceRecorder::hasMethod(JSObject* obj, jsid id)
3423  {  {
# Line 2566  Line 3441 
3441              if (VALUE_IS_FUNCTION(cx, v)) {              if (VALUE_IS_FUNCTION(cx, v)) {
3442                  found = true;                  found = true;
3443                  if (!SCOPE_IS_BRANDED(scope)) {                  if (!SCOPE_IS_BRANDED(scope)) {
3444                      SCOPE_MAKE_UNIQUE_SHAPE(cx, scope);                      js_MakeScopeShapeUnique(cx, scope);
3445                      SCOPE_SET_BRANDED(scope);                      SCOPE_SET_BRANDED(scope);
3446                  }                  }
3447              }              }
# Line 2577  Line 3452 
3452      return found;      return found;
3453  }  }
3454    
3455  bool  JS_REQUIRES_STACK bool
 TraceRecorder::hasToStringMethod(JSObject* obj)  
 {  
     JS_ASSERT(cx->fp->regs->sp + 1 <= cx->fp->slots + cx->fp->script->nslots);  
   
     return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.toStringAtom));  
 }  
   
 bool  
 TraceRecorder::hasValueOfMethod(JSObject* obj)  
 {  
     JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots);  
   
     return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.valueOfAtom));  
 }  
   
 bool  
3456  TraceRecorder::hasIteratorMethod(JSObject* obj)  TraceRecorder::hasIteratorMethod(JSObject* obj)
3457  {  {
3458      JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots);      JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots);
# Line 2619  Line 3478 
3478    
3479      x = (VMSideExit *)i->record()->exit;      x = (VMSideExit *)i->record()->exit;
3480      sprintf(out,      sprintf(out,
3481              "%s: %s %s -> %lu:%lu sp%+ld rp%+ld",              "%s: %s %s -> pc=%p imacpc=%p sp%+ld rp%+ld",
3482              formatRef(i),              formatRef(i),
3483              lirNames[i->opcode()],              lirNames[i->opcode()],
3484              i->oprnd1()->isCond() ? formatRef(i->oprnd1()) : "",              i->oprnd1()->isCond() ? formatRef(i->oprnd1()) : "",
3485              IMACRO_PC_ADJ(x->ip_adj),              (void *)x->pc,
3486              SCRIPT_PC_ADJ(x->ip_adj),              (void *)x->imacpc,
3487              (long int)x->sp_adj,              (long int)x->sp_adj,
3488              (long int)x->rp_adj);              (long int)x->rp_adj);
3489  }  }
# Line 2633  Line 3492 
3492  void  void
3493  nanojit::Fragment::onDestroy()  nanojit::Fragment::onDestroy()
3494  {  {
     if (root == this) {  
         delete mergeCounts;  
         delete lirbuf;  
     }  
3495      delete (TreeInfo *)vmprivate;      delete (TreeInfo *)vmprivate;
3496  }  }
3497    
3498  void  static JS_REQUIRES_STACK bool
3499  js_DeleteRecorder(JSContext* cx)  js_DeleteRecorder(JSContext* cx)
3500  {  {
3501      JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);      JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3502    
3503      /* Aborting and completing a trace end up here. */      /* Aborting and completing a trace end up here. */
     JS_ASSERT(tm->onTrace);  
     tm->onTrace = false;  
   
3504      delete tm->recorder;      delete tm->recorder;
3505      tm->recorder = NULL;      tm->recorder = NULL;
3506    
3507        /*
3508         * If we ran out of memory, flush the code cache.
3509         */
3510        if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem ||
3511            js_OverfullFragmento(tm, tm->fragmento)) {
3512            FlushJITCache(cx);
3513            return false;
3514        }
3515    
3516        return true;
3517  }  }
3518    
3519  /**  /**
3520   * Checks whether the shape of the global object has changed.   * Checks whether the shape of the global object has changed.
3521   */   */
3522  static inline bool  static JS_REQUIRES_STACK bool
3523  js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj)  CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj,
3524                           uint32 *shape=NULL, SlotList** slots=NULL)
3525  {  {
3526      /* Check the global shape. */      if (tm->needFlush) {
3527      if (OBJ_SHAPE(globalObj) != tm->globalShape) {          FlushJITCache(cx);
         AUDIT(globalShapeMismatchAtEntry);  
         debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",  
                             OBJ_SHAPE(globalObj), tm->globalShape);)  
3528          return false;          return false;
3529      }      }
3530      return true;  
3531        if (STOBJ_NSLOTS(globalObj) > MAX_GLOBAL_SLOTS)
3532            return false;
3533    
3534        uint32 globalShape = OBJ_SHAPE(globalObj);
3535    
3536        if (tm->recorder) {
3537            VMFragment* root = (VMFragment*)tm->recorder->getFragment()->root;
3538            TreeInfo* ti = tm->recorder->getTreeInfo();
3539            /* Check the global shape matches the recorder's treeinfo's shape. */
3540            if (globalObj != root->globalObj || globalShape != root->globalShape) {
3541                AUDIT(globalShapeMismatchAtEntry);
3542                debug_only_v(printf("Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
3543                                    (void*)globalObj, globalShape, (void*)root->globalObj,
3544                                    root->globalShape);)
3545                js_Backoff(cx, (jsbytecode*) root->ip);
3546                FlushJITCache(cx);
3547                return false;
3548            }
3549            if (shape)
3550                *shape = globalShape;
3551            if (slots)
3552                *slots = ti->globalSlots;
3553            return true;
3554        }
3555    
3556        /* No recorder, search for a tracked global-state (or allocate one). */
3557        for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
3558            GlobalState &state = tm->globalStates[i];
3559    
3560            if (state.globalShape == uint32(-1)) {
3561                state.globalObj = globalObj;
3562                state.globalShape = globalShape;
3563                JS_ASSERT(state.globalSlots);
3564                JS_ASSERT(state.globalSlots->length() == 0);
3565            }
3566    
3567            if (state.globalObj == globalObj && state.globalShape == globalShape) {
3568                if (shape)
3569                    *shape = globalShape;
3570                if (slots)
3571                    *slots = state.globalSlots;
3572                return true;
3573            }
3574        }
3575    
3576        /* No currently-tracked-global found and no room to allocate, abort. */
3577        AUDIT(globalShapeMismatchAtEntry);
3578        debug_only_v(printf("No global slotlist for global shape %u, flushing cache.\n",
3579                            globalShape));
3580        FlushJITCache(cx);
3581        return false;
3582  }  }
3583    
3584  static bool  static JS_REQUIRES_STACK bool
3585  js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,  js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
3586                   unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,                   unsigned stackSlots, unsigned ngslots, uint8* typeMap,
3587                   VMSideExit* expectedInnerExit, Fragment* outer)                   VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc)
3588  {  {
3589      JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);      JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
3590        if (JS_TRACE_MONITOR(cx).needFlush) {
3591            FlushJITCache(cx);
3592            return false;
3593        }
3594    
3595      /*      JS_ASSERT(f->root != f || !cx->fp->imacpc);
      * Emulate on-trace semantics and avoid rooting headaches while recording,  
      * by suppressing last-ditch GC attempts while recording a trace. This does  
      * means that trace recording must not nest or the following assertion will  
      * botch.  
      */  
     JS_ASSERT(!tm->onTrace);  
     tm->onTrace = true;  
3596    
3597      /* start recording if no exception during construction */      /* start recording if no exception during construction */
3598      tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,      tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
3599                                             ngslots, globalTypeMap, stackTypeMap,                                             stackSlots, ngslots, typeMap,
3600                                             expectedInnerExit, outer);                                             expectedInnerExit, outer, outerArgc);
3601    
3602      if (cx->throwing) {      if (cx->throwing) {
3603          js_AbortRecording(cx, "setting up recorder failed");          js_AbortRecording(cx, "setting up recorder failed");
3604          return false;          return false;
# Line 2715  Line 3625 
3625      unsigned length = ti->dependentTrees.length();      unsigned length = ti->dependentTrees.length();
3626      for (unsigned n = 0; n < length; ++n)      for (unsigned n = 0; n < length; ++n)
3627          js_TrashTree(cx, data[n]);          js_TrashTree(cx, data[n]);
3628        data = ti->linkedTrees.data();
3629        length = ti->linkedTrees.length();
3630        for (unsigned n = 0; n < length; ++n)
3631            js_TrashTree(cx, data[n]);
3632      delete ti;      delete ti;
3633      JS_ASSERT(!f->code() && !f->vmprivate);      JS_ASSERT(!f->code() && !f->vmprivate);
3634  }  }
# Line 2722  Line 3636 
3636  static int  static int
3637  js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)  js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
3638  {  {
3639        VOUCH_DOES_NOT_REQUIRE_STACK();
3640    
3641      JS_ASSERT(HAS_FUNCTION_CLASS(fi.callee));      JS_ASSERT(HAS_FUNCTION_CLASS(fi.callee));
3642    
3643      JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi.callee);      JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi.callee);
3644      JS_ASSERT(FUN_INTERPRETED(fun));      JS_ASSERT(FUN_INTERPRETED(fun));
3645    
3646      /* Assert that we have a correct sp distance from cx->fp->slots in fi. */      /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
3647      JS_ASSERT_IF(!FI_IMACRO_PC(fi, cx->fp),      JSStackFrame* fp = cx->fp;
3648                   js_ReconstructStackDepth(cx, cx->fp->script, FI_SCRIPT_PC(fi, cx->fp))      JS_ASSERT_IF(!fi.imacpc,
3649                   == uintN(fi.s.spdist - cx->fp->script->nfixed));                   js_ReconstructStackDepth(cx, fp->script, fi.pc)
3650                     == uintN(fi.spdist - fp->script->nfixed));
3651    
3652      uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));      uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
3653      JSScript* script = fun->u.i.script;      JSScript* script = fun->u.i.script;
# Line 2739  Line 3656 
3656      /* Code duplicated from inline_call: case in js_Interpret (FIXME). */      /* Code duplicated from inline_call: case in js_Interpret (FIXME). */
3657      JSArena* a = cx->stackPool.current;      JSArena* a = cx->stackPool.current;
3658      void* newmark = (void*) a->avail;      void* newmark = (void*) a->avail;
3659      uintN argc = fi.s.argc & 0x7fff;      uintN argc = fi.get_argc();
3660      jsval* vp = cx->fp->slots + fi.s.spdist - (2 + argc);      jsval* vp = fp->slots + fi.spdist - (2 + argc);
3661      uintN missing = 0;      uintN missing = 0;
3662      jsval* newsp;      jsval* newsp;
3663    
3664      if (fun->nargs > argc) {      if (fun->nargs > argc) {
3665          const JSFrameRegs& regs = *cx->fp->regs;          const JSFrameRegs& regs = *fp->regs;
3666    
3667          newsp = vp + 2 + fun->nargs;          newsp = vp + 2 + fun->nargs;
3668          JS_ASSERT(newsp > regs.sp);          JS_ASSERT(newsp > regs.sp);
# Line 2769  Line 3686 
3686          a->avail += nbytes;          a->avail += nbytes;
3687          JS_ASSERT(missing == 0);          JS_ASSERT(missing == 0);
3688      } else {      } else {
3689            /* This allocation is infallible: js_ExecuteTree reserved enough stack. */
3690          JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);          JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
3691          if (!newsp) {          JS_ASSERT(newsp);
             js_ReportOutOfScriptQuota(cx);  
             return 0;  
         }  
3692    
3693          /*          /*
3694           * Move args if the missing ones overflow arena a, then push           * Move args if the missing ones overflow arena a, then push
# Line 2797  Line 3712 
3712      newifp->frame.argsobj = NULL;      newifp->frame.argsobj = NULL;
3713      newifp->frame.varobj = NULL;      newifp->frame.varobj = NULL;
3714      newifp->frame.script = script;      newifp->frame.script = script;
3715      newifp->frame.callee = fi.callee;      newifp->frame.callee = fi.callee; // Roll with a potentially stale callee for now.
3716      newifp->frame.fun = fun;      newifp->frame.fun = fun;
3717    
3718      bool constructing = fi.s.argc & 0x8000;      bool constructing = fi.is_constructing();
3719      newifp->frame.argc = argc;      newifp->frame.argc = argc;
3720        newifp->callerRegs.pc = fi.pc;
3721        newifp->callerRegs.sp = fp->slots + fi.spdist;
3722        fp->imacpc = fi.imacpc;
3723    
3724      jsbytecode* imacro_pc = FI_IMACRO_PC(fi, cx->fp);  #ifdef DEBUG
3725      jsbytecode* script_pc = FI_SCRIPT_PC(fi, cx->fp);      if (fi.block != fp->blockChain) {
3726      newifp->callerRegs.pc = imacro_pc ? imacro_pc : script_pc;          for (JSObject* obj = fi.block; obj != fp->blockChain; obj = STOBJ_GET_PARENT(obj))
3727      newifp->callerRegs.sp = cx->fp->slots + fi.s.spdist;              JS_ASSERT(obj);
3728      cx->fp->imacpc = imacro_pc ? script_pc : NULL;      }
3729    #endif
3730        fp->blockChain = fi.block;
3731    
3732      newifp->frame.argv = newifp->callerRegs.sp - argc;      newifp->frame.argv = newifp->callerRegs.sp - argc;
3733      JS_ASSERT(newifp->frame.argv);      JS_ASSERT(newifp->frame.argv);
# Line 2816  Line 3736 
3736      // someone forgets to initialize it later.      // someone forgets to initialize it later.
3737      newifp->frame.argv[-1] = JSVAL_HOLE;      newifp->frame.argv[-1] = JSVAL_HOLE;
3738  #endif  #endif
3739      JS_ASSERT(newifp->frame.argv >= StackBase(cx->fp) + 2);      JS_ASSERT(newifp->frame.argv >= StackBase(fp) + 2);
3740    
3741      newifp->frame.rval = JSVAL_VOID;      newifp->frame.rval = JSVAL_VOID;
3742      newifp->frame.down = cx->fp;      newifp->frame.down = fp;
3743      newifp->frame.annotation = NULL;      newifp->frame.annotation = NULL;
3744      newifp->frame.scopeChain = OBJ_GET_PARENT(cx, fi.callee);      newifp->frame.scopeChain = NULL; // will be updated in FlushNativeStackFrame
3745      newifp->frame.sharpDepth = 0;      newifp->frame.sharpDepth = 0;
3746      newifp->frame.sharpArray = NULL;      newifp->frame.sharpArray = NULL;
3747      newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;      newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;
# Line 2829  Line 3749 
3749      newifp->frame.xmlNamespace = NULL;      newifp->frame.xmlNamespace = NULL;
3750      newifp->frame.blockChain = NULL;      newifp->frame.blockChain = NULL;
3751      newifp->mark = newmark;      newifp->mark = newmark;
3752      newifp->frame.thisp = NULL; // will be set by js_ExecuteTree -> FlushNativeStackFrame      newifp->frame.thisp = NULL; // will be updated in FlushNativeStackFrame
3753    
3754      newifp->frame.regs = cx->fp->regs;      newifp->frame.regs = fp->regs;
3755      newifp->frame.regs->pc = script->code;      newifp->frame.regs->pc = script->code;
3756      newifp->frame.regs->sp = newsp + script->nfixed;      newifp->frame.regs->sp = newsp + script->nfixed;
3757      newifp->frame.imacpc = NULL;      newifp->frame.imacpc = NULL;
3758      newifp->frame.slots = newsp;      newifp->frame.slots = newsp;
3759      if (script->staticDepth < JS_DISPLAY_SIZE) {      if (script->staticLevel < JS_DISPLAY_SIZE) {
3760          JSStackFrame **disp = &cx->display[script->staticDepth];          JSStackFrame **disp = &cx->display[script->staticLevel];
3761          newifp->frame.displaySave = *disp;          newifp->frame.displaySave = *disp;
3762          *disp = &newifp->frame;          *disp = &newifp->frame;
3763      }      }
 #ifdef DEBUG  
     newifp->frame.pcDisabledSave = 0;  
 #endif  
3764    
3765      /*      /*
3766       * Note that cx->fp->script is still the caller's script; set the callee       * Note that fp->script is still the caller's script; set the callee
3767       * inline frame's idea of caller version from its version.       * inline frame's idea of caller version from its version.
3768       */       */
3769      newifp->callerVersion = (JSVersion) cx->fp->script->version;      newifp->callerVersion = (JSVersion) fp->script->version;
   
     cx->fp->regs = &newifp->callerRegs;  
     cx->fp = &newifp->frame;  
3770    
3771      if (fun->flags & JSFUN_HEAVYWEIGHT) {      // After this paragraph, fp and cx->fp point to the newly synthesized frame.
3772          /*      fp->regs = &newifp->callerRegs;
3773           * Set hookData to null because the failure case for js_GetCallObject      fp = cx->fp = &newifp->frame;
          * involves it calling the debugger hook.  
          */  
         newifp->hookData = NULL;  
         if (!js_GetCallObject(cx, &newifp->frame, newifp->frame.scopeChain))  
             return -1;  
     }  
3774    
3775      /*      /*
3776       * If there's a call hook, invoke it to compute the hookData used by       * If there's a call hook, invoke it to compute the hookData used by
# Line 2870  Line 3778 
3778       */       */
3779      JSInterpreterHook hook = cx->debugHooks->callHook;      JSInterpreterHook hook = cx->debugHooks->callHook;
3780      if (hook) {      if (hook) {
3781          newifp->hookData = hook(cx, &newifp->frame, JS_TRUE, 0,          newifp->hookData = hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData);
                                 cx->debugHooks->callHookData);  
3782      } else {      } else {
3783          newifp->hookData = NULL;          newifp->hookData = NULL;
3784      }      }
# Line 2879  Line 3786 
3786      // FIXME? we must count stack slots from caller's operand stack up to (but not including)      // FIXME? we must count stack slots from caller's operand stack up to (but not including)
3787      // callee's, including missing arguments. Could we shift everything down to the caller's      // callee's, including missing arguments. Could we shift everything down to the caller's
3788      // fp->slots (where vars start) and avoid some of the complexity?      // fp->slots (where vars start) and avoid some of the complexity?
3789      return (fi.s.spdist - cx->fp->down->script->nfixed) +      return (fi.spdist - fp->down->script->nfixed) +
3790             ((fun->nargs > cx->fp->argc) ? fun->nargs - cx->fp->argc : 0) +             ((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) +
3791             script->nfixed;             script->nfixed;
3792  }  }
3793    
3794  bool  static void
3795  js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, unsigned* demotes)  SynthesizeSlowNativeFrame(JSContext *cx, VMSideExit *exit)
3796  {  {
3797      JS_ASSERT(cx->fp->regs->pc == f->ip && f->root == f);      void *mark;
3798            JSInlineFrame *ifp;
3799      /* Avoid recording loops in overlarge scripts. */  
3800      if (cx->fp->script->length >= SCRIPT_PC_ADJ_LIMIT) {      /* This allocation is infallible: js_ExecuteTree reserved enough stack. */
3801          js_AbortRecording(cx, "script too large");      mark = JS_ARENA_MARK(&cx->stackPool);
3802          return false;      JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
3803      }      JS_ASSERT(ifp);
3804    
3805        JSStackFrame *fp = &ifp->frame;
3806        fp->regs = NULL;
3807        fp->imacpc = NULL;
3808        fp->slots = NULL;
3809        fp->callobj = NULL;
3810        fp->argsobj = NULL;
3811        fp->varobj = cx->fp->varobj;
3812        fp->callee = exit->nativeCallee();
3813        fp->script = NULL;
3814        fp->fun = GET_FUNCTION_PRIVATE(cx, fp->callee);
3815        // fp->thisp is really a jsval, so reinterpret_cast here, not JSVAL_TO_OBJECT.
3816        fp->thisp = (JSObject *) cx->nativeVp[1];
3817        fp->argc = cx->nativeVpLen - 2;
3818        fp->argv = cx->