/[jscoverage]/trunk/js/jstracer.h
ViewVC logotype

Contents of /trunk/js/jstracer.h

Parent Directory Parent Directory | Revision Log Revision Log


Revision 585 - (show annotations)
Sun Sep 12 15:13:23 2010 UTC (9 years ago) by siliconforks
File MIME type: text/plain
File size: 51367 byte(s)
Update to SpiderMonkey from Firefox 3.6.9.

1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99 ft=cpp:
3 *
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 *
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
11 *
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
16 *
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
19 *
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
22 *
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
27 *
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
39 *
40 * ***** END LICENSE BLOCK ***** */
41
42 #ifndef jstracer_h___
43 #define jstracer_h___
44
45 #ifdef JS_TRACER
46
47 #include "jstypes.h"
48 #include "jsbuiltins.h"
49 #include "jscntxt.h"
50 #include "jsinterp.h"
51 #include "jslock.h"
52 #include "jsnum.h"
53 #include "jsvector.h"
54
55 #if defined(DEBUG) && !defined(JS_JIT_SPEW)
56 #define JS_JIT_SPEW
57 #endif
58
59 template <typename T>
60 class Queue {
61 T* _data;
62 unsigned _len;
63 unsigned _max;
64 nanojit::Allocator* alloc;
65
66 public:
67 void ensure(unsigned size) {
68 if (!_max)
69 _max = 16;
70 while (_max < size)
71 _max <<= 1;
72 if (alloc) {
73 T* tmp = new (*alloc) T[_max];
74 memcpy(tmp, _data, _len * sizeof(T));
75 _data = tmp;
76 } else {
77 _data = (T*)realloc(_data, _max * sizeof(T));
78 }
79 #if defined(DEBUG)
80 memset(&_data[_len], 0xcd, _max - _len);
81 #endif
82 }
83
84 Queue(nanojit::Allocator* alloc)
85 : alloc(alloc)
86 {
87 this->_max =
88 this->_len = 0;
89 this->_data = NULL;
90 }
91
92 ~Queue() {
93 if (!alloc)
94 free(_data);
95 }
96
97 bool contains(T a) {
98 for (unsigned n = 0; n < _len; ++n) {
99 if (_data[n] == a)
100 return true;
101 }
102 return false;
103 }
104
105 void add(T a) {
106 ensure(_len + 1);
107 JS_ASSERT(_len <= _max);
108 _data[_len++] = a;
109 }
110
111 void add(T* chunk, unsigned size) {
112 ensure(_len + size);
113 JS_ASSERT(_len <= _max);
114 memcpy(&_data[_len], chunk, size * sizeof(T));
115 _len += size;
116 }
117
118 void addUnique(T a) {
119 if (!contains(a))
120 add(a);
121 }
122
123 void setLength(unsigned len) {
124 ensure(len + 1);
125 _len = len;
126 }
127
128 void clear() {
129 _len = 0;
130 }
131
132 T & get(unsigned i) {
133 JS_ASSERT(i < length());
134 return _data[i];
135 }
136
137 const T & get(unsigned i) const {
138 JS_ASSERT(i < length());
139 return _data[i];
140 }
141
142 T & operator [](unsigned i) {
143 return get(i);
144 }
145
146 const T & operator [](unsigned i) const {
147 return get(i);
148 }
149
150 unsigned length() const {
151 return _len;
152 }
153
154 T* data() const {
155 return _data;
156 }
157 };
158
159 /*
160 * Tracker is used to keep track of values being manipulated by the interpreter
161 * during trace recording. Note that tracker pages aren't necessarily the
162 * same size as OS pages, they just are a moderate-sized chunk of memory.
163 */
164 class Tracker {
165 struct TrackerPage {
166 struct TrackerPage* next;
167 jsuword base;
168 nanojit::LIns* map[1];
169 };
170 struct TrackerPage* pagelist;
171
172 jsuword getTrackerPageBase(const void* v) const;
173 struct TrackerPage* findTrackerPage(const void* v) const;
174 struct TrackerPage* addTrackerPage(const void* v);
175 public:
176 Tracker();
177 ~Tracker();
178
179 bool has(const void* v) const;
180 nanojit::LIns* get(const void* v) const;
181 void set(const void* v, nanojit::LIns* ins);
182 void clear();
183 };
184
185 #if defined(JS_JIT_SPEW) || defined(MOZ_NO_VARADIC_MACROS)
186
187 enum LC_TMBits {
188 /*
189 * Output control bits for all non-Nanojit code. Only use bits 16 and
190 * above, since Nanojit uses 0 .. 15 itself.
191 */
192 LC_TMMinimal = 1<<16,
193 LC_TMTracer = 1<<17,
194 LC_TMRecorder = 1<<18,
195 LC_TMAbort = 1<<19,
196 LC_TMStats = 1<<20,
197 LC_TMRegexp = 1<<21,
198 LC_TMTreeVis = 1<<22
199 };
200
201 #endif
202
203 #ifdef MOZ_NO_VARADIC_MACROS
204
205 #define debug_only_stmt(action) /* */
206 static void debug_only_printf(int mask, const char *fmt, ...) {}
207 #define debug_only_print0(mask, str) /* */
208
209 #elif defined(JS_JIT_SPEW)
210
211 // Top level logging controller object.
212 extern nanojit::LogControl js_LogController;
213
214 // Top level profiling hook, needed to harvest profile info from Fragments
215 // whose logical lifetime is about to finish
216 extern void js_FragProfiling_FragFinalizer(nanojit::Fragment* f, JSTraceMonitor*);
217
218 #define debug_only_stmt(stmt) \
219 stmt
220
221 #define debug_only_printf(mask, fmt, ...) \
222 JS_BEGIN_MACRO \
223 if ((js_LogController.lcbits & (mask)) > 0) { \
224 js_LogController.printf(fmt, __VA_ARGS__); \
225 fflush(stdout); \
226 } \
227 JS_END_MACRO
228
229 #define debug_only_print0(mask, str) \
230 JS_BEGIN_MACRO \
231 if ((js_LogController.lcbits & (mask)) > 0) { \
232 js_LogController.printf("%s", str); \
233 fflush(stdout); \
234 } \
235 JS_END_MACRO
236
237 #else
238
239 #define debug_only_stmt(action) /* */
240 #define debug_only_printf(mask, fmt, ...) /* */
241 #define debug_only_print0(mask, str) /* */
242
243 #endif
244
245 /*
246 * The oracle keeps track of hit counts for program counter locations, as
247 * well as slots that should not be demoted to int because we know them to
248 * overflow or they result in type-unstable traces. We are using simple
249 * hash tables. Collisions lead to loss of optimization (demotable slots
250 * are not demoted, etc.) but have no correctness implications.
251 */
252 #define ORACLE_SIZE 4096
253
254 class Oracle {
255 avmplus::BitSet _stackDontDemote;
256 avmplus::BitSet _globalDontDemote;
257 avmplus::BitSet _pcDontDemote;
258 public:
259 Oracle();
260
261 JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot);
262 JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const;
263 JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot);
264 JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const;
265 void markInstructionUndemotable(jsbytecode* pc);
266 bool isInstructionUndemotable(jsbytecode* pc) const;
267
268 void clearDemotability();
269 void clear() {
270 clearDemotability();
271 }
272 };
273
274 #if defined(_MSC_VER) && _MSC_VER >= 1400 || (defined(__GNUC__) && __GNUC__ >= 4)
275 #define USE_TRACE_TYPE_ENUM
276 #endif
277
278 /*
279 * The types of values calculated during tracing, used to specialize operations
280 * to the types of those values. These loosely correspond to the values of the
281 * JSVAL_* language types, but we add a few further divisions to enable further
282 * optimization at execution time. Do not rely on this loose correspondence for
283 * correctness without adding static assertions!
284 *
285 * The ifdefs enforce that this enum occupies only one byte of memory, where
286 * possible. If it doesn't, type maps will occupy more space but should
287 * otherwise work correctly. A static assertion in jstracer.cpp verifies that
288 * this requirement is correctly enforced by these compilers.
289 */
290 enum JSTraceType_
291 #if defined(_MSC_VER) && _MSC_VER >= 1400
292 : int8_t
293 #endif
294 {
295 TT_OBJECT = 0, /* pointer to JSObject whose class is not js_FunctionClass */
296 TT_INT32 = 1, /* 32-bit signed integer */
297 TT_DOUBLE = 2, /* pointer to jsdouble */
298 TT_JSVAL = 3, /* arbitrary jsval */
299 TT_STRING = 4, /* pointer to JSString */
300 TT_NULL = 5, /* null */
301 TT_PSEUDOBOOLEAN = 6, /* true, false, or undefined (0, 1, or 2) */
302 TT_FUNCTION = 7 /* pointer to JSObject whose class is js_FunctionClass */
303 }
304 #if defined(__GNUC__) && defined(USE_TRACE_TYPE_ENUM)
305 __attribute__((packed))
306 #endif
307 ;
308
309 #ifdef USE_TRACE_TYPE_ENUM
310 typedef JSTraceType_ JSTraceType;
311 #else
312 typedef int8_t JSTraceType;
313 #endif
314
315 /*
316 * This indicates an invalid type or error. Note that it should not be used in typemaps,
317 * because it is the wrong size. It can only be used as a uint32, for example as the
318 * return value from a function that returns a type as a uint32.
319 */
320 const uint32 TT_INVALID = uint32(-1);
321
322 typedef Queue<uint16> SlotList;
323
324 class TypeMap : public Queue<JSTraceType> {
325 public:
326 TypeMap(nanojit::Allocator* alloc) : Queue<JSTraceType>(alloc) {}
327 JS_REQUIRES_STACK void captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth);
328 JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots,
329 unsigned stackSlots);
330 bool matches(TypeMap& other) const;
331 void fromRaw(JSTraceType* other, unsigned numSlots);
332 };
333
334 #define JS_TM_EXITCODES(_) \
335 /* \
336 * An exit at a possible branch-point in the trace at which to attach a \
337 * future secondary trace. Therefore the recorder must generate different \
338 * code to handle the other outcome of the branch condition from the \
339 * primary trace's outcome. \
340 */ \
341 _(BRANCH) \
342 /* \
343 * Exit at a tableswitch via a numbered case. \
344 */ \
345 _(CASE) \
346 /* \
347 * Exit at a tableswitch via the default case. \
348 */ \
349 _(DEFAULT) \
350 _(LOOP) \
351 _(NESTED) \
352 /* \
353 * An exit from a trace because a condition relied upon at recording time \
354 * no longer holds, where the alternate path of execution is so rare or \
355 * difficult to address in native code that it is not traced at all, e.g. \
356 * negative array index accesses, which differ from positive indexes in \
357 * that they require a string-based property lookup rather than a simple \
358 * memory access. \
359 */ \
360 _(MISMATCH) \
361 /* \
362 * A specialization of MISMATCH_EXIT to handle allocation failures. \
363 */ \
364 _(OOM) \
365 _(OVERFLOW) \
366 _(UNSTABLE_LOOP) \
367 _(TIMEOUT) \
368 _(DEEP_BAIL) \
369 _(STATUS)
370
371 enum ExitType {
372 #define MAKE_EXIT_CODE(x) x##_EXIT,
373 JS_TM_EXITCODES(MAKE_EXIT_CODE)
374 #undef MAKE_EXIT_CODE
375 TOTAL_EXIT_TYPES
376 };
377
378 struct VMSideExit : public nanojit::SideExit
379 {
380 JSObject* block;
381 jsbytecode* pc;
382 jsbytecode* imacpc;
383 intptr_t sp_adj;
384 intptr_t rp_adj;
385 int32_t calldepth;
386 uint32 numGlobalSlots;
387 uint32 numStackSlots;
388 uint32 numStackSlotsBelowCurrentFrame;
389 ExitType exitType;
390 uintN lookupFlags;
391
392 /*
393 * Ordinarily 0. If a slow native function is atop the stack, the 1 bit is
394 * set if constructing and the other bits are a pointer to the funobj.
395 */
396 uintptr_t nativeCalleeWord;
397
398 JSObject * nativeCallee() {
399 return (JSObject *) (nativeCalleeWord & ~1);
400 }
401
402 bool constructing() {
403 return bool(nativeCalleeWord & 1);
404 }
405
406 void setNativeCallee(JSObject *callee, bool constructing) {
407 nativeCalleeWord = uintptr_t(callee) | (constructing ? 1 : 0);
408 }
409
410 inline JSTraceType* stackTypeMap() {
411 return (JSTraceType*)(this + 1);
412 }
413
414 inline JSTraceType* globalTypeMap() {
415 return (JSTraceType*)(this + 1) + this->numStackSlots;
416 }
417
418 inline JSTraceType* fullTypeMap() {
419 return stackTypeMap();
420 }
421
422 inline VMFragment* root() {
423 return (VMFragment*)from->root;
424 }
425 };
426
427 class VMAllocator : public nanojit::Allocator
428 {
429
430 public:
431 VMAllocator() : mOutOfMemory(false), mSize(0)
432 {}
433
434 size_t size() {
435 return mSize;
436 }
437
438 bool outOfMemory() {
439 return mOutOfMemory;
440 }
441
442 bool mOutOfMemory;
443 size_t mSize;
444
445 /*
446 * FIXME: Area the LIR spills into if we encounter an OOM mid-way
447 * through compilation; we must check mOutOfMemory before we run out
448 * of mReserve, otherwise we're in undefined territory. This area
449 * used to be one page, now 16 to be "safer". This is a temporary
450 * and quite unsatisfactory approach to handling OOM in Nanojit.
451 */
452 uintptr_t mReserve[0x10000];
453 };
454
455
456 struct REHashKey {
457 size_t re_length;
458 uint16 re_flags;
459 const jschar* re_chars;
460
461 REHashKey(size_t re_length, uint16 re_flags, const jschar *re_chars)
462 : re_length(re_length)
463 , re_flags(re_flags)
464 , re_chars(re_chars)
465 {}
466
467 bool operator==(const REHashKey& other) const
468 {
469 return ((this->re_length == other.re_length) &&
470 (this->re_flags == other.re_flags) &&
471 !memcmp(this->re_chars, other.re_chars,
472 this->re_length * sizeof(jschar)));
473 }
474 };
475
476 struct REHashFn {
477 static size_t hash(const REHashKey& k) {
478 return
479 k.re_length +
480 k.re_flags +
481 nanojit::murmurhash(k.re_chars, k.re_length * sizeof(jschar));
482 }
483 };
484
485 struct FrameInfo {
486 JSObject* block; // caller block chain head
487 jsbytecode* pc; // caller fp->regs->pc
488 jsbytecode* imacpc; // caller fp->imacpc
489 uint32 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
490
491 /*
492 * Bit 15 (0x8000) is a flag that is set if constructing (called through new).
493 * Bits 0-14 are the actual argument count. This may be less than fun->nargs.
494 * NB: This is argc for the callee, not the caller.
495 */
496 uint32 argc;
497
498 /*
499 * Number of stack slots in the caller, not counting slots pushed when
500 * invoking the callee. That is, slots after JSOP_CALL completes but
501 * without the return value. This is also equal to the number of slots
502 * between fp->down->argv[-2] (calleR fp->callee) and fp->argv[-2]
503 * (calleE fp->callee).
504 */
505 uint32 callerHeight;
506
507 /* argc of the caller */
508 uint32 callerArgc;
509
510 // Safer accessors for argc.
511 enum { CONSTRUCTING_FLAG = 0x10000 };
512 void set_argc(uint16 argc, bool constructing) {
513 this->argc = uint32(argc) | (constructing ? CONSTRUCTING_FLAG: 0);
514 }
515 uint16 get_argc() const { return argc & ~CONSTRUCTING_FLAG; }
516 bool is_constructing() const { return (argc & CONSTRUCTING_FLAG) != 0; }
517
518 // The typemap just before the callee is called.
519 JSTraceType* get_typemap() { return (JSTraceType*) (this+1); }
520 };
521
522 struct UnstableExit
523 {
524 nanojit::Fragment* fragment;
525 VMSideExit* exit;
526 UnstableExit* next;
527 };
528
529 class TreeInfo {
530 public:
531 nanojit::Fragment* const fragment;
532 JSScript* script;
533 unsigned maxNativeStackSlots;
534 ptrdiff_t nativeStackBase;
535 unsigned maxCallDepth;
536 TypeMap typeMap;
537 unsigned nStackTypes;
538 SlotList* globalSlots;
539 /* Dependent trees must be trashed if this tree dies, and updated on missing global types */
540 Queue<nanojit::Fragment*> dependentTrees;
541 /* Linked trees must be updated on missing global types, but are not dependent */
542 Queue<nanojit::Fragment*> linkedTrees;
543 unsigned branchCount;
544 Queue<VMSideExit*> sideExits;
545 UnstableExit* unstableExits;
546 /* All embedded GC things are registered here so the GC can scan them. */
547 Queue<jsval> gcthings;
548 Queue<JSScopeProperty*> sprops;
549 #ifdef DEBUG
550 const char* treeFileName;
551 uintN treeLineNumber;
552 uintN treePCOffset;
553 #endif
554
555 TreeInfo(nanojit::Allocator* alloc,
556 nanojit::Fragment* _fragment,
557 SlotList* _globalSlots)
558 : fragment(_fragment),
559 script(NULL),
560 maxNativeStackSlots(0),
561 nativeStackBase(0),
562 maxCallDepth(0),
563 typeMap(alloc),
564 nStackTypes(0),
565 globalSlots(_globalSlots),
566 dependentTrees(alloc),
567 linkedTrees(alloc),
568 branchCount(0),
569 sideExits(alloc),
570 unstableExits(NULL),
571 gcthings(alloc),
572 sprops(alloc)
573 {}
574
575 inline unsigned nGlobalTypes() {
576 return typeMap.length() - nStackTypes;
577 }
578 inline JSTraceType* globalTypeMap() {
579 return typeMap.data() + nStackTypes;
580 }
581 inline JSTraceType* stackTypeMap() {
582 return typeMap.data();
583 }
584
585 UnstableExit* removeUnstableExit(VMSideExit* exit);
586 };
587
588 #if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
589 # define EXECUTE_TREE_TIMER
590 #endif
591
592 typedef enum JSBuiltinStatus {
593 JSBUILTIN_BAILED = 1,
594 JSBUILTIN_ERROR = 2
595 } JSBuiltinStatus;
596
597 struct InterpState
598 {
599 double *sp; // native stack pointer, stack[0] is spbase[0]
600 FrameInfo** rp; // call stack pointer
601 JSContext *cx; // current VM context handle
602 double *eos; // first unusable word after the native stack
603 void *eor; // first unusable word after the call stack
604 VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
605 VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
606 // call exit guard mismatched
607 void* rpAtLastTreeCall; // value of rp at innermost tree call guard
608 TreeInfo* outermostTree; // the outermost tree we initially invoked
609 double* stackBase; // native stack base
610 FrameInfo** callstackBase; // call stack base
611 uintN* inlineCallCountp; // inline call count counter
612 VMSideExit** innermostNestedGuardp;
613 void* stackMark;
614 VMSideExit* innermost;
615 #ifdef EXECUTE_TREE_TIMER
616 uint64 startTime;
617 #endif
618 InterpState* prev;
619
620 // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
621 // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
622 // if an error or exception occurred.
623 uint32 builtinStatus;
624
625 // Used to communicate the location of the return value in case of a deep bail.
626 double* deepBailSp;
627
628
629 // Used when calling natives from trace to root the vp vector. */
630 uintN nativeVpLen;
631 jsval *nativeVp;
632 };
633
634 // Arguments objects created on trace have a private value that points to an
635 // instance of this struct. The struct includes a typemap that is allocated
636 // as part of the object.
637 struct js_ArgsPrivateNative {
638 double *argv;
639
640 static js_ArgsPrivateNative *create(VMAllocator &alloc, unsigned argc)
641 {
642 return (js_ArgsPrivateNative*) new (alloc) char[sizeof(js_ArgsPrivateNative) + argc];
643 }
644
645 JSTraceType *typemap()
646 {
647 return (JSTraceType*) (this+1);
648 }
649 };
650
651 static JS_INLINE void
652 js_SetBuiltinError(JSContext *cx)
653 {
654 cx->interpState->builtinStatus |= JSBUILTIN_ERROR;
655 }
656
657 #ifdef DEBUG_JSRS_NOT_BOOL
658 struct JSRecordingStatus {
659 int code;
660 bool operator==(JSRecordingStatus &s) { return this->code == s.code; };
661 bool operator!=(JSRecordingStatus &s) { return this->code != s.code; };
662 };
663 enum JSRScodes {
664 JSRS_ERROR_code,
665 JSRS_STOP_code,
666 JSRS_CONTINUE_code,
667 JSRS_IMACRO_code
668 };
669 struct JSRecordingStatus JSRS_CONTINUE = { JSRS_CONTINUE_code };
670 struct JSRecordingStatus JSRS_STOP = { JSRS_STOP_code };
671 struct JSRecordingStatus JSRS_IMACRO = { JSRS_IMACRO_code };
672 struct JSRecordingStatus JSRS_ERROR = { JSRS_ERROR_code };
673 #define STATUS_ABORTS_RECORDING(s) ((s) == JSRS_STOP || (s) == JSRS_ERROR)
674 #else
675 enum JSRecordingStatus {
676 JSRS_ERROR, // Error; propagate to interpreter.
677 JSRS_STOP, // Abort recording.
678 JSRS_CONTINUE, // Continue recording.
679 JSRS_IMACRO // Entered imacro; continue recording.
680 // Only JSOP_IS_IMACOP opcodes may return this.
681 };
682 #define STATUS_ABORTS_RECORDING(s) ((s) <= JSRS_STOP)
683 #endif
684
685 class SlotMap;
686
687 /* Results of trying to compare two typemaps together */
688 enum TypeConsensus
689 {
690 TypeConsensus_Okay, /* Two typemaps are compatible */
691 TypeConsensus_Undemotes, /* Not compatible now, but would be with pending undemotes. */
692 TypeConsensus_Bad /* Typemaps are not compatible */
693 };
694
695 class TraceRecorder {
696 VMAllocator& tempAlloc;
697 JSContext* cx;
698 JSTraceMonitor* traceMonitor;
699 JSObject* globalObj;
700 JSObject* lexicalBlock;
701 Tracker tracker;
702 Tracker nativeFrameTracker;
703 unsigned callDepth;
704 JSAtom** atoms;
705 VMSideExit* anchor;
706 nanojit::Fragment* fragment;
707 TreeInfo* treeInfo;
708 nanojit::LirBuffer* lirbuf;
709 nanojit::LirWriter* lir;
710 nanojit::LirBufWriter* lir_buf_writer;
711 nanojit::LirWriter* verbose_filter;
712 nanojit::LirWriter* cse_filter;
713 nanojit::LirWriter* expr_filter;
714 nanojit::LirWriter* func_filter;
715 nanojit::LirWriter* float_filter;
716 #ifdef DEBUG
717 nanojit::LirWriter* sanity_filter_1;
718 nanojit::LirWriter* sanity_filter_2;
719 #endif
720 nanojit::LIns* cx_ins;
721 nanojit::LIns* eos_ins;
722 nanojit::LIns* eor_ins;
723 nanojit::LIns* rval_ins;
724 nanojit::LIns* inner_sp_ins;
725 nanojit::LIns* native_rval_ins;
726 nanojit::LIns* newobj_ins;
727 bool trashSelf;
728 Queue<nanojit::Fragment*> whichTreesToTrash;
729 Queue<jsbytecode*> cfgMerges;
730 jsval* global_dslots;
731 JSSpecializedNative generatedSpecializedNative;
732 JSSpecializedNative* pendingSpecializedNative;
733 jsval* pendingUnboxSlot;
734 nanojit::LIns* pendingGuardCondition;
735 jsbytecode* outer; /* outer trace header PC */
736 uint32 outerArgc; /* outer trace deepest frame argc */
737 bool loop;
738 nanojit::LIns* loopLabel;
739 js::Vector<JSTraceType, 256> tempTypeMap;
740
741 nanojit::LIns* insImmVal(jsval val);
742 nanojit::LIns* insImmObj(JSObject* obj);
743 nanojit::LIns* insImmFun(JSFunction* fun);
744 nanojit::LIns* insImmStr(JSString* str);
745 nanojit::LIns* insImmSprop(JSScopeProperty* sprop);
746 nanojit::LIns* p2i(nanojit::LIns* ins);
747
748 bool isGlobal(jsval* p) const;
749 ptrdiff_t nativeGlobalOffset(jsval* p) const;
750 JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(jsval* p) const;
751 JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, JSTraceType t,
752 const char *prefix, uintN index, JSStackFrame *fp);
753 JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned stackSlots,
754 unsigned callDepth, unsigned ngslots, JSTraceType* typeMap);
755 void trackNativeStackUse(unsigned slots);
756
757 JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop);
758 JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
759
760 JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, ExitType exitType);
761 JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
762
763 nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
764
765 nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset,
766 bool demote);
767 JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false,
768 bool demote = true);
769 JS_REQUIRES_STACK nanojit::LIns* get(jsval* p);
770 JS_REQUIRES_STACK nanojit::LIns* addr(jsval* p);
771
772 JS_REQUIRES_STACK bool known(jsval* p);
773 JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
774
775 JS_REQUIRES_STACK TypeConsensus selfTypeStability(SlotMap& smap);
776 JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, VMFragment** peer);
777
778 JS_REQUIRES_STACK jsval& argval(unsigned n) const;
779 JS_REQUIRES_STACK jsval& varval(unsigned n) const;
780 JS_REQUIRES_STACK jsval& stackval(int n) const;
781
782 struct NameResult {
783 // |tracked| is true iff the result of the name lookup is a variable that
784 // is already in the tracker. The rest of the fields are set only if
785 // |tracked| is false.
786 bool tracked;
787 jsval v; // current property value
788 JSObject *obj; // Call object where name was found
789 nanojit::LIns *obj_ins; // LIR value for obj
790 JSScopeProperty *sprop; // sprop name was resolved to
791 };
792
793 JS_REQUIRES_STACK nanojit::LIns* scopeChain() const;
794 JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const;
795 JS_REQUIRES_STACK JSRecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins);
796 JS_REQUIRES_STACK JSRecordingStatus scopeChainProp(JSObject* obj, jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
797 JS_REQUIRES_STACK JSRecordingStatus callProp(JSObject* obj, JSObject* obj2, JSProperty* sprop, jsid id, jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
798
799 JS_REQUIRES_STACK nanojit::LIns* arg(unsigned n);
800 JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i);
801 JS_REQUIRES_STACK nanojit::LIns* var(unsigned n);
802 JS_REQUIRES_STACK void var(unsigned n, nanojit::LIns* i);
803 JS_REQUIRES_STACK nanojit::LIns* upvar(JSScript* script, JSUpvarArray* uva, uintN index, jsval& v);
804 nanojit::LIns* stackLoad(nanojit::LIns* addr, uint8 type);
805 JS_REQUIRES_STACK nanojit::LIns* stack(int n);
806 JS_REQUIRES_STACK void stack(int n, nanojit::LIns* i);
807
808 JS_REQUIRES_STACK nanojit::LIns* alu(nanojit::LOpcode op, jsdouble v0, jsdouble v1,
809 nanojit::LIns* s0, nanojit::LIns* s1);
810 nanojit::LIns* f2i(nanojit::LIns* f);
811 JS_REQUIRES_STACK nanojit::LIns* makeNumberInt32(nanojit::LIns* f);
812 JS_REQUIRES_STACK nanojit::LIns* stringify(jsval& v);
813
814 nanojit::LIns* newArguments();
815
816 JS_REQUIRES_STACK JSRecordingStatus call_imacro(jsbytecode* imacro);
817
818 JS_REQUIRES_STACK JSRecordingStatus ifop();
819 JS_REQUIRES_STACK JSRecordingStatus switchop();
820 #ifdef NANOJIT_IA32
821 JS_REQUIRES_STACK JSRecordingStatus tableswitch();
822 #endif
823 JS_REQUIRES_STACK JSRecordingStatus inc(jsval& v, jsint incr, bool pre = true);
824 JS_REQUIRES_STACK JSRecordingStatus inc(jsval v, nanojit::LIns*& v_ins, jsint incr,
825 bool pre = true);
826 JS_REQUIRES_STACK JSRecordingStatus incHelper(jsval v, nanojit::LIns* v_ins,
827 nanojit::LIns*& v_after, jsint incr);
828 JS_REQUIRES_STACK JSRecordingStatus incProp(jsint incr, bool pre = true);
829 JS_REQUIRES_STACK JSRecordingStatus incElem(jsint incr, bool pre = true);
830 JS_REQUIRES_STACK JSRecordingStatus incName(jsint incr, bool pre = true);
831
832 JS_REQUIRES_STACK void strictEquality(bool equal, bool cmpCase);
833 JS_REQUIRES_STACK JSRecordingStatus equality(bool negate, bool tryBranchAfterCond);
834 JS_REQUIRES_STACK JSRecordingStatus equalityHelper(jsval l, jsval r,
835 nanojit::LIns* l_ins, nanojit::LIns* r_ins,
836 bool negate, bool tryBranchAfterCond,
837 jsval& rval);
838 JS_REQUIRES_STACK JSRecordingStatus relational(nanojit::LOpcode op, bool tryBranchAfterCond);
839
840 JS_REQUIRES_STACK JSRecordingStatus unary(nanojit::LOpcode op);
841 JS_REQUIRES_STACK JSRecordingStatus binary(nanojit::LOpcode op);
842
843 void guardShape(nanojit::LIns* obj_ins, JSObject* obj, uint32 shape, const char* guardName,
844 nanojit::LIns* map_ins, VMSideExit* exit);
845
846 inline nanojit::LIns* map(nanojit::LIns *obj_ins);
847 JS_REQUIRES_STACK bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins,
848 nanojit::LIns*& ops_ins, size_t op_offset = 0);
849 JS_REQUIRES_STACK JSRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
850 JSObject*& obj2, jsuword& pcval);
851 JS_REQUIRES_STACK JSRecordingStatus guardNativePropertyOp(JSObject* aobj,
852 nanojit::LIns* map_ins);
853 JS_REQUIRES_STACK JSRecordingStatus guardPropertyCacheHit(nanojit::LIns* obj_ins,
854 nanojit::LIns* map_ins,
855 JSObject* aobj,
856 JSObject* obj2,
857 JSPropCacheEntry* entry,
858 jsuword& pcval);
859
860 void stobj_set_fslot(nanojit::LIns *obj_ins, unsigned slot,
861 nanojit::LIns* v_ins);
862 void stobj_set_dslot(nanojit::LIns *obj_ins, unsigned slot, nanojit::LIns*& dslots_ins,
863 nanojit::LIns* v_ins);
864 void stobj_set_slot(nanojit::LIns* obj_ins, unsigned slot, nanojit::LIns*& dslots_ins,
865 nanojit::LIns* v_ins);
866
867 nanojit::LIns* stobj_get_fslot(nanojit::LIns* obj_ins, unsigned slot);
868 nanojit::LIns* stobj_get_dslot(nanojit::LIns* obj_ins, unsigned index,
869 nanojit::LIns*& dslots_ins);
870 nanojit::LIns* stobj_get_slot(nanojit::LIns* obj_ins, unsigned slot,
871 nanojit::LIns*& dslots_ins);
872
873 nanojit::LIns* stobj_get_private(nanojit::LIns* obj_ins) {
874 return stobj_get_fslot(obj_ins, JSSLOT_PRIVATE);
875 }
876
877 nanojit::LIns* stobj_get_proto(nanojit::LIns* obj_ins) {
878 return stobj_get_fslot(obj_ins, JSSLOT_PROTO);
879 }
880
881 nanojit::LIns* stobj_get_parent(nanojit::LIns* obj_ins) {
882 return stobj_get_fslot(obj_ins, JSSLOT_PARENT);
883 }
884
885 JSRecordingStatus native_get(nanojit::LIns* obj_ins, nanojit::LIns* pobj_ins,
886 JSScopeProperty* sprop, nanojit::LIns*& dslots_ins,
887 nanojit::LIns*& v_ins);
888
889 nanojit::LIns* getStringLength(nanojit::LIns* str_ins);
890
891 JS_REQUIRES_STACK JSRecordingStatus name(jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
892 JS_REQUIRES_STACK JSRecordingStatus prop(JSObject* obj, nanojit::LIns* obj_ins, uint32 *slotp,
893 nanojit::LIns** v_insp, jsval* outp);
894 JS_REQUIRES_STACK JSRecordingStatus denseArrayElement(jsval& oval, jsval& idx, jsval*& vp,
895 nanojit::LIns*& v_ins,
896 nanojit::LIns*& addr_ins);
897 JS_REQUIRES_STACK JSRecordingStatus getProp(JSObject* obj, nanojit::LIns* obj_ins);
898 JS_REQUIRES_STACK JSRecordingStatus getProp(jsval& v);
899 JS_REQUIRES_STACK JSRecordingStatus getThis(nanojit::LIns*& this_ins);
900
901 JS_REQUIRES_STACK VMSideExit* enterDeepBailCall();
902 JS_REQUIRES_STACK void leaveDeepBailCall();
903
904 JS_REQUIRES_STACK JSRecordingStatus primitiveToStringInPlace(jsval* vp);
905 JS_REQUIRES_STACK void finishGetProp(nanojit::LIns* obj_ins, nanojit::LIns* vp_ins,
906 nanojit::LIns* ok_ins, jsval* outp);
907 JS_REQUIRES_STACK JSRecordingStatus getPropertyByName(nanojit::LIns* obj_ins, jsval* idvalp,
908 jsval* outp);
909 JS_REQUIRES_STACK JSRecordingStatus getPropertyByIndex(nanojit::LIns* obj_ins,
910 nanojit::LIns* index_ins, jsval* outp);
911 JS_REQUIRES_STACK JSRecordingStatus getPropertyById(nanojit::LIns* obj_ins, jsval* outp);
912 JS_REQUIRES_STACK JSRecordingStatus getPropertyWithNativeGetter(nanojit::LIns* obj_ins,
913 JSScopeProperty* sprop,
914 jsval* outp);
915
916 JS_REQUIRES_STACK JSRecordingStatus nativeSet(JSObject* obj, nanojit::LIns* obj_ins,
917 JSScopeProperty* sprop,
918 jsval v, nanojit::LIns* v_ins);
919 JS_REQUIRES_STACK JSRecordingStatus setProp(jsval &l, JSPropCacheEntry* entry,
920 JSScopeProperty* sprop,
921 jsval &v, nanojit::LIns*& v_ins);
922 JS_REQUIRES_STACK JSRecordingStatus setCallProp(JSObject *callobj, nanojit::LIns *callobj_ins,
923 JSScopeProperty *sprop, nanojit::LIns *v_ins,
924 jsval v);
925 JS_REQUIRES_STACK JSRecordingStatus initOrSetPropertyByName(nanojit::LIns* obj_ins,
926 jsval* idvalp, jsval* rvalp,
927 bool init);
928 JS_REQUIRES_STACK JSRecordingStatus initOrSetPropertyByIndex(nanojit::LIns* obj_ins,
929 nanojit::LIns* index_ins,
930 jsval* rvalp, bool init);
931
932 JS_REQUIRES_STACK nanojit::LIns* box_jsval(jsval v, nanojit::LIns* v_ins);
933 JS_REQUIRES_STACK nanojit::LIns* unbox_jsval(jsval v, nanojit::LIns* v_ins, VMSideExit* exit);
934 JS_REQUIRES_STACK bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp,
935 VMSideExit* exit);
936 JS_REQUIRES_STACK bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
937 ExitType exitType = MISMATCH_EXIT);
938 JS_REQUIRES_STACK bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
939 VMSideExit* exit);
940 JS_REQUIRES_STACK bool guardHasPrototype(JSObject* obj, nanojit::LIns* obj_ins,
941 JSObject** pobj, nanojit::LIns** pobj_ins,
942 VMSideExit* exit);
943 JS_REQUIRES_STACK JSRecordingStatus guardPrototypeHasNoIndexedProperties(JSObject* obj,
944 nanojit::LIns* obj_ins,
945 ExitType exitType);
946 JS_REQUIRES_STACK JSRecordingStatus guardNotGlobalObject(JSObject* obj,
947 nanojit::LIns* obj_ins);
948 void clearFrameSlotsFromCache();
949 JS_REQUIRES_STACK void putArguments();
950 JS_REQUIRES_STACK JSRecordingStatus guardCallee(jsval& callee);
951 JS_REQUIRES_STACK JSStackFrame *guardArguments(JSObject *obj, nanojit::LIns* obj_ins,
952 unsigned *depthp);
953 JS_REQUIRES_STACK nanojit::LIns* guardArgsLengthNotAssigned(nanojit::LIns* argsobj_ins);
954 JS_REQUIRES_STACK JSRecordingStatus getClassPrototype(JSObject* ctor,
955 nanojit::LIns*& proto_ins);
956 JS_REQUIRES_STACK JSRecordingStatus getClassPrototype(JSProtoKey key,
957 nanojit::LIns*& proto_ins);
958 JS_REQUIRES_STACK JSRecordingStatus newArray(JSObject* ctor, uint32 argc, jsval* argv,
959 jsval* rval);
960 JS_REQUIRES_STACK JSRecordingStatus newString(JSObject* ctor, uint32 argc, jsval* argv,
961 jsval* rval);
962 JS_REQUIRES_STACK JSRecordingStatus interpretedFunctionCall(jsval& fval, JSFunction* fun,
963 uintN argc, bool constructing);
964 JS_REQUIRES_STACK void propagateFailureToBuiltinStatus(nanojit::LIns *ok_ins,
965 nanojit::LIns *&status_ins);
966 JS_REQUIRES_STACK JSRecordingStatus emitNativeCall(JSSpecializedNative* sn, uintN argc,
967 nanojit::LIns* args[], bool rooted);
968 JS_REQUIRES_STACK void emitNativePropertyOp(JSScope* scope,
969 JSScopeProperty* sprop,
970 nanojit::LIns* obj_ins,
971 bool setflag,
972 nanojit::LIns* boxed_ins);
973 JS_REQUIRES_STACK JSRecordingStatus callSpecializedNative(JSNativeTraceInfo* trcinfo, uintN argc,
974 bool constructing);
975 JS_REQUIRES_STACK JSRecordingStatus callNative(uintN argc, JSOp mode);
976 JS_REQUIRES_STACK JSRecordingStatus functionCall(uintN argc, JSOp mode);
977
978 JS_REQUIRES_STACK void trackCfgMerges(jsbytecode* pc);
979 JS_REQUIRES_STACK void emitIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
980 JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
981 JS_REQUIRES_STACK JSRecordingStatus checkTraceEnd(jsbytecode* pc);
982
983 bool hasMethod(JSObject* obj, jsid id);
984 JS_REQUIRES_STACK bool hasIteratorMethod(JSObject* obj);
985
986 JS_REQUIRES_STACK jsatomid getFullIndex(ptrdiff_t pcoff = 0);
987
988 public:
989
990 inline void*
991 operator new(size_t size)
992 {
993 return calloc(1, size);
994 }
995
996 inline void
997 operator delete(void *p)
998 {
999 free(p);
1000 }
1001
1002 JS_REQUIRES_STACK
1003 TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
1004 unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
1005 VMSideExit* expectedInnerExit, jsbytecode* outerTree,
1006 uint32 outerArgc);
1007 ~TraceRecorder();
1008
1009 bool outOfMemory();
1010
1011 static JS_REQUIRES_STACK JSRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr,
1012 JSOp op);
1013
1014 JS_REQUIRES_STACK JSTraceType determineSlotType(jsval* vp);
1015
1016 JS_REQUIRES_STACK JSRecordingStatus setUpwardTrackedVar(jsval* stackVp, jsval v,
1017 nanojit::LIns* v_ins);
1018
1019 /*
1020 * Examines current interpreter state to record information suitable for
1021 * returning to the interpreter through a side exit of the given type.
1022 */
1023 JS_REQUIRES_STACK VMSideExit* snapshot(ExitType exitType);
1024
1025 /*
1026 * Creates a separate but identical copy of the given side exit, allowing
1027 * the guards associated with each to be entirely separate even after
1028 * subsequent patching.
1029 */
1030 JS_REQUIRES_STACK VMSideExit* copy(VMSideExit* exit);
1031
1032 /*
1033 * Creates an instruction whose payload is a GuardRecord for the given exit.
1034 * The instruction is suitable for use as the final argument of a single
1035 * call to LirBuffer::insGuard; do not reuse the returned value.
1036 */
1037 JS_REQUIRES_STACK nanojit::GuardRecord* createGuardRecord(VMSideExit* exit);
1038
1039 nanojit::Fragment* getFragment() const { return fragment; }
1040 TreeInfo* getTreeInfo() const { return treeInfo; }
1041 JS_REQUIRES_STACK bool compile(JSTraceMonitor* tm);
1042 JS_REQUIRES_STACK bool closeLoop(TypeConsensus &consensus);
1043 JS_REQUIRES_STACK bool closeLoop(SlotMap& slotMap, VMSideExit* exit, TypeConsensus &consensus);
1044 JS_REQUIRES_STACK void endLoop();
1045 JS_REQUIRES_STACK void endLoop(VMSideExit* exit);
1046 JS_REQUIRES_STACK void joinEdgesToEntry(VMFragment* peer_root);
1047 JS_REQUIRES_STACK void adjustCallerTypes(nanojit::Fragment* f);
1048 JS_REQUIRES_STACK VMFragment* findNestedCompatiblePeer(VMFragment* f);
1049 JS_REQUIRES_STACK void prepareTreeCall(VMFragment* inner);
1050 JS_REQUIRES_STACK void emitTreeCall(VMFragment* inner, VMSideExit* exit);
1051 unsigned getCallDepth() const;
1052
1053 JS_REQUIRES_STACK JSRecordingStatus record_EnterFrame();
1054 JS_REQUIRES_STACK JSRecordingStatus record_LeaveFrame();
1055 JS_REQUIRES_STACK JSRecordingStatus record_SetPropHit(JSPropCacheEntry* entry,
1056 JSScopeProperty* sprop);
1057 JS_REQUIRES_STACK JSRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
1058 JS_REQUIRES_STACK JSRecordingStatus record_NativeCallComplete();
1059
1060 TreeInfo* getTreeInfo() { return treeInfo; }
1061
1062 #ifdef DEBUG
1063 void tprint(const char *format, int count, nanojit::LIns *insa[]);
1064 void tprint(const char *format);
1065 void tprint(const char *format, nanojit::LIns *ins);
1066 void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2);
1067 void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2, nanojit::LIns *ins3);
1068 void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2, nanojit::LIns *ins3,
1069 nanojit::LIns *ins4);
1070 void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2, nanojit::LIns *ins3,
1071 nanojit::LIns *ins4, nanojit::LIns *ins5);
1072 void tprint(const char *format, nanojit::LIns *ins1, nanojit::LIns *ins2, nanojit::LIns *ins3,
1073 nanojit::LIns *ins4, nanojit::LIns *ins5, nanojit::LIns *ins6);
1074 #endif
1075
1076 #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
1077 JS_REQUIRES_STACK JSRecordingStatus record_##op();
1078 # include "jsopcode.tbl"
1079 #undef OPDEF
1080
1081 friend class ImportBoxedStackSlotVisitor;
1082 friend class ImportUnboxedStackSlotVisitor;
1083 friend class ImportUnboxedFrameSlotVisitor;
1084 friend class ImportGlobalSlotVisitor;
1085 friend class AdjustCallerGlobalTypesVisitor;
1086 friend class AdjustCallerStackTypesVisitor;
1087 friend class TypeCompatibilityVisitor;
1088 friend class SlotMap;
1089 friend class DefaultSlotMap;
1090 friend jsval *js_ConcatPostImacroStackCleanup(uint32 argc, JSFrameRegs &regs,
1091 TraceRecorder *recorder);
1092 };
1093
1094 #define TRACING_ENABLED(cx) ((cx)->jitEnabled)
1095 #define TRACE_RECORDER(cx) (JS_TRACE_MONITOR(cx).recorder)
1096 #define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
1097
1098 #define JSOP_IN_RANGE(op,lo,hi) (uintN((op) - (lo)) <= uintN((hi) - (lo)))
1099 #define JSOP_IS_BINARY(op) JSOP_IN_RANGE(op, JSOP_BITOR, JSOP_MOD)
1100 #define JSOP_IS_UNARY(op) JSOP_IN_RANGE(op, JSOP_NEG, JSOP_POS)
1101 #define JSOP_IS_EQUALITY(op) JSOP_IN_RANGE(op, JSOP_EQ, JSOP_NE)
1102
1103 #define TRACE_ARGS_(x,args) \
1104 JS_BEGIN_MACRO \
1105 if (TraceRecorder* tr_ = TRACE_RECORDER(cx)) { \
1106 JSRecordingStatus status = tr_->record_##x args; \
1107 if (STATUS_ABORTS_RECORDING(status)) { \
1108 if (TRACE_RECORDER(cx)) \
1109 js_AbortRecording(cx, #x); \
1110 if (status == JSRS_ERROR) \
1111 goto error; \
1112 } \
1113 JS_ASSERT(status != JSRS_IMACRO); \
1114 } \
1115 JS_END_MACRO
1116
1117 #define TRACE_ARGS(x,args) TRACE_ARGS_(x, args)
1118 #define TRACE_0(x) TRACE_ARGS(x, ())
1119 #define TRACE_1(x,a) TRACE_ARGS(x, (a))
1120 #define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b))
1121
1122 extern JS_REQUIRES_STACK bool
1123 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
1124
1125 #ifdef DEBUG
1126 # define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx, reason)
1127 #else
1128 # define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx)
1129 #endif
1130
1131 extern JS_REQUIRES_STACK void
1132 js_AbortRecording(JSContext* cx, const char* reason);
1133
1134 extern void
1135 js_InitJIT(JSTraceMonitor *tm);
1136
1137 extern void
1138 js_FinishJIT(JSTraceMonitor *tm);
1139
1140 extern void
1141 js_PurgeScriptFragments(JSContext* cx, JSScript* script);
1142
1143 extern bool
1144 js_OverfullJITCache(JSTraceMonitor* tm);
1145
1146 extern void
1147 js_FlushJITCache(JSContext* cx);
1148
1149 extern void
1150 js_PurgeJITOracle();
1151
1152 extern JSObject *
1153 js_GetBuiltinFunction(JSContext *cx, uintN index);
1154
1155 extern void
1156 js_SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes);
1157
1158 extern bool
1159 js_NativeToValue(JSContext* cx, jsval& v, JSTraceType type, double* slot);
1160
1161 #ifdef MOZ_TRACEVIS
1162
1163 extern JS_FRIEND_API(bool)
1164 JS_StartTraceVis(const char* filename);
1165
1166 extern JS_FRIEND_API(JSBool)
1167 js_StartTraceVis(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
1168 jsval *rval);
1169
1170 extern JS_FRIEND_API(bool)
1171 JS_StopTraceVis();
1172
1173 extern JS_FRIEND_API(JSBool)
1174 js_StopTraceVis(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
1175 jsval *rval);
1176
1177 /* Must contain no more than 16 items. */
1178 enum TraceVisState {
1179 // Special: means we returned from current activity to last
1180 S_EXITLAST,
1181 // Activities
1182 S_INTERP,
1183 S_MONITOR,
1184 S_RECORD,
1185 S_COMPILE,
1186 S_EXECUTE,
1187 S_NATIVE,
1188 // Events: these all have (bit 3) == 1.
1189 S_RESET = 8
1190 };
1191
1192 /* Reason for an exit to the interpreter. */
1193 enum TraceVisExitReason {
1194 R_NONE,
1195 R_ABORT,
1196 /* Reasons in js_MonitorLoopEdge */
1197 R_INNER_SIDE_EXIT,
1198 R_DOUBLES,
1199 R_CALLBACK_PENDING,
1200 R_OOM_GETANCHOR,
1201 R_BACKED_OFF,
1202 R_COLD,
1203 R_FAIL_RECORD_TREE,
1204 R_MAX_PEERS,
1205 R_FAIL_EXECUTE_TREE,
1206 R_FAIL_STABILIZE,
1207 R_FAIL_EXTEND_FLUSH,
1208 R_FAIL_EXTEND_MAX_BRANCHES,
1209 R_FAIL_EXTEND_START,
1210 R_FAIL_EXTEND_COLD,
1211 R_NO_EXTEND_OUTER,
1212 R_MISMATCH_EXIT,
1213 R_OOM_EXIT,
1214 R_TIMEOUT_EXIT,
1215 R_DEEP_BAIL_EXIT,
1216 R_STATUS_EXIT,
1217 R_OTHER_EXIT
1218 };
1219
1220 enum TraceVisFlushReason {
1221 FR_DEEP_BAIL,
1222 FR_OOM,
1223 FR_GLOBAL_SHAPE_MISMATCH,
1224 FR_GLOBALS_FULL
1225 };
1226
1227 const unsigned long long MS64_MASK = 0xfull << 60;
1228 const unsigned long long MR64_MASK = 0x1full << 55;
1229 const unsigned long long MT64_MASK = ~(MS64_MASK | MR64_MASK);
1230
1231 extern FILE* traceVisLogFile;
1232 extern JSHashTable *traceVisScriptTable;
1233
1234 extern JS_FRIEND_API(void)
1235 js_StoreTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r);
1236
1237 static inline void
1238 js_LogTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r)
1239 {
1240 if (traceVisLogFile) {
1241 unsigned long long sllu = s;
1242 unsigned long long rllu = r;
1243 unsigned long long d = (sllu << 60) | (rllu << 55) | (rdtsc() & MT64_MASK);
1244 fwrite(&d, sizeof(d), 1, traceVisLogFile);
1245 }
1246 if (traceVisScriptTable) {
1247 js_StoreTraceVisState(cx, s, r);
1248 }
1249 }
1250
1251 /*
1252 * Although this runs the same code as js_LogTraceVisState, it is a separate
1253 * function because the meaning of the log entry is different. Also, the entry
1254 * formats may diverge someday.
1255 */
1256 static inline void
1257 js_LogTraceVisEvent(JSContext *cx, TraceVisState s, TraceVisFlushReason r)
1258 {
1259 js_LogTraceVisState(cx, s, (TraceVisExitReason) r);
1260 }
1261
1262 static inline void
1263 js_EnterTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r)
1264 {
1265 js_LogTraceVisState(cx, s, r);
1266 }
1267
1268 static inline void
1269 js_ExitTraceVisState(JSContext *cx, TraceVisExitReason r)
1270 {
1271 js_LogTraceVisState(cx, S_EXITLAST, r);
1272 }
1273
1274 struct TraceVisStateObj {
1275 TraceVisExitReason r;
1276 JSContext *mCx;
1277
1278 inline TraceVisStateObj(JSContext *cx, TraceVisState s) : r(R_NONE)
1279 {
1280 js_EnterTraceVisState(cx, s, R_NONE);
1281 mCx = cx;
1282 }
1283 inline ~TraceVisStateObj()
1284 {
1285 js_ExitTraceVisState(mCx, r);
1286 }
1287 };
1288
1289 #endif /* MOZ_TRACEVIS */
1290
1291 extern jsval *
1292 js_ConcatPostImacroStackCleanup(uint32 argc, JSFrameRegs &regs,
1293 TraceRecorder *recorder);
1294
1295 #else /* !JS_TRACER */
1296
1297 #define TRACE_0(x) ((void)0)
1298 #define TRACE_1(x,a) ((void)0)
1299 #define TRACE_2(x,a,b) ((void)0)
1300
1301 #endif /* !JS_TRACER */
1302
1303 #endif /* jstracer_h___ */

  ViewVC Help
Powered by ViewVC 1.1.24