/[jscoverage]/trunk/js/jstracer.h
ViewVC logotype

Contents of /trunk/js/jstracer.h

Parent Directory Parent Directory | Revision Log Revision Log


Revision 460 - (show annotations)
Sat Sep 26 23:15:22 2009 UTC (12 years, 8 months ago) by siliconforks
File MIME type: text/plain
File size: 31255 byte(s)
Upgrade to SpiderMonkey from Firefox 3.5.3.

1 /* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99 ft=cpp:
3 *
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 *
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
11 *
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
16 *
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
19 *
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
22 *
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
27 *
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
39 *
40 * ***** END LICENSE BLOCK ***** */
41
42 #ifndef jstracer_h___
43 #define jstracer_h___
44
45 #ifdef JS_TRACER
46
47 #include "jscntxt.h"
48 #include "jsstddef.h"
49 #include "jstypes.h"
50 #include "jslock.h"
51 #include "jsnum.h"
52 #include "jsinterp.h"
53 #include "jsbuiltins.h"
54
55 #if defined(DEBUG) && !defined(JS_JIT_SPEW)
56 #define JS_JIT_SPEW
57 #endif
58
59 template <typename T>
60 class Queue : public avmplus::GCObject {
61 T* _data;
62 unsigned _len;
63 unsigned _max;
64
65 void ensure(unsigned size) {
66 while (_max < size)
67 _max <<= 1;
68 _data = (T*)realloc(_data, _max * sizeof(T));
69 #if defined(DEBUG)
70 memset(&_data[_len], 0xcd, _max - _len);
71 #endif
72 }
73 public:
74 Queue(unsigned max = 16) {
75 this->_max = max;
76 this->_len = 0;
77 this->_data = (T*)malloc(max * sizeof(T));
78 }
79
80 ~Queue() {
81 free(_data);
82 }
83
84 bool contains(T a) {
85 for (unsigned n = 0; n < _len; ++n) {
86 if (_data[n] == a)
87 return true;
88 }
89 return false;
90 }
91
92 void add(T a) {
93 ensure(_len + 1);
94 JS_ASSERT(_len <= _max);
95 _data[_len++] = a;
96 }
97
98 void add(T* chunk, unsigned size) {
99 ensure(_len + size);
100 JS_ASSERT(_len <= _max);
101 memcpy(&_data[_len], chunk, size * sizeof(T));
102 _len += size;
103 }
104
105 void addUnique(T a) {
106 if (!contains(a))
107 add(a);
108 }
109
110 void setLength(unsigned len) {
111 ensure(len + 1);
112 _len = len;
113 }
114
115 void clear() {
116 _len = 0;
117 }
118
119 const T & get(unsigned i) const {
120 return _data[i];
121 }
122
123 unsigned length() const {
124 return _len;
125 }
126
127 T* data() const {
128 return _data;
129 }
130 };
131
132 /*
133 * Tracker is used to keep track of values being manipulated by the interpreter
134 * during trace recording.
135 */
136 class Tracker {
137 struct Page {
138 struct Page* next;
139 jsuword base;
140 nanojit::LIns* map[1];
141 };
142 struct Page* pagelist;
143
144 jsuword getPageBase(const void* v) const;
145 struct Page* findPage(const void* v) const;
146 struct Page* addPage(const void* v);
147 public:
148 Tracker();
149 ~Tracker();
150
151 bool has(const void* v) const;
152 nanojit::LIns* get(const void* v) const;
153 void set(const void* v, nanojit::LIns* ins);
154 void clear();
155 };
156
157 #ifdef JS_JIT_SPEW
158 extern bool js_verboseDebug;
159 #define debug_only_v(x) if (js_verboseDebug) { x; fflush(stdout); }
160 #else
161 #define debug_only_v(x)
162 #endif
163
164 /*
165 * The oracle keeps track of hit counts for program counter locations, as
166 * well as slots that should not be demoted to int because we know them to
167 * overflow or they result in type-unstable traces. We are using simple
168 * hash tables. Collisions lead to loss of optimization (demotable slots
169 * are not demoted, etc.) but have no correctness implications.
170 */
171 #define ORACLE_SIZE 4096
172
173 class Oracle {
174 avmplus::BitSet _stackDontDemote;
175 avmplus::BitSet _globalDontDemote;
176 public:
177 Oracle();
178
179 JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot);
180 JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const;
181 JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot);
182 JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const;
183 void clearDemotability();
184 void clear() {
185 clearDemotability();
186 }
187 };
188
189 typedef Queue<uint16> SlotList;
190
191 class TypeMap : public Queue<uint8> {
192 public:
193 JS_REQUIRES_STACK void captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth);
194 JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx,
195 SlotList& slots,
196 unsigned stackSlots);
197 bool matches(TypeMap& other) const;
198 };
199
200 enum ExitType {
201 /*
202 * An exit at a possible branch-point in the trace at which to attach a
203 * future secondary trace. Therefore the recorder must generate different
204 * code to handle the other outcome of the branch condition from the
205 * primary trace's outcome.
206 */
207 BRANCH_EXIT,
208
209 /*
210 * Exit at a tableswitch via a numbered case.
211 */
212 CASE_EXIT,
213
214 /*
215 * Exit at a tableswitch via the default case.
216 */
217 DEFAULT_EXIT,
218
219 LOOP_EXIT,
220 NESTED_EXIT,
221
222 /*
223 * An exit from a trace because a condition relied upon at recording time
224 * no longer holds, where the alternate path of execution is so rare or
225 * difficult to address in native code that it is not traced at all, e.g.
226 * negative array index accesses, which differ from positive indexes in
227 * that they require a string-based property lookup rather than a simple
228 * memory access.
229 */
230 MISMATCH_EXIT,
231
232 /*
233 * A specialization of MISMATCH_EXIT to handle allocation failures.
234 */
235 OOM_EXIT,
236 OVERFLOW_EXIT,
237 UNSTABLE_LOOP_EXIT,
238 TIMEOUT_EXIT,
239 DEEP_BAIL_EXIT,
240 STATUS_EXIT
241 };
242
243 struct VMSideExit : public nanojit::SideExit
244 {
245 JSObject* block;
246 jsbytecode* pc;
247 jsbytecode* imacpc;
248 intptr_t sp_adj;
249 intptr_t rp_adj;
250 int32_t calldepth;
251 uint32 numGlobalSlots;
252 uint32 numStackSlots;
253 uint32 numStackSlotsBelowCurrentFrame;
254 ExitType exitType;
255
256 /*
257 * Ordinarily 0. If a slow native function is atop the stack, the 1 bit is
258 * set if constructing and the other bits are a pointer to the funobj.
259 */
260 uintptr_t nativeCalleeWord;
261
262 JSObject * nativeCallee() {
263 return (JSObject *) (nativeCalleeWord & ~1);
264 }
265
266 bool constructing() {
267 return bool(nativeCalleeWord & 1);
268 }
269
270 void setNativeCallee(JSObject *callee, bool constructing) {
271 nativeCalleeWord = uintptr_t(callee) | (constructing ? 1 : 0);
272 }
273 };
274
275 static inline uint8* getStackTypeMap(nanojit::SideExit* exit)
276 {
277 return (uint8*)(((VMSideExit*)exit) + 1);
278 }
279
280 static inline uint8* getGlobalTypeMap(nanojit::SideExit* exit)
281 {
282 return getStackTypeMap(exit) + ((VMSideExit*)exit)->numStackSlots;
283 }
284
285 static inline uint8* getFullTypeMap(nanojit::SideExit* exit)
286 {
287 return getStackTypeMap(exit);
288 }
289
290 struct FrameInfo {
291 JSObject* callee; // callee function object
292 JSObject* block; // caller block chain head
293 jsbytecode* pc; // caller fp->regs->pc
294 jsbytecode* imacpc; // caller fp->imacpc
295 uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
296
297 /*
298 * Bit 15 (0x8000) is a flag that is set if constructing (called through new).
299 * Bits 0-14 are the actual argument count. This may be less than fun->nargs.
300 */
301 uint16 argc;
302
303 /*
304 * Stack pointer adjustment needed for navigation of native stack in
305 * js_GetUpvarOnTrace. spoffset is the number of slots in the native
306 * stack frame for the caller *before* the slots covered by spdist.
307 * This may be negative if the caller is the top level script.
308 * The key fact is that if we let 'cpos' be the start of the caller's
309 * native stack frame, then (cpos + spoffset) points to the first
310 * non-argument slot in the callee's native stack frame.
311 */
312 int32 spoffset;
313
314 // Safer accessors for argc.
315 enum { CONSTRUCTING_MASK = 0x8000 };
316 void set_argc(uint16 argc, bool constructing) {
317 this->argc = argc | (constructing ? CONSTRUCTING_MASK : 0);
318 }
319 uint16 get_argc() const { return argc & ~CONSTRUCTING_MASK; }
320 bool is_constructing() const { return (argc & CONSTRUCTING_MASK) != 0; }
321
322 // The typemap just before the callee is called.
323 uint8* get_typemap() { return (uint8*) (this+1); }
324 };
325
326 struct UnstableExit
327 {
328 nanojit::Fragment* fragment;
329 VMSideExit* exit;
330 UnstableExit* next;
331 };
332
333 class TreeInfo MMGC_SUBCLASS_DECL {
334 public:
335 nanojit::Fragment* const fragment;
336 JSScript* script;
337 unsigned maxNativeStackSlots;
338 ptrdiff_t nativeStackBase;
339 unsigned maxCallDepth;
340 TypeMap typeMap;
341 unsigned nStackTypes;
342 SlotList* globalSlots;
343 /* Dependent trees must be trashed if this tree dies, and updated on missing global types */
344 Queue<nanojit::Fragment*> dependentTrees;
345 /* Linked trees must be updated on missing global types, but are not dependent */
346 Queue<nanojit::Fragment*> linkedTrees;
347 unsigned branchCount;
348 Queue<VMSideExit*> sideExits;
349 UnstableExit* unstableExits;
350 #ifdef DEBUG
351 const char* treeFileName;
352 uintN treeLineNumber;
353 uintN treePCOffset;
354 #endif
355
356 TreeInfo(nanojit::Fragment* _fragment,
357 SlotList* _globalSlots)
358 : fragment(_fragment),
359 script(NULL),
360 maxNativeStackSlots(0),
361 nativeStackBase(0),
362 maxCallDepth(0),
363 nStackTypes(0),
364 globalSlots(_globalSlots),
365 branchCount(0),
366 unstableExits(NULL)
367 {}
368 ~TreeInfo();
369
370 inline unsigned nGlobalTypes() {
371 return typeMap.length() - nStackTypes;
372 }
373 inline uint8* globalTypeMap() {
374 return typeMap.data() + nStackTypes;
375 }
376 inline uint8* stackTypeMap() {
377 return typeMap.data();
378 }
379 };
380
381 #if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
382 # define EXECUTE_TREE_TIMER
383 #endif
384
385 typedef enum JSBuiltinStatus {
386 JSBUILTIN_BAILED = 1,
387 JSBUILTIN_ERROR = 2
388 } JSBuiltinStatus;
389
390 struct InterpState
391 {
392 double *sp; // native stack pointer, stack[0] is spbase[0]
393 FrameInfo** rp; // call stack pointer
394 JSContext *cx; // current VM context handle
395 double *eos; // first unusable word after the native stack
396 void *eor; // first unusable word after the call stack
397 VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
398 VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
399 // call exit guard mismatched
400 void* rpAtLastTreeCall; // value of rp at innermost tree call guard
401 TreeInfo* outermostTree; // the outermost tree we initially invoked
402 double* stackBase; // native stack base
403 FrameInfo** callstackBase; // call stack base
404 uintN* inlineCallCountp; // inline call count counter
405 VMSideExit** innermostNestedGuardp;
406 void* stackMark;
407 VMSideExit* innermost;
408 #ifdef EXECUTE_TREE_TIMER
409 uint64 startTime;
410 #endif
411 InterpState* prev;
412
413 /*
414 * Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
415 * JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
416 * if an error or exception occurred.
417 */
418 uint32 builtinStatus;
419
420 // Used to communicate the location of the return value in case of a deep bail.
421 double* deepBailSp;
422 };
423
424 static JS_INLINE void
425 js_SetBuiltinError(JSContext *cx)
426 {
427 cx->interpState->builtinStatus |= JSBUILTIN_ERROR;
428 }
429
430 #ifdef DEBUG_JSRS_NOT_BOOL
431 struct JSRecordingStatus {
432 int code;
433 bool operator==(JSRecordingStatus &s) { return this->code == s.code; };
434 bool operator!=(JSRecordingStatus &s) { return this->code != s.code; };
435 };
436 enum JSRScodes {
437 JSRS_ERROR_code,
438 JSRS_STOP_code,
439 JSRS_CONTINUE_code,
440 JSRS_IMACRO_code
441 };
442 struct JSRecordingStatus JSRS_CONTINUE = { JSRS_CONTINUE_code };
443 struct JSRecordingStatus JSRS_STOP = { JSRS_STOP_code };
444 struct JSRecordingStatus JSRS_IMACRO = { JSRS_IMACRO_code };
445 struct JSRecordingStatus JSRS_ERROR = { JSRS_ERROR_code };
446 #define STATUS_ABORTS_RECORDING(s) ((s) == JSRS_STOP || (s) == JSRS_ERROR)
447 #else
448 enum JSRecordingStatus {
449 JSRS_ERROR, // Error; propagate to interpreter.
450 JSRS_STOP, // Abort recording.
451 JSRS_CONTINUE, // Continue recording.
452 JSRS_IMACRO // Entered imacro; continue recording.
453 // Only JSOP_IS_IMACOP opcodes may return this.
454 };
455 #define STATUS_ABORTS_RECORDING(s) ((s) <= JSRS_STOP)
456 #endif
457
458
459
460 class TraceRecorder : public avmplus::GCObject {
461 JSContext* cx;
462 JSTraceMonitor* traceMonitor;
463 JSObject* globalObj;
464 JSObject* lexicalBlock;
465 Tracker tracker;
466 Tracker nativeFrameTracker;
467 char* entryTypeMap;
468 unsigned callDepth;
469 JSAtom** atoms;
470 VMSideExit* anchor;
471 nanojit::Fragment* fragment;
472 TreeInfo* treeInfo;
473 nanojit::LirBuffer* lirbuf;
474 nanojit::LirWriter* lir;
475 nanojit::LirBufWriter* lir_buf_writer;
476 nanojit::LirWriter* verbose_filter;
477 nanojit::LirWriter* cse_filter;
478 nanojit::LirWriter* expr_filter;
479 nanojit::LirWriter* func_filter;
480 nanojit::LirWriter* float_filter;
481 nanojit::LIns* cx_ins;
482 nanojit::LIns* eos_ins;
483 nanojit::LIns* eor_ins;
484 nanojit::LIns* rval_ins;
485 nanojit::LIns* inner_sp_ins;
486 nanojit::LIns* native_rval_ins;
487 nanojit::LIns* newobj_ins;
488 bool deepAborted;
489 bool trashSelf;
490 Queue<nanojit::Fragment*> whichTreesToTrash;
491 Queue<jsbytecode*> cfgMerges;
492 jsval* global_dslots;
493 JSTraceableNative* generatedTraceableNative;
494 JSTraceableNative* pendingTraceableNative;
495 TraceRecorder* nextRecorderToAbort;
496 bool wasRootFragment;
497 jsbytecode* outer; /* outer trace header PC */
498 uint32 outerArgc; /* outer trace deepest frame argc */
499 bool loop;
500
501 bool isGlobal(jsval* p) const;
502 ptrdiff_t nativeGlobalOffset(jsval* p) const;
503 JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(jsval* p) const;
504 JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8 t,
505 const char *prefix, uintN index, JSStackFrame *fp);
506 JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned stackSlots,
507 unsigned callDepth, unsigned ngslots, uint8* typeMap);
508 void trackNativeStackUse(unsigned slots);
509
510 JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop);
511 JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
512
513 JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, ExitType exitType);
514 JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
515
516 nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
517
518 nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
519 JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false);
520 JS_REQUIRES_STACK nanojit::LIns* get(jsval* p);
521 JS_REQUIRES_STACK bool known(jsval* p);
522 JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
523
524 JS_REQUIRES_STACK bool checkType(jsval& v, uint8 t, jsval*& stage_val,
525 nanojit::LIns*& stage_ins, unsigned& stage_count);
526 JS_REQUIRES_STACK bool deduceTypeStability(nanojit::Fragment* root_peer,
527 nanojit::Fragment** stable_peer,
528 bool& demote);
529
530 JS_REQUIRES_STACK jsval& argval(unsigned n) const;
531 JS_REQUIRES_STACK jsval& varval(unsigned n) const;
532 JS_REQUIRES_STACK jsval& stackval(int n) const;
533
534 JS_REQUIRES_STACK nanojit::LIns* scopeChain() const;
535 JS_REQUIRES_STACK JSRecordingStatus activeCallOrGlobalSlot(JSObject* obj, jsval*& vp);
536
537 JS_REQUIRES_STACK nanojit::LIns* arg(unsigned n);
538 JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i);
539 JS_REQUIRES_STACK nanojit::LIns* var(unsigned n);
540 JS_REQUIRES_STACK void var(unsigned n, nanojit::LIns* i);
541 JS_REQUIRES_STACK nanojit::LIns* upvar(JSScript* script, JSUpvarArray* uva, uintN index, jsval& v);
542 JS_REQUIRES_STACK nanojit::LIns* stack(int n);
543 JS_REQUIRES_STACK void stack(int n, nanojit::LIns* i);
544
545 JS_REQUIRES_STACK nanojit::LIns* alu(nanojit::LOpcode op, jsdouble v0, jsdouble v1,
546 nanojit::LIns* s0, nanojit::LIns* s1);
547 nanojit::LIns* f2i(nanojit::LIns* f);
548 JS_REQUIRES_STACK nanojit::LIns* makeNumberInt32(nanojit::LIns* f);
549 JS_REQUIRES_STACK nanojit::LIns* stringify(jsval& v);
550
551 JS_REQUIRES_STACK JSRecordingStatus call_imacro(jsbytecode* imacro);
552
553 JS_REQUIRES_STACK JSRecordingStatus ifop();
554 JS_REQUIRES_STACK JSRecordingStatus switchop();
555 #ifdef NANOJIT_IA32
556 JS_REQUIRES_STACK nanojit::LIns* tableswitch();
557 #endif
558 JS_REQUIRES_STACK JSRecordingStatus inc(jsval& v, jsint incr, bool pre = true);
559 JS_REQUIRES_STACK JSRecordingStatus inc(jsval& v, nanojit::LIns*& v_ins, jsint incr,
560 bool pre = true);
561 JS_REQUIRES_STACK JSRecordingStatus incProp(jsint incr, bool pre = true);
562 JS_REQUIRES_STACK JSRecordingStatus incElem(jsint incr, bool pre = true);
563 JS_REQUIRES_STACK JSRecordingStatus incName(jsint incr, bool pre = true);
564
565 JS_REQUIRES_STACK void strictEquality(bool equal, bool cmpCase);
566 JS_REQUIRES_STACK JSRecordingStatus equality(bool negate, bool tryBranchAfterCond);
567 JS_REQUIRES_STACK JSRecordingStatus equalityHelper(jsval l, jsval r,
568 nanojit::LIns* l_ins, nanojit::LIns* r_ins,
569 bool negate, bool tryBranchAfterCond,
570 jsval& rval);
571 JS_REQUIRES_STACK JSRecordingStatus relational(nanojit::LOpcode op, bool tryBranchAfterCond);
572
573 JS_REQUIRES_STACK JSRecordingStatus unary(nanojit::LOpcode op);
574 JS_REQUIRES_STACK JSRecordingStatus binary(nanojit::LOpcode op);
575
576 bool ibinary(nanojit::LOpcode op);
577 bool iunary(nanojit::LOpcode op);
578 bool bbinary(nanojit::LOpcode op);
579 void demote(jsval& v, jsdouble result);
580
581 JS_REQUIRES_STACK bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins,
582 nanojit::LIns*& ops_ins, size_t op_offset = 0);
583 JS_REQUIRES_STACK JSRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
584 JSObject*& obj2, jsuword& pcval);
585 void stobj_set_fslot(nanojit::LIns *obj_ins, unsigned slot,
586 nanojit::LIns* v_ins, const char *name);
587 void stobj_set_dslot(nanojit::LIns *obj_ins, unsigned slot, nanojit::LIns*& dslots_ins,
588 nanojit::LIns* v_ins, const char *name);
589 void stobj_set_slot(nanojit::LIns* obj_ins, unsigned slot, nanojit::LIns*& dslots_ins,
590 nanojit::LIns* v_ins);
591
592 nanojit::LIns* stobj_get_fslot(nanojit::LIns* obj_ins, unsigned slot);
593 nanojit::LIns* stobj_get_dslot(nanojit::LIns* obj_ins, unsigned index,
594 nanojit::LIns*& dslots_ins);
595 nanojit::LIns* stobj_get_slot(nanojit::LIns* obj_ins, unsigned slot,
596 nanojit::LIns*& dslots_ins);
597 JSRecordingStatus native_set(nanojit::LIns* obj_ins, JSScopeProperty* sprop,
598 nanojit::LIns*& dslots_ins, nanojit::LIns* v_ins);
599 JSRecordingStatus native_get(nanojit::LIns* obj_ins, nanojit::LIns* pobj_ins,
600 JSScopeProperty* sprop, nanojit::LIns*& dslots_ins,
601 nanojit::LIns*& v_ins);
602
603 nanojit::LIns* getStringLength(nanojit::LIns* str_ins);
604
605 JS_REQUIRES_STACK JSRecordingStatus name(jsval*& vp);
606 JS_REQUIRES_STACK JSRecordingStatus prop(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot,
607 nanojit::LIns*& v_ins);
608 JS_REQUIRES_STACK JSRecordingStatus denseArrayElement(jsval& oval, jsval& idx, jsval*& vp,
609 nanojit::LIns*& v_ins,
610 nanojit::LIns*& addr_ins);
611 JS_REQUIRES_STACK JSRecordingStatus getProp(JSObject* obj, nanojit::LIns* obj_ins);
612 JS_REQUIRES_STACK JSRecordingStatus getProp(jsval& v);
613 JS_REQUIRES_STACK JSRecordingStatus getThis(nanojit::LIns*& this_ins);
614
615 JS_REQUIRES_STACK void box_jsval(jsval v, nanojit::LIns*& v_ins);
616 JS_REQUIRES_STACK void unbox_jsval(jsval v, nanojit::LIns*& v_ins, VMSideExit* exit);
617 JS_REQUIRES_STACK bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp,
618 VMSideExit* exit);
619 JS_REQUIRES_STACK bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
620 ExitType exitType = MISMATCH_EXIT);
621 JS_REQUIRES_STACK bool guardHasPrototype(JSObject* obj, nanojit::LIns* obj_ins,
622 JSObject** pobj, nanojit::LIns** pobj_ins,
623 VMSideExit* exit);
624 JS_REQUIRES_STACK JSRecordingStatus guardPrototypeHasNoIndexedProperties(JSObject* obj,
625 nanojit::LIns* obj_ins,
626 ExitType exitType);
627 JS_REQUIRES_STACK JSRecordingStatus guardNotGlobalObject(JSObject* obj,
628 nanojit::LIns* obj_ins);
629 void clearFrameSlotsFromCache();
630 JS_REQUIRES_STACK JSRecordingStatus guardCallee(jsval& callee);
631 JS_REQUIRES_STACK JSRecordingStatus getClassPrototype(JSObject* ctor,
632 nanojit::LIns*& proto_ins);
633 JS_REQUIRES_STACK JSRecordingStatus getClassPrototype(JSProtoKey key,
634 nanojit::LIns*& proto_ins);
635 JS_REQUIRES_STACK JSRecordingStatus newArray(JSObject* ctor, uint32 argc, jsval* argv,
636 jsval* rval);
637 JS_REQUIRES_STACK JSRecordingStatus newString(JSObject* ctor, uint32 argc, jsval* argv,
638 jsval* rval);
639 JS_REQUIRES_STACK JSRecordingStatus interpretedFunctionCall(jsval& fval, JSFunction* fun,
640 uintN argc, bool constructing);
641 JS_REQUIRES_STACK JSRecordingStatus emitNativeCall(JSTraceableNative* known, uintN argc,
642 nanojit::LIns* args[]);
643 JS_REQUIRES_STACK JSRecordingStatus callTraceableNative(JSFunction* fun, uintN argc,
644 bool constructing);
645 JS_REQUIRES_STACK JSRecordingStatus callNative(uintN argc, JSOp mode);
646 JS_REQUIRES_STACK JSRecordingStatus functionCall(uintN argc, JSOp mode);
647
648 JS_REQUIRES_STACK void trackCfgMerges(jsbytecode* pc);
649 JS_REQUIRES_STACK void emitIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
650 JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
651 JS_REQUIRES_STACK JSRecordingStatus checkTraceEnd(jsbytecode* pc);
652
653 bool hasMethod(JSObject* obj, jsid id);
654 JS_REQUIRES_STACK bool hasIteratorMethod(JSObject* obj);
655
656 JS_REQUIRES_STACK jsatomid getFullIndex(ptrdiff_t pcoff = 0);
657
658 public:
659 JS_REQUIRES_STACK
660 TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
661 unsigned stackSlots, unsigned ngslots, uint8* typeMap,
662 VMSideExit* expectedInnerExit, jsbytecode* outerTree,
663 uint32 outerArgc);
664 ~TraceRecorder();
665
666 static JS_REQUIRES_STACK JSRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr,
667 JSOp op);
668
669 JS_REQUIRES_STACK uint8 determineSlotType(jsval* vp);
670
671 /*
672 * Examines current interpreter state to record information suitable for
673 * returning to the interpreter through a side exit of the given type.
674 */
675 JS_REQUIRES_STACK VMSideExit* snapshot(ExitType exitType);
676
677 /*
678 * Creates a separate but identical copy of the given side exit, allowing
679 * the guards associated with each to be entirely separate even after
680 * subsequent patching.
681 */
682 JS_REQUIRES_STACK VMSideExit* copy(VMSideExit* exit);
683
684 /*
685 * Creates an instruction whose payload is a GuardRecord for the given exit.
686 * The instruction is suitable for use as the final argument of a single
687 * call to LirBuffer::insGuard; do not reuse the returned value.
688 */
689 JS_REQUIRES_STACK nanojit::LIns* createGuardRecord(VMSideExit* exit);
690
691 nanojit::Fragment* getFragment() const { return fragment; }
692 TreeInfo* getTreeInfo() const { return treeInfo; }
693 JS_REQUIRES_STACK void compile(JSTraceMonitor* tm);
694 JS_REQUIRES_STACK void closeLoop(JSTraceMonitor* tm, bool& demote);
695 JS_REQUIRES_STACK void endLoop(JSTraceMonitor* tm);
696 JS_REQUIRES_STACK void joinEdgesToEntry(nanojit::Fragmento* fragmento,
697 VMFragment* peer_root);
698 void blacklist() { fragment->blacklist(); }
699 JS_REQUIRES_STACK void adjustCallerTypes(nanojit::Fragment* f);
700 JS_REQUIRES_STACK nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f);
701 JS_REQUIRES_STACK void prepareTreeCall(nanojit::Fragment* inner);
702 JS_REQUIRES_STACK void emitTreeCall(nanojit::Fragment* inner, VMSideExit* exit);
703 unsigned getCallDepth() const;
704 void pushAbortStack();
705 void popAbortStack();
706 void removeFragmentoReferences();
707 void deepAbort();
708
709 JS_REQUIRES_STACK JSRecordingStatus record_EnterFrame();
710 JS_REQUIRES_STACK JSRecordingStatus record_LeaveFrame();
711 JS_REQUIRES_STACK JSRecordingStatus record_SetPropHit(JSPropCacheEntry* entry,
712 JSScopeProperty* sprop);
713 JS_REQUIRES_STACK JSRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
714 JS_REQUIRES_STACK JSRecordingStatus record_NativeCallComplete();
715
716 bool wasDeepAborted() { return deepAborted; }
717 TreeInfo* getTreeInfo() { return treeInfo; }
718
719 #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
720 JS_REQUIRES_STACK JSRecordingStatus record_##op();
721 # include "jsopcode.tbl"
722 #undef OPDEF
723 };
724 #define TRACING_ENABLED(cx) JS_HAS_OPTION(cx, JSOPTION_JIT)
725 #define TRACE_RECORDER(cx) (JS_TRACE_MONITOR(cx).recorder)
726 #define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
727
728 #define JSOP_IN_RANGE(op,lo,hi) (uintN((op) - (lo)) <= uintN((hi) - (lo)))
729 #define JSOP_IS_BINARY(op) JSOP_IN_RANGE(op, JSOP_BITOR, JSOP_MOD)
730 #define JSOP_IS_UNARY(op) JSOP_IN_RANGE(op, JSOP_NEG, JSOP_POS)
731 #define JSOP_IS_EQUALITY(op) JSOP_IN_RANGE(op, JSOP_EQ, JSOP_NE)
732
733 #define TRACE_ARGS_(x,args) \
734 JS_BEGIN_MACRO \
735 TraceRecorder* tr_ = TRACE_RECORDER(cx); \
736 if (tr_ && !tr_->wasDeepAborted()) { \
737 JSRecordingStatus status = tr_->record_##x args; \
738 if (STATUS_ABORTS_RECORDING(status)) { \
739 js_AbortRecording(cx, #x); \
740 if (status == JSRS_ERROR) \
741 goto error; \
742 } \
743 JS_ASSERT(status != JSRS_IMACRO); \
744 } \
745 JS_END_MACRO
746
747 #define TRACE_ARGS(x,args) TRACE_ARGS_(x, args)
748 #define TRACE_0(x) TRACE_ARGS(x, ())
749 #define TRACE_1(x,a) TRACE_ARGS(x, (a))
750 #define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b))
751
752 extern JS_REQUIRES_STACK bool
753 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
754
755 #ifdef DEBUG
756 # define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx, reason)
757 #else
758 # define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx)
759 #endif
760
761 extern JS_REQUIRES_STACK void
762 js_AbortRecording(JSContext* cx, const char* reason);
763
764 extern void
765 js_InitJIT(JSTraceMonitor *tm);
766
767 extern void
768 js_FinishJIT(JSTraceMonitor *tm);
769
770 extern void
771 js_PurgeScriptFragments(JSContext* cx, JSScript* script);
772
773 extern bool
774 js_OverfullFragmento(JSTraceMonitor* tm, nanojit::Fragmento *frago);
775
776 extern void
777 js_PurgeJITOracle();
778
779 extern JSObject *
780 js_GetBuiltinFunction(JSContext *cx, uintN index);
781
782 extern void
783 js_SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes);
784
785 #else /* !JS_TRACER */
786
787 #define TRACE_0(x) ((void)0)
788 #define TRACE_1(x,a) ((void)0)
789 #define TRACE_2(x,a,b) ((void)0)
790
791 #endif /* !JS_TRACER */
792
793 #endif /* jstracer_h___ */

  ViewVC Help
Powered by ViewVC 1.1.24