/[jscoverage]/trunk/js/jstracer.h
ViewVC logotype

Annotation of /trunk/js/jstracer.h

Parent Directory Parent Directory | Revision Log Revision Log


Revision 332 - (hide annotations)
Thu Oct 23 19:03:33 2008 UTC (11 years, 11 months ago) by siliconforks
File MIME type: text/plain
File size: 15946 byte(s)
Add SpiderMonkey from Firefox 3.1b1.

The following directories and files were removed:
correct/, correct.js
liveconnect/
nanojit/
t/
v8/
vprof/
xpconnect/
all JavaScript files (Y.js, call.js, if.js, math-partial-sums.js, md5.js, perfect.js, trace-test.js, trace.js)


1 siliconforks 332 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2     * vim: set ts=8 sw=4 et tw=99 ft=cpp:
3     *
4     * ***** BEGIN LICENSE BLOCK *****
5     * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6     *
7     * The contents of this file are subject to the Mozilla Public License Version
8     * 1.1 (the "License"); you may not use this file except in compliance with
9     * the License. You may obtain a copy of the License at
10     * http://www.mozilla.org/MPL/
11     *
12     * Software distributed under the License is distributed on an "AS IS" basis,
13     * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14     * for the specific language governing rights and limitations under the
15     * License.
16     *
17     * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18     * May 28, 2008.
19     *
20     * The Initial Developer of the Original Code is
21     * Brendan Eich <brendan@mozilla.org>
22     *
23     * Contributor(s):
24     * Andreas Gal <gal@mozilla.com>
25     * Mike Shaver <shaver@mozilla.org>
26     * David Anderson <danderson@mozilla.com>
27     *
28     * Alternatively, the contents of this file may be used under the terms of
29     * either of the GNU General Public License Version 2 or later (the "GPL"),
30     * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31     * in which case the provisions of the GPL or the LGPL are applicable instead
32     * of those above. If you wish to allow use of your version of this file only
33     * under the terms of either the GPL or the LGPL, and not to allow others to
34     * use your version of this file under the terms of the MPL, indicate your
35     * decision by deleting the provisions above and replace them with the notice
36     * and other provisions required by the GPL or the LGPL. If you do not delete
37     * the provisions above, a recipient may use your version of this file under
38     * the terms of any one of the MPL, the GPL or the LGPL.
39     *
40     * ***** END LICENSE BLOCK ***** */
41    
42     #ifndef jstracer_h___
43     #define jstracer_h___
44    
45     #ifdef JS_TRACER
46    
47     #include "jsstddef.h"
48     #include "jstypes.h"
49     #include "jslock.h"
50     #include "jsnum.h"
51     #include "jsinterp.h"
52    
53     #include "nanojit/nanojit.h"
54    
55     /*
56     * We use a magic boxed pointer value to represent error conditions that
57     * trigger a side exit. The address is so low that it should never be actually
58     * in use. If it is, a performance regression occurs, not an actual runtime
59     * error.
60     */
61     #define JSVAL_ERROR_COOKIE OBJECT_TO_JSVAL((void*)0x10)
62    
63     /*
64     * We also need a magic unboxed 32-bit integer that signals an error. Again if
65     * this number is hit we experience a performance regression, not a runtime
66     * error.
67     */
68     #define INT32_ERROR_COOKIE 0xffffabcd
69    
70     template <typename T>
71     class Queue : public GCObject {
72     T* _data;
73     unsigned _len;
74     unsigned _max;
75    
76     void ensure(unsigned size) {
77     while (_max < size)
78     _max <<= 1;
79     _data = (T*)realloc(_data, _max * sizeof(T));
80     }
81     public:
82     Queue(unsigned max = 16) {
83     this->_max = max;
84     this->_len = 0;
85     this->_data = (T*)malloc(max * sizeof(T));
86     }
87    
88     ~Queue() {
89     free(_data);
90     }
91    
92     bool contains(T a) {
93     for (unsigned n = 0; n < _len; ++n)
94     if (_data[n] == a)
95     return true;
96     return false;
97     }
98    
99     void add(T a) {
100     ensure(_len + 1);
101     JS_ASSERT(_len <= _max);
102     _data[_len++] = a;
103     }
104    
105     void add(T* chunk, unsigned size) {
106     ensure(_len + size);
107     JS_ASSERT(_len <= _max);
108     memcpy(&_data[_len], chunk, size * sizeof(T));
109     _len += size;
110     }
111    
112     void addUnique(T a) {
113     if (!contains(a))
114     add(a);
115     }
116    
117     void setLength(unsigned len) {
118     ensure(len + 1);
119     _len = len;
120     }
121    
122     void clear() {
123     _len = 0;
124     }
125    
126     unsigned length() const {
127     return _len;
128     }
129    
130     T* data() const {
131     return _data;
132     }
133     };
134    
135     /*
136     * Tracker is used to keep track of values being manipulated by the interpreter
137     * during trace recording.
138     */
139     class Tracker {
140     struct Page {
141     struct Page* next;
142     jsuword base;
143     nanojit::LIns* map[1];
144     };
145     struct Page* pagelist;
146    
147     jsuword getPageBase(const void* v) const;
148     struct Page* findPage(const void* v) const;
149     struct Page* addPage(const void* v);
150     public:
151     Tracker();
152     ~Tracker();
153    
154     bool has(const void* v) const;
155     nanojit::LIns* get(const void* v) const;
156     void set(const void* v, nanojit::LIns* ins);
157     void clear();
158     };
159    
160     /*
161     * The oracle keeps track of slots that should not be demoted to int because we know them
162     * to overflow or they result in type-unstable traces. We are using a simple hash table.
163     * Collisions lead to loss of optimization (demotable slots are not demoted) but have no
164     * correctness implications.
165     */
166     #define ORACLE_SIZE 4096
167    
168     class Oracle {
169     avmplus::BitSet _dontDemote;
170     public:
171     void markGlobalSlotUndemotable(JSScript* script, unsigned slot);
172     bool isGlobalSlotUndemotable(JSScript* script, unsigned slot) const;
173     void markStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot);
174     bool isStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot) const;
175     void clear();
176     };
177    
178     typedef Queue<uint16> SlotList;
179    
180     class TypeMap : public Queue<uint8> {
181     public:
182     void captureGlobalTypes(JSContext* cx, SlotList& slots);
183     void captureStackTypes(JSContext* cx, unsigned callDepth);
184     bool matches(TypeMap& other) const;
185     };
186    
187     class TreeInfo MMGC_SUBCLASS_DECL {
188     nanojit::Fragment* fragment;
189     public:
190     JSScript* script;
191     unsigned maxNativeStackSlots;
192     ptrdiff_t nativeStackBase;
193     unsigned maxCallDepth;
194     TypeMap stackTypeMap;
195     unsigned mismatchCount;
196     Queue<nanojit::Fragment*> dependentTrees;
197     unsigned branchCount;
198    
199     TreeInfo(nanojit::Fragment* _fragment) {
200     fragment = _fragment;
201     }
202     };
203    
204     extern struct nanojit::CallInfo builtins[];
205    
206     class TraceRecorder : public GCObject {
207     JSContext* cx;
208     JSTraceMonitor* traceMonitor;
209     JSObject* globalObj;
210     Tracker tracker;
211     Tracker nativeFrameTracker;
212     char* entryTypeMap;
213     unsigned callDepth;
214     JSAtom** atoms;
215     nanojit::GuardRecord* anchor;
216     nanojit::Fragment* fragment;
217     TreeInfo* treeInfo;
218     nanojit::LirBuffer* lirbuf;
219     nanojit::LirWriter* lir;
220     nanojit::LirBufWriter* lir_buf_writer;
221     nanojit::LirWriter* verbose_filter;
222     nanojit::LirWriter* cse_filter;
223     nanojit::LirWriter* expr_filter;
224     nanojit::LirWriter* func_filter;
225     #ifdef NJ_SOFTFLOAT
226     nanojit::LirWriter* float_filter;
227     #endif
228     nanojit::LIns* cx_ins;
229     nanojit::LIns* gp_ins;
230     nanojit::LIns* eos_ins;
231     nanojit::LIns* eor_ins;
232     nanojit::LIns* rval_ins;
233     nanojit::LIns* inner_sp_ins;
234     nanojit::SideExit exit;
235     bool deepAborted;
236     bool applyingArguments;
237     bool trashTree;
238     nanojit::Fragment* whichTreeToTrash;
239     Queue<jsbytecode*> inlinedLoopEdges;
240     Queue<jsbytecode*> cfgMerges;
241     jsval* global_dslots;
242    
243     bool isGlobal(jsval* p) const;
244     ptrdiff_t nativeGlobalOffset(jsval* p) const;
245     ptrdiff_t nativeStackOffset(jsval* p) const;
246     void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
247     const char *prefix, uintN index, JSStackFrame *fp);
248     void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned ngslots, unsigned callDepth,
249     uint8* globalTypeMap, uint8* stackTypeMap);
250     void trackNativeStackUse(unsigned slots);
251    
252     bool lazilyImportGlobalSlot(unsigned slot);
253    
254     nanojit::LIns* guard(bool expected, nanojit::LIns* cond, nanojit::ExitType exitType);
255     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
256    
257     nanojit::LIns* get(jsval* p) const;
258     nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
259     void set(jsval* p, nanojit::LIns* l, bool initializing = false);
260    
261     bool checkType(jsval& v, uint8 type, bool& recompile);
262     bool verifyTypeStability();
263    
264     jsval& argval(unsigned n) const;
265     jsval& varval(unsigned n) const;
266     jsval& stackval(int n) const;
267    
268     nanojit::LIns* scopeChain() const;
269     bool activeCallOrGlobalSlot(JSObject* obj, jsval*& vp);
270    
271     nanojit::LIns* arg(unsigned n);
272     void arg(unsigned n, nanojit::LIns* i);
273     nanojit::LIns* var(unsigned n);
274     void var(unsigned n, nanojit::LIns* i);
275     nanojit::LIns* stack(int n);
276     void stack(int n, nanojit::LIns* i);
277    
278     nanojit::LIns* f2i(nanojit::LIns* f);
279     nanojit::LIns* makeNumberInt32(nanojit::LIns* f);
280    
281     bool ifop();
282     bool switchop();
283     bool inc(jsval& v, jsint incr, bool pre = true);
284     bool inc(jsval& v, nanojit::LIns*& v_ins, jsint incr, bool pre = true);
285     bool incProp(jsint incr, bool pre = true);
286     bool incElem(jsint incr, bool pre = true);
287     bool incName(jsint incr, bool pre = true);
288    
289     enum { CMP_NEGATE = 1, CMP_TRY_BRANCH_AFTER_COND = 2, CMP_CASE = 4, CMP_STRICT = 8 };
290     bool cmp(nanojit::LOpcode op, int flags = 0);
291    
292     bool unary(nanojit::LOpcode op);
293     bool binary(nanojit::LOpcode op);
294    
295     bool ibinary(nanojit::LOpcode op);
296     bool iunary(nanojit::LOpcode op);
297     bool bbinary(nanojit::LOpcode op);
298     void demote(jsval& v, jsdouble result);
299    
300     bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins, nanojit::LIns*& ops_ins,
301     size_t op_offset = 0);
302     bool test_property_cache(JSObject* obj, nanojit::LIns* obj_ins, JSObject*& obj2,
303     jsuword& pcval);
304     bool test_property_cache_direct_slot(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot);
305     void stobj_set_slot(nanojit::LIns* obj_ins, unsigned slot,
306     nanojit::LIns*& dslots_ins, nanojit::LIns* v_ins);
307     nanojit::LIns* stobj_get_fslot(nanojit::LIns* obj_ins, unsigned slot);
308     nanojit::LIns* stobj_get_slot(nanojit::LIns* obj_ins, unsigned slot,
309     nanojit::LIns*& dslots_ins);
310     bool native_set(nanojit::LIns* obj_ins, JSScopeProperty* sprop,
311     nanojit::LIns*& dslots_ins, nanojit::LIns* v_ins);
312     bool native_get(nanojit::LIns* obj_ins, nanojit::LIns* pobj_ins, JSScopeProperty* sprop,
313     nanojit::LIns*& dslots_ins, nanojit::LIns*& v_ins);
314    
315     bool name(jsval*& vp);
316     bool prop(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot, nanojit::LIns*& v_ins);
317     bool elem(jsval& oval, jsval& idx, jsval*& vp, nanojit::LIns*& v_ins, nanojit::LIns*& addr_ins);
318    
319     bool getProp(JSObject* obj, nanojit::LIns* obj_ins);
320     bool getProp(jsval& v);
321     bool getThis(nanojit::LIns*& this_ins);
322    
323     bool box_jsval(jsval v, nanojit::LIns*& v_ins);
324     bool unbox_jsval(jsval v, nanojit::LIns*& v_ins);
325     bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp);
326     bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins);
327     bool guardDenseArrayIndex(JSObject* obj, jsint idx, nanojit::LIns* obj_ins,
328     nanojit::LIns* dslots_ins, nanojit::LIns* idx_ins);
329     bool guardElemOp(JSObject* obj, nanojit::LIns* obj_ins, jsid id, size_t op_offset, jsval* vp);
330     void clearFrameSlotsFromCache();
331     bool guardShapelessCallee(jsval& callee);
332     bool interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing);
333     bool forInLoop(jsval* vp);
334    
335     void trackCfgMerges(jsbytecode* pc);
336     void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
337    
338     public:
339     friend bool js_MonitorRecording(TraceRecorder* tr);
340    
341     TraceRecorder(JSContext* cx, nanojit::GuardRecord*, nanojit::Fragment*, TreeInfo*,
342     unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
343     nanojit::GuardRecord* expectedInnerExit);
344     ~TraceRecorder();
345    
346     uint8 determineSlotType(jsval* vp) const;
347     nanojit::SideExit* snapshot(nanojit::ExitType exitType);
348     nanojit::Fragment* getFragment() const { return fragment; }
349     bool isLoopHeader(JSContext* cx) const;
350     void compile(nanojit::Fragmento* fragmento);
351     void closeLoop(nanojit::Fragmento* fragmento);
352     void endLoop(nanojit::Fragmento* fragmento);
353     void blacklist() { fragment->blacklist(); }
354     bool adjustCallerTypes(nanojit::Fragment* f);
355     bool selectCallablePeerFragment(nanojit::Fragment** first);
356     void prepareTreeCall(nanojit::Fragment* inner);
357     void emitTreeCall(nanojit::Fragment* inner, nanojit::GuardRecord* lr);
358     unsigned getCallDepth() const;
359     bool trackLoopEdges();
360    
361     bool record_EnterFrame();
362     bool record_LeaveFrame();
363     bool record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop);
364     bool record_SetPropMiss(JSPropCacheEntry* entry);
365     bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
366    
367     void deepAbort() { deepAborted = true; }
368     bool wasDeepAborted() { return deepAborted; }
369    
370     #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
371     bool record_##op();
372     # include "jsopcode.tbl"
373     #undef OPDEF
374     };
375    
376     #define TRACING_ENABLED(cx) JS_HAS_OPTION(cx, JSOPTION_JIT)
377     #define TRACE_RECORDER(cx) (JS_TRACE_MONITOR(cx).recorder)
378     #define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
379    
380     // See jsinterp.cpp for the ENABLE_TRACER definition.
381     #define RECORD_ARGS(x,args) \
382     JS_BEGIN_MACRO \
383     TraceRecorder* tr_ = TRACE_RECORDER(cx); \
384     if (!js_MonitorRecording(tr_)) \
385     ENABLE_TRACER(0); \
386     else \
387     TRACE_ARGS_(tr_,x,args); \
388     JS_END_MACRO
389    
390     #define TRACE_ARGS_(tr,x,args) \
391     JS_BEGIN_MACRO \
392     if (!tr->record_##x args) { \
393     js_AbortRecording(cx, NULL, #x); \
394     ENABLE_TRACER(0); \
395     } \
396     JS_END_MACRO
397    
398     #define TRACE_ARGS(x,args) \
399     JS_BEGIN_MACRO \
400     TraceRecorder* tr_ = TRACE_RECORDER(cx); \
401     if (tr_) \
402     TRACE_ARGS_(tr_, x, args); \
403     JS_END_MACRO
404    
405     #define RECORD(x) RECORD_ARGS(x, ())
406     #define TRACE_0(x) TRACE_ARGS(x, ())
407     #define TRACE_1(x,a) TRACE_ARGS(x, (a))
408     #define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b))
409    
410     extern bool
411     js_MonitorLoopEdge(JSContext* cx, jsbytecode* oldpc, uintN& inlineCallCount);
412    
413     extern bool
414     js_MonitorRecording(TraceRecorder *tr);
415    
416     extern void
417     js_AbortRecording(JSContext* cx, jsbytecode* abortpc, const char* reason);
418    
419     extern void
420     js_InitJIT(JSTraceMonitor *tm);
421    
422     extern void
423     js_FinishJIT(JSTraceMonitor *tm);
424    
425     extern void
426     js_FlushJITCache(JSContext* cx);
427    
428     extern void
429     js_FlushJITOracle(JSContext* cx);
430    
431     #else /* !JS_TRACER */
432    
433     #define RECORD(x) ((void)0)
434     #define TRACE_0(x) ((void)0)
435     #define TRACE_1(x,a) ((void)0)
436     #define TRACE_2(x,a,b) ((void)0)
437    
438     #endif /* !JS_TRACER */
439    
440     #endif /* jstracer_h___ */

  ViewVC Help
Powered by ViewVC 1.1.24