1 |
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
2 |
* vim: set ts=8 sw=4 et tw=79: |
3 |
* |
4 |
* ***** BEGIN LICENSE BLOCK ***** |
5 |
* Version: MPL 1.1/GPL 2.0/LGPL 2.1 |
6 |
* |
7 |
* The contents of this file are subject to the Mozilla Public License Version |
8 |
* 1.1 (the "License"); you may not use this file except in compliance with |
9 |
* the License. You may obtain a copy of the License at |
10 |
* http://www.mozilla.org/MPL/ |
11 |
* |
12 |
* Software distributed under the License is distributed on an "AS IS" basis, |
13 |
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License |
14 |
* for the specific language governing rights and limitations under the |
15 |
* License. |
16 |
* |
17 |
* The Original Code is Mozilla Communicator client code, released |
18 |
* March 31, 1998. |
19 |
* |
20 |
* The Initial Developer of the Original Code is |
21 |
* Netscape Communications Corporation. |
22 |
* Portions created by the Initial Developer are Copyright (C) 1998 |
23 |
* the Initial Developer. All Rights Reserved. |
24 |
* |
25 |
* Contributor(s): |
26 |
* |
27 |
* Alternatively, the contents of this file may be used under the terms of |
28 |
* either of the GNU General Public License Version 2 or later (the "GPL"), |
29 |
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), |
30 |
* in which case the provisions of the GPL or the LGPL are applicable instead |
31 |
* of those above. If you wish to allow use of your version of this file only |
32 |
* under the terms of either the GPL or the LGPL, and not to allow others to |
33 |
* use your version of this file under the terms of the MPL, indicate your |
34 |
* decision by deleting the provisions above and replace them with the notice |
35 |
* and other provisions required by the GPL or the LGPL. If you do not delete |
36 |
* the provisions above, a recipient may use your version of this file under |
37 |
* the terms of any one of the MPL, the GPL or the LGPL. |
38 |
* |
39 |
* ***** END LICENSE BLOCK ***** */ |
40 |
|
41 |
/* |
42 |
* JavaScript bytecode interpreter. |
43 |
*/ |
44 |
#include <stdio.h> |
45 |
#include <string.h> |
46 |
#include <math.h> |
47 |
#include "jstypes.h" |
48 |
#include "jsstdint.h" |
49 |
#include "jsarena.h" /* Added by JSIFY */ |
50 |
#include "jsutil.h" /* Added by JSIFY */ |
51 |
#include "jsprf.h" |
52 |
#include "jsapi.h" |
53 |
#include "jsarray.h" |
54 |
#include "jsatom.h" |
55 |
#include "jsbool.h" |
56 |
#include "jscntxt.h" |
57 |
#include "jsdate.h" |
58 |
#include "jsversion.h" |
59 |
#include "jsdbgapi.h" |
60 |
#include "jsfun.h" |
61 |
#include "jsgc.h" |
62 |
#include "jsinterp.h" |
63 |
#include "jsiter.h" |
64 |
#include "jslock.h" |
65 |
#include "jsnum.h" |
66 |
#include "jsobj.h" |
67 |
#include "jsopcode.h" |
68 |
#include "jsscan.h" |
69 |
#include "jsscope.h" |
70 |
#include "jsscript.h" |
71 |
#include "jsstr.h" |
72 |
#include "jsstaticcheck.h" |
73 |
#include "jstracer.h" |
74 |
#include "jslibmath.h" |
75 |
#include "jsvector.h" |
76 |
#include "jsstrinlines.h" |
77 |
|
78 |
#ifdef INCLUDE_MOZILLA_DTRACE |
79 |
#include "jsdtracef.h" |
80 |
#endif |
81 |
|
82 |
#if JS_HAS_XML_SUPPORT |
83 |
#include "jsxml.h" |
84 |
#endif |
85 |
|
86 |
#include "jsatominlines.h" |
87 |
#include "jsscriptinlines.h" |
88 |
|
89 |
#include "jsautooplen.h" |
90 |
|
91 |
/* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */ |
92 |
#if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ |
93 |
|
94 |
JS_REQUIRES_STACK JSPropCacheEntry * |
95 |
js_FillPropertyCache(JSContext *cx, JSObject *obj, |
96 |
uintN scopeIndex, uintN protoIndex, JSObject *pobj, |
97 |
JSScopeProperty *sprop, JSBool adding) |
98 |
{ |
99 |
JSPropertyCache *cache; |
100 |
jsbytecode *pc; |
101 |
JSScope *scope; |
102 |
jsuword kshape, vshape, khash; |
103 |
JSOp op; |
104 |
const JSCodeSpec *cs; |
105 |
jsuword vword; |
106 |
ptrdiff_t pcoff; |
107 |
JSAtom *atom; |
108 |
JSPropCacheEntry *entry; |
109 |
|
110 |
JS_ASSERT(!cx->runtime->gcRunning); |
111 |
cache = &JS_PROPERTY_CACHE(cx); |
112 |
|
113 |
/* FIXME bug 489098: consider enabling the property cache for eval. */ |
114 |
if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) { |
115 |
PCMETER(cache->disfills++); |
116 |
return JS_NO_PROP_CACHE_FILL; |
117 |
} |
118 |
|
119 |
/* |
120 |
* Check for fill from js_SetPropertyHelper where the setter removed sprop |
121 |
* from pobj's scope (via unwatch or delete, e.g.). |
122 |
*/ |
123 |
scope = OBJ_SCOPE(pobj); |
124 |
if (!scope->has(sprop)) { |
125 |
PCMETER(cache->oddfills++); |
126 |
return JS_NO_PROP_CACHE_FILL; |
127 |
} |
128 |
|
129 |
/* |
130 |
* Check for overdeep scope and prototype chain. Because resolve, getter, |
131 |
* and setter hooks can change the prototype chain using JS_SetPrototype |
132 |
* after js_LookupPropertyWithFlags has returned the nominal protoIndex, |
133 |
* we have to validate protoIndex if it is non-zero. If it is zero, then |
134 |
* we know thanks to the scope->has test above, combined with the fact that |
135 |
* obj == pobj, that protoIndex is invariant. |
136 |
* |
137 |
* The scopeIndex can't be wrong. We require JS_SetParent calls to happen |
138 |
* before any running script might consult a parent-linked scope chain. If |
139 |
* this requirement is not satisfied, the fill in progress will never hit, |
140 |
* but vcap vs. scope shape tests ensure nothing malfunctions. |
141 |
*/ |
142 |
JS_ASSERT_IF(scopeIndex == 0 && protoIndex == 0, obj == pobj); |
143 |
|
144 |
if (protoIndex != 0) { |
145 |
JSObject *tmp = obj; |
146 |
|
147 |
for (uintN i = 0; i != scopeIndex; i++) |
148 |
tmp = OBJ_GET_PARENT(cx, tmp); |
149 |
JS_ASSERT(tmp != pobj); |
150 |
|
151 |
protoIndex = 1; |
152 |
for (;;) { |
153 |
tmp = OBJ_GET_PROTO(cx, tmp); |
154 |
|
155 |
/* |
156 |
* We cannot cache properties coming from native objects behind |
157 |
* non-native ones on the prototype chain. The non-natives can |
158 |
* mutate in arbitrary way without changing any shapes. |
159 |
*/ |
160 |
if (!tmp || !OBJ_IS_NATIVE(tmp)) { |
161 |
PCMETER(cache->noprotos++); |
162 |
return JS_NO_PROP_CACHE_FILL; |
163 |
} |
164 |
if (tmp == pobj) |
165 |
break; |
166 |
++protoIndex; |
167 |
} |
168 |
} |
169 |
|
170 |
if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) { |
171 |
PCMETER(cache->longchains++); |
172 |
return JS_NO_PROP_CACHE_FILL; |
173 |
} |
174 |
|
175 |
/* |
176 |
* Optimize the cached vword based on our parameters and the current pc's |
177 |
* opcode format flags. |
178 |
*/ |
179 |
pc = cx->fp->regs->pc; |
180 |
op = js_GetOpcode(cx, cx->fp->script, pc); |
181 |
cs = &js_CodeSpec[op]; |
182 |
kshape = 0; |
183 |
|
184 |
do { |
185 |
/* |
186 |
* Check for a prototype "plain old method" callee computation. What |
187 |
* is a plain old method? It's a function-valued property with stub |
188 |
* getter, so get of a function is idempotent. |
189 |
*/ |
190 |
if ((cs->format & JOF_CALLOP) && |
191 |
SPROP_HAS_STUB_GETTER(sprop) && |
192 |
SPROP_HAS_VALID_SLOT(sprop, scope)) { |
193 |
jsval v; |
194 |
|
195 |
v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot); |
196 |
if (VALUE_IS_FUNCTION(cx, v)) { |
197 |
/* |
198 |
* Great, we have a function-valued prototype property where |
199 |
* the getter is JS_PropertyStub. The type id in pobj's scope |
200 |
* does not evolve with changes to property values, however. |
201 |
* |
202 |
* So here, on first cache fill for this method, we brand the |
203 |
* scope with a new shape and set the SCOPE_BRANDED flag. Once |
204 |
* this scope flag is set, any write to a function-valued plain |
205 |
* old property in pobj will result in shape being regenerated. |
206 |
*/ |
207 |
if (!scope->branded()) { |
208 |
PCMETER(cache->brandfills++); |
209 |
#ifdef DEBUG_notme |
210 |
fprintf(stderr, |
211 |
"branding %p (%s) for funobj %p (%s), shape %lu\n", |
212 |
pobj, pobj->getClass()->name, |
213 |
JSVAL_TO_OBJECT(v), |
214 |
JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v))), |
215 |
OBJ_SHAPE(obj)); |
216 |
#endif |
217 |
scope->brandingShapeChange(cx, sprop->slot, v); |
218 |
if (js_IsPropertyCacheDisabled(cx)) /* check for rt->shapeGen overflow */ |
219 |
return JS_NO_PROP_CACHE_FILL; |
220 |
scope->setBranded(); |
221 |
} |
222 |
vword = JSVAL_OBJECT_TO_PCVAL(v); |
223 |
break; |
224 |
} |
225 |
} |
226 |
|
227 |
/* If getting a value via a stub getter, we can cache the slot. */ |
228 |
if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) && |
229 |
SPROP_HAS_STUB_GETTER(sprop) && |
230 |
SPROP_HAS_VALID_SLOT(sprop, scope)) { |
231 |
/* Great, let's cache sprop's slot and use it on cache hit. */ |
232 |
vword = SLOT_TO_PCVAL(sprop->slot); |
233 |
} else { |
234 |
/* Best we can do is to cache sprop (still a nice speedup). */ |
235 |
vword = SPROP_TO_PCVAL(sprop); |
236 |
if (adding && |
237 |
sprop == scope->lastProp && |
238 |
scope->shape == sprop->shape) { |
239 |
/* |
240 |
* Our caller added a new property. We also know that a setter |
241 |
* that js_NativeSet could have run has not mutated the scope |
242 |
* so the added property is still the last one added and the |
243 |
* scope is not branded. |
244 |
* |
245 |
* We want to cache under scope's shape before the property |
246 |
* addition to bias for the case when the mutator opcode |
247 |
* always adds the same property. It allows to optimize |
248 |
* periodic execution of object initializers or explicit |
249 |
* initialization sequences like |
250 |
* |
251 |
* obj = {}; obj.x = 1; obj.y = 2; |
252 |
* |
253 |
* We assume that on average the win from this optimization is |
254 |
* bigger that the cost of an extra mismatch per loop due to |
255 |
* the bias for the following case: |
256 |
* |
257 |
* obj = {}; ... for (...) { ... obj.x = ... } |
258 |
* |
259 |
* On the first iteration JSOP_SETPROP fills the cache with |
260 |
* the shape of newly created object, not the shape after |
261 |
* obj.x is assigned. That mismatches obj's shape on the |
262 |
* second iteration. Note that on third and the following |
263 |
* iterations the cache will be hit since the shape no longer |
264 |
* mutates. |
265 |
*/ |
266 |
JS_ASSERT(scope->owned()); |
267 |
if (sprop->parent) { |
268 |
kshape = sprop->parent->shape; |
269 |
} else { |
270 |
/* |
271 |
* If obj had its own empty scope before, with a unique |
272 |
* shape, that is lost. Here we only attempt to find a |
273 |
* matching empty scope. In unusual cases involving |
274 |
* __proto__ assignment we may not find one. |
275 |
*/ |
276 |
JSObject *proto = STOBJ_GET_PROTO(obj); |
277 |
if (!proto || !OBJ_IS_NATIVE(proto)) |
278 |
return JS_NO_PROP_CACHE_FILL; |
279 |
JSScope *protoscope = OBJ_SCOPE(proto); |
280 |
if (!protoscope->emptyScope || |
281 |
protoscope->emptyScope->clasp != obj->getClass()) { |
282 |
return JS_NO_PROP_CACHE_FILL; |
283 |
} |
284 |
kshape = protoscope->emptyScope->shape; |
285 |
} |
286 |
|
287 |
/* |
288 |
* When adding we predict no prototype object will later gain a |
289 |
* readonly property or setter. |
290 |
*/ |
291 |
vshape = cx->runtime->protoHazardShape; |
292 |
} |
293 |
} |
294 |
} while (0); |
295 |
|
296 |
if (kshape == 0) { |
297 |
kshape = OBJ_SHAPE(obj); |
298 |
vshape = scope->shape; |
299 |
} |
300 |
|
301 |
khash = PROPERTY_CACHE_HASH_PC(pc, kshape); |
302 |
if (obj == pobj) { |
303 |
JS_ASSERT(scopeIndex == 0 && protoIndex == 0); |
304 |
} else { |
305 |
if (op == JSOP_LENGTH) { |
306 |
atom = cx->runtime->atomState.lengthAtom; |
307 |
} else { |
308 |
pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0; |
309 |
GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom); |
310 |
} |
311 |
|
312 |
#ifdef DEBUG |
313 |
if (scopeIndex == 0) { |
314 |
JS_ASSERT(protoIndex != 0); |
315 |
JS_ASSERT((protoIndex == 1) == (OBJ_GET_PROTO(cx, obj) == pobj)); |
316 |
} |
317 |
#endif |
318 |
|
319 |
if (scopeIndex != 0 || protoIndex != 1) { |
320 |
khash = PROPERTY_CACHE_HASH_ATOM(atom, obj); |
321 |
PCMETER(if (PCVCAP_TAG(cache->table[khash].vcap) <= 1) |
322 |
cache->pcrecycles++); |
323 |
pc = (jsbytecode *) atom; |
324 |
kshape = (jsuword) obj; |
325 |
|
326 |
/* |
327 |
* Make sure that a later shadowing assignment will enter |
328 |
* PurgeProtoChain and invalidate this entry, bug 479198. |
329 |
* |
330 |
* This is thread-safe even though obj is not locked. Only the |
331 |
* DELEGATE bit of obj->classword can change at runtime, given that |
332 |
* obj is native; and the bit is only set, never cleared. And on |
333 |
* platforms where another CPU can fail to see this write, it's OK |
334 |
* because the property cache and JIT cache are thread-local. |
335 |
*/ |
336 |
obj->setDelegate(); |
337 |
} |
338 |
} |
339 |
|
340 |
entry = &cache->table[khash]; |
341 |
PCMETER(PCVAL_IS_NULL(entry->vword) || cache->recycles++); |
342 |
entry->kpc = pc; |
343 |
entry->kshape = kshape; |
344 |
entry->vcap = PCVCAP_MAKE(vshape, scopeIndex, protoIndex); |
345 |
entry->vword = vword; |
346 |
|
347 |
cache->empty = JS_FALSE; |
348 |
PCMETER(cache->fills++); |
349 |
|
350 |
/* |
351 |
* The modfills counter is not exact. It increases if a getter or setter |
352 |
* recurse into the interpreter. |
353 |
*/ |
354 |
PCMETER(entry == cache->pctestentry || cache->modfills++); |
355 |
PCMETER(cache->pctestentry = NULL); |
356 |
return entry; |
357 |
} |
358 |
|
359 |
JS_REQUIRES_STACK JSAtom * |
360 |
js_FullTestPropertyCache(JSContext *cx, jsbytecode *pc, |
361 |
JSObject **objp, JSObject **pobjp, |
362 |
JSPropCacheEntry **entryp) |
363 |
{ |
364 |
JSOp op; |
365 |
const JSCodeSpec *cs; |
366 |
ptrdiff_t pcoff; |
367 |
JSAtom *atom; |
368 |
JSObject *obj, *pobj, *tmp; |
369 |
JSPropCacheEntry *entry; |
370 |
uint32 vcap; |
371 |
|
372 |
JS_ASSERT(uintN((cx->fp->imacpc ? cx->fp->imacpc : pc) - cx->fp->script->code) |
373 |
< cx->fp->script->length); |
374 |
|
375 |
op = js_GetOpcode(cx, cx->fp->script, pc); |
376 |
cs = &js_CodeSpec[op]; |
377 |
if (op == JSOP_LENGTH) { |
378 |
atom = cx->runtime->atomState.lengthAtom; |
379 |
} else { |
380 |
pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0; |
381 |
GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom); |
382 |
} |
383 |
|
384 |
obj = *objp; |
385 |
JS_ASSERT(OBJ_IS_NATIVE(obj)); |
386 |
entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_ATOM(atom, obj)]; |
387 |
*entryp = entry; |
388 |
vcap = entry->vcap; |
389 |
|
390 |
if (entry->kpc != (jsbytecode *) atom) { |
391 |
PCMETER(JS_PROPERTY_CACHE(cx).idmisses++); |
392 |
|
393 |
#ifdef DEBUG_notme |
394 |
entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_PC(pc, OBJ_SHAPE(obj))]; |
395 |
fprintf(stderr, |
396 |
"id miss for %s from %s:%u" |
397 |
" (pc %u, kpc %u, kshape %u, shape %u)\n", |
398 |
js_AtomToPrintableString(cx, atom), |
399 |
cx->fp->script->filename, |
400 |
js_PCToLineNumber(cx, cx->fp->script, pc), |
401 |
pc - cx->fp->script->code, |
402 |
entry->kpc - cx->fp->script->code, |
403 |
entry->kshape, |
404 |
OBJ_SHAPE(obj)); |
405 |
js_Disassemble1(cx, cx->fp->script, pc, |
406 |
pc - cx->fp->script->code, |
407 |
JS_FALSE, stderr); |
408 |
#endif |
409 |
|
410 |
return atom; |
411 |
} |
412 |
|
413 |
if (entry->kshape != (jsuword) obj) { |
414 |
PCMETER(JS_PROPERTY_CACHE(cx).komisses++); |
415 |
return atom; |
416 |
} |
417 |
|
418 |
pobj = obj; |
419 |
|
420 |
if (JOF_MODE(cs->format) == JOF_NAME) { |
421 |
while (vcap & (PCVCAP_SCOPEMASK << PCVCAP_PROTOBITS)) { |
422 |
tmp = OBJ_GET_PARENT(cx, pobj); |
423 |
if (!tmp || !OBJ_IS_NATIVE(tmp)) |
424 |
break; |
425 |
pobj = tmp; |
426 |
vcap -= PCVCAP_PROTOSIZE; |
427 |
} |
428 |
|
429 |
*objp = pobj; |
430 |
} |
431 |
|
432 |
while (vcap & PCVCAP_PROTOMASK) { |
433 |
tmp = OBJ_GET_PROTO(cx, pobj); |
434 |
if (!tmp || !OBJ_IS_NATIVE(tmp)) |
435 |
break; |
436 |
pobj = tmp; |
437 |
--vcap; |
438 |
} |
439 |
|
440 |
if (JS_LOCK_OBJ_IF_SHAPE(cx, pobj, PCVCAP_SHAPE(vcap))) { |
441 |
#ifdef DEBUG |
442 |
jsid id = ATOM_TO_JSID(atom); |
443 |
|
444 |
id = js_CheckForStringIndex(id); |
445 |
JS_ASSERT(OBJ_SCOPE(pobj)->lookup(id)); |
446 |
JS_ASSERT_IF(OBJ_SCOPE(pobj)->object, OBJ_SCOPE(pobj)->object == pobj); |
447 |
#endif |
448 |
*pobjp = pobj; |
449 |
return NULL; |
450 |
} |
451 |
|
452 |
PCMETER(JS_PROPERTY_CACHE(cx).vcmisses++); |
453 |
return atom; |
454 |
} |
455 |
|
456 |
#ifdef DEBUG |
457 |
#define ASSERT_CACHE_IS_EMPTY(cache) \ |
458 |
JS_BEGIN_MACRO \ |
459 |
JSPropertyCache *cache_ = (cache); \ |
460 |
uintN i_; \ |
461 |
JS_ASSERT(cache_->empty); \ |
462 |
for (i_ = 0; i_ < PROPERTY_CACHE_SIZE; i_++) { \ |
463 |
JS_ASSERT(!cache_->table[i_].kpc); \ |
464 |
JS_ASSERT(!cache_->table[i_].kshape); \ |
465 |
JS_ASSERT(!cache_->table[i_].vcap); \ |
466 |
JS_ASSERT(!cache_->table[i_].vword); \ |
467 |
} \ |
468 |
JS_END_MACRO |
469 |
#else |
470 |
#define ASSERT_CACHE_IS_EMPTY(cache) ((void)0) |
471 |
#endif |
472 |
|
473 |
JS_STATIC_ASSERT(PCVAL_NULL == 0); |
474 |
|
475 |
void |
476 |
js_PurgePropertyCache(JSContext *cx, JSPropertyCache *cache) |
477 |
{ |
478 |
if (cache->empty) { |
479 |
ASSERT_CACHE_IS_EMPTY(cache); |
480 |
return; |
481 |
} |
482 |
|
483 |
memset(cache->table, 0, sizeof cache->table); |
484 |
cache->empty = JS_TRUE; |
485 |
|
486 |
#ifdef JS_PROPERTY_CACHE_METERING |
487 |
{ static FILE *fp; |
488 |
if (!fp) |
489 |
fp = fopen("/tmp/propcache.stats", "w"); |
490 |
if (fp) { |
491 |
fputs("Property cache stats for ", fp); |
492 |
#ifdef JS_THREADSAFE |
493 |
fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id); |
494 |
#endif |
495 |
fprintf(fp, "GC %u\n", cx->runtime->gcNumber); |
496 |
|
497 |
# define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)cache->mem) |
498 |
P(fills); |
499 |
P(nofills); |
500 |
P(rofills); |
501 |
P(disfills); |
502 |
P(oddfills); |
503 |
P(modfills); |
504 |
P(brandfills); |
505 |
P(noprotos); |
506 |
P(longchains); |
507 |
P(recycles); |
508 |
P(pcrecycles); |
509 |
P(tests); |
510 |
P(pchits); |
511 |
P(protopchits); |
512 |
P(initests); |
513 |
P(inipchits); |
514 |
P(inipcmisses); |
515 |
P(settests); |
516 |
P(addpchits); |
517 |
P(setpchits); |
518 |
P(setpcmisses); |
519 |
P(slotchanges); |
520 |
P(setmisses); |
521 |
P(idmisses); |
522 |
P(komisses); |
523 |
P(vcmisses); |
524 |
P(misses); |
525 |
P(flushes); |
526 |
P(pcpurges); |
527 |
# undef P |
528 |
|
529 |
fprintf(fp, "hit rates: pc %g%% (proto %g%%), set %g%%, ini %g%%, full %g%%\n", |
530 |
(100. * cache->pchits) / cache->tests, |
531 |
(100. * cache->protopchits) / cache->tests, |
532 |
(100. * (cache->addpchits + cache->setpchits)) |
533 |
/ cache->settests, |
534 |
(100. * cache->inipchits) / cache->initests, |
535 |
(100. * (cache->tests - cache->misses)) / cache->tests); |
536 |
fflush(fp); |
537 |
} |
538 |
} |
539 |
#endif |
540 |
|
541 |
PCMETER(cache->flushes++); |
542 |
} |
543 |
|
544 |
void |
545 |
js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script) |
546 |
{ |
547 |
JSPropertyCache *cache; |
548 |
JSPropCacheEntry *entry; |
549 |
|
550 |
cache = &JS_PROPERTY_CACHE(cx); |
551 |
for (entry = cache->table; entry < cache->table + PROPERTY_CACHE_SIZE; |
552 |
entry++) { |
553 |
if (JS_UPTRDIFF(entry->kpc, script->code) < script->length) { |
554 |
entry->kpc = NULL; |
555 |
entry->kshape = 0; |
556 |
#ifdef DEBUG |
557 |
entry->vcap = entry->vword = 0; |
558 |
#endif |
559 |
} |
560 |
} |
561 |
} |
562 |
|
563 |
/* |
564 |
* Check if the current arena has enough space to fit nslots after sp and, if |
565 |
* so, reserve the necessary space. |
566 |
*/ |
567 |
static JS_REQUIRES_STACK JSBool |
568 |
AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots) |
569 |
{ |
570 |
uintN surplus; |
571 |
jsval *sp2; |
572 |
|
573 |
JS_ASSERT((jsval *) cx->stackPool.current->base <= sp); |
574 |
JS_ASSERT(sp <= (jsval *) cx->stackPool.current->avail); |
575 |
surplus = (jsval *) cx->stackPool.current->avail - sp; |
576 |
if (nslots <= surplus) |
577 |
return JS_TRUE; |
578 |
|
579 |
/* |
580 |
* No room before current->avail, check if the arena has enough space to |
581 |
* fit the missing slots before the limit. |
582 |
*/ |
583 |
if (nslots > (size_t) ((jsval *) cx->stackPool.current->limit - sp)) |
584 |
return JS_FALSE; |
585 |
|
586 |
JS_ARENA_ALLOCATE_CAST(sp2, jsval *, &cx->stackPool, |
587 |
(nslots - surplus) * sizeof(jsval)); |
588 |
JS_ASSERT(sp2 == sp + surplus); |
589 |
return JS_TRUE; |
590 |
} |
591 |
|
592 |
JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval * |
593 |
js_AllocRawStack(JSContext *cx, uintN nslots, void **markp) |
594 |
{ |
595 |
jsval *sp; |
596 |
|
597 |
JS_ASSERT(nslots != 0); |
598 |
JS_ASSERT_NOT_ON_TRACE(cx); |
599 |
|
600 |
if (!cx->stackPool.first.next) { |
601 |
int64 *timestamp; |
602 |
|
603 |
JS_ARENA_ALLOCATE_CAST(timestamp, int64 *, |
604 |
&cx->stackPool, sizeof *timestamp); |
605 |
if (!timestamp) { |
606 |
js_ReportOutOfScriptQuota(cx); |
607 |
return NULL; |
608 |
} |
609 |
*timestamp = JS_Now(); |
610 |
} |
611 |
|
612 |
if (markp) |
613 |
*markp = JS_ARENA_MARK(&cx->stackPool); |
614 |
JS_ARENA_ALLOCATE_CAST(sp, jsval *, &cx->stackPool, nslots * sizeof(jsval)); |
615 |
if (!sp) |
616 |
js_ReportOutOfScriptQuota(cx); |
617 |
return sp; |
618 |
} |
619 |
|
620 |
JS_STATIC_INTERPRET JS_REQUIRES_STACK void |
621 |
js_FreeRawStack(JSContext *cx, void *mark) |
622 |
{ |
623 |
JS_ARENA_RELEASE(&cx->stackPool, mark); |
624 |
} |
625 |
|
626 |
JS_REQUIRES_STACK JS_FRIEND_API(jsval *) |
627 |
js_AllocStack(JSContext *cx, uintN nslots, void **markp) |
628 |
{ |
629 |
jsval *sp; |
630 |
JSArena *a; |
631 |
JSStackHeader *sh; |
632 |
|
633 |
/* Callers don't check for zero nslots: we do to avoid empty segments. */ |
634 |
if (nslots == 0) { |
635 |
*markp = NULL; |
636 |
return (jsval *) JS_ARENA_MARK(&cx->stackPool); |
637 |
} |
638 |
|
639 |
/* Allocate 2 extra slots for the stack segment header we'll likely need. */ |
640 |
sp = js_AllocRawStack(cx, 2 + nslots, markp); |
641 |
if (!sp) |
642 |
return NULL; |
643 |
|
644 |
/* Try to avoid another header if we can piggyback on the last segment. */ |
645 |
a = cx->stackPool.current; |
646 |
sh = cx->stackHeaders; |
647 |
if (sh && JS_STACK_SEGMENT(sh) + sh->nslots == sp) { |
648 |
/* Extend the last stack segment, give back the 2 header slots. */ |
649 |
sh->nslots += nslots; |
650 |
a->avail -= 2 * sizeof(jsval); |
651 |
} else { |
652 |
/* |
653 |
* Need a new stack segment, so allocate and push a stack segment |
654 |
* header from the 2 extra slots. |
655 |
*/ |
656 |
sh = (JSStackHeader *)sp; |
657 |
sh->nslots = nslots; |
658 |
sh->down = cx->stackHeaders; |
659 |
cx->stackHeaders = sh; |
660 |
sp += 2; |
661 |
} |
662 |
|
663 |
/* |
664 |
* Store JSVAL_NULL using memset, to let compilers optimize as they see |
665 |
* fit, in case a caller allocates and pushes GC-things one by one, which |
666 |
* could nest a last-ditch GC that will scan this segment. |
667 |
*/ |
668 |
memset(sp, 0, nslots * sizeof(jsval)); |
669 |
return sp; |
670 |
} |
671 |
|
672 |
JS_REQUIRES_STACK JS_FRIEND_API(void) |
673 |
js_FreeStack(JSContext *cx, void *mark) |
674 |
{ |
675 |
JSStackHeader *sh; |
676 |
jsuword slotdiff; |
677 |
|
678 |
/* Check for zero nslots allocation special case. */ |
679 |
if (!mark) |
680 |
return; |
681 |
|
682 |
/* We can assert because js_FreeStack always balances js_AllocStack. */ |
683 |
sh = cx->stackHeaders; |
684 |
JS_ASSERT(sh); |
685 |
|
686 |
/* If mark is in the current segment, reduce sh->nslots, else pop sh. */ |
687 |
slotdiff = JS_UPTRDIFF(mark, JS_STACK_SEGMENT(sh)) / sizeof(jsval); |
688 |
if (slotdiff < (jsuword)sh->nslots) |
689 |
sh->nslots = slotdiff; |
690 |
else |
691 |
cx->stackHeaders = sh->down; |
692 |
|
693 |
/* Release the stackPool space allocated since mark was set. */ |
694 |
JS_ARENA_RELEASE(&cx->stackPool, mark); |
695 |
} |
696 |
|
697 |
JSObject * |
698 |
js_GetScopeChain(JSContext *cx, JSStackFrame *fp) |
699 |
{ |
700 |
JSObject *sharedBlock = fp->blockChain; |
701 |
|
702 |
if (!sharedBlock) { |
703 |
/* |
704 |
* Don't force a call object for a lightweight function call, but do |
705 |
* insist that there is a call object for a heavyweight function call. |
706 |
*/ |
707 |
JS_ASSERT(!fp->fun || |
708 |
!(fp->fun->flags & JSFUN_HEAVYWEIGHT) || |
709 |
fp->callobj); |
710 |
JS_ASSERT(fp->scopeChain); |
711 |
return fp->scopeChain; |
712 |
} |
713 |
|
714 |
/* We don't handle cloning blocks on trace. */ |
715 |
js_LeaveTrace(cx); |
716 |
|
717 |
/* |
718 |
* We have one or more lexical scopes to reflect into fp->scopeChain, so |
719 |
* make sure there's a call object at the current head of the scope chain, |
720 |
* if this frame is a call frame. |
721 |
* |
722 |
* Also, identify the innermost compiler-allocated block we needn't clone. |
723 |
*/ |
724 |
JSObject *limitBlock, *limitClone; |
725 |
if (fp->fun && !fp->callobj) { |
726 |
JS_ASSERT(OBJ_GET_CLASS(cx, fp->scopeChain) != &js_BlockClass || |
727 |
fp->scopeChain->getPrivate() != fp); |
728 |
if (!js_GetCallObject(cx, fp)) |
729 |
return NULL; |
730 |
|
731 |
/* We know we must clone everything on blockChain. */ |
732 |
limitBlock = limitClone = NULL; |
733 |
} else { |
734 |
/* |
735 |
* scopeChain includes all blocks whose static scope we're within that |
736 |
* have already been cloned. Find the innermost such block. Its |
737 |
* prototype should appear on blockChain; we'll clone blockChain up |
738 |
* to, but not including, that prototype. |
739 |
*/ |
740 |
limitClone = fp->scopeChain; |
741 |
while (OBJ_GET_CLASS(cx, limitClone) == &js_WithClass) |
742 |
limitClone = OBJ_GET_PARENT(cx, limitClone); |
743 |
JS_ASSERT(limitClone); |
744 |
|
745 |
/* |
746 |
* It may seem like we don't know enough about limitClone to be able |
747 |
* to just grab its prototype as we do here, but it's actually okay. |
748 |
* |
749 |
* If limitClone is a block object belonging to this frame, then its |
750 |
* prototype is the innermost entry in blockChain that we have already |
751 |
* cloned, and is thus the place to stop when we clone below. |
752 |
* |
753 |
* Otherwise, there are no blocks for this frame on scopeChain, and we |
754 |
* need to clone the whole blockChain. In this case, limitBlock can |
755 |
* point to any object known not to be on blockChain, since we simply |
756 |
* loop until we hit limitBlock or NULL. If limitClone is a block, it |
757 |
* isn't a block from this function, since blocks can't be nested |
758 |
* within themselves on scopeChain (recursion is dynamic nesting, not |
759 |
* static nesting). If limitClone isn't a block, its prototype won't |
760 |
* be a block either. So we can just grab limitClone's prototype here |
761 |
* regardless of its type or which frame it belongs to. |
762 |
*/ |
763 |
limitBlock = OBJ_GET_PROTO(cx, limitClone); |
764 |
|
765 |
/* If the innermost block has already been cloned, we are done. */ |
766 |
if (limitBlock == sharedBlock) |
767 |
return fp->scopeChain; |
768 |
} |
769 |
|
770 |
/* |
771 |
* Special-case cloning the innermost block; this doesn't have enough in |
772 |
* common with subsequent steps to include in the loop. |
773 |
* |
774 |
* js_CloneBlockObject leaves the clone's parent slot uninitialized. We |
775 |
* populate it below. |
776 |
*/ |
777 |
JSObject *innermostNewChild = js_CloneBlockObject(cx, sharedBlock, fp); |
778 |
if (!innermostNewChild) |
779 |
return NULL; |
780 |
JSAutoTempValueRooter tvr(cx, innermostNewChild); |
781 |
|
782 |
/* |
783 |
* Clone our way towards outer scopes until we reach the innermost |
784 |
* enclosing function, or the innermost block we've already cloned. |
785 |
*/ |
786 |
JSObject *newChild = innermostNewChild; |
787 |
for (;;) { |
788 |
JS_ASSERT(OBJ_GET_PROTO(cx, newChild) == sharedBlock); |
789 |
sharedBlock = OBJ_GET_PARENT(cx, sharedBlock); |
790 |
|
791 |
/* Sometimes limitBlock will be NULL, so check that first. */ |
792 |
if (sharedBlock == limitBlock || !sharedBlock) |
793 |
break; |
794 |
|
795 |
/* As in the call above, we don't know the real parent yet. */ |
796 |
JSObject *clone |
797 |
= js_CloneBlockObject(cx, sharedBlock, fp); |
798 |
if (!clone) |
799 |
return NULL; |
800 |
|
801 |
/* |
802 |
* Avoid OBJ_SET_PARENT overhead as newChild cannot escape to |
803 |
* other threads. |
804 |
*/ |
805 |
STOBJ_SET_PARENT(newChild, clone); |
806 |
newChild = clone; |
807 |
} |
808 |
STOBJ_SET_PARENT(newChild, fp->scopeChain); |
809 |
|
810 |
|
811 |
/* |
812 |
* If we found a limit block belonging to this frame, then we should have |
813 |
* found it in blockChain. |
814 |
*/ |
815 |
JS_ASSERT_IF(limitBlock && |
816 |
OBJ_GET_CLASS(cx, limitBlock) == &js_BlockClass && |
817 |
limitClone->getPrivate() == fp, |
818 |
sharedBlock); |
819 |
|
820 |
/* Place our newly cloned blocks at the head of the scope chain. */ |
821 |
fp->scopeChain = innermostNewChild; |
822 |
return fp->scopeChain; |
823 |
} |
824 |
|
825 |
JSBool |
826 |
js_GetPrimitiveThis(JSContext *cx, jsval *vp, JSClass *clasp, jsval *thisvp) |
827 |
{ |
828 |
jsval v; |
829 |
JSObject *obj; |
830 |
|
831 |
v = vp[1]; |
832 |
if (JSVAL_IS_OBJECT(v)) { |
833 |
obj = JS_THIS_OBJECT(cx, vp); |
834 |
if (!JS_InstanceOf(cx, obj, clasp, vp + 2)) |
835 |
return JS_FALSE; |
836 |
v = obj->fslots[JSSLOT_PRIMITIVE_THIS]; |
837 |
} |
838 |
*thisvp = v; |
839 |
return JS_TRUE; |
840 |
} |
841 |
|
842 |
/* Some objects (e.g., With) delegate 'this' to another object. */ |
843 |
static inline JSObject * |
844 |
CallThisObjectHook(JSContext *cx, JSObject *obj, jsval *argv) |
845 |
{ |
846 |
JSObject *thisp = obj->thisObject(cx); |
847 |
if (!thisp) |
848 |
return NULL; |
849 |
argv[-1] = OBJECT_TO_JSVAL(thisp); |
850 |
return thisp; |
851 |
} |
852 |
|
853 |
/* |
854 |
* ECMA requires "the global object", but in embeddings such as the browser, |
855 |
* which have multiple top-level objects (windows, frames, etc. in the DOM), |
856 |
* we prefer fun's parent. An example that causes this code to run: |
857 |
* |
858 |
* // in window w1 |
859 |
* function f() { return this } |
860 |
* function g() { return f } |
861 |
* |
862 |
* // in window w2 |
863 |
* var h = w1.g() |
864 |
* alert(h() == w1) |
865 |
* |
866 |
* The alert should display "true". |
867 |
*/ |
868 |
JS_STATIC_INTERPRET JSObject * |
869 |
js_ComputeGlobalThis(JSContext *cx, JSBool lazy, jsval *argv) |
870 |
{ |
871 |
JSObject *thisp; |
872 |
|
873 |
if (JSVAL_IS_PRIMITIVE(argv[-2]) || |
874 |
!OBJ_GET_PARENT(cx, JSVAL_TO_OBJECT(argv[-2]))) { |
875 |
thisp = cx->globalObject; |
876 |
} else { |
877 |
JSStackFrame *fp; |
878 |
jsid id; |
879 |
jsval v; |
880 |
uintN attrs; |
881 |
JSBool ok; |
882 |
JSObject *parent; |
883 |
|
884 |
/* |
885 |
* Walk up the parent chain, first checking that the running script |
886 |
* has access to the callee's parent object. Note that if lazy, the |
887 |
* running script whose principals we want to check is the script |
888 |
* associated with fp->down, not with fp. |
889 |
* |
890 |
* FIXME: 417851 -- this access check should not be required, as it |
891 |
* imposes a performance penalty on all js_ComputeGlobalThis calls, |
892 |
* and it represents a maintenance hazard. |
893 |
*/ |
894 |
fp = js_GetTopStackFrame(cx); /* quell GCC overwarning */ |
895 |
if (lazy) { |
896 |
JS_ASSERT(fp->argv == argv); |
897 |
fp->dormantNext = cx->dormantFrameChain; |
898 |
cx->dormantFrameChain = fp; |
899 |
cx->fp = fp->down; |
900 |
fp->down = NULL; |
901 |
} |
902 |
thisp = JSVAL_TO_OBJECT(argv[-2]); |
903 |
id = ATOM_TO_JSID(cx->runtime->atomState.parentAtom); |
904 |
|
905 |
ok = thisp->checkAccess(cx, id, JSACC_PARENT, &v, &attrs); |
906 |
if (lazy) { |
907 |
cx->dormantFrameChain = fp->dormantNext; |
908 |
fp->dormantNext = NULL; |
909 |
fp->down = cx->fp; |
910 |
cx->fp = fp; |
911 |
} |
912 |
if (!ok) |
913 |
return NULL; |
914 |
|
915 |
thisp = JSVAL_IS_VOID(v) |
916 |
? OBJ_GET_PARENT(cx, thisp) |
917 |
: JSVAL_TO_OBJECT(v); |
918 |
while ((parent = OBJ_GET_PARENT(cx, thisp)) != NULL) |
919 |
thisp = parent; |
920 |
} |
921 |
|
922 |
return CallThisObjectHook(cx, thisp, argv); |
923 |
} |
924 |
|
925 |
static JSObject * |
926 |
ComputeThis(JSContext *cx, JSBool lazy, jsval *argv) |
927 |
{ |
928 |
JSObject *thisp; |
929 |
|
930 |
JS_ASSERT(!JSVAL_IS_NULL(argv[-1])); |
931 |
if (!JSVAL_IS_OBJECT(argv[-1])) { |
932 |
if (!js_PrimitiveToObject(cx, &argv[-1])) |
933 |
return NULL; |
934 |
thisp = JSVAL_TO_OBJECT(argv[-1]); |
935 |
return thisp; |
936 |
} |
937 |
|
938 |
thisp = JSVAL_TO_OBJECT(argv[-1]); |
939 |
if (OBJ_GET_CLASS(cx, thisp) == &js_CallClass || OBJ_GET_CLASS(cx, thisp) == &js_BlockClass) |
940 |
return js_ComputeGlobalThis(cx, lazy, argv); |
941 |
|
942 |
return CallThisObjectHook(cx, thisp, argv); |
943 |
} |
944 |
|
945 |
JSObject * |
946 |
js_ComputeThis(JSContext *cx, JSBool lazy, jsval *argv) |
947 |
{ |
948 |
JS_ASSERT(argv[-1] != JSVAL_HOLE); // check for SynthesizeFrame poisoning |
949 |
if (JSVAL_IS_NULL(argv[-1])) |
950 |
return js_ComputeGlobalThis(cx, lazy, argv); |
951 |
return ComputeThis(cx, lazy, argv); |
952 |
} |
953 |
|
954 |
#if JS_HAS_NO_SUCH_METHOD |
955 |
|
956 |
const uint32 JSSLOT_FOUND_FUNCTION = JSSLOT_PRIVATE; |
957 |
const uint32 JSSLOT_SAVED_ID = JSSLOT_PRIVATE + 1; |
958 |
|
959 |
JSClass js_NoSuchMethodClass = { |
960 |
"NoSuchMethod", |
961 |
JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS, |
962 |
JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, |
963 |
JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL, |
964 |
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL |
965 |
}; |
966 |
|
967 |
/* |
968 |
* When JSOP_CALLPROP or JSOP_CALLELEM does not find the method property of |
969 |
* the base object, we search for the __noSuchMethod__ method in the base. |
970 |
* If it exists, we store the method and the property's id into an object of |
971 |
* NoSuchMethod class and store this object into the callee's stack slot. |
972 |
* Later, js_Invoke will recognise such an object and transfer control to |
973 |
* NoSuchMethod that invokes the method like: |
974 |
* |
975 |
* this.__noSuchMethod__(id, args) |
976 |
* |
977 |
* where id is the name of the method that this invocation attempted to |
978 |
* call by name, and args is an Array containing this invocation's actual |
979 |
* parameters. |
980 |
*/ |
981 |
JS_STATIC_INTERPRET JSBool |
982 |
js_OnUnknownMethod(JSContext *cx, jsval *vp) |
983 |
{ |
984 |
JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1])); |
985 |
|
986 |
JSObject *obj = JSVAL_TO_OBJECT(vp[1]); |
987 |
jsid id = ATOM_TO_JSID(cx->runtime->atomState.noSuchMethodAtom); |
988 |
JSAutoTempValueRooter tvr(cx, JSVAL_NULL); |
989 |
if (!js_GetMethod(cx, obj, id, false, tvr.addr())) |
990 |
return false; |
991 |
if (JSVAL_IS_PRIMITIVE(tvr.value())) { |
992 |
vp[0] = tvr.value(); |
993 |
} else { |
994 |
#if JS_HAS_XML_SUPPORT |
995 |
/* Extract the function name from function::name qname. */ |
996 |
if (!JSVAL_IS_PRIMITIVE(vp[0])) { |
997 |
obj = JSVAL_TO_OBJECT(vp[0]); |
998 |
if (!js_IsFunctionQName(cx, obj, &id)) |
999 |
return false; |
1000 |
if (id != 0) |
1001 |
vp[0] = ID_TO_VALUE(id); |
1002 |
} |
1003 |
#endif |
1004 |
obj = js_NewObjectWithGivenProto(cx, &js_NoSuchMethodClass, |
1005 |
NULL, NULL); |
1006 |
if (!obj) |
1007 |
return false; |
1008 |
obj->fslots[JSSLOT_FOUND_FUNCTION] = tvr.value(); |
1009 |
obj->fslots[JSSLOT_SAVED_ID] = vp[0]; |
1010 |
vp[0] = OBJECT_TO_JSVAL(obj); |
1011 |
} |
1012 |
return true; |
1013 |
} |
1014 |
|
1015 |
static JS_REQUIRES_STACK JSBool |
1016 |
NoSuchMethod(JSContext *cx, uintN argc, jsval *vp, uint32 flags) |
1017 |
{ |
1018 |
jsval *invokevp; |
1019 |
void *mark; |
1020 |
JSBool ok; |
1021 |
JSObject *obj, *argsobj; |
1022 |
|
1023 |
invokevp = js_AllocStack(cx, 2 + 2, &mark); |
1024 |
if (!invokevp) |
1025 |
return JS_FALSE; |
1026 |
|
1027 |
JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[0])); |
1028 |
JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1])); |
1029 |
obj = JSVAL_TO_OBJECT(vp[0]); |
1030 |
JS_ASSERT(STOBJ_GET_CLASS(obj) == &js_NoSuchMethodClass); |
1031 |
|
1032 |
invokevp[0] = obj->fslots[JSSLOT_FOUND_FUNCTION]; |
1033 |
invokevp[1] = vp[1]; |
1034 |
invokevp[2] = obj->fslots[JSSLOT_SAVED_ID]; |
1035 |
argsobj = js_NewArrayObject(cx, argc, vp + 2); |
1036 |
if (!argsobj) { |
1037 |
ok = JS_FALSE; |
1038 |
} else { |
1039 |
invokevp[3] = OBJECT_TO_JSVAL(argsobj); |
1040 |
ok = (flags & JSINVOKE_CONSTRUCT) |
1041 |
? js_InvokeConstructor(cx, 2, JS_TRUE, invokevp) |
1042 |
: js_Invoke(cx, 2, invokevp, flags); |
1043 |
vp[0] = invokevp[0]; |
1044 |
} |
1045 |
js_FreeStack(cx, mark); |
1046 |
return ok; |
1047 |
} |
1048 |
|
1049 |
#endif /* JS_HAS_NO_SUCH_METHOD */ |
1050 |
|
1051 |
/* |
1052 |
* We check if the function accepts a primitive value as |this|. For that we |
1053 |
* use a table that maps value's tag into the corresponding function flag. |
1054 |
*/ |
1055 |
JS_STATIC_ASSERT(JSVAL_INT == 1); |
1056 |
JS_STATIC_ASSERT(JSVAL_DOUBLE == 2); |
1057 |
JS_STATIC_ASSERT(JSVAL_STRING == 4); |
1058 |
JS_STATIC_ASSERT(JSVAL_SPECIAL == 6); |
1059 |
|
1060 |
const uint16 js_PrimitiveTestFlags[] = { |
1061 |
JSFUN_THISP_NUMBER, /* INT */ |
1062 |
JSFUN_THISP_NUMBER, /* DOUBLE */ |
1063 |
JSFUN_THISP_NUMBER, /* INT */ |
1064 |
JSFUN_THISP_STRING, /* STRING */ |
1065 |
JSFUN_THISP_NUMBER, /* INT */ |
1066 |
JSFUN_THISP_BOOLEAN, /* BOOLEAN */ |
1067 |
JSFUN_THISP_NUMBER /* INT */ |
1068 |
}; |
1069 |
|
1070 |
/* |
1071 |
* Find a function reference and its 'this' object implicit first parameter |
1072 |
* under argc arguments on cx's stack, and call the function. Push missing |
1073 |
* required arguments, allocate declared local variables, and pop everything |
1074 |
* when done. Then push the return value. |
1075 |
*/ |
1076 |
JS_REQUIRES_STACK JS_FRIEND_API(JSBool) |
1077 |
js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags) |
1078 |
{ |
1079 |
void *mark; |
1080 |
JSStackFrame frame; |
1081 |
jsval *sp, *argv, *newvp; |
1082 |
jsval v; |
1083 |
JSObject *funobj, *parent; |
1084 |
JSBool ok; |
1085 |
JSClass *clasp; |
1086 |
const JSObjectOps *ops; |
1087 |
JSNative native; |
1088 |
JSFunction *fun; |
1089 |
JSScript *script; |
1090 |
uintN nslots, i; |
1091 |
uint32 rootedArgsFlag; |
1092 |
JSInterpreterHook hook; |
1093 |
void *hookData; |
1094 |
|
1095 |
JS_ASSERT(argc <= JS_ARGS_LENGTH_MAX); |
1096 |
|
1097 |
/* [vp .. vp + 2 + argc) must belong to the last JS stack arena. */ |
1098 |
JS_ASSERT((jsval *) cx->stackPool.current->base <= vp); |
1099 |
JS_ASSERT(vp + 2 + argc <= (jsval *) cx->stackPool.current->avail); |
1100 |
|
1101 |
/* Mark the top of stack and load frequently-used registers. */ |
1102 |
mark = JS_ARENA_MARK(&cx->stackPool); |
1103 |
MUST_FLOW_THROUGH("out2"); |
1104 |
v = *vp; |
1105 |
|
1106 |
if (JSVAL_IS_PRIMITIVE(v)) |
1107 |
goto bad; |
1108 |
|
1109 |
funobj = JSVAL_TO_OBJECT(v); |
1110 |
parent = OBJ_GET_PARENT(cx, funobj); |
1111 |
clasp = OBJ_GET_CLASS(cx, funobj); |
1112 |
if (clasp != &js_FunctionClass) { |
1113 |
#if JS_HAS_NO_SUCH_METHOD |
1114 |
if (clasp == &js_NoSuchMethodClass) { |
1115 |
ok = NoSuchMethod(cx, argc, vp, flags); |
1116 |
goto out2; |
1117 |
} |
1118 |
#endif |
1119 |
|
1120 |
/* Function is inlined, all other classes use object ops. */ |
1121 |
ops = funobj->map->ops; |
1122 |
|
1123 |
/* |
1124 |
* XXX this makes no sense -- why convert to function if clasp->call? |
1125 |
* XXX better to call that hook without converting |
1126 |
* |
1127 |
* FIXME bug 408416: try converting to function, for API compatibility |
1128 |
* if there is a call op defined. |
1129 |
*/ |
1130 |
if ((ops == &js_ObjectOps) ? clasp->call : ops->call) { |
1131 |
ok = clasp->convert(cx, funobj, JSTYPE_FUNCTION, &v); |
1132 |
if (!ok) |
1133 |
goto out2; |
1134 |
|
1135 |
if (VALUE_IS_FUNCTION(cx, v)) { |
1136 |
/* Make vp refer to funobj to keep it available as argv[-2]. */ |
1137 |
*vp = v; |
1138 |
funobj = JSVAL_TO_OBJECT(v); |
1139 |
parent = OBJ_GET_PARENT(cx, funobj); |
1140 |
goto have_fun; |
1141 |
} |
1142 |
} |
1143 |
fun = NULL; |
1144 |
script = NULL; |
1145 |
nslots = 0; |
1146 |
|
1147 |
/* Try a call or construct native object op. */ |
1148 |
if (flags & JSINVOKE_CONSTRUCT) { |
1149 |
if (!JSVAL_IS_OBJECT(vp[1])) { |
1150 |
ok = js_PrimitiveToObject(cx, &vp[1]); |
1151 |
if (!ok) |
1152 |
goto out2; |
1153 |
} |
1154 |
native = ops->construct; |
1155 |
} else { |
1156 |
native = ops->call; |
1157 |
} |
1158 |
if (!native) |
1159 |
goto bad; |
1160 |
} else { |
1161 |
have_fun: |
1162 |
/* Get private data and set derived locals from it. */ |
1163 |
fun = GET_FUNCTION_PRIVATE(cx, funobj); |
1164 |
nslots = FUN_MINARGS(fun); |
1165 |
nslots = (nslots > argc) ? nslots - argc : 0; |
1166 |
if (FUN_INTERPRETED(fun)) { |
1167 |
native = NULL; |
1168 |
script = fun->u.i.script; |
1169 |
JS_ASSERT(script); |
1170 |
} else { |
1171 |
native = fun->u.n.native; |
1172 |
script = NULL; |
1173 |
nslots += fun->u.n.extra; |
1174 |
} |
1175 |
|
1176 |
if (JSFUN_BOUND_METHOD_TEST(fun->flags)) { |
1177 |
/* Handle bound method special case. */ |
1178 |
vp[1] = OBJECT_TO_JSVAL(parent); |
1179 |
} else if (!JSVAL_IS_OBJECT(vp[1])) { |
1180 |
JS_ASSERT(!(flags & JSINVOKE_CONSTRUCT)); |
1181 |
if (PRIMITIVE_THIS_TEST(fun, vp[1])) |
1182 |
goto start_call; |
1183 |
} |
1184 |
} |
1185 |
|
1186 |
if (flags & JSINVOKE_CONSTRUCT) { |
1187 |
JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1])); |
1188 |
} else { |
1189 |
/* |
1190 |
* We must call js_ComputeThis in case we are not called from the |
1191 |
* interpreter, where a prior bytecode has computed an appropriate |
1192 |
* |this| already. |
1193 |
* |
1194 |
* But we need to compute |this| eagerly only for so-called "slow" |
1195 |
* (i.e., not fast) native functions. Fast natives must use either |
1196 |
* JS_THIS or JS_THIS_OBJECT, and scripted functions will go through |
1197 |
* the appropriate this-computing bytecode, e.g., JSOP_THIS. |
1198 |
*/ |
1199 |
if (native && (!fun || !(fun->flags & JSFUN_FAST_NATIVE))) { |
1200 |
if (!js_ComputeThis(cx, JS_FALSE, vp + 2)) { |
1201 |
ok = JS_FALSE; |
1202 |
goto out2; |
1203 |
} |
1204 |
flags |= JSFRAME_COMPUTED_THIS; |
1205 |
} |
1206 |
} |
1207 |
|
1208 |
start_call: |
1209 |
if (native && fun && (fun->flags & JSFUN_FAST_NATIVE)) { |
1210 |
#ifdef DEBUG_NOT_THROWING |
1211 |
JSBool alreadyThrowing = cx->throwing; |
1212 |
#endif |
1213 |
JS_ASSERT(nslots == 0); |
1214 |
#if JS_HAS_LVALUE_RETURN |
1215 |
/* Set by JS_SetCallReturnValue2, used to return reference types. */ |
1216 |
cx->rval2set = JS_FALSE; |
1217 |
#endif |
1218 |
ok = ((JSFastNative) native)(cx, argc, vp); |
1219 |
JS_RUNTIME_METER(cx->runtime, nativeCalls); |
1220 |
#ifdef DEBUG_NOT_THROWING |
1221 |
if (ok && !alreadyThrowing) |
1222 |
ASSERT_NOT_THROWING(cx); |
1223 |
#endif |
1224 |
goto out2; |
1225 |
} |
1226 |
|
1227 |
argv = vp + 2; |
1228 |
sp = argv + argc; |
1229 |
|
1230 |
rootedArgsFlag = JSFRAME_ROOTED_ARGV; |
1231 |
if (nslots != 0) { |
1232 |
/* |
1233 |
* The extra slots required by the function continue with argument |
1234 |
* slots. Thus, when the last stack pool arena does not have room to |
1235 |
* fit nslots right after sp and AllocateAfterSP fails, we have to copy |
1236 |
* [vp..vp+2+argc) slots and clear rootedArgsFlag to root the copy. |
1237 |
*/ |
1238 |
if (!AllocateAfterSP(cx, sp, nslots)) { |
1239 |
rootedArgsFlag = 0; |
1240 |
newvp = js_AllocRawStack(cx, 2 + argc + nslots, NULL); |
1241 |
if (!newvp) { |
1242 |
ok = JS_FALSE; |
1243 |
goto out2; |
1244 |
} |
1245 |
memcpy(newvp, vp, (2 + argc) * sizeof(jsval)); |
1246 |
argv = newvp + 2; |
1247 |
sp = argv + argc; |
1248 |
} |
1249 |
|
1250 |
/* Push void to initialize missing args. */ |
1251 |
i = nslots; |
1252 |
do { |
1253 |
*sp++ = JSVAL_VOID; |
1254 |
} while (--i != 0); |
1255 |
} |
1256 |
|
1257 |
/* Allocate space for local variables and stack of interpreted function. */ |
1258 |
if (script && script->nslots != 0) { |
1259 |
if (!AllocateAfterSP(cx, sp, script->nslots)) { |
1260 |
/* NB: Discontinuity between argv and slots, stack slots. */ |
1261 |
sp = js_AllocRawStack(cx, script->nslots, NULL); |
1262 |
if (!sp) { |
1263 |
ok = JS_FALSE; |
1264 |
goto out2; |
1265 |
} |
1266 |
} |
1267 |
|
1268 |
/* Push void to initialize local variables. */ |
1269 |
for (jsval *end = sp + fun->u.i.nvars; sp != end; ++sp) |
1270 |
*sp = JSVAL_VOID; |
1271 |
} |
1272 |
|
1273 |
/* |
1274 |
* Initialize the frame. |
1275 |
* |
1276 |
* To set thisp we use an explicit cast and not JSVAL_TO_OBJECT, as vp[1] |
1277 |
* can be a primitive value here for those native functions specified with |
1278 |
* JSFUN_THISP_(NUMBER|STRING|BOOLEAN) flags. |
1279 |
*/ |
1280 |
frame.thisp = (JSObject *)vp[1]; |
1281 |
frame.varobj = NULL; |
1282 |
frame.callobj = NULL; |
1283 |
frame.argsobj = NULL; |
1284 |
frame.script = script; |
1285 |
frame.fun = fun; |
1286 |
frame.argc = argc; |
1287 |
frame.argv = argv; |
1288 |
|
1289 |
/* Default return value for a constructor is the new object. */ |
1290 |
frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID; |
1291 |
frame.down = cx->fp; |
1292 |
frame.annotation = NULL; |
1293 |
frame.scopeChain = NULL; /* set below for real, after cx->fp is set */ |
1294 |
frame.blockChain = NULL; |
1295 |
frame.regs = NULL; |
1296 |
frame.imacpc = NULL; |
1297 |
frame.slots = NULL; |
1298 |
frame.sharpDepth = 0; |
1299 |
frame.sharpArray = NULL; |
1300 |
frame.flags = flags | rootedArgsFlag; |
1301 |
frame.dormantNext = NULL; |
1302 |
frame.displaySave = NULL; |
1303 |
|
1304 |
MUST_FLOW_THROUGH("out"); |
1305 |
cx->fp = &frame; |
1306 |
|
1307 |
/* Init these now in case we goto out before first hook call. */ |
1308 |
hook = cx->debugHooks->callHook; |
1309 |
hookData = NULL; |
1310 |
|
1311 |
if (native) { |
1312 |
/* If native, use caller varobj and scopeChain for eval. */ |
1313 |
JS_ASSERT(!frame.varobj); |
1314 |
JS_ASSERT(!frame.scopeChain); |
1315 |
if (frame.down) { |
1316 |
frame.varobj = frame.down->varobj; |
1317 |
frame.scopeChain = frame.down->scopeChain; |
1318 |
} |
1319 |
|
1320 |
/* But ensure that we have a scope chain. */ |
1321 |
if (!frame.scopeChain) |
1322 |
frame.scopeChain = parent; |
1323 |
} else { |
1324 |
/* Use parent scope so js_GetCallObject can find the right "Call". */ |
1325 |
frame.scopeChain = parent; |
1326 |
if (JSFUN_HEAVYWEIGHT_TEST(fun->flags)) { |
1327 |
/* Scope with a call object parented by the callee's parent. */ |
1328 |
if (!js_GetCallObject(cx, &frame)) { |
1329 |
ok = JS_FALSE; |
1330 |
goto out; |
1331 |
} |
1332 |
} |
1333 |
frame.slots = sp - fun->u.i.nvars; |
1334 |
} |
1335 |
|
1336 |
/* Call the hook if present after we fully initialized the frame. */ |
1337 |
if (hook) |
1338 |
hookData = hook(cx, &frame, JS_TRUE, 0, cx->debugHooks->callHookData); |
1339 |
|
1340 |
#ifdef INCLUDE_MOZILLA_DTRACE |
1341 |
/* DTrace function entry, non-inlines */ |
1342 |
if (JAVASCRIPT_FUNCTION_ENTRY_ENABLED()) |
1343 |
jsdtrace_function_entry(cx, &frame, fun); |
1344 |
if (JAVASCRIPT_FUNCTION_INFO_ENABLED()) |
1345 |
jsdtrace_function_info(cx, &frame, frame.down, fun); |
1346 |
if (JAVASCRIPT_FUNCTION_ARGS_ENABLED()) |
1347 |
jsdtrace_function_args(cx, &frame, fun, frame.argc, frame.argv); |
1348 |
#endif |
1349 |
|
1350 |
/* Call the function, either a native method or an interpreted script. */ |
1351 |
if (native) { |
1352 |
#ifdef DEBUG_NOT_THROWING |
1353 |
JSBool alreadyThrowing = cx->throwing; |
1354 |
#endif |
1355 |
|
1356 |
#if JS_HAS_LVALUE_RETURN |
1357 |
/* Set by JS_SetCallReturnValue2, used to return reference types. */ |
1358 |
cx->rval2set = JS_FALSE; |
1359 |
#endif |
1360 |
ok = native(cx, frame.thisp, argc, frame.argv, &frame.rval); |
1361 |
JS_RUNTIME_METER(cx->runtime, nativeCalls); |
1362 |
#ifdef DEBUG_NOT_THROWING |
1363 |
if (ok && !alreadyThrowing) |
1364 |
ASSERT_NOT_THROWING(cx); |
1365 |
#endif |
1366 |
} else { |
1367 |
JS_ASSERT(script); |
1368 |
ok = js_Interpret(cx); |
1369 |
} |
1370 |
|
1371 |
#ifdef INCLUDE_MOZILLA_DTRACE |
1372 |
/* DTrace function return, non-inlines */ |
1373 |
if (JAVASCRIPT_FUNCTION_RVAL_ENABLED()) |
1374 |
jsdtrace_function_rval(cx, &frame, fun, &frame.rval); |
1375 |
if (JAVASCRIPT_FUNCTION_RETURN_ENABLED()) |
1376 |
jsdtrace_function_return(cx, &frame, fun); |
1377 |
#endif |
1378 |
|
1379 |
out: |
1380 |
if (hookData) { |
1381 |
hook = cx->debugHooks->callHook; |
1382 |
if (hook) |
1383 |
hook(cx, &frame, JS_FALSE, &ok, hookData); |
1384 |
} |
1385 |
|
1386 |
frame.putActivationObjects(cx); |
1387 |
|
1388 |
*vp = frame.rval; |
1389 |
|
1390 |
/* Restore cx->fp now that we're done releasing frame objects. */ |
1391 |
cx->fp = frame.down; |
1392 |
|
1393 |
out2: |
1394 |
/* Pop everything we may have allocated off the stack. */ |
1395 |
JS_ARENA_RELEASE(&cx->stackPool, mark); |
1396 |
if (!ok) |
1397 |
*vp = JSVAL_NULL; |
1398 |
return ok; |
1399 |
|
1400 |
bad: |
1401 |
js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS); |
1402 |
ok = JS_FALSE; |
1403 |
goto out2; |
1404 |
} |
1405 |
|
1406 |
JSBool |
1407 |
js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags, |
1408 |
uintN argc, jsval *argv, jsval *rval) |
1409 |
{ |
1410 |
jsval *invokevp; |
1411 |
void *mark; |
1412 |
JSBool ok; |
1413 |
|
1414 |
js_LeaveTrace(cx); |
1415 |
invokevp = js_AllocStack(cx, 2 + argc, &mark); |
1416 |
if (!invokevp) |
1417 |
return JS_FALSE; |
1418 |
|
1419 |
invokevp[0] = fval; |
1420 |
invokevp[1] = OBJECT_TO_JSVAL(obj); |
1421 |
memcpy(invokevp + 2, argv, argc * sizeof *argv); |
1422 |
|
1423 |
ok = js_Invoke(cx, argc, invokevp, flags); |
1424 |
if (ok) { |
1425 |
/* |
1426 |
* Store *rval in the a scoped local root if a scope is open, else in |
1427 |
* the lastInternalResult pigeon-hole GC root, solely so users of |
1428 |
* js_InternalInvoke and its direct and indirect (js_ValueToString for |
1429 |
* example) callers do not need to manage roots for local, temporary |
1430 |
* references to such results. |
1431 |
*/ |
1432 |
*rval = *invokevp; |
1433 |
if (JSVAL_IS_GCTHING(*rval) && *rval != JSVAL_NULL) { |
1434 |
if (cx->localRootStack) { |
1435 |
if (js_PushLocalRoot(cx, cx->localRootStack, *rval) < 0) |
1436 |
ok = JS_FALSE; |
1437 |
} else { |
1438 |
cx->weakRoots.lastInternalResult = *rval; |
1439 |
} |
1440 |
} |
1441 |
} |
1442 |
|
1443 |
js_FreeStack(cx, mark); |
1444 |
return ok; |
1445 |
} |
1446 |
|
1447 |
JSBool |
1448 |
js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval, |
1449 |
JSAccessMode mode, uintN argc, jsval *argv, jsval *rval) |
1450 |
{ |
1451 |
JSSecurityCallbacks *callbacks; |
1452 |
|
1453 |
js_LeaveTrace(cx); |
1454 |
|
1455 |
/* |
1456 |
* js_InternalInvoke could result in another try to get or set the same id |
1457 |
* again, see bug 355497. |
1458 |
*/ |
1459 |
JS_CHECK_RECURSION(cx, return JS_FALSE); |
1460 |
|
1461 |
/* |
1462 |
* Check general (not object-ops/class-specific) access from the running |
1463 |
* script to obj.id only if id has a scripted getter or setter that we're |
1464 |
* about to invoke. If we don't check this case, nothing else will -- no |
1465 |
* other native code has the chance to check. |
1466 |
* |
1467 |
* Contrast this non-native (scripted) case with native getter and setter |
1468 |
* accesses, where the native itself must do an access check, if security |
1469 |
* policies requires it. We make a checkAccess or checkObjectAccess call |
1470 |
* back to the embedding program only in those cases where we're not going |
1471 |
* to call an embedding-defined native function, getter, setter, or class |
1472 |
* hook anyway. Where we do call such a native, there's no need for the |
1473 |
* engine to impose a separate access check callback on all embeddings -- |
1474 |
* many embeddings have no security policy at all. |
1475 |
*/ |
1476 |
JS_ASSERT(mode == JSACC_READ || mode == JSACC_WRITE); |
1477 |
callbacks = JS_GetSecurityCallbacks(cx); |
1478 |
if (callbacks && |
1479 |
callbacks->checkObjectAccess && |
1480 |
VALUE_IS_FUNCTION(cx, fval) && |
1481 |
FUN_INTERPRETED(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval))) && |
1482 |
!callbacks->checkObjectAccess(cx, obj, ID_TO_VALUE(id), mode, &fval)) { |
1483 |
return JS_FALSE; |
1484 |
} |
1485 |
|
1486 |
return js_InternalCall(cx, obj, fval, argc, argv, rval); |
1487 |
} |
1488 |
|
1489 |
JSBool |
1490 |
js_Execute(JSContext *cx, JSObject *chain, JSScript *script, |
1491 |
JSStackFrame *down, uintN flags, jsval *result) |
1492 |
{ |
1493 |
JSInterpreterHook hook; |
1494 |
void *hookData, *mark; |
1495 |
JSStackFrame *oldfp, frame; |
1496 |
JSObject *obj, *tmp; |
1497 |
JSBool ok; |
1498 |
|
1499 |
js_LeaveTrace(cx); |
1500 |
|
1501 |
#ifdef INCLUDE_MOZILLA_DTRACE |
1502 |
if (JAVASCRIPT_EXECUTE_START_ENABLED()) |
1503 |
jsdtrace_execute_start(script); |
1504 |
#endif |
1505 |
|
1506 |
hook = cx->debugHooks->executeHook; |
1507 |
hookData = mark = NULL; |
1508 |
oldfp = js_GetTopStackFrame(cx); |
1509 |
frame.script = script; |
1510 |
if (down) { |
1511 |
/* Propagate arg state for eval and the debugger API. */ |
1512 |
frame.callobj = down->callobj; |
1513 |
frame.argsobj = down->argsobj; |
1514 |
frame.varobj = down->varobj; |
1515 |
frame.fun = down->fun; |
1516 |
frame.thisp = down->thisp; |
1517 |
if (down->flags & JSFRAME_COMPUTED_THIS) |
1518 |
flags |= JSFRAME_COMPUTED_THIS; |
1519 |
frame.argc = down->argc; |
1520 |
frame.argv = down->argv; |
1521 |
frame.annotation = down->annotation; |
1522 |
frame.sharpArray = down->sharpArray; |
1523 |
JS_ASSERT(script->nfixed == 0); |
1524 |
} else { |
1525 |
frame.callobj = NULL; |
1526 |
frame.argsobj = NULL; |
1527 |
obj = chain; |
1528 |
if (cx->options & JSOPTION_VAROBJFIX) { |
1529 |
while ((tmp = OBJ_GET_PARENT(cx, obj)) != NULL) |
1530 |
obj = tmp; |
1531 |
} |
1532 |
frame.varobj = obj; |
1533 |
frame.fun = NULL; |
1534 |
frame.thisp = chain; |
1535 |
frame.argc = 0; |
1536 |
frame.argv = NULL; |
1537 |
frame.annotation = NULL; |
1538 |
frame.sharpArray = NULL; |
1539 |
} |
1540 |
|
1541 |
frame.imacpc = NULL; |
1542 |
if (script->nslots != 0) { |
1543 |
frame.slots = js_AllocRawStack(cx, script->nslots, &mark); |
1544 |
if (!frame.slots) { |
1545 |
ok = JS_FALSE; |
1546 |
goto out; |
1547 |
} |
1548 |
memset(frame.slots, 0, script->nfixed * sizeof(jsval)); |
1549 |
} else { |
1550 |
frame.slots = NULL; |
1551 |
} |
1552 |
|
1553 |
frame.rval = JSVAL_VOID; |
1554 |
frame.down = down; |
1555 |
frame.scopeChain = chain; |
1556 |
frame.regs = NULL; |
1557 |
frame.sharpDepth = 0; |
1558 |
frame.flags = flags; |
1559 |
frame.dormantNext = NULL; |
1560 |
frame.blockChain = NULL; |
1561 |
|
1562 |
/* |
1563 |
* Here we wrap the call to js_Interpret with code to (conditionally) |
1564 |
* save and restore the old stack frame chain into a chain of 'dormant' |
1565 |
* frame chains. Since we are replacing cx->fp, we were running into |
1566 |
* the problem that if GC was called under this frame, some of the GC |
1567 |
* things associated with the old frame chain (available here only in |
1568 |
* the C variable 'oldfp') were not rooted and were being collected. |
1569 |
* |
1570 |
* So, now we preserve the links to these 'dormant' frame chains in cx |
1571 |
* before calling js_Interpret and cleanup afterwards. The GC walks |
1572 |
* these dormant chains and marks objects in the same way that it marks |
1573 |
* objects in the primary cx->fp chain. |
1574 |
*/ |
1575 |
if (oldfp && oldfp != down) { |
1576 |
JS_ASSERT(!oldfp->dormantNext); |
1577 |
oldfp->dormantNext = cx->dormantFrameChain; |
1578 |
cx->dormantFrameChain = oldfp; |
1579 |
} |
1580 |
|
1581 |
cx->fp = &frame; |
1582 |
if (!down) { |
1583 |
OBJ_TO_INNER_OBJECT(cx, chain); |
1584 |
if (!chain) |
1585 |
return JS_FALSE; |
1586 |
frame.scopeChain = chain; |
1587 |
|
1588 |
frame.thisp = frame.thisp->thisObject(cx); |
1589 |
if (!frame.thisp) { |
1590 |
ok = JS_FALSE; |
1591 |
goto out2; |
1592 |
} |
1593 |
frame.flags |= JSFRAME_COMPUTED_THIS; |
1594 |
} |
1595 |
|
1596 |
if (hook) { |
1597 |
hookData = hook(cx, &frame, JS_TRUE, 0, |
1598 |
cx->debugHooks->executeHookData); |
1599 |
} |
1600 |
|
1601 |
ok = js_Interpret(cx); |
1602 |
if (result) |
1603 |
*result = frame.rval; |
1604 |
|
1605 |
if (hookData) { |
1606 |
hook = cx->debugHooks->executeHook; |
1607 |
if (hook) |
1608 |
hook(cx, &frame, JS_FALSE, &ok, hookData); |
1609 |
} |
1610 |
|
1611 |
out2: |
1612 |
if (mark) |
1613 |
js_FreeRawStack(cx, mark); |
1614 |
cx->fp = oldfp; |
1615 |
|
1616 |
if (oldfp && oldfp != down) { |
1617 |
JS_ASSERT(cx->dormantFrameChain == oldfp); |
1618 |
cx->dormantFrameChain = oldfp->dormantNext; |
1619 |
oldfp->dormantNext = NULL; |
1620 |
} |
1621 |
|
1622 |
out: |
1623 |
#ifdef INCLUDE_MOZILLA_DTRACE |
1624 |
if (JAVASCRIPT_EXECUTE_DONE_ENABLED()) |
1625 |
jsdtrace_execute_done(script); |
1626 |
#endif |
1627 |
return ok; |
1628 |
} |
1629 |
|
1630 |
JSBool |
1631 |
js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs, |
1632 |
JSObject **objp, JSProperty **propp) |
1633 |
{ |
1634 |
JSObject *obj2; |
1635 |
JSProperty *prop; |
1636 |
uintN oldAttrs, report; |
1637 |
bool isFunction; |
1638 |
jsval value; |
1639 |
const char *type, *name; |
1640 |
|
1641 |
/* |
1642 |
* Both objp and propp must be either null or given. When given, *propp |
1643 |
* must be null. This way we avoid an extra "if (propp) *propp = NULL" for |
1644 |
* the common case of a non-existing property. |
1645 |
*/ |
1646 |
JS_ASSERT(!objp == !propp); |
1647 |
JS_ASSERT_IF(propp, !*propp); |
1648 |
|
1649 |
/* The JSPROP_INITIALIZER case below may generate a warning. Since we must |
1650 |
* drop the property before reporting it, we insists on !propp to avoid |
1651 |
* looking up the property again after the reporting is done. |
1652 |
*/ |
1653 |
JS_ASSERT_IF(attrs & JSPROP_INITIALIZER, attrs == JSPROP_INITIALIZER); |
1654 |
JS_ASSERT_IF(attrs == JSPROP_INITIALIZER, !propp); |
1655 |
|
1656 |
if (!obj->lookupProperty(cx, id, &obj2, &prop)) |
1657 |
return JS_FALSE; |
1658 |
if (!prop) |
1659 |
return JS_TRUE; |
1660 |
|
1661 |
/* Use prop as a speedup hint to obj->getAttributes. */ |
1662 |
if (!obj2->getAttributes(cx, id, prop, &oldAttrs)) { |
1663 |
obj2->dropProperty(cx, prop); |
1664 |
return JS_FALSE; |
1665 |
} |
1666 |
|
1667 |
/* |
1668 |
* If our caller doesn't want prop, drop it (we don't need it any longer). |
1669 |
*/ |
1670 |
if (!propp) { |
1671 |
obj2->dropProperty(cx, prop); |
1672 |
prop = NULL; |
1673 |
} else { |
1674 |
*objp = obj2; |
1675 |
*propp = prop; |
1676 |
} |
1677 |
|
1678 |
if (attrs == JSPROP_INITIALIZER) { |
1679 |
/* Allow the new object to override properties. */ |
1680 |
if (obj2 != obj) |
1681 |
return JS_TRUE; |
1682 |
|
1683 |
/* The property must be dropped already. */ |
1684 |
JS_ASSERT(!prop); |
1685 |
report = JSREPORT_WARNING | JSREPORT_STRICT; |
1686 |
|
1687 |
#ifdef __GNUC__ |
1688 |
isFunction = false; /* suppress bogus gcc warnings */ |
1689 |
#endif |
1690 |
} else { |
1691 |
/* We allow redeclaring some non-readonly properties. */ |
1692 |
if (((oldAttrs | attrs) & JSPROP_READONLY) == 0) { |
1693 |
/* Allow redeclaration of variables and functions. */ |
1694 |
if (!(attrs & (JSPROP_GETTER | JSPROP_SETTER))) |
1695 |
return JS_TRUE; |
1696 |
|
1697 |
/* |
1698 |
* Allow adding a getter only if a property already has a setter |
1699 |
* but no getter and similarly for adding a setter. That is, we |
1700 |
* allow only the following transitions: |
1701 |
* |
1702 |
* no-property --> getter --> getter + setter |
1703 |
* no-property --> setter --> getter + setter |
1704 |
*/ |
1705 |
if ((~(oldAttrs ^ attrs) & (JSPROP_GETTER | JSPROP_SETTER)) == 0) |
1706 |
return JS_TRUE; |
1707 |
|
1708 |
/* |
1709 |
* Allow redeclaration of an impermanent property (in which case |
1710 |
* anyone could delete it and redefine it, willy-nilly). |
1711 |
*/ |
1712 |
if (!(oldAttrs & JSPROP_PERMANENT)) |
1713 |
return JS_TRUE; |
1714 |
} |
1715 |
if (prop) |
1716 |
obj2->dropProperty(cx, prop); |
1717 |
|
1718 |
report = JSREPORT_ERROR; |
1719 |
isFunction = (oldAttrs & (JSPROP_GETTER | JSPROP_SETTER)) != 0; |
1720 |
if (!isFunction) { |
1721 |
if (!obj->getProperty(cx, id, &value)) |
1722 |
return JS_FALSE; |
1723 |
isFunction = VALUE_IS_FUNCTION(cx, value); |
1724 |
} |
1725 |
} |
1726 |
|
1727 |
type = (attrs == JSPROP_INITIALIZER) |
1728 |
? "property" |
1729 |
: (oldAttrs & attrs & JSPROP_GETTER) |
1730 |
? js_getter_str |
1731 |
: (oldAttrs & attrs & JSPROP_SETTER) |
1732 |
? js_setter_str |
1733 |
: (oldAttrs & JSPROP_READONLY) |
1734 |
? js_const_str |
1735 |
: isFunction |
1736 |
? js_function_str |
1737 |
: js_var_str; |
1738 |
name = js_ValueToPrintableString(cx, ID_TO_VALUE(id)); |
1739 |
if (!name) |
1740 |
return JS_FALSE; |
1741 |
return JS_ReportErrorFlagsAndNumber(cx, report, |
1742 |
js_GetErrorMessage, NULL, |
1743 |
JSMSG_REDECLARED_VAR, |
1744 |
type, name); |
1745 |
} |
1746 |
|
1747 |
JSBool |
1748 |
js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval) |
1749 |
{ |
1750 |
jsval ltag = JSVAL_TAG(lval), rtag = JSVAL_TAG(rval); |
1751 |
jsdouble ld, rd; |
1752 |
|
1753 |
if (ltag == rtag) { |
1754 |
if (ltag == JSVAL_STRING) { |
1755 |
JSString *lstr = JSVAL_TO_STRING(lval), |
1756 |
*rstr = JSVAL_TO_STRING(rval); |
1757 |
return js_EqualStrings(lstr, rstr); |
1758 |
} |
1759 |
if (ltag == JSVAL_DOUBLE) { |
1760 |
ld = *JSVAL_TO_DOUBLE(lval); |
1761 |
rd = *JSVAL_TO_DOUBLE(rval); |
1762 |
return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE); |
1763 |
} |
1764 |
if (ltag == JSVAL_OBJECT && |
1765 |
lval != rval && |
1766 |
!JSVAL_IS_NULL(lval) && |
1767 |
!JSVAL_IS_NULL(rval)) { |
1768 |
JSObject *lobj, *robj; |
1769 |
|
1770 |
lobj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(lval)); |
1771 |
robj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(rval)); |
1772 |
lval = OBJECT_TO_JSVAL(lobj); |
1773 |
rval = OBJECT_TO_JSVAL(robj); |
1774 |
} |
1775 |
return lval == rval; |
1776 |
} |
1777 |
if (ltag == JSVAL_DOUBLE && JSVAL_IS_INT(rval)) { |
1778 |
ld = *JSVAL_TO_DOUBLE(lval); |
1779 |
rd = JSVAL_TO_INT(rval); |
1780 |
return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE); |
1781 |
} |
1782 |
if (JSVAL_IS_INT(lval) && rtag == JSVAL_DOUBLE) { |
1783 |
ld = JSVAL_TO_INT(lval); |
1784 |
rd = *JSVAL_TO_DOUBLE(rval); |
1785 |
return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE); |
1786 |
} |
1787 |
return lval == rval; |
1788 |
} |
1789 |
|
1790 |
static inline bool |
1791 |
IsNegativeZero(jsval v) |
1792 |
{ |
1793 |
return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v)); |
1794 |
} |
1795 |
|
1796 |
static inline bool |
1797 |
IsNaN(jsval v) |
1798 |
{ |
1799 |
return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NaN(*JSVAL_TO_DOUBLE(v)); |
1800 |
} |
1801 |
|
1802 |
JSBool |
1803 |
js_SameValue(jsval v1, jsval v2, JSContext *cx) |
1804 |
{ |
1805 |
if (IsNegativeZero(v1)) |
1806 |
return IsNegativeZero(v2); |
1807 |
if (IsNegativeZero(v2)) |
1808 |
return JS_FALSE; |
1809 |
if (IsNaN(v1) && IsNaN(v2)) |
1810 |
return JS_TRUE; |
1811 |
return js_StrictlyEqual(cx, v1, v2); |
1812 |
} |
1813 |
|
1814 |
JS_REQUIRES_STACK JSBool |
1815 |
js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp) |
1816 |
{ |
1817 |
JSFunction *fun, *fun2; |
1818 |
JSObject *obj, *obj2, *proto, *parent; |
1819 |
jsval lval, rval; |
1820 |
JSClass *clasp; |
1821 |
|
1822 |
fun = NULL; |
1823 |
obj2 = NULL; |
1824 |
lval = *vp; |
1825 |
if (!JSVAL_IS_OBJECT(lval) || |
1826 |
(obj2 = JSVAL_TO_OBJECT(lval)) == NULL || |
1827 |
/* XXX clean up to avoid special cases above ObjectOps layer */ |
1828 |
OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass || |
1829 |
!obj2->map->ops->construct) |
1830 |
{ |
1831 |
fun = js_ValueToFunction(cx, vp, JSV2F_CONSTRUCT); |
1832 |
if (!fun) |
1833 |
return JS_FALSE; |
1834 |
} |
1835 |
|
1836 |
clasp = &js_ObjectClass; |
1837 |
if (!obj2) { |
1838 |
proto = parent = NULL; |
1839 |
fun = NULL; |
1840 |
} else { |
1841 |
/* |
1842 |
* Get the constructor prototype object for this function. |
1843 |
* Use the nominal 'this' parameter slot, vp[1], as a local |
1844 |
* root to protect this prototype, in case it has no other |
1845 |
* strong refs. |
1846 |
*/ |
1847 |
if (!obj2->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), |
1848 |
&vp[1])) { |
1849 |
return JS_FALSE; |
1850 |
} |
1851 |
rval = vp[1]; |
1852 |
proto = JSVAL_IS_OBJECT(rval) ? JSVAL_TO_OBJECT(rval) : NULL; |
1853 |
parent = OBJ_GET_PARENT(cx, obj2); |
1854 |
|
1855 |
if (OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass) { |
1856 |
fun2 = GET_FUNCTION_PRIVATE(cx, obj2); |
1857 |
if (!FUN_INTERPRETED(fun2) && fun2->u.n.clasp) |
1858 |
clasp = fun2->u.n.clasp; |
1859 |
} |
1860 |
} |
1861 |
obj = js_NewObject(cx, clasp, proto, parent); |
1862 |
if (!obj) |
1863 |
return JS_FALSE; |
1864 |
|
1865 |
JSAutoTempValueRooter tvr(cx, obj); |
1866 |
|
1867 |
/* Now we have an object with a constructor method; call it. */ |
1868 |
vp[1] = OBJECT_TO_JSVAL(obj); |
1869 |
if (!js_Invoke(cx, argc, vp, JSINVOKE_CONSTRUCT)) |
1870 |
return JS_FALSE; |
1871 |
|
1872 |
/* Check the return value and if it's primitive, force it to be obj. */ |
1873 |
rval = *vp; |
1874 |
if (clampReturn && JSVAL_IS_PRIMITIVE(rval)) { |
1875 |
if (!fun) { |
1876 |
/* native [[Construct]] returning primitive is error */ |
1877 |
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, |
1878 |
JSMSG_BAD_NEW_RESULT, |
1879 |
js_ValueToPrintableString(cx, rval)); |
1880 |
return JS_FALSE; |
1881 |
} |
1882 |
*vp = OBJECT_TO_JSVAL(obj); |
1883 |
} |
1884 |
|
1885 |
JS_RUNTIME_METER(cx->runtime, constructs); |
1886 |
return JS_TRUE; |
1887 |
} |
1888 |
|
1889 |
JSBool |
1890 |
js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp) |
1891 |
{ |
1892 |
JS_ASSERT(!JSVAL_IS_INT(idval)); |
1893 |
|
1894 |
#if JS_HAS_XML_SUPPORT |
1895 |
if (!JSVAL_IS_PRIMITIVE(idval)) { |
1896 |
if (OBJECT_IS_XML(cx, obj)) { |
1897 |
*idp = OBJECT_JSVAL_TO_JSID(idval); |
1898 |
return JS_TRUE; |
1899 |
} |
1900 |
if (!js_IsFunctionQName(cx, JSVAL_TO_OBJECT(idval), idp)) |
1901 |
return JS_FALSE; |
1902 |
if (*idp != 0) |
1903 |
return JS_TRUE; |
1904 |
} |
1905 |
#endif |
1906 |
|
1907 |
return js_ValueToStringId(cx, idval, idp); |
1908 |
} |
1909 |
|
1910 |
/* |
1911 |
* Enter the new with scope using an object at sp[-1] and associate the depth |
1912 |
* of the with block with sp + stackIndex. |
1913 |
*/ |
1914 |
JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool |
1915 |
js_EnterWith(JSContext *cx, jsint stackIndex) |
1916 |
{ |
1917 |
JSStackFrame *fp; |
1918 |
jsval *sp; |
1919 |
JSObject *obj, *parent, *withobj; |
1920 |
|
1921 |
fp = cx->fp; |
1922 |
sp = fp->regs->sp; |
1923 |
JS_ASSERT(stackIndex < 0); |
1924 |
JS_ASSERT(StackBase(fp) <= sp + stackIndex); |
1925 |
|
1926 |
if (!JSVAL_IS_PRIMITIVE(sp[-1])) { |
1927 |
obj = JSVAL_TO_OBJECT(sp[-1]); |
1928 |
} else { |
1929 |
obj = js_ValueToNonNullObject(cx, sp[-1]); |
1930 |
if (!obj) |
1931 |
return JS_FALSE; |
1932 |
sp[-1] = OBJECT_TO_JSVAL(obj); |
1933 |
} |
1934 |
|
1935 |
parent = js_GetScopeChain(cx, fp); |
1936 |
if (!parent) |
1937 |
return JS_FALSE; |
1938 |
|
1939 |
OBJ_TO_INNER_OBJECT(cx, obj); |
1940 |
if (!obj) |
1941 |
return JS_FALSE; |
1942 |
|
1943 |
withobj = js_NewWithObject(cx, obj, parent, |
1944 |
sp + stackIndex - StackBase(fp)); |
1945 |
if (!withobj) |
1946 |
return JS_FALSE; |
1947 |
|
1948 |
fp->scopeChain = withobj; |
1949 |
return JS_TRUE; |
1950 |
} |
1951 |
|
1952 |
JS_STATIC_INTERPRET JS_REQUIRES_STACK void |
1953 |
js_LeaveWith(JSContext *cx) |
1954 |
{ |
1955 |
JSObject *withobj; |
1956 |
|
1957 |
withobj = cx->fp->scopeChain; |
1958 |
JS_ASSERT(OBJ_GET_CLASS(cx, withobj) == &js_WithClass); |
1959 |
JS_ASSERT(withobj->getPrivate() == cx->fp); |
1960 |
JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0); |
1961 |
cx->fp->scopeChain = OBJ_GET_PARENT(cx, withobj); |
1962 |
withobj->setPrivate(NULL); |
1963 |
} |
1964 |
|
1965 |
JS_REQUIRES_STACK JSClass * |
1966 |
js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth) |
1967 |
{ |
1968 |
JSClass *clasp; |
1969 |
|
1970 |
clasp = OBJ_GET_CLASS(cx, obj); |
1971 |
if ((clasp == &js_WithClass || clasp == &js_BlockClass) && |
1972 |
obj->getPrivate() == cx->fp && |
1973 |
OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) { |
1974 |
return clasp; |
1975 |
} |
1976 |
return NULL; |
1977 |
} |
1978 |
|
1979 |
/* |
1980 |
* Unwind block and scope chains to match the given depth. The function sets |
1981 |
* fp->sp on return to stackDepth. |
1982 |
*/ |
1983 |
JS_REQUIRES_STACK JSBool |
1984 |
js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth, |
1985 |
JSBool normalUnwind) |
1986 |
{ |
1987 |
JSObject *obj; |
1988 |
JSClass *clasp; |
1989 |
|
1990 |
JS_ASSERT(stackDepth >= 0); |
1991 |
JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp); |
1992 |
|
1993 |
for (obj = fp->blockChain; obj; obj = OBJ_GET_PARENT(cx, obj)) { |
1994 |
JS_ASSERT(OBJ_GET_CLASS(cx, obj) == &js_BlockClass); |
1995 |
if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth) |
1996 |
break; |
1997 |
} |
1998 |
fp->blockChain = obj; |
1999 |
|
2000 |
for (;;) { |
2001 |
obj = fp->scopeChain; |
2002 |
clasp = js_IsActiveWithOrBlock(cx, obj, stackDepth); |
2003 |
if (!clasp) |
2004 |
break; |
2005 |
if (clasp == &js_BlockClass) { |
2006 |
/* Don't fail until after we've updated all stacks. */ |
2007 |
normalUnwind &= js_PutBlockObject(cx, normalUnwind); |
2008 |
} else { |
2009 |
js_LeaveWith(cx); |
2010 |
} |
2011 |
} |
2012 |
|
2013 |
fp->regs->sp = StackBase(fp) + stackDepth; |
2014 |
return normalUnwind; |
2015 |
} |
2016 |
|
2017 |
JS_STATIC_INTERPRET JSBool |
2018 |
js_DoIncDec(JSContext *cx, const JSCodeSpec *cs, jsval *vp, jsval *vp2) |
2019 |
{ |
2020 |
jsval v; |
2021 |
jsdouble d; |
2022 |
|
2023 |
v = *vp; |
2024 |
if (JSVAL_IS_DOUBLE(v)) { |
2025 |
d = *JSVAL_TO_DOUBLE(v); |
2026 |
} else if (JSVAL_IS_INT(v)) { |
2027 |
d = JSVAL_TO_INT(v); |
2028 |
} else { |
2029 |
d = js_ValueToNumber(cx, vp); |
2030 |
if (JSVAL_IS_NULL(*vp)) |
2031 |
return JS_FALSE; |
2032 |
JS_ASSERT(JSVAL_IS_NUMBER(*vp) || *vp == JSVAL_TRUE); |
2033 |
|
2034 |
/* Store the result of v conversion back in vp for post increments. */ |
2035 |
if ((cs->format & JOF_POST) && |
2036 |
*vp == JSVAL_TRUE |
2037 |
&& !js_NewNumberInRootedValue(cx, d, vp)) { |
2038 |
return JS_FALSE; |
2039 |
} |
2040 |
} |
2041 |
|
2042 |
(cs->format & JOF_INC) ? d++ : d--; |
2043 |
if (!js_NewNumberInRootedValue(cx, d, vp2)) |
2044 |
return JS_FALSE; |
2045 |
|
2046 |
if (!(cs->format & JOF_POST)) |
2047 |
*vp = *vp2; |
2048 |
return JS_TRUE; |
2049 |
} |
2050 |
|
2051 |
jsval& |
2052 |
js_GetUpvar(JSContext *cx, uintN level, uintN cookie) |
2053 |
{ |
2054 |
level -= UPVAR_FRAME_SKIP(cookie); |
2055 |
JS_ASSERT(level < JS_DISPLAY_SIZE); |
2056 |
|
2057 |
JSStackFrame *fp = cx->display[level]; |
2058 |
JS_ASSERT(fp->script); |
2059 |
|
2060 |
uintN slot = UPVAR_FRAME_SLOT(cookie); |
2061 |
jsval *vp; |
2062 |
|
2063 |
if (!fp->fun) { |
2064 |
vp = fp->slots + fp->script->nfixed; |
2065 |
} else if (slot < fp->fun->nargs) { |
2066 |
vp = fp->argv; |
2067 |
} else if (slot == CALLEE_UPVAR_SLOT) { |
2068 |
vp = &fp->argv[-2]; |
2069 |
slot = 0; |
2070 |
} else { |
2071 |
slot -= fp->fun->nargs; |
2072 |
JS_ASSERT(slot < fp->script->nslots); |
2073 |
vp = fp->slots; |
2074 |
} |
2075 |
|
2076 |
return vp[slot]; |
2077 |
} |
2078 |
|
2079 |
#ifdef DEBUG |
2080 |
|
2081 |
JS_STATIC_INTERPRET JS_REQUIRES_STACK void |
2082 |
js_TraceOpcode(JSContext *cx) |
2083 |
{ |
2084 |
FILE *tracefp; |
2085 |
JSStackFrame *fp; |
2086 |
JSFrameRegs *regs; |
2087 |
intN ndefs, n, nuses; |
2088 |
jsval *siter; |
2089 |
JSString *str; |
2090 |
JSOp op; |
2091 |
|
2092 |
tracefp = (FILE *) cx->tracefp; |
2093 |
JS_ASSERT(tracefp); |
2094 |
fp = cx->fp; |
2095 |
regs = fp->regs; |
2096 |
|
2097 |
/* |
2098 |
* Operations in prologues don't produce interesting values, and |
2099 |
* js_DecompileValueGenerator isn't set up to handle them anyway. |
2100 |
*/ |
2101 |
if (cx->tracePrevPc && regs->pc >= fp->script->main) { |
2102 |
JSOp tracePrevOp = JSOp(*cx->tracePrevPc); |
2103 |
ndefs = js_GetStackDefs(cx, &js_CodeSpec[tracePrevOp], tracePrevOp, |
2104 |
fp->script, cx->tracePrevPc); |
2105 |
|
2106 |
/* |
2107 |
* If there aren't that many elements on the stack, then |
2108 |
* we have probably entered a new frame, and printing output |
2109 |
* would just be misleading. |
2110 |
*/ |
2111 |
if (ndefs != 0 && |
2112 |
ndefs < regs->sp - fp->slots) { |
2113 |
for (n = -ndefs; n < 0; n++) { |
2114 |
char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n], |
2115 |
NULL); |
2116 |
if (bytes) { |
2117 |
fprintf(tracefp, "%s %s", |
2118 |
(n == -ndefs) ? " output:" : ",", |
2119 |
bytes); |
2120 |
cx->free(bytes); |
2121 |
} |
2122 |
} |
2123 |
fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp))); |
2124 |
} |
2125 |
fprintf(tracefp, " stack: "); |
2126 |
for (siter = StackBase(fp); siter < regs->sp; siter++) { |
2127 |
str = js_ValueToString(cx, *siter); |
2128 |
if (!str) |
2129 |
fputs("<null>", tracefp); |
2130 |
else |
2131 |
js_FileEscapedString(tracefp, str, 0); |
2132 |
fputc(' ', tracefp); |
2133 |
} |
2134 |
fputc('\n', tracefp); |
2135 |
} |
2136 |
|
2137 |
fprintf(tracefp, "%4u: ", |
2138 |
js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : regs->pc)); |
2139 |
js_Disassemble1(cx, fp->script, regs->pc, |
2140 |
regs->pc - fp->script->code, |
2141 |
JS_FALSE, tracefp); |
2142 |
op = (JSOp) *regs->pc; |
2143 |
nuses = js_GetStackUses(&js_CodeSpec[op], op, regs->pc); |
2144 |
if (nuses != 0) { |
2145 |
for (n = -nuses; n < 0; n++) { |
2146 |
char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n], |
2147 |
NULL); |
2148 |
if (bytes) { |
2149 |
fprintf(tracefp, "%s %s", |
2150 |
(n == -nuses) ? " inputs:" : ",", |
2151 |
bytes); |
2152 |
cx->free(bytes); |
2153 |
} |
2154 |
} |
2155 |
fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp))); |
2156 |
} |
2157 |
cx->tracePrevPc = regs->pc; |
2158 |
|
2159 |
/* It's nice to have complete traces when debugging a crash. */ |
2160 |
fflush(tracefp); |
2161 |
} |
2162 |
|
2163 |
#endif /* DEBUG */ |
2164 |
|
2165 |
#ifdef JS_OPMETER |
2166 |
|
2167 |
# include <stdlib.h> |
2168 |
|
2169 |
# define HIST_NSLOTS 8 |
2170 |
|
2171 |
/* |
2172 |
* The second dimension is hardcoded at 256 because we know that many bits fit |
2173 |
* in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address |
2174 |
* any particular row. |
2175 |
*/ |
2176 |
static uint32 succeeds[JSOP_LIMIT][256]; |
2177 |
static uint32 slot_ops[JSOP_LIMIT][HIST_NSLOTS]; |
2178 |
|
2179 |
JS_STATIC_INTERPRET void |
2180 |
js_MeterOpcodePair(JSOp op1, JSOp op2) |
2181 |
{ |
2182 |
if (op1 != JSOP_STOP) |
2183 |
++succeeds[op1][op2]; |
2184 |
} |
2185 |
|
2186 |
JS_STATIC_INTERPRET void |
2187 |
js_MeterSlotOpcode(JSOp op, uint32 slot) |
2188 |
{ |
2189 |
if (slot < HIST_NSLOTS) |
2190 |
++slot_ops[op][slot]; |
2191 |
} |
2192 |
|
2193 |
typedef struct Edge { |
2194 |
const char *from; |
2195 |
const char *to; |
2196 |
uint32 count; |
2197 |
} Edge; |
2198 |
|
2199 |
static int |
2200 |
compare_edges(const void *a, const void *b) |
2201 |
{ |
2202 |
const Edge *ea = (const Edge *) a; |
2203 |
const Edge *eb = (const Edge *) b; |
2204 |
|
2205 |
return (int32)eb->count - (int32)ea->count; |
2206 |
} |
2207 |
|
2208 |
void |
2209 |
js_DumpOpMeters() |
2210 |
{ |
2211 |
const char *name, *from, *style; |
2212 |
FILE *fp; |
2213 |
uint32 total, count; |
2214 |
uint32 i, j, nedges; |
2215 |
Edge *graph; |
2216 |
|
2217 |
name = getenv("JS_OPMETER_FILE"); |
2218 |
if (!name) |
2219 |
name = "/tmp/ops.dot"; |
2220 |
fp = fopen(name, "w"); |
2221 |
if (!fp) { |
2222 |
perror(name); |
2223 |
return; |
2224 |
} |
2225 |
|
2226 |
total = nedges = 0; |
2227 |
for (i = 0; i < JSOP_LIMIT; i++) { |
2228 |
for (j = 0; j < JSOP_LIMIT; j++) { |
2229 |
count = succeeds[i][j]; |
2230 |
if (count != 0) { |
2231 |
total += count; |
2232 |
++nedges; |
2233 |
} |
2234 |
} |
2235 |
} |
2236 |
|
2237 |
# define SIGNIFICANT(count,total) (200. * (count) >= (total)) |
2238 |
|
2239 |
graph = (Edge *) js_calloc(nedges * sizeof graph[0]); |
2240 |
for (i = nedges = 0; i < JSOP_LIMIT; i++) { |
2241 |
from = js_CodeName[i]; |
2242 |
for (j = 0; j < JSOP_LIMIT; j++) { |
2243 |
count = succeeds[i][j]; |
2244 |
if (count != 0 && SIGNIFICANT(count, total)) { |
2245 |
graph[nedges].from = from; |
2246 |
graph[nedges].to = js_CodeName[j]; |
2247 |
graph[nedges].count = count; |
2248 |
++nedges; |
2249 |
} |
2250 |
} |
2251 |
} |
2252 |
qsort(graph, nedges, sizeof(Edge), compare_edges); |
2253 |
|
2254 |
# undef SIGNIFICANT |
2255 |
|
2256 |
fputs("digraph {\n", fp); |
2257 |
for (i = 0, style = NULL; i < nedges; i++) { |
2258 |
JS_ASSERT(i == 0 || graph[i-1].count >= graph[i].count); |
2259 |
if (!style || graph[i-1].count != graph[i].count) { |
2260 |
style = (i > nedges * .75) ? "dotted" : |
2261 |
(i > nedges * .50) ? "dashed" : |
2262 |
(i > nedges * .25) ? "solid" : "bold"; |
2263 |
} |
2264 |
fprintf(fp, " %s -> %s [label=\"%lu\" style=%s]\n", |
2265 |
graph[i].from, graph[i].to, |
2266 |
(unsigned long)graph[i].count, style); |
2267 |
} |
2268 |
js_free(graph); |
2269 |
fputs("}\n", fp); |
2270 |
fclose(fp); |
2271 |
|
2272 |
name = getenv("JS_OPMETER_HIST"); |
2273 |
if (!name) |
2274 |
name = "/tmp/ops.hist"; |
2275 |
fp = fopen(name, "w"); |
2276 |
if (!fp) { |
2277 |
perror(name); |
2278 |
return; |
2279 |
} |
2280 |
fputs("bytecode", fp); |
2281 |
for (j = 0; j < HIST_NSLOTS; j++) |
2282 |
fprintf(fp, " slot %1u", (unsigned)j); |
2283 |
putc('\n', fp); |
2284 |
fputs("========", fp); |
2285 |
for (j = 0; j < HIST_NSLOTS; j++) |
2286 |
fputs(" =======", fp); |
2287 |
putc('\n', fp); |
2288 |
for (i = 0; i < JSOP_LIMIT; i++) { |
2289 |
for (j = 0; j < HIST_NSLOTS; j++) { |
2290 |
if (slot_ops[i][j] != 0) { |
2291 |
/* Reuse j in the next loop, since we break after. */ |
2292 |
fprintf(fp, "%-8.8s", js_CodeName[i]); |
2293 |
for (j = 0; j < HIST_NSLOTS; j++) |
2294 |
fprintf(fp, " %7lu", (unsigned long)slot_ops[i][j]); |
2295 |
putc('\n', fp); |
2296 |
break; |
2297 |
} |
2298 |
} |
2299 |
} |
2300 |
fclose(fp); |
2301 |
} |
2302 |
|
2303 |
#endif /* JS_OPSMETER */ |
2304 |
|
2305 |
#endif /* !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ */ |
2306 |
|
2307 |
#ifndef jsinvoke_cpp___ |
2308 |
|
2309 |
#define PUSH(v) (*regs.sp++ = (v)) |
2310 |
#define PUSH_OPND(v) PUSH(v) |
2311 |
#define STORE_OPND(n,v) (regs.sp[n] = (v)) |
2312 |
#define POP() (*--regs.sp) |
2313 |
#define POP_OPND() POP() |
2314 |
#define FETCH_OPND(n) (regs.sp[n]) |
2315 |
|
2316 |
/* |
2317 |
* Push the jsdouble d using sp from the lexical environment. Try to convert d |
2318 |
* to a jsint that fits in a jsval, otherwise GC-alloc space for it and push a |
2319 |
* reference. |
2320 |
*/ |
2321 |
#define STORE_NUMBER(cx, n, d) \ |
2322 |
JS_BEGIN_MACRO \ |
2323 |
jsint i_; \ |
2324 |
\ |
2325 |
if (JSDOUBLE_IS_INT(d, i_) && INT_FITS_IN_JSVAL(i_)) \ |
2326 |
regs.sp[n] = INT_TO_JSVAL(i_); \ |
2327 |
else if (!js_NewDoubleInRootedValue(cx, d, ®s.sp[n])) \ |
2328 |
goto error; \ |
2329 |
JS_END_MACRO |
2330 |
|
2331 |
#define STORE_INT(cx, n, i) \ |
2332 |
JS_BEGIN_MACRO \ |
2333 |
if (INT_FITS_IN_JSVAL(i)) \ |
2334 |
regs.sp[n] = INT_TO_JSVAL(i); \ |
2335 |
else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (i), ®s.sp[n])) \ |
2336 |
goto error; \ |
2337 |
JS_END_MACRO |
2338 |
|
2339 |
#define STORE_UINT(cx, n, u) \ |
2340 |
JS_BEGIN_MACRO \ |
2341 |
if ((u) <= JSVAL_INT_MAX) \ |
2342 |
regs.sp[n] = INT_TO_JSVAL(u); \ |
2343 |
else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (u), ®s.sp[n])) \ |
2344 |
goto error; \ |
2345 |
JS_END_MACRO |
2346 |
|
2347 |
#define FETCH_NUMBER(cx, n, d) \ |
2348 |
JS_BEGIN_MACRO \ |
2349 |
jsval v_; \ |
2350 |
\ |
2351 |
v_ = FETCH_OPND(n); \ |
2352 |
VALUE_TO_NUMBER(cx, n, v_, d); \ |
2353 |
JS_END_MACRO |
2354 |
|
2355 |
#define FETCH_INT(cx, n, i) \ |
2356 |
JS_BEGIN_MACRO \ |
2357 |
jsval v_; \ |
2358 |
\ |
2359 |
v_= FETCH_OPND(n); \ |
2360 |
if (JSVAL_IS_INT(v_)) { \ |
2361 |
i = JSVAL_TO_INT(v_); \ |
2362 |
} else { \ |
2363 |
i = js_ValueToECMAInt32(cx, ®s.sp[n]); \ |
2364 |
if (JSVAL_IS_NULL(regs.sp[n])) \ |
2365 |
goto error; \ |
2366 |
} \ |
2367 |
JS_END_MACRO |
2368 |
|
2369 |
#define FETCH_UINT(cx, n, ui) \ |
2370 |
JS_BEGIN_MACRO \ |
2371 |
jsval v_; \ |
2372 |
\ |
2373 |
v_= FETCH_OPND(n); \ |
2374 |
if (JSVAL_IS_INT(v_)) { \ |
2375 |
ui = (uint32) JSVAL_TO_INT(v_); \ |
2376 |
} else { \ |
2377 |
ui = js_ValueToECMAUint32(cx, ®s.sp[n]); \ |
2378 |
if (JSVAL_IS_NULL(regs.sp[n])) \ |
2379 |
goto error; \ |
2380 |
} \ |
2381 |
JS_END_MACRO |
2382 |
|
2383 |
/* |
2384 |
* Optimized conversion macros that test for the desired type in v before |
2385 |
* homing sp and calling a conversion function. |
2386 |
*/ |
2387 |
#define VALUE_TO_NUMBER(cx, n, v, d) \ |
2388 |
JS_BEGIN_MACRO \ |
2389 |
JS_ASSERT(v == regs.sp[n]); \ |
2390 |
if (JSVAL_IS_INT(v)) { \ |
2391 |
d = (jsdouble)JSVAL_TO_INT(v); \ |
2392 |
} else if (JSVAL_IS_DOUBLE(v)) { \ |
2393 |
d = *JSVAL_TO_DOUBLE(v); \ |
2394 |
} else { \ |
2395 |
d = js_ValueToNumber(cx, ®s.sp[n]); \ |
2396 |
if (JSVAL_IS_NULL(regs.sp[n])) \ |
2397 |
goto error; \ |
2398 |
JS_ASSERT(JSVAL_IS_NUMBER(regs.sp[n]) || \ |
2399 |
regs.sp[n] == JSVAL_TRUE); \ |
2400 |
} \ |
2401 |
JS_END_MACRO |
2402 |
|
2403 |
#define POP_BOOLEAN(cx, v, b) \ |
2404 |
JS_BEGIN_MACRO \ |
2405 |
v = FETCH_OPND(-1); \ |
2406 |
if (v == JSVAL_NULL) { \ |
2407 |
b = JS_FALSE; \ |
2408 |
} else if (JSVAL_IS_BOOLEAN(v)) { \ |
2409 |
b = JSVAL_TO_BOOLEAN(v); \ |
2410 |
} else { \ |
2411 |
b = js_ValueToBoolean(v); \ |
2412 |
} \ |
2413 |
regs.sp--; \ |
2414 |
JS_END_MACRO |
2415 |
|
2416 |
#define VALUE_TO_OBJECT(cx, n, v, obj) \ |
2417 |
JS_BEGIN_MACRO \ |
2418 |
if (!JSVAL_IS_PRIMITIVE(v)) { \ |
2419 |
obj = JSVAL_TO_OBJECT(v); \ |
2420 |
} else { \ |
2421 |
obj = js_ValueToNonNullObject(cx, v); \ |
2422 |
if (!obj) \ |
2423 |
goto error; \ |
2424 |
STORE_OPND(n, OBJECT_TO_JSVAL(obj)); \ |
2425 |
} \ |
2426 |
JS_END_MACRO |
2427 |
|
2428 |
#define FETCH_OBJECT(cx, n, v, obj) \ |
2429 |
JS_BEGIN_MACRO \ |
2430 |
v = FETCH_OPND(n); \ |
2431 |
VALUE_TO_OBJECT(cx, n, v, obj); \ |
2432 |
JS_END_MACRO |
2433 |
|
2434 |
#define DEFAULT_VALUE(cx, n, hint, v) \ |
2435 |
JS_BEGIN_MACRO \ |
2436 |
JS_ASSERT(!JSVAL_IS_PRIMITIVE(v)); \ |
2437 |
JS_ASSERT(v == regs.sp[n]); \ |
2438 |
if (!JSVAL_TO_OBJECT(v)->defaultValue(cx, hint, ®s.sp[n])) \ |
2439 |
goto error; \ |
2440 |
v = regs.sp[n]; \ |
2441 |
JS_END_MACRO |
2442 |
|
2443 |
/* |
2444 |
* Quickly test if v is an int from the [-2**29, 2**29) range, that is, when |
2445 |
* the lowest bit of v is 1 and the bits 30 and 31 are both either 0 or 1. For |
2446 |
* such v we can do increment or decrement via adding or subtracting two |
2447 |
* without checking that the result overflows JSVAL_INT_MIN or JSVAL_INT_MAX. |
2448 |
*/ |
2449 |
#define CAN_DO_FAST_INC_DEC(v) (((((v) << 1) ^ v) & 0x80000001) == 1) |
2450 |
|
2451 |
JS_STATIC_ASSERT(JSVAL_INT == 1); |
2452 |
JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MIN))); |
2453 |
JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MAX))); |
2454 |
|
2455 |
/* |
2456 |
* Conditional assert to detect failure to clear a pending exception that is |
2457 |
* suppressed (or unintentional suppression of a wanted exception). |
2458 |
*/ |
2459 |
#if defined DEBUG_brendan || defined DEBUG_mrbkap || defined DEBUG_shaver |
2460 |
# define DEBUG_NOT_THROWING 1 |
2461 |
#endif |
2462 |
|
2463 |
#ifdef DEBUG_NOT_THROWING |
2464 |
# define ASSERT_NOT_THROWING(cx) JS_ASSERT(!(cx)->throwing) |
2465 |
#else |
2466 |
# define ASSERT_NOT_THROWING(cx) /* nothing */ |
2467 |
#endif |
2468 |
|
2469 |
/* |
2470 |
* Define JS_OPMETER to instrument bytecode succession, generating a .dot file |
2471 |
* on shutdown that shows the graph of significant predecessor/successor pairs |
2472 |
* executed, where the edge labels give the succession counts. The .dot file |
2473 |
* is named by the JS_OPMETER_FILE envariable, and defaults to /tmp/ops.dot. |
2474 |
* |
2475 |
* Bonus feature: JS_OPMETER also enables counters for stack-addressing ops |
2476 |
* such as JSOP_GETLOCAL, JSOP_INCARG, via METER_SLOT_OP. The resulting counts |
2477 |
* are written to JS_OPMETER_HIST, defaulting to /tmp/ops.hist. |
2478 |
*/ |
2479 |
#ifndef JS_OPMETER |
2480 |
# define METER_OP_INIT(op) /* nothing */ |
2481 |
# define METER_OP_PAIR(op1,op2) /* nothing */ |
2482 |
# define METER_SLOT_OP(op,slot) /* nothing */ |
2483 |
#else |
2484 |
|
2485 |
/* |
2486 |
* The second dimension is hardcoded at 256 because we know that many bits fit |
2487 |
* in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address |
2488 |
* any particular row. |
2489 |
*/ |
2490 |
# define METER_OP_INIT(op) ((op) = JSOP_STOP) |
2491 |
# define METER_OP_PAIR(op1,op2) (js_MeterOpcodePair(op1, op2)) |
2492 |
# define METER_SLOT_OP(op,slot) (js_MeterSlotOpcode(op, slot)) |
2493 |
|
2494 |
#endif |
2495 |
|
2496 |
#define MAX_INLINE_CALL_COUNT 3000 |
2497 |
|
2498 |
/* |
2499 |
* Threaded interpretation via computed goto appears to be well-supported by |
2500 |
* GCC 3 and higher. IBM's C compiler when run with the right options (e.g., |
2501 |
* -qlanglvl=extended) also supports threading. Ditto the SunPro C compiler. |
2502 |
* Currently it's broken for JS_VERSION < 160, though this isn't worth fixing. |
2503 |
* Add your compiler support macros here. |
2504 |
*/ |
2505 |
#ifndef JS_THREADED_INTERP |
2506 |
# if JS_VERSION >= 160 && ( \ |
2507 |
__GNUC__ >= 3 || \ |
2508 |
(__IBMC__ >= 700 && defined __IBM_COMPUTED_GOTO) || \ |
2509 |
__SUNPRO_C >= 0x570) |
2510 |
# define JS_THREADED_INTERP 1 |
2511 |
# else |
2512 |
# define JS_THREADED_INTERP 0 |
2513 |
# endif |
2514 |
#endif |
2515 |
|
2516 |
/* |
2517 |
* Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on |
2518 |
* single-thread DEBUG js shell testing to verify property cache hits. |
2519 |
*/ |
2520 |
#if defined DEBUG && !defined JS_THREADSAFE |
2521 |
|
2522 |
# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \ |
2523 |
JS_BEGIN_MACRO \ |
2524 |
if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \ |
2525 |
entry)) { \ |
2526 |
goto error; \ |
2527 |
} \ |
2528 |
JS_END_MACRO |
2529 |
|
2530 |
static bool |
2531 |
AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs, |
2532 |
ptrdiff_t pcoff, JSObject *start, JSObject *found, |
2533 |
JSPropCacheEntry *entry) |
2534 |
{ |
2535 |
uint32 sample = cx->runtime->gcNumber; |
2536 |
|
2537 |
JSAtom *atom; |
2538 |
if (pcoff >= 0) |
2539 |
GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom); |
2540 |
else |
2541 |
atom = cx->runtime->atomState.lengthAtom; |
2542 |
|
2543 |
JSObject *obj, *pobj; |
2544 |
JSProperty *prop; |
2545 |
bool ok; |
2546 |
|
2547 |
if (JOF_OPMODE(*regs.pc) == JOF_NAME) { |
2548 |
ok = js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &pobj, &prop); |
2549 |
} else { |
2550 |
obj = start; |
2551 |
ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop); |
2552 |
} |
2553 |
if (!ok) |
2554 |
return false; |
2555 |
if (!prop) |
2556 |
return true; |
2557 |
if (cx->runtime->gcNumber != sample || |
2558 |
PCVCAP_SHAPE(entry->vcap) != OBJ_SHAPE(pobj)) { |
2559 |
pobj->dropProperty(cx, prop); |
2560 |
return true; |
2561 |
} |
2562 |
JS_ASSERT(prop); |
2563 |
JS_ASSERT(pobj == found); |
2564 |
|
2565 |
JSScopeProperty *sprop = (JSScopeProperty *) prop; |
2566 |
if (PCVAL_IS_SLOT(entry->vword)) { |
2567 |
JS_ASSERT(PCVAL_TO_SLOT(entry->vword) == sprop->slot); |
2568 |
} else if (PCVAL_IS_SPROP(entry->vword)) { |
2569 |
JS_ASSERT(PCVAL_TO_SPROP(entry->vword) == sprop); |
2570 |
} else { |
2571 |
jsval v; |
2572 |
JS_ASSERT(PCVAL_IS_OBJECT(entry->vword)); |
2573 |
JS_ASSERT(entry->vword != PCVAL_NULL); |
2574 |
JS_ASSERT(OBJ_SCOPE(pobj)->branded()); |
2575 |
JS_ASSERT(SPROP_HAS_STUB_GETTER(sprop)); |
2576 |
JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(pobj))); |
2577 |
v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot); |
2578 |
JS_ASSERT(VALUE_IS_FUNCTION(cx, v)); |
2579 |
JS_ASSERT(PCVAL_TO_OBJECT(entry->vword) == JSVAL_TO_OBJECT(v)); |
2580 |
} |
2581 |
|
2582 |
pobj->dropProperty(cx, prop); |
2583 |
return true; |
2584 |
} |
2585 |
|
2586 |
#else |
2587 |
# define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0) |
2588 |
#endif |
2589 |
|
2590 |
/* |
2591 |
* Ensure that the intrepreter switch can close call-bytecode cases in the |
2592 |
* same way as non-call bytecodes. |
2593 |
*/ |
2594 |
JS_STATIC_ASSERT(JSOP_NAME_LENGTH == JSOP_CALLNAME_LENGTH); |
2595 |
JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH); |
2596 |
JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH); |
2597 |
JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH); |
2598 |
JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH); |
2599 |
JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH); |
2600 |
JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH); |
2601 |
JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH); |
2602 |
JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH); |
2603 |
|
2604 |
/* |
2605 |
* Same for debuggable flat closures defined at top level in another function |
2606 |
* or program fragment. |
2607 |
*/ |
2608 |
JS_STATIC_ASSERT(JSOP_DEFFUN_FC_LENGTH == JSOP_DEFFUN_DBGFC_LENGTH); |
2609 |
|
2610 |
/* |
2611 |
* Same for JSOP_SETNAME and JSOP_SETPROP, which differ only slightly but |
2612 |
* remain distinct for the decompiler. |
2613 |
*/ |
2614 |
JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH); |
2615 |
|
2616 |
/* See TRY_BRANCH_AFTER_COND. */ |
2617 |
JS_STATIC_ASSERT(JSOP_IFNE_LENGTH == JSOP_IFEQ_LENGTH); |
2618 |
JS_STATIC_ASSERT(JSOP_IFNE == JSOP_IFEQ + 1); |
2619 |
|
2620 |
/* For the fastest case inder JSOP_INCNAME, etc. */ |
2621 |
JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_DECNAME_LENGTH); |
2622 |
JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEINC_LENGTH); |
2623 |
JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEDEC_LENGTH); |
2624 |
|
2625 |
#ifdef JS_TRACER |
2626 |
# define ABORT_RECORDING(cx, reason) \ |
2627 |
JS_BEGIN_MACRO \ |
2628 |
if (TRACE_RECORDER(cx)) \ |
2629 |
js_AbortRecording(cx, reason); \ |
2630 |
JS_END_MACRO |
2631 |
#else |
2632 |
# define ABORT_RECORDING(cx, reason) ((void) 0) |
2633 |
#endif |
2634 |
|
2635 |
JS_REQUIRES_STACK JSBool |
2636 |
js_Interpret(JSContext *cx) |
2637 |
{ |
2638 |
#ifdef MOZ_TRACEVIS |
2639 |
TraceVisStateObj tvso(cx, S_INTERP); |
2640 |
#endif |
2641 |
|
2642 |
JSRuntime *rt; |
2643 |
JSStackFrame *fp; |
2644 |
JSScript *script; |
2645 |
uintN inlineCallCount; |
2646 |
JSAtom **atoms; |
2647 |
JSVersion currentVersion, originalVersion; |
2648 |
JSFrameRegs regs; |
2649 |
JSObject *obj, *obj2, *parent; |
2650 |
JSBool ok, cond; |
2651 |
jsint len; |
2652 |
jsbytecode *endpc, *pc2; |
2653 |
JSOp op, op2; |
2654 |
jsatomid index; |
2655 |
JSAtom *atom; |
2656 |
uintN argc, attrs, flags; |
2657 |
uint32 slot; |
2658 |
jsval *vp, lval, rval, ltmp, rtmp; |
2659 |
jsid id; |
2660 |
JSProperty *prop; |
2661 |
JSScopeProperty *sprop; |
2662 |
JSString *str, *str2; |
2663 |
jsint i, j; |
2664 |
jsdouble d, d2; |
2665 |
JSClass *clasp; |
2666 |
JSFunction *fun; |
2667 |
JSType type; |
2668 |
jsint low, high, off, npairs; |
2669 |
JSBool match; |
2670 |
#if JS_HAS_GETTER_SETTER |
2671 |
JSPropertyOp getter, setter; |
2672 |
#endif |
2673 |
JSAutoResolveFlags rf(cx, JSRESOLVE_INFER); |
2674 |
|
2675 |
#ifdef __GNUC__ |
2676 |
# define JS_EXTENSION __extension__ |
2677 |
# define JS_EXTENSION_(s) __extension__ ({ s; }) |
2678 |
#else |
2679 |
# define JS_EXTENSION |
2680 |
# define JS_EXTENSION_(s) s |
2681 |
#endif |
2682 |
|
2683 |
# ifdef DEBUG |
2684 |
/* |
2685 |
* We call this macro from BEGIN_CASE in threaded interpreters, |
2686 |
* and before entering the switch in non-threaded interpreters. |
2687 |
* However, reaching such points doesn't mean we've actually |
2688 |
* fetched an OP from the instruction stream: some opcodes use |
2689 |
* 'op=x; DO_OP()' to let another opcode's implementation finish |
2690 |
* their work, and many opcodes share entry points with a run of |
2691 |
* consecutive BEGIN_CASEs. |
2692 |
* |
2693 |
* Take care to trace OP only when it is the opcode fetched from |
2694 |
* the instruction stream, so the trace matches what one would |
2695 |
* expect from looking at the code. (We do omit POPs after SETs; |
2696 |
* unfortunate, but not worth fixing.) |
2697 |
*/ |
2698 |
# define TRACE_OPCODE(OP) JS_BEGIN_MACRO \ |
2699 |
if (JS_UNLIKELY(cx->tracefp != NULL) && \ |
2700 |
(OP) == *regs.pc) \ |
2701 |
js_TraceOpcode(cx); \ |
2702 |
JS_END_MACRO |
2703 |
# else |
2704 |
# define TRACE_OPCODE(OP) ((void) 0) |
2705 |
# endif |
2706 |
|
2707 |
#if JS_THREADED_INTERP |
2708 |
static void *const normalJumpTable[] = { |
2709 |
# define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \ |
2710 |
JS_EXTENSION &&L_##op, |
2711 |
# include "jsopcode.tbl" |
2712 |
# undef OPDEF |
2713 |
}; |
2714 |
|
2715 |
static void *const interruptJumpTable[] = { |
2716 |
# define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \ |
2717 |
JS_EXTENSION &&interrupt, |
2718 |
# include "jsopcode.tbl" |
2719 |
# undef OPDEF |
2720 |
}; |
2721 |
|
2722 |
register void * const *jumpTable = normalJumpTable; |
2723 |
|
2724 |
METER_OP_INIT(op); /* to nullify first METER_OP_PAIR */ |
2725 |
|
2726 |
# define ENABLE_INTERRUPTS() ((void) (jumpTable = interruptJumpTable)) |
2727 |
|
2728 |
# ifdef JS_TRACER |
2729 |
# define CHECK_RECORDER() \ |
2730 |
JS_ASSERT_IF(TRACE_RECORDER(cx), jumpTable == interruptJumpTable) |
2731 |
# else |
2732 |
# define CHECK_RECORDER() ((void)0) |
2733 |
# endif |
2734 |
|
2735 |
# define DO_OP() JS_BEGIN_MACRO \ |
2736 |
CHECK_RECORDER(); \ |
2737 |
JS_EXTENSION_(goto *jumpTable[op]); \ |
2738 |
JS_END_MACRO |
2739 |
# define DO_NEXT_OP(n) JS_BEGIN_MACRO \ |
2740 |
METER_OP_PAIR(op, regs.pc[n]); \ |
2741 |
op = (JSOp) *(regs.pc += (n)); \ |
2742 |
DO_OP(); \ |
2743 |
JS_END_MACRO |
2744 |
|
2745 |
# define BEGIN_CASE(OP) L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER(); |
2746 |
# define END_CASE(OP) DO_NEXT_OP(OP##_LENGTH); |
2747 |
# define END_VARLEN_CASE DO_NEXT_OP(len); |
2748 |
# define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) \ |
2749 |
JS_ASSERT(js_CodeSpec[OP].length == 1); \ |
2750 |
op = (JSOp) *++regs.pc; \ |
2751 |
DO_OP(); |
2752 |
|
2753 |
# define END_EMPTY_CASES |
2754 |
|
2755 |
#else /* !JS_THREADED_INTERP */ |
2756 |
|
2757 |
register intN switchMask = 0; |
2758 |
intN switchOp; |
2759 |
|
2760 |
# define ENABLE_INTERRUPTS() ((void) (switchMask = -1)) |
2761 |
|
2762 |
# ifdef JS_TRACER |
2763 |
# define CHECK_RECORDER() \ |
2764 |
JS_ASSERT_IF(TRACE_RECORDER(cx), switchMask == -1) |
2765 |
# else |
2766 |
# define CHECK_RECORDER() ((void)0) |
2767 |
# endif |
2768 |
|
2769 |
# define DO_OP() goto do_op |
2770 |
# define DO_NEXT_OP(n) JS_BEGIN_MACRO \ |
2771 |
JS_ASSERT((n) == len); \ |
2772 |
goto advance_pc; \ |
2773 |
JS_END_MACRO |
2774 |
|
2775 |
# define BEGIN_CASE(OP) case OP: CHECK_RECORDER(); |
2776 |
# define END_CASE(OP) END_CASE_LEN(OP##_LENGTH) |
2777 |
# define END_CASE_LEN(n) END_CASE_LENX(n) |
2778 |
# define END_CASE_LENX(n) END_CASE_LEN##n |
2779 |
|
2780 |
/* |
2781 |
* To share the code for all len == 1 cases we use the specialized label with |
2782 |
* code that falls through to advance_pc: . |
2783 |
*/ |
2784 |
# define END_CASE_LEN1 goto advance_pc_by_one; |
2785 |
# define END_CASE_LEN2 len = 2; goto advance_pc; |
2786 |
# define END_CASE_LEN3 len = 3; goto advance_pc; |
2787 |
# define END_CASE_LEN4 len = 4; goto advance_pc; |
2788 |
# define END_CASE_LEN5 len = 5; goto advance_pc; |
2789 |
# define END_VARLEN_CASE goto advance_pc; |
2790 |
# define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) |
2791 |
# define END_EMPTY_CASES goto advance_pc_by_one; |
2792 |
|
2793 |
#endif /* !JS_THREADED_INTERP */ |
2794 |
|
2795 |
#ifdef JS_TRACER |
2796 |
/* We cannot reenter the interpreter while recording. */ |
2797 |
if (TRACE_RECORDER(cx)) |
2798 |
js_AbortRecording(cx, "attempt to reenter interpreter while recording"); |
2799 |
#endif |
2800 |
|
2801 |
/* Check for too deep of a native thread stack. */ |
2802 |
JS_CHECK_RECURSION(cx, return JS_FALSE); |
2803 |
|
2804 |
rt = cx->runtime; |
2805 |
|
2806 |
/* Set registerized frame pointer and derived script pointer. */ |
2807 |
fp = cx->fp; |
2808 |
script = fp->script; |
2809 |
JS_ASSERT(script->length != 0); |
2810 |
|
2811 |
/* Count of JS function calls that nest in this C js_Interpret frame. */ |
2812 |
inlineCallCount = 0; |
2813 |
|
2814 |
/* |
2815 |
* Initialize the index segment register used by LOAD_ATOM and |
2816 |
* GET_FULL_INDEX macros below. As a register we use a pointer based on |
2817 |
* the atom map to turn frequently executed LOAD_ATOM into simple array |
2818 |
* access. For less frequent object and regexp loads we have to recover |
2819 |
* the segment from atoms pointer first. |
2820 |
*/ |
2821 |
atoms = script->atomMap.vector; |
2822 |
|
2823 |
#define LOAD_ATOM(PCOFF) \ |
2824 |
JS_BEGIN_MACRO \ |
2825 |
JS_ASSERT(fp->imacpc \ |
2826 |
? atoms == COMMON_ATOMS_START(&rt->atomState) && \ |
2827 |
GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \ |
2828 |
: (size_t)(atoms - script->atomMap.vector) < \ |
2829 |
(size_t)(script->atomMap.length - \ |
2830 |
GET_INDEX(regs.pc + PCOFF))); \ |
2831 |
atom = atoms[GET_INDEX(regs.pc + PCOFF)]; \ |
2832 |
JS_END_MACRO |
2833 |
|
2834 |
#define GET_FULL_INDEX(PCOFF) \ |
2835 |
(atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF)) |
2836 |
|
2837 |
#define LOAD_OBJECT(PCOFF) \ |
2838 |
(obj = script->getObject(GET_FULL_INDEX(PCOFF))) |
2839 |
|
2840 |
#define LOAD_FUNCTION(PCOFF) \ |
2841 |
(fun = script->getFunction(GET_FULL_INDEX(PCOFF))) |
2842 |
|
2843 |
#ifdef JS_TRACER |
2844 |
|
2845 |
#ifdef MOZ_TRACEVIS |
2846 |
#if JS_THREADED_INTERP |
2847 |
#define MONITOR_BRANCH_TRACEVIS \ |
2848 |
JS_BEGIN_MACRO \ |
2849 |
if (jumpTable != interruptJumpTable) \ |
2850 |
js_EnterTraceVisState(cx, S_RECORD, R_NONE); \ |
2851 |
JS_END_MACRO |
2852 |
#else /* !JS_THREADED_INTERP */ |
2853 |
#define MONITOR_BRANCH_TRACEVIS \ |
2854 |
JS_BEGIN_MACRO \ |
2855 |
js_EnterTraceVisState(cx, S_RECORD, R_NONE); \ |
2856 |
JS_END_MACRO |
2857 |
#endif |
2858 |
#else |
2859 |
#define MONITOR_BRANCH_TRACEVIS |
2860 |
#endif |
2861 |
|
2862 |
#define MONITOR_BRANCH() \ |
2863 |
JS_BEGIN_MACRO \ |
2864 |
if (TRACING_ENABLED(cx)) { \ |
2865 |
if (js_MonitorLoopEdge(cx, inlineCallCount)) { \ |
2866 |
JS_ASSERT(TRACE_RECORDER(cx)); \ |
2867 |
MONITOR_BRANCH_TRACEVIS; \ |
2868 |
ENABLE_INTERRUPTS(); \ |
2869 |
} \ |
2870 |
fp = cx->fp; \ |
2871 |
script = fp->script; \ |
2872 |
atoms = FrameAtomBase(cx, fp); \ |
2873 |
currentVersion = (JSVersion) script->version; \ |
2874 |
JS_ASSERT(fp->regs == ®s); \ |
2875 |
if (cx->throwing) \ |
2876 |
goto error; \ |
2877 |
} \ |
2878 |
JS_END_MACRO |
2879 |
|
2880 |
#else /* !JS_TRACER */ |
2881 |
|
2882 |
#define MONITOR_BRANCH() ((void) 0) |
2883 |
|
2884 |
#endif /* !JS_TRACER */ |
2885 |
|
2886 |
/* |
2887 |
* Prepare to call a user-supplied branch handler, and abort the script |
2888 |
* if it returns false. |
2889 |
*/ |
2890 |
#define CHECK_BRANCH() \ |
2891 |
JS_BEGIN_MACRO \ |
2892 |
if (!JS_CHECK_OPERATION_LIMIT(cx)) \ |
2893 |
goto error; \ |
2894 |
JS_END_MACRO |
2895 |
|
2896 |
#ifndef TRACE_RECORDER |
2897 |
#define TRACE_RECORDER(cx) (false) |
2898 |
#endif |
2899 |
|
2900 |
#define BRANCH(n) \ |
2901 |
JS_BEGIN_MACRO \ |
2902 |
regs.pc += (n); \ |
2903 |
op = (JSOp) *regs.pc; \ |
2904 |
if ((n) <= 0) { \ |
2905 |
CHECK_BRANCH(); \ |
2906 |
if (op == JSOP_NOP) { \ |
2907 |
if (TRACE_RECORDER(cx)) { \ |
2908 |
MONITOR_BRANCH(); \ |
2909 |
op = (JSOp) *regs.pc; \ |
2910 |
} else { \ |
2911 |
op = (JSOp) *++regs.pc; \ |
2912 |
} \ |
2913 |
} else if (op == JSOP_TRACE) { \ |
2914 |
MONITOR_BRANCH(); \ |
2915 |
op = (JSOp) *regs.pc; \ |
2916 |
} \ |
2917 |
} \ |
2918 |
DO_OP(); \ |
2919 |
JS_END_MACRO |
2920 |
|
2921 |
MUST_FLOW_THROUGH("exit"); |
2922 |
++cx->interpLevel; |
2923 |
|
2924 |
/* |
2925 |
* Optimized Get and SetVersion for proper script language versioning. |
2926 |
* |
2927 |
* If any native method or JSClass/JSObjectOps hook calls js_SetVersion |
2928 |
* and changes cx->version, the effect will "stick" and we will stop |
2929 |
* maintaining currentVersion. This is relied upon by testsuites, for |
2930 |
* the most part -- web browsers select version before compiling and not |
2931 |
* at run-time. |
2932 |
*/ |
2933 |
currentVersion = (JSVersion) script->version; |
2934 |
originalVersion = (JSVersion) cx->version; |
2935 |
if (currentVersion != originalVersion) |
2936 |
js_SetVersion(cx, currentVersion); |
2937 |
|
2938 |
/* Update the static-link display. */ |
2939 |
if (script->staticLevel < JS_DISPLAY_SIZE) { |
2940 |
JSStackFrame **disp = &cx->display[script->staticLevel]; |
2941 |
fp->displaySave = *disp; |
2942 |
*disp = fp; |
2943 |
} |
2944 |
|
2945 |
# define CHECK_INTERRUPT_HANDLER() \ |
2946 |
JS_BEGIN_MACRO \ |
2947 |
if (cx->debugHooks->interruptHandler) \ |
2948 |
ENABLE_INTERRUPTS(); \ |
2949 |
JS_END_MACRO |
2950 |
|
2951 |
/* |
2952 |
* Load the debugger's interrupt hook here and after calling out to native |
2953 |
* functions (but not to getters, setters, or other native hooks), so we do |
2954 |
* not have to reload it each time through the interpreter loop -- we hope |
2955 |
* the compiler can keep it in a register when it is non-null. |
2956 |
*/ |
2957 |
CHECK_INTERRUPT_HANDLER(); |
2958 |
|
2959 |
#if !JS_HAS_GENERATORS |
2960 |
JS_ASSERT(!fp->regs); |
2961 |
#else |
2962 |
/* Initialize the pc and sp registers unless we're resuming a generator. */ |
2963 |
if (JS_LIKELY(!fp->regs)) { |
2964 |
#endif |
2965 |
ASSERT_NOT_THROWING(cx); |
2966 |
regs.pc = script->code; |
2967 |
regs.sp = StackBase(fp); |
2968 |
fp->regs = ®s; |
2969 |
#if JS_HAS_GENERATORS |
2970 |
} else { |
2971 |
JSGenerator *gen; |
2972 |
|
2973 |
JS_ASSERT(fp->flags & JSFRAME_GENERATOR); |
2974 |
gen = FRAME_TO_GENERATOR(fp); |
2975 |
JS_ASSERT(fp->regs == &gen->savedRegs); |
2976 |
regs = gen->savedRegs; |
2977 |
fp->regs = ®s; |
2978 |
JS_ASSERT((size_t) (regs.pc - script->code) <= script->length); |
2979 |
JS_ASSERT((size_t) (regs.sp - StackBase(fp)) <= StackDepth(script)); |
2980 |
|
2981 |
/* |
2982 |
* To support generator_throw and to catch ignored exceptions, |
2983 |
* fail if cx->throwing is set. |
2984 |
*/ |
2985 |
if (cx->throwing) { |
2986 |
#ifdef DEBUG_NOT_THROWING |
2987 |
if (cx->exception != JSVAL_ARETURN) { |
2988 |
printf("JS INTERPRETER CALLED WITH PENDING EXCEPTION %lx\n", |
2989 |
(unsigned long) cx->exception); |
2990 |
} |
2991 |
#endif |
2992 |
goto error; |
2993 |
} |
2994 |
} |
2995 |
#endif /* JS_HAS_GENERATORS */ |
2996 |
|
2997 |
/* |
2998 |
* It is important that "op" be initialized before calling DO_OP because |
2999 |
* it is possible for "op" to be specially assigned during the normal |
3000 |
* processing of an opcode while looping. We rely on DO_NEXT_OP to manage |
3001 |
* "op" correctly in all other cases. |
3002 |
*/ |
3003 |
len = 0; |
3004 |
DO_NEXT_OP(len); |
3005 |
|
3006 |
#if JS_THREADED_INTERP |
3007 |
/* |
3008 |
* This is a loop, but it does not look like a loop. The loop-closing |
3009 |
* jump is distributed throughout goto *jumpTable[op] inside of DO_OP. |
3010 |
* When interrupts are enabled, jumpTable is set to interruptJumpTable |
3011 |
* where all jumps point to the interrupt label. The latter, after |
3012 |
* calling the interrupt handler, dispatches through normalJumpTable to |
3013 |
* continue the normal bytecode processing. |
3014 |
*/ |
3015 |
|
3016 |
#else /* !JS_THREADED_INTERP */ |
3017 |
for (;;) { |
3018 |
advance_pc_by_one: |
3019 |
JS_ASSERT(js_CodeSpec[op].length == 1); |
3020 |
len = 1; |
3021 |
advance_pc: |
3022 |
regs.pc += len; |
3023 |
op = (JSOp) *regs.pc; |
3024 |
|
3025 |
do_op: |
3026 |
CHECK_RECORDER(); |
3027 |
TRACE_OPCODE(op); |
3028 |
switchOp = intN(op) | switchMask; |
3029 |
do_switch: |
3030 |
switch (switchOp) { |
3031 |
#endif |
3032 |
|
3033 |
/********************** Here we include the operations ***********************/ |
3034 |
#include "jsops.cpp" |
3035 |
/*****************************************************************************/ |
3036 |
|
3037 |
#if !JS_THREADED_INTERP |
3038 |
default: |
3039 |
#endif |
3040 |
#ifndef JS_TRACER |
3041 |
bad_opcode: |
3042 |
#endif |
3043 |
{ |
3044 |
char numBuf[12]; |
3045 |
JS_snprintf(numBuf, sizeof numBuf, "%d", op); |
3046 |
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, |
3047 |
JSMSG_BAD_BYTECODE, numBuf); |
3048 |
goto error; |
3049 |
} |
3050 |
|
3051 |
#if !JS_THREADED_INTERP |
3052 |
} /* switch (op) */ |
3053 |
} /* for (;;) */ |
3054 |
#endif /* !JS_THREADED_INTERP */ |
3055 |
|
3056 |
error: |
3057 |
if (fp->imacpc && cx->throwing) { |
3058 |
// To keep things simple, we hard-code imacro exception handlers here. |
3059 |
if (*fp->imacpc == JSOP_NEXTITER && js_ValueIsStopIteration(cx->exception)) { |
3060 |
// pc may point to JSOP_DUP here due to bug 474854. |
3061 |
JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP || *regs.pc == JSOP_TRUE); |
3062 |
cx->throwing = JS_FALSE; |
3063 |
cx->exception = JSVAL_VOID; |
3064 |
regs.sp[-1] = JSVAL_HOLE; |
3065 |
PUSH(JSVAL_FALSE); |
3066 |
goto end_imacro; |
3067 |
} |
3068 |
|
3069 |
// Handle other exceptions as if they came from the imacro-calling pc. |
3070 |
regs.pc = fp->imacpc; |
3071 |
fp->imacpc = NULL; |
3072 |
atoms = script->atomMap.vector; |
3073 |
} |
3074 |
|
3075 |
JS_ASSERT((size_t)((fp->imacpc ? fp->imacpc : regs.pc) - script->code) < script->length); |
3076 |
|
3077 |
#ifdef JS_TRACER |
3078 |
/* |
3079 |
* This abort could be weakened to permit tracing through exceptions that |
3080 |
* are thrown and caught within a loop, with the co-operation of the tracer. |
3081 |
* For now just bail on any sign of trouble. |
3082 |
*/ |
3083 |
if (TRACE_RECORDER(cx)) |
3084 |
js_AbortRecording(cx, "error or exception while recording"); |
3085 |
#endif |
3086 |
|
3087 |
if (!cx->throwing) { |
3088 |
/* This is an error, not a catchable exception, quit the frame ASAP. */ |
3089 |
ok = JS_FALSE; |
3090 |
} else { |
3091 |
JSTrapHandler handler; |
3092 |
JSTryNote *tn, *tnlimit; |
3093 |
uint32 offset; |
3094 |
|
3095 |
/* Call debugger throw hook if set. */ |
3096 |
handler = cx->debugHooks->throwHook; |
3097 |
if (handler) { |
3098 |
switch (handler(cx, script, regs.pc, &rval, |
3099 |
cx->debugHooks->throwHookData)) { |
3100 |
case JSTRAP_ERROR: |
3101 |
cx->throwing = JS_FALSE; |
3102 |
goto error; |
3103 |
case JSTRAP_RETURN: |
3104 |
cx->throwing = JS_FALSE; |
3105 |
fp->rval = rval; |
3106 |
ok = JS_TRUE; |
3107 |
goto forced_return; |
3108 |
case JSTRAP_THROW: |
3109 |
cx->exception = rval; |
3110 |
case JSTRAP_CONTINUE: |
3111 |
default:; |
3112 |
} |
3113 |
CHECK_INTERRUPT_HANDLER(); |
3114 |
} |
3115 |
|
3116 |
/* |
3117 |
* Look for a try block in script that can catch this exception. |
3118 |
*/ |
3119 |
if (script->trynotesOffset == 0) |
3120 |
goto no_catch; |
3121 |
|
3122 |
offset = (uint32)(regs.pc - script->main); |
3123 |
tn = script->trynotes()->vector; |
3124 |
tnlimit = tn + script->trynotes()->length; |
3125 |
do { |
3126 |
if (offset - tn->start >= tn->length) |
3127 |
continue; |
3128 |
|
3129 |
/* |
3130 |
* We have a note that covers the exception pc but we must check |
3131 |
* whether the interpreter has already executed the corresponding |
3132 |
* handler. This is possible when the executed bytecode |
3133 |
* implements break or return from inside a for-in loop. |
3134 |
* |
3135 |
* In this case the emitter generates additional [enditer] and |
3136 |
* [gosub] opcodes to close all outstanding iterators and execute |
3137 |
* the finally blocks. If such an [enditer] throws an exception, |
3138 |
* its pc can still be inside several nested for-in loops and |
3139 |
* try-finally statements even if we have already closed the |
3140 |
* corresponding iterators and invoked the finally blocks. |
3141 |
* |
3142 |
* To address this, we make [enditer] always decrease the stack |
3143 |
* even when its implementation throws an exception. Thus already |
3144 |
* executed [enditer] and [gosub] opcodes will have try notes |
3145 |
* with the stack depth exceeding the current one and this |
3146 |
* condition is what we use to filter them out. |
3147 |
*/ |
3148 |
if (tn->stackDepth > regs.sp - StackBase(fp)) |
3149 |
continue; |
3150 |
|
3151 |
/* |
3152 |
* Set pc to the first bytecode after the the try note to point |
3153 |
* to the beginning of catch or finally or to [enditer] closing |
3154 |
* the for-in loop. |
3155 |
*/ |
3156 |
regs.pc = (script)->main + tn->start + tn->length; |
3157 |
|
3158 |
ok = js_UnwindScope(cx, fp, tn->stackDepth, JS_TRUE); |
3159 |
JS_ASSERT(fp->regs->sp == StackBase(fp) + tn->stackDepth); |
3160 |
if (!ok) { |
3161 |
/* |
3162 |
* Restart the handler search with updated pc and stack depth |
3163 |
* to properly notify the debugger. |
3164 |
*/ |
3165 |
goto error; |
3166 |
} |
3167 |
|
3168 |
switch (tn->kind) { |
3169 |
case JSTRY_CATCH: |
3170 |
JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENTERBLOCK); |
3171 |
|
3172 |
#if JS_HAS_GENERATORS |
3173 |
/* Catch cannot intercept the closing of a generator. */ |
3174 |
if (JS_UNLIKELY(cx->exception == JSVAL_ARETURN)) |
3175 |
break; |
3176 |
#endif |
3177 |
|
3178 |
/* |
3179 |
* Don't clear cx->throwing to save cx->exception from GC |
3180 |
* until it is pushed to the stack via [exception] in the |
3181 |
* catch block. |
3182 |
*/ |
3183 |
len = 0; |
3184 |
DO_NEXT_OP(len); |
3185 |
|
3186 |
case JSTRY_FINALLY: |
3187 |
/* |
3188 |
* Push (true, exception) pair for finally to indicate that |
3189 |
* [retsub] should rethrow the exception. |
3190 |
*/ |
3191 |
PUSH(JSVAL_TRUE); |
3192 |
PUSH(cx->exception); |
3193 |
cx->throwing = JS_FALSE; |
3194 |
len = 0; |
3195 |
DO_NEXT_OP(len); |
3196 |
|
3197 |
case JSTRY_ITER: |
3198 |
/* |
3199 |
* This is similar to JSOP_ENDITER in the interpreter loop, |
3200 |
* except the code now uses the stack slot normally used by |
3201 |
* JSOP_NEXTITER, namely regs.sp[-1] before the regs.sp -= 2 |
3202 |
* adjustment and regs.sp[1] after, to save and restore the |
3203 |
* pending exception. |
3204 |
*/ |
3205 |
JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENDITER); |
3206 |
regs.sp[-1] = cx->exception; |
3207 |
cx->throwing = JS_FALSE; |
3208 |
ok = js_CloseIterator(cx, regs.sp[-2]); |
3209 |
regs.sp -= 2; |
3210 |
if (!ok) |
3211 |
goto error; |
3212 |
cx->throwing = JS_TRUE; |
3213 |
cx->exception = regs.sp[1]; |
3214 |
} |
3215 |
} while (++tn != tnlimit); |
3216 |
|
3217 |
no_catch: |
3218 |
/* |
3219 |
* Propagate the exception or error to the caller unless the exception |
3220 |
* is an asynchronous return from a generator. |
3221 |
*/ |
3222 |
ok = JS_FALSE; |
3223 |
#if JS_HAS_GENERATORS |
3224 |
if (JS_UNLIKELY(cx->throwing && cx->exception == JSVAL_ARETURN)) { |
3225 |
cx->throwing = JS_FALSE; |
3226 |
ok = JS_TRUE; |
3227 |
fp->rval = JSVAL_VOID; |
3228 |
} |
3229 |
#endif |
3230 |
} |
3231 |
|
3232 |
forced_return: |
3233 |
/* |
3234 |
* Unwind the scope making sure that ok stays false even when UnwindScope |
3235 |
* returns true. |
3236 |
* |
3237 |
* When a trap handler returns JSTRAP_RETURN, we jump here with ok set to |
3238 |
* true bypassing any finally blocks. |
3239 |
*/ |
3240 |
ok &= js_UnwindScope(cx, fp, 0, ok || cx->throwing); |
3241 |
JS_ASSERT(regs.sp == StackBase(fp)); |
3242 |
|
3243 |
#ifdef DEBUG |
3244 |
cx->tracePrevPc = NULL; |
3245 |
#endif |
3246 |
|
3247 |
if (inlineCallCount) |
3248 |
goto inline_return; |
3249 |
|
3250 |
exit: |
3251 |
/* |
3252 |
* At this point we are inevitably leaving an interpreted function or a |
3253 |
* top-level script, and returning to one of: |
3254 |
* (a) an "out of line" call made through js_Invoke; |
3255 |
* (b) a js_Execute activation; |
3256 |
* (c) a generator (SendToGenerator, jsiter.c). |
3257 |
* |
3258 |
* We must not be in an inline frame. The check above ensures that for the |
3259 |
* error case and for a normal return, the code jumps directly to parent's |
3260 |
* frame pc. |
3261 |
*/ |
3262 |
JS_ASSERT(inlineCallCount == 0); |
3263 |
JS_ASSERT(fp->regs == ®s); |
3264 |
#ifdef JS_TRACER |
3265 |
if (TRACE_RECORDER(cx)) |
3266 |
js_AbortRecording(cx, "recording out of js_Interpret"); |
3267 |
#endif |
3268 |
#if JS_HAS_GENERATORS |
3269 |
if (JS_UNLIKELY(fp->flags & JSFRAME_YIELDING)) { |
3270 |
JSGenerator *gen; |
3271 |
|
3272 |
gen = FRAME_TO_GENERATOR(fp); |
3273 |
gen->savedRegs = regs; |
3274 |
gen->frame.regs = &gen->savedRegs; |
3275 |
} else |
3276 |
#endif /* JS_HAS_GENERATORS */ |
3277 |
{ |
3278 |
JS_ASSERT(!fp->blockChain); |
3279 |
JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0)); |
3280 |
fp->regs = NULL; |
3281 |
} |
3282 |
|
3283 |
/* Undo the remaining effects committed on entry to js_Interpret. */ |
3284 |
if (script->staticLevel < JS_DISPLAY_SIZE) |
3285 |
cx->display[script->staticLevel] = fp->displaySave; |
3286 |
if (cx->version == currentVersion && currentVersion != originalVersion) |
3287 |
js_SetVersion(cx, originalVersion); |
3288 |
--cx->interpLevel; |
3289 |
|
3290 |
return ok; |
3291 |
|
3292 |
atom_not_defined: |
3293 |
{ |
3294 |
const char *printable; |
3295 |
|
3296 |
printable = js_AtomToPrintableString(cx, atom); |
3297 |
if (printable) |
3298 |
js_ReportIsNotDefined(cx, printable); |
3299 |
goto error; |
3300 |
} |
3301 |
} |
3302 |
|
3303 |
#endif /* !defined jsinvoke_cpp___ */ |