Parent Directory
|
Revision Log
Upgrade to SpiderMonkey from Firefox 3.5.3.
1 | /* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
2 | * vim: set ts=8 sw=4 et tw=79: |
3 | * |
4 | * ***** BEGIN LICENSE BLOCK ***** |
5 | * Version: MPL 1.1/GPL 2.0/LGPL 2.1 |
6 | * |
7 | * The contents of this file are subject to the Mozilla Public License Version |
8 | * 1.1 (the "License"); you may not use this file except in compliance with |
9 | * the License. You may obtain a copy of the License at |
10 | * http://www.mozilla.org/MPL/ |
11 | * |
12 | * Software distributed under the License is distributed on an "AS IS" basis, |
13 | * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License |
14 | * for the specific language governing rights and limitations under the |
15 | * License. |
16 | * |
17 | * The Original Code is Mozilla Communicator client code, released |
18 | * March 31, 1998. |
19 | * |
20 | * The Initial Developer of the Original Code is |
21 | * Netscape Communications Corporation. |
22 | * Portions created by the Initial Developer are Copyright (C) 1998 |
23 | * the Initial Developer. All Rights Reserved. |
24 | * |
25 | * Contributor(s): |
26 | * |
27 | * Alternatively, the contents of this file may be used under the terms of |
28 | * either of the GNU General Public License Version 2 or later (the "GPL"), |
29 | * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), |
30 | * in which case the provisions of the GPL or the LGPL are applicable instead |
31 | * of those above. If you wish to allow use of your version of this file only |
32 | * under the terms of either the GPL or the LGPL, and not to allow others to |
33 | * use your version of this file under the terms of the MPL, indicate your |
34 | * decision by deleting the provisions above and replace them with the notice |
35 | * and other provisions required by the GPL or the LGPL. If you do not delete |
36 | * the provisions above, a recipient may use your version of this file under |
37 | * the terms of any one of the MPL, the GPL or the LGPL. |
38 | * |
39 | * ***** END LICENSE BLOCK ***** */ |
40 | |
41 | /* |
42 | * JavaScript bytecode interpreter. |
43 | */ |
44 | #include "jsstddef.h" |
45 | #include <stdio.h> |
46 | #include <string.h> |
47 | #include <math.h> |
48 | #include "jstypes.h" |
49 | #include "jsarena.h" /* Added by JSIFY */ |
50 | #include "jsutil.h" /* Added by JSIFY */ |
51 | #include "jsprf.h" |
52 | #include "jsapi.h" |
53 | #include "jsarray.h" |
54 | #include "jsatom.h" |
55 | #include "jsbool.h" |
56 | #include "jscntxt.h" |
57 | #include "jsdate.h" |
58 | #include "jsversion.h" |
59 | #include "jsdbgapi.h" |
60 | #include "jsfun.h" |
61 | #include "jsgc.h" |
62 | #include "jsinterp.h" |
63 | #include "jsiter.h" |
64 | #include "jslock.h" |
65 | #include "jsnum.h" |
66 | #include "jsobj.h" |
67 | #include "jsopcode.h" |
68 | #include "jsscan.h" |
69 | #include "jsscope.h" |
70 | #include "jsscript.h" |
71 | #include "jsstr.h" |
72 | #include "jsstaticcheck.h" |
73 | #include "jstracer.h" |
74 | |
75 | #ifdef INCLUDE_MOZILLA_DTRACE |
76 | #include "jsdtracef.h" |
77 | #endif |
78 | |
79 | #if JS_HAS_XML_SUPPORT |
80 | #include "jsxml.h" |
81 | #endif |
82 | |
83 | #include "jsautooplen.h" |
84 | |
85 | /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */ |
86 | #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ |
87 | |
88 | uint32 |
89 | js_GenerateShape(JSContext *cx, JSBool gcLocked) |
90 | { |
91 | JSRuntime *rt; |
92 | uint32 shape; |
93 | |
94 | rt = cx->runtime; |
95 | shape = JS_ATOMIC_INCREMENT(&rt->shapeGen); |
96 | JS_ASSERT(shape != 0); |
97 | if (shape >= SHAPE_OVERFLOW_BIT) { |
98 | /* |
99 | * FIXME bug 440834: The shape id space has overflowed. Currently we |
100 | * cope badly with this and schedule the GC on the every call. But |
101 | * first we make sure that increments from other threads would not |
102 | * have a chance to wrap around shapeGen to zero. |
103 | */ |
104 | rt->shapeGen = SHAPE_OVERFLOW_BIT; |
105 | js_TriggerGC(cx, gcLocked); |
106 | } |
107 | return shape; |
108 | } |
109 | |
110 | JS_REQUIRES_STACK JSPropCacheEntry * |
111 | js_FillPropertyCache(JSContext *cx, JSObject *obj, |
112 | uintN scopeIndex, uintN protoIndex, JSObject *pobj, |
113 | JSScopeProperty *sprop, JSBool adding) |
114 | { |
115 | JSPropertyCache *cache; |
116 | jsbytecode *pc; |
117 | JSScope *scope; |
118 | jsuword kshape, vshape, khash; |
119 | JSOp op; |
120 | const JSCodeSpec *cs; |
121 | jsuword vword; |
122 | ptrdiff_t pcoff; |
123 | JSAtom *atom; |
124 | JSPropCacheEntry *entry; |
125 | |
126 | JS_ASSERT(!cx->runtime->gcRunning); |
127 | cache = &JS_PROPERTY_CACHE(cx); |
128 | |
129 | /* FIXME bug 489098: consider enabling the property cache for eval. */ |
130 | if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) { |
131 | PCMETER(cache->disfills++); |
132 | return JS_NO_PROP_CACHE_FILL; |
133 | } |
134 | |
135 | /* |
136 | * Check for fill from js_SetPropertyHelper where the setter removed sprop |
137 | * from pobj's scope (via unwatch or delete, e.g.). |
138 | */ |
139 | scope = OBJ_SCOPE(pobj); |
140 | JS_ASSERT(scope->object == pobj); |
141 | if (!SCOPE_HAS_PROPERTY(scope, sprop)) { |
142 | PCMETER(cache->oddfills++); |
143 | return JS_NO_PROP_CACHE_FILL; |
144 | } |
145 | |
146 | /* |
147 | * Check for overdeep scope and prototype chain. Because resolve, getter, |
148 | * and setter hooks can change the prototype chain using JS_SetPrototype |
149 | * after js_LookupPropertyWithFlags has returned the nominal protoIndex, |
150 | * we have to validate protoIndex if it is non-zero. If it is zero, then |
151 | * we know thanks to the SCOPE_HAS_PROPERTY test above, and from the fact |
152 | * that obj == pobj, that protoIndex is invariant. |
153 | * |
154 | * The scopeIndex can't be wrong. We require JS_SetParent calls to happen |
155 | * before any running script might consult a parent-linked scope chain. If |
156 | * this requirement is not satisfied, the fill in progress will never hit, |
157 | * but vcap vs. scope shape tests ensure nothing malfunctions. |
158 | */ |
159 | JS_ASSERT_IF(scopeIndex == 0 && protoIndex == 0, obj == pobj); |
160 | |
161 | if (protoIndex != 0) { |
162 | JSObject *tmp = obj; |
163 | |
164 | for (uintN i = 0; i != scopeIndex; i++) |
165 | tmp = OBJ_GET_PARENT(cx, tmp); |
166 | JS_ASSERT(tmp != pobj); |
167 | |
168 | protoIndex = 1; |
169 | for (;;) { |
170 | tmp = OBJ_GET_PROTO(cx, tmp); |
171 | |
172 | /* |
173 | * We cannot cache properties coming from native objects behind |
174 | * non-native ones on the prototype chain. The non-natives can |
175 | * mutate in arbitrary way without changing any shapes. |
176 | */ |
177 | if (!tmp || !OBJ_IS_NATIVE(tmp)) { |
178 | PCMETER(cache->noprotos++); |
179 | return JS_NO_PROP_CACHE_FILL; |
180 | } |
181 | if (tmp == pobj) |
182 | break; |
183 | ++protoIndex; |
184 | } |
185 | } |
186 | |
187 | if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) { |
188 | PCMETER(cache->longchains++); |
189 | return JS_NO_PROP_CACHE_FILL; |
190 | } |
191 | |
192 | /* |
193 | * Optimize the cached vword based on our parameters and the current pc's |
194 | * opcode format flags. |
195 | */ |
196 | pc = cx->fp->regs->pc; |
197 | op = js_GetOpcode(cx, cx->fp->script, pc); |
198 | cs = &js_CodeSpec[op]; |
199 | kshape = 0; |
200 | |
201 | do { |
202 | /* |
203 | * Check for a prototype "plain old method" callee computation. What |
204 | * is a plain old method? It's a function-valued property with stub |
205 | * getter and setter, so get of a function is idempotent and set is |
206 | * transparent. |
207 | */ |
208 | if (cs->format & JOF_CALLOP) { |
209 | if (SPROP_HAS_STUB_GETTER(sprop) && |
210 | SPROP_HAS_VALID_SLOT(sprop, scope)) { |
211 | jsval v; |
212 | |
213 | v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot); |
214 | if (VALUE_IS_FUNCTION(cx, v)) { |
215 | /* |
216 | * Great, we have a function-valued prototype property |
217 | * where the getter is JS_PropertyStub. The type id in |
218 | * pobj's scope does not evolve with changes to property |
219 | * values, however. |
220 | * |
221 | * So here, on first cache fill for this method, we brand |
222 | * the scope with a new shape and set the SCOPE_BRANDED |
223 | * flag. Once this scope flag is set, any write that adds |
224 | * or deletes a function-valued plain old property in |
225 | * scope->object will result in shape being regenerated. |
226 | */ |
227 | if (!SCOPE_IS_BRANDED(scope)) { |
228 | PCMETER(cache->brandfills++); |
229 | #ifdef DEBUG_notme |
230 | fprintf(stderr, |
231 | "branding %p (%s) for funobj %p (%s), shape %lu\n", |
232 | pobj, LOCKED_OBJ_GET_CLASS(pobj)->name, |
233 | JSVAL_TO_OBJECT(v), |
234 | JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx, |
235 | JSVAL_TO_OBJECT(v))), |
236 | OBJ_SHAPE(obj)); |
237 | #endif |
238 | js_MakeScopeShapeUnique(cx, scope); |
239 | if (js_IsPropertyCacheDisabled(cx)) { |
240 | /* |
241 | * js_GenerateShape could not recover from |
242 | * rt->shapeGen's overflow. |
243 | */ |
244 | return JS_NO_PROP_CACHE_FILL; |
245 | } |
246 | SCOPE_SET_BRANDED(scope); |
247 | } |
248 | vword = JSVAL_OBJECT_TO_PCVAL(v); |
249 | break; |
250 | } |
251 | } |
252 | } |
253 | |
254 | /* If getting a value via a stub getter, we can cache the slot. */ |
255 | if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) && |
256 | SPROP_HAS_STUB_GETTER(sprop) && |
257 | SPROP_HAS_VALID_SLOT(sprop, scope)) { |
258 | /* Great, let's cache sprop's slot and use it on cache hit. */ |
259 | vword = SLOT_TO_PCVAL(sprop->slot); |
260 | } else { |
261 | /* Best we can do is to cache sprop (still a nice speedup). */ |
262 | vword = SPROP_TO_PCVAL(sprop); |
263 | if (adding && |
264 | sprop == scope->lastProp && |
265 | scope->shape == sprop->shape) { |
266 | /* |
267 | * Our caller added a new property. We also know that a setter |
268 | * that js_NativeSet could have run has not mutated the scope |
269 | * so the added property is still the last one added and the |
270 | * scope is not branded. |
271 | * |
272 | * We want to cache under scope's shape before the property |
273 | * addition to bias for the case when the mutator opcode |
274 | * always adds the same property. It allows to optimize |
275 | * periodic execution of object initializers or explicit |
276 | * initialization sequences like |
277 | * |
278 | * obj = {}; obj.x = 1; obj.y = 2; |
279 | * |
280 | * We assume that on average the win from this optimization is |
281 | * bigger that the cost of an extra mismatch per loop due to |
282 | * the bias for the following case: |
283 | * |
284 | * obj = {}; ... for (...) { ... obj.x = ... } |
285 | * |
286 | * On the first iteration JSOP_SETPROP fills the cache with |
287 | * the shape of newly created object, not the shape after |
288 | * obj.x is assigned. That mismatches obj's shape on the |
289 | * second iteration. Note that on third and the following |
290 | * iterations the cache will be hit since the shape no longer |
291 | * mutates. |
292 | */ |
293 | JS_ASSERT(scope->object == obj); |
294 | if (sprop->parent) { |
295 | kshape = sprop->parent->shape; |
296 | } else { |
297 | JSObject *proto = STOBJ_GET_PROTO(obj); |
298 | if (proto && OBJ_IS_NATIVE(proto)) |
299 | kshape = OBJ_SHAPE(proto); |
300 | } |
301 | |
302 | /* |
303 | * When adding we predict no prototype object will later gain a |
304 | * readonly property or setter. |
305 | */ |
306 | vshape = cx->runtime->protoHazardShape; |
307 | } |
308 | } |
309 | } while (0); |
310 | |
311 | if (kshape == 0) { |
312 | kshape = OBJ_SHAPE(obj); |
313 | vshape = scope->shape; |
314 | } |
315 | |
316 | khash = PROPERTY_CACHE_HASH_PC(pc, kshape); |
317 | if (obj == pobj) { |
318 | JS_ASSERT(scopeIndex == 0 && protoIndex == 0); |
319 | JS_ASSERT(OBJ_SCOPE(obj)->object == obj); |
320 | JS_ASSERT(kshape != 0); |
321 | } else { |
322 | if (op == JSOP_LENGTH) { |
323 | atom = cx->runtime->atomState.lengthAtom; |
324 | } else { |
325 | pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0; |
326 | GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom); |
327 | } |
328 | |
329 | #ifdef DEBUG |
330 | if (scopeIndex == 0) { |
331 | JS_ASSERT(protoIndex != 0); |
332 | JS_ASSERT((protoIndex == 1) == (OBJ_GET_PROTO(cx, obj) == pobj)); |
333 | } |
334 | #endif |
335 | |
336 | if (scopeIndex != 0 || protoIndex != 1) { |
337 | khash = PROPERTY_CACHE_HASH_ATOM(atom, obj, pobj); |
338 | PCMETER(if (PCVCAP_TAG(cache->table[khash].vcap) <= 1) |
339 | cache->pcrecycles++); |
340 | pc = (jsbytecode *) atom; |
341 | kshape = (jsuword) obj; |
342 | |
343 | /* |
344 | * Make sure that a later shadowing assignment will enter |
345 | * PurgeProtoChain and invalidate this entry, bug 479198. |
346 | * |
347 | * This is thread-safe even though obj is not locked. Only the |
348 | * DELEGATE bit of obj->classword can change at runtime, given that |
349 | * obj is native; and the bit is only set, never cleared. And on |
350 | * platforms where another CPU can fail to see this write, it's OK |
351 | * because the property cache and JIT cache are thread-local. |
352 | */ |
353 | OBJ_SET_DELEGATE(cx, obj); |
354 | } |
355 | } |
356 | |
357 | entry = &cache->table[khash]; |
358 | PCMETER(PCVAL_IS_NULL(entry->vword) || cache->recycles++); |
359 | entry->kpc = pc; |
360 | entry->kshape = kshape; |
361 | entry->vcap = PCVCAP_MAKE(vshape, scopeIndex, protoIndex); |
362 | entry->vword = vword; |
363 | |
364 | cache->empty = JS_FALSE; |
365 | PCMETER(cache->fills++); |
366 | |
367 | /* |
368 | * The modfills counter is not exact. It increases if a getter or setter |
369 | * recurse into the interpreter. |
370 | */ |
371 | PCMETER(entry == cache->pctestentry || cache->modfills++); |
372 | PCMETER(cache->pctestentry = NULL); |
373 | return entry; |
374 | } |
375 | |
376 | JS_REQUIRES_STACK JSAtom * |
377 | js_FullTestPropertyCache(JSContext *cx, jsbytecode *pc, |
378 | JSObject **objp, JSObject **pobjp, |
379 | JSPropCacheEntry **entryp) |
380 | { |
381 | JSOp op; |
382 | const JSCodeSpec *cs; |
383 | ptrdiff_t pcoff; |
384 | JSAtom *atom; |
385 | JSObject *obj, *pobj, *tmp; |
386 | JSPropCacheEntry *entry; |
387 | uint32 vcap; |
388 | |
389 | JS_ASSERT(uintN((cx->fp->imacpc ? cx->fp->imacpc : pc) - cx->fp->script->code) |
390 | < cx->fp->script->length); |
391 | |
392 | op = js_GetOpcode(cx, cx->fp->script, pc); |
393 | cs = &js_CodeSpec[op]; |
394 | if (op == JSOP_LENGTH) { |
395 | atom = cx->runtime->atomState.lengthAtom; |
396 | } else { |
397 | pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0; |
398 | GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom); |
399 | } |
400 | |
401 | obj = *objp; |
402 | JS_ASSERT(OBJ_IS_NATIVE(obj)); |
403 | entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_ATOM(atom, obj, NULL)]; |
404 | *entryp = entry; |
405 | vcap = entry->vcap; |
406 | |
407 | if (entry->kpc != (jsbytecode *) atom) { |
408 | PCMETER(JS_PROPERTY_CACHE(cx).idmisses++); |
409 | |
410 | #ifdef DEBUG_notme |
411 | entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_PC(pc, OBJ_SHAPE(obj))]; |
412 | fprintf(stderr, |
413 | "id miss for %s from %s:%u" |
414 | " (pc %u, kpc %u, kshape %u, shape %u)\n", |
415 | js_AtomToPrintableString(cx, atom), |
416 | cx->fp->script->filename, |
417 | js_PCToLineNumber(cx, cx->fp->script, pc), |
418 | pc - cx->fp->script->code, |
419 | entry->kpc - cx->fp->script->code, |
420 | entry->kshape, |
421 | OBJ_SHAPE(obj)); |
422 | js_Disassemble1(cx, cx->fp->script, pc, |
423 | PTRDIFF(pc, cx->fp->script->code, jsbytecode), |
424 | JS_FALSE, stderr); |
425 | #endif |
426 | |
427 | return atom; |
428 | } |
429 | |
430 | if (entry->kshape != (jsuword) obj) { |
431 | PCMETER(JS_PROPERTY_CACHE(cx).komisses++); |
432 | return atom; |
433 | } |
434 | |
435 | pobj = obj; |
436 | JS_LOCK_OBJ(cx, pobj); |
437 | |
438 | if (JOF_MODE(cs->format) == JOF_NAME) { |
439 | while (vcap & (PCVCAP_SCOPEMASK << PCVCAP_PROTOBITS)) { |
440 | tmp = LOCKED_OBJ_GET_PARENT(pobj); |
441 | if (!tmp || !OBJ_IS_NATIVE(tmp)) |
442 | break; |
443 | JS_UNLOCK_OBJ(cx, pobj); |
444 | pobj = tmp; |
445 | JS_LOCK_OBJ(cx, pobj); |
446 | vcap -= PCVCAP_PROTOSIZE; |
447 | } |
448 | |
449 | *objp = pobj; |
450 | } |
451 | |
452 | while (vcap & PCVCAP_PROTOMASK) { |
453 | tmp = LOCKED_OBJ_GET_PROTO(pobj); |
454 | if (!tmp || !OBJ_IS_NATIVE(tmp)) |
455 | break; |
456 | JS_UNLOCK_OBJ(cx, pobj); |
457 | pobj = tmp; |
458 | JS_LOCK_OBJ(cx, pobj); |
459 | --vcap; |
460 | } |
461 | |
462 | if (PCVCAP_SHAPE(vcap) == OBJ_SHAPE(pobj)) { |
463 | #ifdef DEBUG |
464 | jsid id = ATOM_TO_JSID(atom); |
465 | |
466 | CHECK_FOR_STRING_INDEX(id); |
467 | JS_ASSERT(SCOPE_GET_PROPERTY(OBJ_SCOPE(pobj), id)); |
468 | JS_ASSERT(OBJ_SCOPE(pobj)->object == pobj); |
469 | #endif |
470 | *pobjp = pobj; |
471 | return NULL; |
472 | } |
473 | |
474 | PCMETER(JS_PROPERTY_CACHE(cx).vcmisses++); |
475 | JS_UNLOCK_OBJ(cx, pobj); |
476 | return atom; |
477 | } |
478 | |
479 | #ifdef DEBUG |
480 | #define ASSERT_CACHE_IS_EMPTY(cache) \ |
481 | JS_BEGIN_MACRO \ |
482 | JSPropertyCache *cache_ = (cache); \ |
483 | uintN i_; \ |
484 | JS_ASSERT(cache_->empty); \ |
485 | for (i_ = 0; i_ < PROPERTY_CACHE_SIZE; i_++) { \ |
486 | JS_ASSERT(!cache_->table[i_].kpc); \ |
487 | JS_ASSERT(!cache_->table[i_].kshape); \ |
488 | JS_ASSERT(!cache_->table[i_].vcap); \ |
489 | JS_ASSERT(!cache_->table[i_].vword); \ |
490 | } \ |
491 | JS_END_MACRO |
492 | #else |
493 | #define ASSERT_CACHE_IS_EMPTY(cache) ((void)0) |
494 | #endif |
495 | |
496 | JS_STATIC_ASSERT(PCVAL_NULL == 0); |
497 | |
498 | void |
499 | js_PurgePropertyCache(JSContext *cx, JSPropertyCache *cache) |
500 | { |
501 | if (cache->empty) { |
502 | ASSERT_CACHE_IS_EMPTY(cache); |
503 | return; |
504 | } |
505 | |
506 | memset(cache->table, 0, sizeof cache->table); |
507 | cache->empty = JS_TRUE; |
508 | |
509 | #ifdef JS_PROPERTY_CACHE_METERING |
510 | { static FILE *fp; |
511 | if (!fp) |
512 | fp = fopen("/tmp/propcache.stats", "w"); |
513 | if (fp) { |
514 | fputs("Property cache stats for ", fp); |
515 | #ifdef JS_THREADSAFE |
516 | fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id); |
517 | #endif |
518 | fprintf(fp, "GC %u\n", cx->runtime->gcNumber); |
519 | |
520 | # define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)cache->mem) |
521 | P(fills); |
522 | P(nofills); |
523 | P(rofills); |
524 | P(disfills); |
525 | P(oddfills); |
526 | P(modfills); |
527 | P(brandfills); |
528 | P(noprotos); |
529 | P(longchains); |
530 | P(recycles); |
531 | P(pcrecycles); |
532 | P(tests); |
533 | P(pchits); |
534 | P(protopchits); |
535 | P(initests); |
536 | P(inipchits); |
537 | P(inipcmisses); |
538 | P(settests); |
539 | P(addpchits); |
540 | P(setpchits); |
541 | P(setpcmisses); |
542 | P(slotchanges); |
543 | P(setmisses); |
544 | P(idmisses); |
545 | P(komisses); |
546 | P(vcmisses); |
547 | P(misses); |
548 | P(flushes); |
549 | P(pcpurges); |
550 | # undef P |
551 | |
552 | fprintf(fp, "hit rates: pc %g%% (proto %g%%), set %g%%, ini %g%%, full %g%%\n", |
553 | (100. * cache->pchits) / cache->tests, |
554 | (100. * cache->protopchits) / cache->tests, |
555 | (100. * (cache->addpchits + cache->setpchits)) |
556 | / cache->settests, |
557 | (100. * cache->inipchits) / cache->initests, |
558 | (100. * (cache->tests - cache->misses)) / cache->tests); |
559 | fflush(fp); |
560 | } |
561 | } |
562 | #endif |
563 | |
564 | PCMETER(cache->flushes++); |
565 | } |
566 | |
567 | void |
568 | js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script) |
569 | { |
570 | JSPropertyCache *cache; |
571 | JSPropCacheEntry *entry; |
572 | |
573 | cache = &JS_PROPERTY_CACHE(cx); |
574 | for (entry = cache->table; entry < cache->table + PROPERTY_CACHE_SIZE; |
575 | entry++) { |
576 | if (JS_UPTRDIFF(entry->kpc, script->code) < script->length) { |
577 | entry->kpc = NULL; |
578 | entry->kshape = 0; |
579 | #ifdef DEBUG |
580 | entry->vcap = entry->vword = 0; |
581 | #endif |
582 | } |
583 | } |
584 | } |
585 | |
586 | /* |
587 | * Check if the current arena has enough space to fit nslots after sp and, if |
588 | * so, reserve the necessary space. |
589 | */ |
590 | static JS_REQUIRES_STACK JSBool |
591 | AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots) |
592 | { |
593 | uintN surplus; |
594 | jsval *sp2; |
595 | |
596 | JS_ASSERT((jsval *) cx->stackPool.current->base <= sp); |
597 | JS_ASSERT(sp <= (jsval *) cx->stackPool.current->avail); |
598 | surplus = (jsval *) cx->stackPool.current->avail - sp; |
599 | if (nslots <= surplus) |
600 | return JS_TRUE; |
601 | |
602 | /* |
603 | * No room before current->avail, check if the arena has enough space to |
604 | * fit the missing slots before the limit. |
605 | */ |
606 | if (nslots > (size_t) ((jsval *) cx->stackPool.current->limit - sp)) |
607 | return JS_FALSE; |
608 | |
609 | JS_ARENA_ALLOCATE_CAST(sp2, jsval *, &cx->stackPool, |
610 | (nslots - surplus) * sizeof(jsval)); |
611 | JS_ASSERT(sp2 == sp + surplus); |
612 | return JS_TRUE; |
613 | } |
614 | |
615 | JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval * |
616 | js_AllocRawStack(JSContext *cx, uintN nslots, void **markp) |
617 | { |
618 | jsval *sp; |
619 | |
620 | JS_ASSERT(nslots != 0); |
621 | js_LeaveTrace(cx); |
622 | |
623 | if (!cx->stackPool.first.next) { |
624 | int64 *timestamp; |
625 | |
626 | JS_ARENA_ALLOCATE_CAST(timestamp, int64 *, |
627 | &cx->stackPool, sizeof *timestamp); |
628 | if (!timestamp) { |
629 | js_ReportOutOfScriptQuota(cx); |
630 | return NULL; |
631 | } |
632 | *timestamp = JS_Now(); |
633 | } |
634 | |
635 | if (markp) |
636 | *markp = JS_ARENA_MARK(&cx->stackPool); |
637 | JS_ARENA_ALLOCATE_CAST(sp, jsval *, &cx->stackPool, nslots * sizeof(jsval)); |
638 | if (!sp) |
639 | js_ReportOutOfScriptQuota(cx); |
640 | return sp; |
641 | } |
642 | |
643 | JS_STATIC_INTERPRET JS_REQUIRES_STACK void |
644 | js_FreeRawStack(JSContext *cx, void *mark) |
645 | { |
646 | JS_ARENA_RELEASE(&cx->stackPool, mark); |
647 | } |
648 | |
649 | JS_REQUIRES_STACK JS_FRIEND_API(jsval *) |
650 | js_AllocStack(JSContext *cx, uintN nslots, void **markp) |
651 | { |
652 | jsval *sp; |
653 | JSArena *a; |
654 | JSStackHeader *sh; |
655 | |
656 | /* Callers don't check for zero nslots: we do to avoid empty segments. */ |
657 | if (nslots == 0) { |
658 | *markp = NULL; |
659 | return (jsval *) JS_ARENA_MARK(&cx->stackPool); |
660 | } |
661 | |
662 | /* Allocate 2 extra slots for the stack segment header we'll likely need. */ |
663 | sp = js_AllocRawStack(cx, 2 + nslots, markp); |
664 | if (!sp) |
665 | return NULL; |
666 | |
667 | /* Try to avoid another header if we can piggyback on the last segment. */ |
668 | a = cx->stackPool.current; |
669 | sh = cx->stackHeaders; |
670 | if (sh && JS_STACK_SEGMENT(sh) + sh->nslots == sp) { |
671 | /* Extend the last stack segment, give back the 2 header slots. */ |
672 | sh->nslots += nslots; |
673 | a->avail -= 2 * sizeof(jsval); |
674 | } else { |
675 | /* |
676 | * Need a new stack segment, so allocate and push a stack segment |
677 | * header from the 2 extra slots. |
678 | */ |
679 | sh = (JSStackHeader *)sp; |
680 | sh->nslots = nslots; |
681 | sh->down = cx->stackHeaders; |
682 | cx->stackHeaders = sh; |
683 | sp += 2; |
684 | } |
685 | |
686 | /* |
687 | * Store JSVAL_NULL using memset, to let compilers optimize as they see |
688 | * fit, in case a caller allocates and pushes GC-things one by one, which |
689 | * could nest a last-ditch GC that will scan this segment. |
690 | */ |
691 | memset(sp, 0, nslots * sizeof(jsval)); |
692 | return sp; |
693 | } |
694 | |
695 | JS_REQUIRES_STACK JS_FRIEND_API(void) |
696 | js_FreeStack(JSContext *cx, void *mark) |
697 | { |
698 | JSStackHeader *sh; |
699 | jsuword slotdiff; |
700 | |
701 | /* Check for zero nslots allocation special case. */ |
702 | if (!mark) |
703 | return; |
704 | |
705 | /* We can assert because js_FreeStack always balances js_AllocStack. */ |
706 | sh = cx->stackHeaders; |
707 | JS_ASSERT(sh); |
708 | |
709 | /* If mark is in the current segment, reduce sh->nslots, else pop sh. */ |
710 | slotdiff = JS_UPTRDIFF(mark, JS_STACK_SEGMENT(sh)) / sizeof(jsval); |
711 | if (slotdiff < (jsuword)sh->nslots) |
712 | sh->nslots = slotdiff; |
713 | else |
714 | cx->stackHeaders = sh->down; |
715 | |
716 | /* Release the stackPool space allocated since mark was set. */ |
717 | JS_ARENA_RELEASE(&cx->stackPool, mark); |
718 | } |
719 | |
720 | JSObject * |
721 | js_GetScopeChain(JSContext *cx, JSStackFrame *fp) |
722 | { |
723 | JSObject *sharedBlock = fp->blockChain; |
724 | |
725 | if (!sharedBlock) { |
726 | /* |
727 | * Don't force a call object for a lightweight function call, but do |
728 | * insist that there is a call object for a heavyweight function call. |
729 | */ |
730 | JS_ASSERT(!fp->fun || |
731 | !(fp->fun->flags & JSFUN_HEAVYWEIGHT) || |
732 | fp->callobj); |
733 | JS_ASSERT(fp->scopeChain); |
734 | return fp->scopeChain; |
735 | } |
736 | |
737 | /* We don't handle cloning blocks on trace. */ |
738 | js_LeaveTrace(cx); |
739 | |
740 | /* |
741 | * We have one or more lexical scopes to reflect into fp->scopeChain, so |
742 | * make sure there's a call object at the current head of the scope chain, |
743 | * if this frame is a call frame. |
744 | * |
745 | * Also, identify the innermost compiler-allocated block we needn't clone. |
746 | */ |
747 | JSObject *limitBlock, *limitClone; |
748 | if (fp->fun && !fp->callobj) { |
749 | JS_ASSERT(OBJ_GET_CLASS(cx, fp->scopeChain) != &js_BlockClass || |
750 | OBJ_GET_PRIVATE(cx, fp->scopeChain) != fp); |
751 | if (!js_GetCallObject(cx, fp)) |
752 | return NULL; |
753 | |
754 | /* We know we must clone everything on blockChain. */ |
755 | limitBlock = limitClone = NULL; |
756 | } else { |
757 | /* |
758 | * scopeChain includes all blocks whose static scope we're within that |
759 | * have already been cloned. Find the innermost such block. Its |
760 | * prototype should appear on blockChain; we'll clone blockChain up |
761 | * to, but not including, that prototype. |
762 | */ |
763 | limitClone = fp->scopeChain; |
764 | while (OBJ_GET_CLASS(cx, limitClone) == &js_WithClass) |
765 | limitClone = OBJ_GET_PARENT(cx, limitClone); |
766 | JS_ASSERT(limitClone); |
767 | |
768 | /* |
769 | * It may seem like we don't know enough about limitClone to be able |
770 | * to just grab its prototype as we do here, but it's actually okay. |
771 | * |
772 | * If limitClone is a block object belonging to this frame, then its |
773 | * prototype is the innermost entry in blockChain that we have already |
774 | * cloned, and is thus the place to stop when we clone below. |
775 | * |
776 | * Otherwise, there are no blocks for this frame on scopeChain, and we |
777 | * need to clone the whole blockChain. In this case, limitBlock can |
778 | * point to any object known not to be on blockChain, since we simply |
779 | * loop until we hit limitBlock or NULL. If limitClone is a block, it |
780 | * isn't a block from this function, since blocks can't be nested |
781 | * within themselves on scopeChain (recursion is dynamic nesting, not |
782 | * static nesting). If limitClone isn't a block, its prototype won't |
783 | * be a block either. So we can just grab limitClone's prototype here |
784 | * regardless of its type or which frame it belongs to. |
785 | */ |
786 | limitBlock = OBJ_GET_PROTO(cx, limitClone); |
787 | |
788 | /* If the innermost block has already been cloned, we are done. */ |
789 | if (limitBlock == sharedBlock) |
790 | return fp->scopeChain; |
791 | } |
792 | |
793 | /* |
794 | * Special-case cloning the innermost block; this doesn't have enough in |
795 | * common with subsequent steps to include in the loop. |
796 | * |
797 | * We pass fp->scopeChain and not null even if we override the parent slot |
798 | * later as null triggers useless calculations of slot's value in |
799 | * js_NewObject that js_CloneBlockObject calls. |
800 | */ |
801 | JSObject *innermostNewChild |
802 | = js_CloneBlockObject(cx, sharedBlock, fp->scopeChain, fp); |
803 | if (!innermostNewChild) |
804 | return NULL; |
805 | JSAutoTempValueRooter tvr(cx, innermostNewChild); |
806 | |
807 | /* |
808 | * Clone our way towards outer scopes until we reach the innermost |
809 | * enclosing function, or the innermost block we've already cloned. |
810 | */ |
811 | JSObject *newChild = innermostNewChild; |
812 | for (;;) { |
813 | JS_ASSERT(OBJ_GET_PROTO(cx, newChild) == sharedBlock); |
814 | sharedBlock = OBJ_GET_PARENT(cx, sharedBlock); |
815 | |
816 | /* Sometimes limitBlock will be NULL, so check that first. */ |
817 | if (sharedBlock == limitBlock || !sharedBlock) |
818 | break; |
819 | |
820 | /* As in the call above, we don't know the real parent yet. */ |
821 | JSObject *clone |
822 | = js_CloneBlockObject(cx, sharedBlock, fp->scopeChain, fp); |
823 | if (!clone) |
824 | return NULL; |
825 | |
826 | /* |
827 | * Avoid OBJ_SET_PARENT overhead as newChild cannot escape to |
828 | * other threads. |
829 | */ |
830 | STOBJ_SET_PARENT(newChild, clone); |
831 | newChild = clone; |
832 | } |
833 | |
834 | /* |
835 | * If we found a limit block belonging to this frame, then we should have |
836 | * found it in blockChain. |
837 | */ |
838 | JS_ASSERT_IF(limitBlock && |
839 | OBJ_GET_CLASS(cx, limitBlock) == &js_BlockClass && |
840 | OBJ_GET_PRIVATE(cx, limitClone) == fp, |
841 | sharedBlock); |
842 | |
843 | /* Place our newly cloned blocks at the head of the scope chain. */ |
844 | fp->scopeChain = innermostNewChild; |
845 | return fp->scopeChain; |
846 | } |
847 | |
848 | JSBool |
849 | js_GetPrimitiveThis(JSContext *cx, jsval *vp, JSClass *clasp, jsval *thisvp) |
850 | { |
851 | jsval v; |
852 | JSObject *obj; |
853 | |
854 | v = vp[1]; |
855 | if (JSVAL_IS_OBJECT(v)) { |
856 | obj = JS_THIS_OBJECT(cx, vp); |
857 | if (!JS_InstanceOf(cx, obj, clasp, vp + 2)) |
858 | return JS_FALSE; |
859 | v = OBJ_GET_SLOT(cx, obj, JSSLOT_PRIVATE); |
860 | } |
861 | *thisvp = v; |
862 | return JS_TRUE; |
863 | } |
864 | |
865 | /* |
866 | * ECMA requires "the global object", but in embeddings such as the browser, |
867 | * which have multiple top-level objects (windows, frames, etc. in the DOM), |
868 | * we prefer fun's parent. An example that causes this code to run: |
869 | * |
870 | * // in window w1 |
871 | * function f() { return this } |
872 | * function g() { return f } |
873 | * |
874 | * // in window w2 |
875 | * var h = w1.g() |
876 | * alert(h() == w1) |
877 | * |
878 | * The alert should display "true". |
879 | */ |
880 | JS_STATIC_INTERPRET JSObject * |
881 | js_ComputeGlobalThis(JSContext *cx, JSBool lazy, jsval *argv) |
882 | { |
883 | JSObject *thisp; |
884 | |
885 | if (JSVAL_IS_PRIMITIVE(argv[-2]) || |
886 | !OBJ_GET_PARENT(cx, JSVAL_TO_OBJECT(argv[-2]))) { |
887 | thisp = cx->globalObject; |
888 | } else { |
889 | JSStackFrame *fp; |
890 | jsid id; |
891 | jsval v; |
892 | uintN attrs; |
893 | JSBool ok; |
894 | JSObject *parent; |
895 | |
896 | /* |
897 | * Walk up the parent chain, first checking that the running script |
898 | * has access to the callee's parent object. Note that if lazy, the |
899 | * running script whose principals we want to check is the script |
900 | * associated with fp->down, not with fp. |
901 | * |
902 | * FIXME: 417851 -- this access check should not be required, as it |
903 | * imposes a performance penalty on all js_ComputeGlobalThis calls, |
904 | * and it represents a maintenance hazard. |
905 | */ |
906 | fp = js_GetTopStackFrame(cx); /* quell GCC overwarning */ |
907 | if (lazy) { |
908 | JS_ASSERT(fp->argv == argv); |
909 | fp->dormantNext = cx->dormantFrameChain; |
910 | cx->dormantFrameChain = fp; |
911 | cx->fp = fp->down; |
912 | fp->down = NULL; |
913 | } |
914 | thisp = JSVAL_TO_OBJECT(argv[-2]); |
915 | id = ATOM_TO_JSID(cx->runtime->atomState.parentAtom); |
916 | |
917 | ok = OBJ_CHECK_ACCESS(cx, thisp, id, JSACC_PARENT, &v, &attrs); |
918 | if (lazy) { |
919 | cx->dormantFrameChain = fp->dormantNext; |
920 | fp->dormantNext = NULL; |
921 | fp->down = cx->fp; |
922 | cx->fp = fp; |
923 | } |
924 | if (!ok) |
925 | return NULL; |
926 | |
927 | thisp = JSVAL_IS_VOID(v) |
928 | ? OBJ_GET_PARENT(cx, thisp) |
929 | : JSVAL_TO_OBJECT(v); |
930 | while ((parent = OBJ_GET_PARENT(cx, thisp)) != NULL) |
931 | thisp = parent; |
932 | } |
933 | |
934 | /* Some objects (e.g., With) delegate 'this' to another object. */ |
935 | thisp = OBJ_THIS_OBJECT(cx, thisp); |
936 | if (!thisp) |
937 | return NULL; |
938 | argv[-1] = OBJECT_TO_JSVAL(thisp); |
939 | return thisp; |
940 | } |
941 | |
942 | static JSObject * |
943 | ComputeThis(JSContext *cx, JSBool lazy, jsval *argv) |
944 | { |
945 | JSObject *thisp; |
946 | |
947 | JS_ASSERT(!JSVAL_IS_NULL(argv[-1])); |
948 | if (!JSVAL_IS_OBJECT(argv[-1])) { |
949 | if (!js_PrimitiveToObject(cx, &argv[-1])) |
950 | return NULL; |
951 | thisp = JSVAL_TO_OBJECT(argv[-1]); |
952 | } else { |
953 | thisp = JSVAL_TO_OBJECT(argv[-1]); |
954 | if (OBJ_GET_CLASS(cx, thisp) == &js_CallClass || |
955 | OBJ_GET_CLASS(cx, thisp) == &js_BlockClass) { |
956 | return js_ComputeGlobalThis(cx, lazy, argv); |
957 | } |
958 | |
959 | /* Some objects (e.g., With) delegate 'this' to another object. */ |
960 | thisp = OBJ_THIS_OBJECT(cx, thisp); |
961 | if (!thisp) |
962 | return NULL; |
963 | argv[-1] = OBJECT_TO_JSVAL(thisp); |
964 | } |
965 | return thisp; |
966 | } |
967 | |
968 | JSObject * |
969 | js_ComputeThis(JSContext *cx, JSBool lazy, jsval *argv) |
970 | { |
971 | if (JSVAL_IS_NULL(argv[-1])) |
972 | return js_ComputeGlobalThis(cx, lazy, argv); |
973 | return ComputeThis(cx, lazy, argv); |
974 | } |
975 | |
976 | #if JS_HAS_NO_SUCH_METHOD |
977 | |
978 | #define JSSLOT_FOUND_FUNCTION JSSLOT_PRIVATE |
979 | #define JSSLOT_SAVED_ID (JSSLOT_PRIVATE + 1) |
980 | |
981 | JSClass js_NoSuchMethodClass = { |
982 | "NoSuchMethod", |
983 | JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS, |
984 | JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, |
985 | JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub, |
986 | NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL |
987 | }; |
988 | |
989 | /* |
990 | * When JSOP_CALLPROP or JSOP_CALLELEM does not find the method property of |
991 | * the base object, we search for the __noSuchMethod__ method in the base. |
992 | * If it exists, we store the method and the property's id into an object of |
993 | * NoSuchMethod class and store this object into the callee's stack slot. |
994 | * Later, js_Invoke will recognise such an object and transfer control to |
995 | * NoSuchMethod that invokes the method like: |
996 | * |
997 | * this.__noSuchMethod__(id, args) |
998 | * |
999 | * where id is the name of the method that this invocation attempted to |
1000 | * call by name, and args is an Array containing this invocation's actual |
1001 | * parameters. |
1002 | */ |
1003 | JS_STATIC_INTERPRET JSBool |
1004 | js_OnUnknownMethod(JSContext *cx, jsval *vp) |
1005 | { |
1006 | JSObject *obj; |
1007 | jsid id; |
1008 | JSTempValueRooter tvr; |
1009 | JSBool ok; |
1010 | |
1011 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1])); |
1012 | obj = JSVAL_TO_OBJECT(vp[1]); |
1013 | JS_PUSH_SINGLE_TEMP_ROOT(cx, JSVAL_NULL, &tvr); |
1014 | |
1015 | MUST_FLOW_THROUGH("out"); |
1016 | id = ATOM_TO_JSID(cx->runtime->atomState.noSuchMethodAtom); |
1017 | ok = js_GetMethod(cx, obj, id, false, &tvr.u.value); |
1018 | if (!ok) |
1019 | goto out; |
1020 | if (JSVAL_IS_PRIMITIVE(tvr.u.value)) { |
1021 | vp[0] = tvr.u.value; |
1022 | } else { |
1023 | #if JS_HAS_XML_SUPPORT |
1024 | /* Extract the function name from function::name qname. */ |
1025 | if (!JSVAL_IS_PRIMITIVE(vp[0])) { |
1026 | obj = JSVAL_TO_OBJECT(vp[0]); |
1027 | ok = js_IsFunctionQName(cx, obj, &id); |
1028 | if (!ok) |
1029 | goto out; |
1030 | if (id != 0) |
1031 | vp[0] = ID_TO_VALUE(id); |
1032 | } |
1033 | #endif |
1034 | obj = js_NewObjectWithGivenProto(cx, &js_NoSuchMethodClass, |
1035 | NULL, NULL, 0); |
1036 | if (!obj) { |
1037 | ok = JS_FALSE; |
1038 | goto out; |
1039 | } |
1040 | obj->fslots[JSSLOT_FOUND_FUNCTION] = tvr.u.value; |
1041 | obj->fslots[JSSLOT_SAVED_ID] = vp[0]; |
1042 | vp[0] = OBJECT_TO_JSVAL(obj); |
1043 | } |
1044 | ok = JS_TRUE; |
1045 | |
1046 | out: |
1047 | JS_POP_TEMP_ROOT(cx, &tvr); |
1048 | return ok; |
1049 | } |
1050 | |
1051 | static JS_REQUIRES_STACK JSBool |
1052 | NoSuchMethod(JSContext *cx, uintN argc, jsval *vp, uint32 flags) |
1053 | { |
1054 | jsval *invokevp; |
1055 | void *mark; |
1056 | JSBool ok; |
1057 | JSObject *obj, *argsobj; |
1058 | |
1059 | invokevp = js_AllocStack(cx, 2 + 2, &mark); |
1060 | if (!invokevp) |
1061 | return JS_FALSE; |
1062 | |
1063 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[0])); |
1064 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1])); |
1065 | obj = JSVAL_TO_OBJECT(vp[0]); |
1066 | JS_ASSERT(STOBJ_GET_CLASS(obj) == &js_NoSuchMethodClass); |
1067 | |
1068 | invokevp[0] = obj->fslots[JSSLOT_FOUND_FUNCTION]; |
1069 | invokevp[1] = vp[1]; |
1070 | invokevp[2] = obj->fslots[JSSLOT_SAVED_ID]; |
1071 | argsobj = js_NewArrayObject(cx, argc, vp + 2); |
1072 | if (!argsobj) { |
1073 | ok = JS_FALSE; |
1074 | } else { |
1075 | invokevp[3] = OBJECT_TO_JSVAL(argsobj); |
1076 | ok = (flags & JSINVOKE_CONSTRUCT) |
1077 | ? js_InvokeConstructor(cx, 2, JS_TRUE, invokevp) |
1078 | : js_Invoke(cx, 2, invokevp, flags); |
1079 | vp[0] = invokevp[0]; |
1080 | } |
1081 | js_FreeStack(cx, mark); |
1082 | return ok; |
1083 | } |
1084 | |
1085 | #endif /* JS_HAS_NO_SUCH_METHOD */ |
1086 | |
1087 | /* |
1088 | * We check if the function accepts a primitive value as |this|. For that we |
1089 | * use a table that maps value's tag into the corresponding function flag. |
1090 | */ |
1091 | JS_STATIC_ASSERT(JSVAL_INT == 1); |
1092 | JS_STATIC_ASSERT(JSVAL_DOUBLE == 2); |
1093 | JS_STATIC_ASSERT(JSVAL_STRING == 4); |
1094 | JS_STATIC_ASSERT(JSVAL_BOOLEAN == 6); |
1095 | |
1096 | const uint16 js_PrimitiveTestFlags[] = { |
1097 | JSFUN_THISP_NUMBER, /* INT */ |
1098 | JSFUN_THISP_NUMBER, /* DOUBLE */ |
1099 | JSFUN_THISP_NUMBER, /* INT */ |
1100 | JSFUN_THISP_STRING, /* STRING */ |
1101 | JSFUN_THISP_NUMBER, /* INT */ |
1102 | JSFUN_THISP_BOOLEAN, /* BOOLEAN */ |
1103 | JSFUN_THISP_NUMBER /* INT */ |
1104 | }; |
1105 | |
1106 | /* |
1107 | * Find a function reference and its 'this' object implicit first parameter |
1108 | * under argc arguments on cx's stack, and call the function. Push missing |
1109 | * required arguments, allocate declared local variables, and pop everything |
1110 | * when done. Then push the return value. |
1111 | */ |
1112 | JS_REQUIRES_STACK JS_FRIEND_API(JSBool) |
1113 | js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags) |
1114 | { |
1115 | void *mark; |
1116 | JSStackFrame frame; |
1117 | jsval *sp, *argv, *newvp; |
1118 | jsval v; |
1119 | JSObject *funobj, *parent; |
1120 | JSBool ok; |
1121 | JSClass *clasp; |
1122 | JSObjectOps *ops; |
1123 | JSNative native; |
1124 | JSFunction *fun; |
1125 | JSScript *script; |
1126 | uintN nslots, i; |
1127 | uint32 rootedArgsFlag; |
1128 | JSInterpreterHook hook; |
1129 | void *hookData; |
1130 | |
1131 | /* [vp .. vp + 2 + argc) must belong to the last JS stack arena. */ |
1132 | JS_ASSERT((jsval *) cx->stackPool.current->base <= vp); |
1133 | JS_ASSERT(vp + 2 + argc <= (jsval *) cx->stackPool.current->avail); |
1134 | |
1135 | /* Mark the top of stack and load frequently-used registers. */ |
1136 | mark = JS_ARENA_MARK(&cx->stackPool); |
1137 | MUST_FLOW_THROUGH("out2"); |
1138 | v = *vp; |
1139 | |
1140 | if (JSVAL_IS_PRIMITIVE(v)) |
1141 | goto bad; |
1142 | |
1143 | funobj = JSVAL_TO_OBJECT(v); |
1144 | parent = OBJ_GET_PARENT(cx, funobj); |
1145 | clasp = OBJ_GET_CLASS(cx, funobj); |
1146 | if (clasp != &js_FunctionClass) { |
1147 | #if JS_HAS_NO_SUCH_METHOD |
1148 | if (clasp == &js_NoSuchMethodClass) { |
1149 | ok = NoSuchMethod(cx, argc, vp, flags); |
1150 | goto out2; |
1151 | } |
1152 | #endif |
1153 | |
1154 | /* Function is inlined, all other classes use object ops. */ |
1155 | ops = funobj->map->ops; |
1156 | |
1157 | /* |
1158 | * XXX this makes no sense -- why convert to function if clasp->call? |
1159 | * XXX better to call that hook without converting |
1160 | * XXX the only thing that needs fixing is liveconnect |
1161 | * |
1162 | * FIXME bug 408416: try converting to function, for API compatibility |
1163 | * if there is a call op defined. |
1164 | */ |
1165 | if ((ops == &js_ObjectOps) ? clasp->call : ops->call) { |
1166 | ok = clasp->convert(cx, funobj, JSTYPE_FUNCTION, &v); |
1167 | if (!ok) |
1168 | goto out2; |
1169 | |
1170 | if (VALUE_IS_FUNCTION(cx, v)) { |
1171 | /* Make vp refer to funobj to keep it available as argv[-2]. */ |
1172 | *vp = v; |
1173 | funobj = JSVAL_TO_OBJECT(v); |
1174 | parent = OBJ_GET_PARENT(cx, funobj); |
1175 | goto have_fun; |
1176 | } |
1177 | } |
1178 | fun = NULL; |
1179 | script = NULL; |
1180 | nslots = 0; |
1181 | |
1182 | /* Try a call or construct native object op. */ |
1183 | if (flags & JSINVOKE_CONSTRUCT) { |
1184 | if (!JSVAL_IS_OBJECT(vp[1])) { |
1185 | ok = js_PrimitiveToObject(cx, &vp[1]); |
1186 | if (!ok) |
1187 | goto out2; |
1188 | } |
1189 | native = ops->construct; |
1190 | } else { |
1191 | native = ops->call; |
1192 | } |
1193 | if (!native) |
1194 | goto bad; |
1195 | } else { |
1196 | have_fun: |
1197 | /* Get private data and set derived locals from it. */ |
1198 | fun = GET_FUNCTION_PRIVATE(cx, funobj); |
1199 | nslots = FUN_MINARGS(fun); |
1200 | nslots = (nslots > argc) ? nslots - argc : 0; |
1201 | if (FUN_INTERPRETED(fun)) { |
1202 | native = NULL; |
1203 | script = fun->u.i.script; |
1204 | JS_ASSERT(script); |
1205 | } else { |
1206 | native = fun->u.n.native; |
1207 | script = NULL; |
1208 | nslots += fun->u.n.extra; |
1209 | } |
1210 | |
1211 | if (JSFUN_BOUND_METHOD_TEST(fun->flags)) { |
1212 | /* Handle bound method special case. */ |
1213 | vp[1] = OBJECT_TO_JSVAL(parent); |
1214 | } else if (!JSVAL_IS_OBJECT(vp[1])) { |
1215 | JS_ASSERT(!(flags & JSINVOKE_CONSTRUCT)); |
1216 | if (PRIMITIVE_THIS_TEST(fun, vp[1])) |
1217 | goto start_call; |
1218 | } |
1219 | } |
1220 | |
1221 | if (flags & JSINVOKE_CONSTRUCT) { |
1222 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1])); |
1223 | } else { |
1224 | /* |
1225 | * We must call js_ComputeThis in case we are not called from the |
1226 | * interpreter, where a prior bytecode has computed an appropriate |
1227 | * |this| already. |
1228 | * |
1229 | * But we need to compute |this| eagerly only for so-called "slow" |
1230 | * (i.e., not fast) native functions. Fast natives must use either |
1231 | * JS_THIS or JS_THIS_OBJECT, and scripted functions will go through |
1232 | * the appropriate this-computing bytecode, e.g., JSOP_THIS. |
1233 | */ |
1234 | if (native && (!fun || !(fun->flags & JSFUN_FAST_NATIVE))) { |
1235 | if (!js_ComputeThis(cx, JS_FALSE, vp + 2)) { |
1236 | ok = JS_FALSE; |
1237 | goto out2; |
1238 | } |
1239 | flags |= JSFRAME_COMPUTED_THIS; |
1240 | } |
1241 | } |
1242 | |
1243 | start_call: |
1244 | if (native && fun && (fun->flags & JSFUN_FAST_NATIVE)) { |
1245 | #ifdef DEBUG_NOT_THROWING |
1246 | JSBool alreadyThrowing = cx->throwing; |
1247 | #endif |
1248 | JS_ASSERT(nslots == 0); |
1249 | #if JS_HAS_LVALUE_RETURN |
1250 | /* Set by JS_SetCallReturnValue2, used to return reference types. */ |
1251 | cx->rval2set = JS_FALSE; |
1252 | #endif |
1253 | ok = ((JSFastNative) native)(cx, argc, vp); |
1254 | JS_RUNTIME_METER(cx->runtime, nativeCalls); |
1255 | #ifdef DEBUG_NOT_THROWING |
1256 | if (ok && !alreadyThrowing) |
1257 | ASSERT_NOT_THROWING(cx); |
1258 | #endif |
1259 | goto out2; |
1260 | } |
1261 | |
1262 | argv = vp + 2; |
1263 | sp = argv + argc; |
1264 | |
1265 | rootedArgsFlag = JSFRAME_ROOTED_ARGV; |
1266 | if (nslots != 0) { |
1267 | /* |
1268 | * The extra slots required by the function continue with argument |
1269 | * slots. Thus, when the last stack pool arena does not have room to |
1270 | * fit nslots right after sp and AllocateAfterSP fails, we have to copy |
1271 | * [vp..vp+2+argc) slots and clear rootedArgsFlag to root the copy. |
1272 | */ |
1273 | if (!AllocateAfterSP(cx, sp, nslots)) { |
1274 | rootedArgsFlag = 0; |
1275 | newvp = js_AllocRawStack(cx, 2 + argc + nslots, NULL); |
1276 | if (!newvp) { |
1277 | ok = JS_FALSE; |
1278 | goto out2; |
1279 | } |
1280 | memcpy(newvp, vp, (2 + argc) * sizeof(jsval)); |
1281 | argv = newvp + 2; |
1282 | sp = argv + argc; |
1283 | } |
1284 | |
1285 | /* Push void to initialize missing args. */ |
1286 | i = nslots; |
1287 | do { |
1288 | *sp++ = JSVAL_VOID; |
1289 | } while (--i != 0); |
1290 | } |
1291 | |
1292 | /* Allocate space for local variables and stack of interpreted function. */ |
1293 | if (script && script->nslots != 0) { |
1294 | if (!AllocateAfterSP(cx, sp, script->nslots)) { |
1295 | /* NB: Discontinuity between argv and slots, stack slots. */ |
1296 | sp = js_AllocRawStack(cx, script->nslots, NULL); |
1297 | if (!sp) { |
1298 | ok = JS_FALSE; |
1299 | goto out2; |
1300 | } |
1301 | } |
1302 | |
1303 | /* Push void to initialize local variables. */ |
1304 | for (jsval *end = sp + fun->u.i.nvars; sp != end; ++sp) |
1305 | *sp = JSVAL_VOID; |
1306 | } |
1307 | |
1308 | /* |
1309 | * Initialize the frame. |
1310 | * |
1311 | * To set thisp we use an explicit cast and not JSVAL_TO_OBJECT, as vp[1] |
1312 | * can be a primitive value here for those native functions specified with |
1313 | * JSFUN_THISP_(NUMBER|STRING|BOOLEAN) flags. |
1314 | */ |
1315 | frame.thisp = (JSObject *)vp[1]; |
1316 | frame.varobj = NULL; |
1317 | frame.callobj = frame.argsobj = NULL; |
1318 | frame.script = script; |
1319 | frame.callee = funobj; |
1320 | frame.fun = fun; |
1321 | frame.argc = argc; |
1322 | frame.argv = argv; |
1323 | |
1324 | /* Default return value for a constructor is the new object. */ |
1325 | frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID; |
1326 | frame.down = cx->fp; |
1327 | frame.annotation = NULL; |
1328 | frame.scopeChain = NULL; /* set below for real, after cx->fp is set */ |
1329 | frame.blockChain = NULL; |
1330 | frame.regs = NULL; |
1331 | frame.imacpc = NULL; |
1332 | frame.slots = NULL; |
1333 | frame.sharpDepth = 0; |
1334 | frame.sharpArray = NULL; |
1335 | frame.flags = flags | rootedArgsFlag; |
1336 | frame.dormantNext = NULL; |
1337 | frame.xmlNamespace = NULL; |
1338 | frame.displaySave = NULL; |
1339 | |
1340 | MUST_FLOW_THROUGH("out"); |
1341 | cx->fp = &frame; |
1342 | |
1343 | /* Init these now in case we goto out before first hook call. */ |
1344 | hook = cx->debugHooks->callHook; |
1345 | hookData = NULL; |
1346 | |
1347 | if (native) { |
1348 | /* If native, use caller varobj and scopeChain for eval. */ |
1349 | JS_ASSERT(!frame.varobj); |
1350 | JS_ASSERT(!frame.scopeChain); |
1351 | if (frame.down) { |
1352 | frame.varobj = frame.down->varobj; |
1353 | frame.scopeChain = frame.down->scopeChain; |
1354 | } |
1355 | |
1356 | /* But ensure that we have a scope chain. */ |
1357 | if (!frame.scopeChain) |
1358 | frame.scopeChain = parent; |
1359 | } else { |
1360 | /* Use parent scope so js_GetCallObject can find the right "Call". */ |
1361 | frame.scopeChain = parent; |
1362 | if (JSFUN_HEAVYWEIGHT_TEST(fun->flags)) { |
1363 | /* Scope with a call object parented by the callee's parent. */ |
1364 | if (!js_GetCallObject(cx, &frame)) { |
1365 | ok = JS_FALSE; |
1366 | goto out; |
1367 | } |
1368 | } |
1369 | frame.slots = sp - fun->u.i.nvars; |
1370 | } |
1371 | |
1372 | /* Call the hook if present after we fully initialized the frame. */ |
1373 | if (hook) |
1374 | hookData = hook(cx, &frame, JS_TRUE, 0, cx->debugHooks->callHookData); |
1375 | |
1376 | /* Call the function, either a native method or an interpreted script. */ |
1377 | if (native) { |
1378 | #ifdef DEBUG_NOT_THROWING |
1379 | JSBool alreadyThrowing = cx->throwing; |
1380 | #endif |
1381 | |
1382 | #if JS_HAS_LVALUE_RETURN |
1383 | /* Set by JS_SetCallReturnValue2, used to return reference types. */ |
1384 | cx->rval2set = JS_FALSE; |
1385 | #endif |
1386 | ok = native(cx, frame.thisp, argc, frame.argv, &frame.rval); |
1387 | JS_RUNTIME_METER(cx->runtime, nativeCalls); |
1388 | #ifdef DEBUG_NOT_THROWING |
1389 | if (ok && !alreadyThrowing) |
1390 | ASSERT_NOT_THROWING(cx); |
1391 | #endif |
1392 | } else { |
1393 | JS_ASSERT(script); |
1394 | ok = js_Interpret(cx); |
1395 | } |
1396 | |
1397 | out: |
1398 | if (hookData) { |
1399 | hook = cx->debugHooks->callHook; |
1400 | if (hook) |
1401 | hook(cx, &frame, JS_FALSE, &ok, hookData); |
1402 | } |
1403 | |
1404 | /* If frame has a call object, sync values and clear back-pointer. */ |
1405 | if (frame.callobj) |
1406 | ok &= js_PutCallObject(cx, &frame); |
1407 | |
1408 | /* If frame has an arguments object, sync values and clear back-pointer. */ |
1409 | if (frame.argsobj) |
1410 | ok &= js_PutArgsObject(cx, &frame); |
1411 | |
1412 | *vp = frame.rval; |
1413 | |
1414 | /* Restore cx->fp now that we're done releasing frame objects. */ |
1415 | cx->fp = frame.down; |
1416 | |
1417 | out2: |
1418 | /* Pop everything we may have allocated off the stack. */ |
1419 | JS_ARENA_RELEASE(&cx->stackPool, mark); |
1420 | if (!ok) |
1421 | *vp = JSVAL_NULL; |
1422 | return ok; |
1423 | |
1424 | bad: |
1425 | js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS); |
1426 | ok = JS_FALSE; |
1427 | goto out2; |
1428 | } |
1429 | |
1430 | JSBool |
1431 | js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags, |
1432 | uintN argc, jsval *argv, jsval *rval) |
1433 | { |
1434 | jsval *invokevp; |
1435 | void *mark; |
1436 | JSBool ok; |
1437 | |
1438 | js_LeaveTrace(cx); |
1439 | invokevp = js_AllocStack(cx, 2 + argc, &mark); |
1440 | if (!invokevp) |
1441 | return JS_FALSE; |
1442 | |
1443 | invokevp[0] = fval; |
1444 | invokevp[1] = OBJECT_TO_JSVAL(obj); |
1445 | memcpy(invokevp + 2, argv, argc * sizeof *argv); |
1446 | |
1447 | ok = js_Invoke(cx, argc, invokevp, flags); |
1448 | if (ok) { |
1449 | /* |
1450 | * Store *rval in the a scoped local root if a scope is open, else in |
1451 | * the lastInternalResult pigeon-hole GC root, solely so users of |
1452 | * js_InternalInvoke and its direct and indirect (js_ValueToString for |
1453 | * example) callers do not need to manage roots for local, temporary |
1454 | * references to such results. |
1455 | */ |
1456 | *rval = *invokevp; |
1457 | if (JSVAL_IS_GCTHING(*rval) && *rval != JSVAL_NULL) { |
1458 | if (cx->localRootStack) { |
1459 | if (js_PushLocalRoot(cx, cx->localRootStack, *rval) < 0) |
1460 | ok = JS_FALSE; |
1461 | } else { |
1462 | cx->weakRoots.lastInternalResult = *rval; |
1463 | } |
1464 | } |
1465 | } |
1466 | |
1467 | js_FreeStack(cx, mark); |
1468 | return ok; |
1469 | } |
1470 | |
1471 | JSBool |
1472 | js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval, |
1473 | JSAccessMode mode, uintN argc, jsval *argv, jsval *rval) |
1474 | { |
1475 | JSSecurityCallbacks *callbacks; |
1476 | |
1477 | js_LeaveTrace(cx); |
1478 | |
1479 | /* |
1480 | * js_InternalInvoke could result in another try to get or set the same id |
1481 | * again, see bug 355497. |
1482 | */ |
1483 | JS_CHECK_RECURSION(cx, return JS_FALSE); |
1484 | |
1485 | /* |
1486 | * Check general (not object-ops/class-specific) access from the running |
1487 | * script to obj.id only if id has a scripted getter or setter that we're |
1488 | * about to invoke. If we don't check this case, nothing else will -- no |
1489 | * other native code has the chance to check. |
1490 | * |
1491 | * Contrast this non-native (scripted) case with native getter and setter |
1492 | * accesses, where the native itself must do an access check, if security |
1493 | * policies requires it. We make a checkAccess or checkObjectAccess call |
1494 | * back to the embedding program only in those cases where we're not going |
1495 | * to call an embedding-defined native function, getter, setter, or class |
1496 | * hook anyway. Where we do call such a native, there's no need for the |
1497 | * engine to impose a separate access check callback on all embeddings -- |
1498 | * many embeddings have no security policy at all. |
1499 | */ |
1500 | JS_ASSERT(mode == JSACC_READ || mode == JSACC_WRITE); |
1501 | callbacks = JS_GetSecurityCallbacks(cx); |
1502 | if (callbacks && |
1503 | callbacks->checkObjectAccess && |
1504 | VALUE_IS_FUNCTION(cx, fval) && |
1505 | FUN_INTERPRETED(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval))) && |
1506 | !callbacks->checkObjectAccess(cx, obj, ID_TO_VALUE(id), mode, &fval)) { |
1507 | return JS_FALSE; |
1508 | } |
1509 | |
1510 | return js_InternalCall(cx, obj, fval, argc, argv, rval); |
1511 | } |
1512 | |
1513 | JSBool |
1514 | js_Execute(JSContext *cx, JSObject *chain, JSScript *script, |
1515 | JSStackFrame *down, uintN flags, jsval *result) |
1516 | { |
1517 | JSInterpreterHook hook; |
1518 | void *hookData, *mark; |
1519 | JSStackFrame *oldfp, frame; |
1520 | JSObject *obj, *tmp; |
1521 | JSBool ok; |
1522 | |
1523 | js_LeaveTrace(cx); |
1524 | |
1525 | #ifdef INCLUDE_MOZILLA_DTRACE |
1526 | if (JAVASCRIPT_EXECUTE_START_ENABLED()) |
1527 | jsdtrace_execute_start(script); |
1528 | #endif |
1529 | |
1530 | hook = cx->debugHooks->executeHook; |
1531 | hookData = mark = NULL; |
1532 | oldfp = js_GetTopStackFrame(cx); |
1533 | frame.script = script; |
1534 | if (down) { |
1535 | /* Propagate arg state for eval and the debugger API. */ |
1536 | frame.callobj = down->callobj; |
1537 | frame.argsobj = down->argsobj; |
1538 | frame.varobj = down->varobj; |
1539 | frame.callee = down->callee; |
1540 | frame.fun = down->fun; |
1541 | frame.thisp = down->thisp; |
1542 | if (down->flags & JSFRAME_COMPUTED_THIS) |
1543 | flags |= JSFRAME_COMPUTED_THIS; |
1544 | frame.argc = down->argc; |
1545 | frame.argv = down->argv; |
1546 | frame.annotation = down->annotation; |
1547 | frame.sharpArray = down->sharpArray; |
1548 | JS_ASSERT(script->nfixed == 0); |
1549 | } else { |
1550 | frame.callobj = frame.argsobj = NULL; |
1551 | obj = chain; |
1552 | if (cx->options & JSOPTION_VAROBJFIX) { |
1553 | while ((tmp = OBJ_GET_PARENT(cx, obj)) != NULL) |
1554 | obj = tmp; |
1555 | } |
1556 | frame.varobj = obj; |
1557 | frame.callee = NULL; |
1558 | frame.fun = NULL; |
1559 | frame.thisp = chain; |
1560 | frame.argc = 0; |
1561 | frame.argv = NULL; |
1562 | frame.annotation = NULL; |
1563 | frame.sharpArray = NULL; |
1564 | } |
1565 | |
1566 | frame.imacpc = NULL; |
1567 | if (script->nslots != 0) { |
1568 | frame.slots = js_AllocRawStack(cx, script->nslots, &mark); |
1569 | if (!frame.slots) { |
1570 | ok = JS_FALSE; |
1571 | goto out; |
1572 | } |
1573 | memset(frame.slots, 0, script->nfixed * sizeof(jsval)); |
1574 | } else { |
1575 | frame.slots = NULL; |
1576 | } |
1577 | |
1578 | frame.rval = JSVAL_VOID; |
1579 | frame.down = down; |
1580 | frame.scopeChain = chain; |
1581 | frame.regs = NULL; |
1582 | frame.sharpDepth = 0; |
1583 | frame.flags = flags; |
1584 | frame.dormantNext = NULL; |
1585 | frame.xmlNamespace = NULL; |
1586 | frame.blockChain = NULL; |
1587 | |
1588 | /* |
1589 | * Here we wrap the call to js_Interpret with code to (conditionally) |
1590 | * save and restore the old stack frame chain into a chain of 'dormant' |
1591 | * frame chains. Since we are replacing cx->fp, we were running into |
1592 | * the problem that if GC was called under this frame, some of the GC |
1593 | * things associated with the old frame chain (available here only in |
1594 | * the C variable 'oldfp') were not rooted and were being collected. |
1595 | * |
1596 | * So, now we preserve the links to these 'dormant' frame chains in cx |
1597 | * before calling js_Interpret and cleanup afterwards. The GC walks |
1598 | * these dormant chains and marks objects in the same way that it marks |
1599 | * objects in the primary cx->fp chain. |
1600 | */ |
1601 | if (oldfp && oldfp != down) { |
1602 | JS_ASSERT(!oldfp->dormantNext); |
1603 | oldfp->dormantNext = cx->dormantFrameChain; |
1604 | cx->dormantFrameChain = oldfp; |
1605 | } |
1606 | |
1607 | cx->fp = &frame; |
1608 | if (!down) { |
1609 | frame.thisp = OBJ_THIS_OBJECT(cx, frame.thisp); |
1610 | if (!frame.thisp) { |
1611 | ok = JS_FALSE; |
1612 | goto out2; |
1613 | } |
1614 | frame.flags |= JSFRAME_COMPUTED_THIS; |
1615 | } |
1616 | |
1617 | if (hook) { |
1618 | hookData = hook(cx, &frame, JS_TRUE, 0, |
1619 | cx->debugHooks->executeHookData); |
1620 | } |
1621 | |
1622 | ok = js_Interpret(cx); |
1623 | if (result) |
1624 | *result = frame.rval; |
1625 | |
1626 | if (hookData) { |
1627 | hook = cx->debugHooks->executeHook; |
1628 | if (hook) |
1629 | hook(cx, &frame, JS_FALSE, &ok, hookData); |
1630 | } |
1631 | |
1632 | out2: |
1633 | if (mark) |
1634 | js_FreeRawStack(cx, mark); |
1635 | cx->fp = oldfp; |
1636 | |
1637 | if (oldfp && oldfp != down) { |
1638 | JS_ASSERT(cx->dormantFrameChain == oldfp); |
1639 | cx->dormantFrameChain = oldfp->dormantNext; |
1640 | oldfp->dormantNext = NULL; |
1641 | } |
1642 | |
1643 | out: |
1644 | #ifdef INCLUDE_MOZILLA_DTRACE |
1645 | if (JAVASCRIPT_EXECUTE_DONE_ENABLED()) |
1646 | jsdtrace_execute_done(script); |
1647 | #endif |
1648 | return ok; |
1649 | } |
1650 | |
1651 | JSBool |
1652 | js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs, |
1653 | JSObject **objp, JSProperty **propp) |
1654 | { |
1655 | JSObject *obj2; |
1656 | JSProperty *prop; |
1657 | uintN oldAttrs, report; |
1658 | JSBool isFunction; |
1659 | jsval value; |
1660 | const char *type, *name; |
1661 | |
1662 | /* |
1663 | * Both objp and propp must be either null or given. When given, *propp |
1664 | * must be null. This way we avoid an extra "if (propp) *propp = NULL" for |
1665 | * the common case of a non-existing property. |
1666 | */ |
1667 | JS_ASSERT(!objp == !propp); |
1668 | JS_ASSERT_IF(propp, !*propp); |
1669 | |
1670 | /* The JSPROP_INITIALIZER case below may generate a warning. Since we must |
1671 | * drop the property before reporting it, we insists on !propp to avoid |
1672 | * looking up the property again after the reporting is done. |
1673 | */ |
1674 | JS_ASSERT_IF(attrs & JSPROP_INITIALIZER, attrs == JSPROP_INITIALIZER); |
1675 | JS_ASSERT_IF(attrs == JSPROP_INITIALIZER, !propp); |
1676 | |
1677 | if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop)) |
1678 | return JS_FALSE; |
1679 | if (!prop) |
1680 | return JS_TRUE; |
1681 | |
1682 | /* Use prop as a speedup hint to OBJ_GET_ATTRIBUTES. */ |
1683 | if (!OBJ_GET_ATTRIBUTES(cx, obj2, id, prop, &oldAttrs)) { |
1684 | OBJ_DROP_PROPERTY(cx, obj2, prop); |
1685 | return JS_FALSE; |
1686 | } |
1687 | |
1688 | /* |
1689 | * If our caller doesn't want prop, drop it (we don't need it any longer). |
1690 | */ |
1691 | if (!propp) { |
1692 | OBJ_DROP_PROPERTY(cx, obj2, prop); |
1693 | prop = NULL; |
1694 | } else { |
1695 | *objp = obj2; |
1696 | *propp = prop; |
1697 | } |
1698 | |
1699 | if (attrs == JSPROP_INITIALIZER) { |
1700 | /* Allow the new object to override properties. */ |
1701 | if (obj2 != obj) |
1702 | return JS_TRUE; |
1703 | |
1704 | /* The property must be dropped already. */ |
1705 | JS_ASSERT(!prop); |
1706 | report = JSREPORT_WARNING | JSREPORT_STRICT; |
1707 | } else { |
1708 | /* We allow redeclaring some non-readonly properties. */ |
1709 | if (((oldAttrs | attrs) & JSPROP_READONLY) == 0) { |
1710 | /* Allow redeclaration of variables and functions. */ |
1711 | if (!(attrs & (JSPROP_GETTER | JSPROP_SETTER))) |
1712 | return JS_TRUE; |
1713 | |
1714 | /* |
1715 | * Allow adding a getter only if a property already has a setter |
1716 | * but no getter and similarly for adding a setter. That is, we |
1717 | * allow only the following transitions: |
1718 | * |
1719 | * no-property --> getter --> getter + setter |
1720 | * no-property --> setter --> getter + setter |
1721 | */ |
1722 | if ((~(oldAttrs ^ attrs) & (JSPROP_GETTER | JSPROP_SETTER)) == 0) |
1723 | return JS_TRUE; |
1724 | |
1725 | /* |
1726 | * Allow redeclaration of an impermanent property (in which case |
1727 | * anyone could delete it and redefine it, willy-nilly). |
1728 | */ |
1729 | if (!(oldAttrs & JSPROP_PERMANENT)) |
1730 | return JS_TRUE; |
1731 | } |
1732 | if (prop) |
1733 | OBJ_DROP_PROPERTY(cx, obj2, prop); |
1734 | |
1735 | report = JSREPORT_ERROR; |
1736 | isFunction = (oldAttrs & (JSPROP_GETTER | JSPROP_SETTER)) != 0; |
1737 | if (!isFunction) { |
1738 | if (!OBJ_GET_PROPERTY(cx, obj, id, &value)) |
1739 | return JS_FALSE; |
1740 | isFunction = VALUE_IS_FUNCTION(cx, value); |
1741 | } |
1742 | } |
1743 | |
1744 | type = (attrs == JSPROP_INITIALIZER) |
1745 | ? "property" |
1746 | : (oldAttrs & attrs & JSPROP_GETTER) |
1747 | ? js_getter_str |
1748 | : (oldAttrs & attrs & JSPROP_SETTER) |
1749 | ? js_setter_str |
1750 | : (oldAttrs & JSPROP_READONLY) |
1751 | ? js_const_str |
1752 | : isFunction |
1753 | ? js_function_str |
1754 | : js_var_str; |
1755 | name = js_ValueToPrintableString(cx, ID_TO_VALUE(id)); |
1756 | if (!name) |
1757 | return JS_FALSE; |
1758 | return JS_ReportErrorFlagsAndNumber(cx, report, |
1759 | js_GetErrorMessage, NULL, |
1760 | JSMSG_REDECLARED_VAR, |
1761 | type, name); |
1762 | } |
1763 | |
1764 | JSBool |
1765 | js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval) |
1766 | { |
1767 | jsval ltag = JSVAL_TAG(lval), rtag = JSVAL_TAG(rval); |
1768 | jsdouble ld, rd; |
1769 | |
1770 | if (ltag == rtag) { |
1771 | if (ltag == JSVAL_STRING) { |
1772 | JSString *lstr = JSVAL_TO_STRING(lval), |
1773 | *rstr = JSVAL_TO_STRING(rval); |
1774 | return js_EqualStrings(lstr, rstr); |
1775 | } |
1776 | if (ltag == JSVAL_DOUBLE) { |
1777 | ld = *JSVAL_TO_DOUBLE(lval); |
1778 | rd = *JSVAL_TO_DOUBLE(rval); |
1779 | return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE); |
1780 | } |
1781 | if (ltag == JSVAL_OBJECT && |
1782 | lval != rval && |
1783 | !JSVAL_IS_NULL(lval) && |
1784 | !JSVAL_IS_NULL(rval)) { |
1785 | JSObject *lobj, *robj; |
1786 | |
1787 | lobj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(lval)); |
1788 | robj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(rval)); |
1789 | lval = OBJECT_TO_JSVAL(lobj); |
1790 | rval = OBJECT_TO_JSVAL(robj); |
1791 | } |
1792 | return lval == rval; |
1793 | } |
1794 | if (ltag == JSVAL_DOUBLE && JSVAL_IS_INT(rval)) { |
1795 | ld = *JSVAL_TO_DOUBLE(lval); |
1796 | rd = JSVAL_TO_INT(rval); |
1797 | return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE); |
1798 | } |
1799 | if (JSVAL_IS_INT(lval) && rtag == JSVAL_DOUBLE) { |
1800 | ld = JSVAL_TO_INT(lval); |
1801 | rd = *JSVAL_TO_DOUBLE(rval); |
1802 | return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE); |
1803 | } |
1804 | return lval == rval; |
1805 | } |
1806 | |
1807 | JS_REQUIRES_STACK JSBool |
1808 | js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp) |
1809 | { |
1810 | JSFunction *fun, *fun2; |
1811 | JSObject *obj, *obj2, *proto, *parent; |
1812 | jsval lval, rval; |
1813 | JSClass *clasp; |
1814 | |
1815 | fun = NULL; |
1816 | obj2 = NULL; |
1817 | lval = *vp; |
1818 | if (!JSVAL_IS_OBJECT(lval) || |
1819 | (obj2 = JSVAL_TO_OBJECT(lval)) == NULL || |
1820 | /* XXX clean up to avoid special cases above ObjectOps layer */ |
1821 | OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass || |
1822 | !obj2->map->ops->construct) |
1823 | { |
1824 | fun = js_ValueToFunction(cx, vp, JSV2F_CONSTRUCT); |
1825 | if (!fun) |
1826 | return JS_FALSE; |
1827 | } |
1828 | |
1829 | clasp = &js_ObjectClass; |
1830 | if (!obj2) { |
1831 | proto = parent = NULL; |
1832 | fun = NULL; |
1833 | } else { |
1834 | /* |
1835 | * Get the constructor prototype object for this function. |
1836 | * Use the nominal 'this' parameter slot, vp[1], as a local |
1837 | * root to protect this prototype, in case it has no other |
1838 | * strong refs. |
1839 | */ |
1840 | if (!OBJ_GET_PROPERTY(cx, obj2, |
1841 | ATOM_TO_JSID(cx->runtime->atomState |
1842 | .classPrototypeAtom), |
1843 | &vp[1])) { |
1844 | return JS_FALSE; |
1845 | } |
1846 | rval = vp[1]; |
1847 | proto = JSVAL_IS_OBJECT(rval) ? JSVAL_TO_OBJECT(rval) : NULL; |
1848 | parent = OBJ_GET_PARENT(cx, obj2); |
1849 | |
1850 | if (OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass) { |
1851 | fun2 = GET_FUNCTION_PRIVATE(cx, obj2); |
1852 | if (!FUN_INTERPRETED(fun2) && fun2->u.n.clasp) |
1853 | clasp = fun2->u.n.clasp; |
1854 | } |
1855 | } |
1856 | obj = js_NewObject(cx, clasp, proto, parent, 0); |
1857 | if (!obj) |
1858 | return JS_FALSE; |
1859 | |
1860 | /* Now we have an object with a constructor method; call it. */ |
1861 | vp[1] = OBJECT_TO_JSVAL(obj); |
1862 | if (!js_Invoke(cx, argc, vp, JSINVOKE_CONSTRUCT)) { |
1863 | cx->weakRoots.newborn[GCX_OBJECT] = NULL; |
1864 | return JS_FALSE; |
1865 | } |
1866 | |
1867 | /* Check the return value and if it's primitive, force it to be obj. */ |
1868 | rval = *vp; |
1869 | if (clampReturn && JSVAL_IS_PRIMITIVE(rval)) { |
1870 | if (!fun) { |
1871 | /* native [[Construct]] returning primitive is error */ |
1872 | JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, |
1873 | JSMSG_BAD_NEW_RESULT, |
1874 | js_ValueToPrintableString(cx, rval)); |
1875 | return JS_FALSE; |
1876 | } |
1877 | *vp = OBJECT_TO_JSVAL(obj); |
1878 | } |
1879 | |
1880 | JS_RUNTIME_METER(cx->runtime, constructs); |
1881 | return JS_TRUE; |
1882 | } |
1883 | |
1884 | JSBool |
1885 | js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp) |
1886 | { |
1887 | JS_ASSERT(!JSVAL_IS_INT(idval)); |
1888 | |
1889 | #if JS_HAS_XML_SUPPORT |
1890 | if (!JSVAL_IS_PRIMITIVE(idval)) { |
1891 | if (OBJECT_IS_XML(cx, obj)) { |
1892 | *idp = OBJECT_JSVAL_TO_JSID(idval); |
1893 | return JS_TRUE; |
1894 | } |
1895 | if (!js_IsFunctionQName(cx, JSVAL_TO_OBJECT(idval), idp)) |
1896 | return JS_FALSE; |
1897 | if (*idp != 0) |
1898 | return JS_TRUE; |
1899 | } |
1900 | #endif |
1901 | |
1902 | return js_ValueToStringId(cx, idval, idp); |
1903 | } |
1904 | |
1905 | /* |
1906 | * Enter the new with scope using an object at sp[-1] and associate the depth |
1907 | * of the with block with sp + stackIndex. |
1908 | */ |
1909 | JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool |
1910 | js_EnterWith(JSContext *cx, jsint stackIndex) |
1911 | { |
1912 | JSStackFrame *fp; |
1913 | jsval *sp; |
1914 | JSObject *obj, *parent, *withobj; |
1915 | |
1916 | fp = cx->fp; |
1917 | sp = fp->regs->sp; |
1918 | JS_ASSERT(stackIndex < 0); |
1919 | JS_ASSERT(StackBase(fp) <= sp + stackIndex); |
1920 | |
1921 | if (!JSVAL_IS_PRIMITIVE(sp[-1])) { |
1922 | obj = JSVAL_TO_OBJECT(sp[-1]); |
1923 | } else { |
1924 | obj = js_ValueToNonNullObject(cx, sp[-1]); |
1925 | if (!obj) |
1926 | return JS_FALSE; |
1927 | sp[-1] = OBJECT_TO_JSVAL(obj); |
1928 | } |
1929 | |
1930 | parent = js_GetScopeChain(cx, fp); |
1931 | if (!parent) |
1932 | return JS_FALSE; |
1933 | |
1934 | OBJ_TO_INNER_OBJECT(cx, obj); |
1935 | if (!obj) |
1936 | return JS_FALSE; |
1937 | |
1938 | withobj = js_NewWithObject(cx, obj, parent, |
1939 | sp + stackIndex - StackBase(fp)); |
1940 | if (!withobj) |
1941 | return JS_FALSE; |
1942 | |
1943 | fp->scopeChain = withobj; |
1944 | return JS_TRUE; |
1945 | } |
1946 | |
1947 | JS_STATIC_INTERPRET JS_REQUIRES_STACK void |
1948 | js_LeaveWith(JSContext *cx) |
1949 | { |
1950 | JSObject *withobj; |
1951 | |
1952 | withobj = cx->fp->scopeChain; |
1953 | JS_ASSERT(OBJ_GET_CLASS(cx, withobj) == &js_WithClass); |
1954 | JS_ASSERT(OBJ_GET_PRIVATE(cx, withobj) == cx->fp); |
1955 | JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0); |
1956 | cx->fp->scopeChain = OBJ_GET_PARENT(cx, withobj); |
1957 | JS_SetPrivate(cx, withobj, NULL); |
1958 | } |
1959 | |
1960 | JS_REQUIRES_STACK JSClass * |
1961 | js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth) |
1962 | { |
1963 | JSClass *clasp; |
1964 | |
1965 | clasp = OBJ_GET_CLASS(cx, obj); |
1966 | if ((clasp == &js_WithClass || clasp == &js_BlockClass) && |
1967 | OBJ_GET_PRIVATE(cx, obj) == cx->fp && |
1968 | OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) { |
1969 | return clasp; |
1970 | } |
1971 | return NULL; |
1972 | } |
1973 | |
1974 | /* |
1975 | * Unwind block and scope chains to match the given depth. The function sets |
1976 | * fp->sp on return to stackDepth. |
1977 | */ |
1978 | JS_REQUIRES_STACK JSBool |
1979 | js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth, |
1980 | JSBool normalUnwind) |
1981 | { |
1982 | JSObject *obj; |
1983 | JSClass *clasp; |
1984 | |
1985 | JS_ASSERT(stackDepth >= 0); |
1986 | JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp); |
1987 | |
1988 | for (obj = fp->blockChain; obj; obj = OBJ_GET_PARENT(cx, obj)) { |
1989 | JS_ASSERT(OBJ_GET_CLASS(cx, obj) == &js_BlockClass); |
1990 | if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth) |
1991 | break; |
1992 | } |
1993 | fp->blockChain = obj; |
1994 | |
1995 | for (;;) { |
1996 | obj = fp->scopeChain; |
1997 | clasp = js_IsActiveWithOrBlock(cx, obj, stackDepth); |
1998 | if (!clasp) |
1999 | break; |
2000 | if (clasp == &js_BlockClass) { |
2001 | /* Don't fail until after we've updated all stacks. */ |
2002 | normalUnwind &= js_PutBlockObject(cx, normalUnwind); |
2003 | } else { |
2004 | js_LeaveWith(cx); |
2005 | } |
2006 | } |
2007 | |
2008 | fp->regs->sp = StackBase(fp) + stackDepth; |
2009 | return normalUnwind; |
2010 | } |
2011 | |
2012 | JS_STATIC_INTERPRET JSBool |
2013 | js_DoIncDec(JSContext *cx, const JSCodeSpec *cs, jsval *vp, jsval *vp2) |
2014 | { |
2015 | jsval v; |
2016 | jsdouble d; |
2017 | |
2018 | v = *vp; |
2019 | if (JSVAL_IS_DOUBLE(v)) { |
2020 | d = *JSVAL_TO_DOUBLE(v); |
2021 | } else if (JSVAL_IS_INT(v)) { |
2022 | d = JSVAL_TO_INT(v); |
2023 | } else { |
2024 | d = js_ValueToNumber(cx, vp); |
2025 | if (JSVAL_IS_NULL(*vp)) |
2026 | return JS_FALSE; |
2027 | JS_ASSERT(JSVAL_IS_NUMBER(*vp) || *vp == JSVAL_TRUE); |
2028 | |
2029 | /* Store the result of v conversion back in vp for post increments. */ |
2030 | if ((cs->format & JOF_POST) && |
2031 | *vp == JSVAL_TRUE |
2032 | && !js_NewNumberInRootedValue(cx, d, vp)) { |
2033 | return JS_FALSE; |
2034 | } |
2035 | } |
2036 | |
2037 | (cs->format & JOF_INC) ? d++ : d--; |
2038 | if (!js_NewNumberInRootedValue(cx, d, vp2)) |
2039 | return JS_FALSE; |
2040 | |
2041 | if (!(cs->format & JOF_POST)) |
2042 | *vp = *vp2; |
2043 | return JS_TRUE; |
2044 | } |
2045 | |
2046 | jsval& |
2047 | js_GetUpvar(JSContext *cx, uintN level, uintN cookie) |
2048 | { |
2049 | level -= UPVAR_FRAME_SKIP(cookie); |
2050 | JS_ASSERT(level < JS_DISPLAY_SIZE); |
2051 | |
2052 | JSStackFrame *fp = cx->display[level]; |
2053 | JS_ASSERT(fp->script); |
2054 | |
2055 | uintN slot = UPVAR_FRAME_SLOT(cookie); |
2056 | jsval *vp; |
2057 | |
2058 | if (!fp->fun) { |
2059 | vp = fp->slots + fp->script->nfixed; |
2060 | } else if (slot < fp->fun->nargs) { |
2061 | vp = fp->argv; |
2062 | } else if (slot == CALLEE_UPVAR_SLOT) { |
2063 | vp = &fp->argv[-2]; |
2064 | slot = 0; |
2065 | } else { |
2066 | slot -= fp->fun->nargs; |
2067 | JS_ASSERT(slot < fp->script->nslots); |
2068 | vp = fp->slots; |
2069 | } |
2070 | |
2071 | return vp[slot]; |
2072 | } |
2073 | |
2074 | #ifdef DEBUG |
2075 | |
2076 | JS_STATIC_INTERPRET JS_REQUIRES_STACK void |
2077 | js_TraceOpcode(JSContext *cx) |
2078 | { |
2079 | FILE *tracefp; |
2080 | JSStackFrame *fp; |
2081 | JSFrameRegs *regs; |
2082 | intN ndefs, n, nuses; |
2083 | jsval *siter; |
2084 | JSString *str; |
2085 | JSOp op; |
2086 | |
2087 | tracefp = (FILE *) cx->tracefp; |
2088 | JS_ASSERT(tracefp); |
2089 | fp = cx->fp; |
2090 | regs = fp->regs; |
2091 | |
2092 | /* |
2093 | * Operations in prologues don't produce interesting values, and |
2094 | * js_DecompileValueGenerator isn't set up to handle them anyway. |
2095 | */ |
2096 | if (cx->tracePrevPc && regs->pc >= fp->script->main) { |
2097 | JSOp tracePrevOp = JSOp(*cx->tracePrevPc); |
2098 | ndefs = js_GetStackDefs(cx, &js_CodeSpec[tracePrevOp], tracePrevOp, |
2099 | fp->script, cx->tracePrevPc); |
2100 | |
2101 | /* |
2102 | * If there aren't that many elements on the stack, then |
2103 | * we have probably entered a new frame, and printing output |
2104 | * would just be misleading. |
2105 | */ |
2106 | if (ndefs != 0 && |
2107 | ndefs < regs->sp - fp->slots) { |
2108 | for (n = -ndefs; n < 0; n++) { |
2109 | char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n], |
2110 | NULL); |
2111 | if (bytes) { |
2112 | fprintf(tracefp, "%s %s", |
2113 | (n == -ndefs) ? " output:" : ",", |
2114 | bytes); |
2115 | JS_free(cx, bytes); |
2116 | } |
2117 | } |
2118 | fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp))); |
2119 | } |
2120 | fprintf(tracefp, " stack: "); |
2121 | for (siter = StackBase(fp); siter < regs->sp; siter++) { |
2122 | str = js_ValueToString(cx, *siter); |
2123 | if (!str) |
2124 | fputs("<null>", tracefp); |
2125 | else |
2126 | js_FileEscapedString(tracefp, str, 0); |
2127 | fputc(' ', tracefp); |
2128 | } |
2129 | fputc('\n', tracefp); |
2130 | } |
2131 | |
2132 | fprintf(tracefp, "%4u: ", |
2133 | js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : regs->pc)); |
2134 | js_Disassemble1(cx, fp->script, regs->pc, |
2135 | PTRDIFF(regs->pc, fp->script->code, jsbytecode), |
2136 | JS_FALSE, tracefp); |
2137 | op = (JSOp) *regs->pc; |
2138 | nuses = js_GetStackUses(&js_CodeSpec[op], op, regs->pc); |
2139 | if (nuses != 0) { |
2140 | for (n = -nuses; n < 0; n++) { |
2141 | char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n], |
2142 | NULL); |
2143 | if (bytes) { |
2144 | fprintf(tracefp, "%s %s", |
2145 | (n == -nuses) ? " inputs:" : ",", |
2146 | bytes); |
2147 | JS_free(cx, bytes); |
2148 | } |
2149 | } |
2150 | fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp))); |
2151 | } |
2152 | cx->tracePrevPc = regs->pc; |
2153 | |
2154 | /* It's nice to have complete traces when debugging a crash. */ |
2155 | fflush(tracefp); |
2156 | } |
2157 | |
2158 | #endif /* DEBUG */ |
2159 | |
2160 | #ifdef JS_OPMETER |
2161 | |
2162 | # include <stdlib.h> |
2163 | |
2164 | # define HIST_NSLOTS 8 |
2165 | |
2166 | /* |
2167 | * The second dimension is hardcoded at 256 because we know that many bits fit |
2168 | * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address |
2169 | * any particular row. |
2170 | */ |
2171 | static uint32 succeeds[JSOP_LIMIT][256]; |
2172 | static uint32 slot_ops[JSOP_LIMIT][HIST_NSLOTS]; |
2173 | |
2174 | JS_STATIC_INTERPRET void |
2175 | js_MeterOpcodePair(JSOp op1, JSOp op2) |
2176 | { |
2177 | if (op1 != JSOP_STOP) |
2178 | ++succeeds[op1][op2]; |
2179 | } |
2180 | |
2181 | JS_STATIC_INTERPRET void |
2182 | js_MeterSlotOpcode(JSOp op, uint32 slot) |
2183 | { |
2184 | if (slot < HIST_NSLOTS) |
2185 | ++slot_ops[op][slot]; |
2186 | } |
2187 | |
2188 | typedef struct Edge { |
2189 | const char *from; |
2190 | const char *to; |
2191 | uint32 count; |
2192 | } Edge; |
2193 | |
2194 | static int |
2195 | compare_edges(const void *a, const void *b) |
2196 | { |
2197 | const Edge *ea = (const Edge *) a; |
2198 | const Edge *eb = (const Edge *) b; |
2199 | |
2200 | return (int32)eb->count - (int32)ea->count; |
2201 | } |
2202 | |
2203 | void |
2204 | js_DumpOpMeters() |
2205 | { |
2206 | const char *name, *from, *style; |
2207 | FILE *fp; |
2208 | uint32 total, count; |
2209 | uint32 i, j, nedges; |
2210 | Edge *graph; |
2211 | |
2212 | name = getenv("JS_OPMETER_FILE"); |
2213 | if (!name) |
2214 | name = "/tmp/ops.dot"; |
2215 | fp = fopen(name, "w"); |
2216 | if (!fp) { |
2217 | perror(name); |
2218 | return; |
2219 | } |
2220 | |
2221 | total = nedges = 0; |
2222 | for (i = 0; i < JSOP_LIMIT; i++) { |
2223 | for (j = 0; j < JSOP_LIMIT; j++) { |
2224 | count = succeeds[i][j]; |
2225 | if (count != 0) { |
2226 | total += count; |
2227 | ++nedges; |
2228 | } |
2229 | } |
2230 | } |
2231 | |
2232 | # define SIGNIFICANT(count,total) (200. * (count) >= (total)) |
2233 | |
2234 | graph = (Edge *) calloc(nedges, sizeof graph[0]); |
2235 | for (i = nedges = 0; i < JSOP_LIMIT; i++) { |
2236 | from = js_CodeName[i]; |
2237 | for (j = 0; j < JSOP_LIMIT; j++) { |
2238 | count = succeeds[i][j]; |
2239 | if (count != 0 && SIGNIFICANT(count, total)) { |
2240 | graph[nedges].from = from; |
2241 | graph[nedges].to = js_CodeName[j]; |
2242 | graph[nedges].count = count; |
2243 | ++nedges; |
2244 | } |
2245 | } |
2246 | } |
2247 | qsort(graph, nedges, sizeof(Edge), compare_edges); |
2248 | |
2249 | # undef SIGNIFICANT |
2250 | |
2251 | fputs("digraph {\n", fp); |
2252 | for (i = 0, style = NULL; i < nedges; i++) { |
2253 | JS_ASSERT(i == 0 || graph[i-1].count >= graph[i].count); |
2254 | if (!style || graph[i-1].count != graph[i].count) { |
2255 | style = (i > nedges * .75) ? "dotted" : |
2256 | (i > nedges * .50) ? "dashed" : |
2257 | (i > nedges * .25) ? "solid" : "bold"; |
2258 | } |
2259 | fprintf(fp, " %s -> %s [label=\"%lu\" style=%s]\n", |
2260 | graph[i].from, graph[i].to, |
2261 | (unsigned long)graph[i].count, style); |
2262 | } |
2263 | free(graph); |
2264 | fputs("}\n", fp); |
2265 | fclose(fp); |
2266 | |
2267 | name = getenv("JS_OPMETER_HIST"); |
2268 | if (!name) |
2269 | name = "/tmp/ops.hist"; |
2270 | fp = fopen(name, "w"); |
2271 | if (!fp) { |
2272 | perror(name); |
2273 | return; |
2274 | } |
2275 | fputs("bytecode", fp); |
2276 | for (j = 0; j < HIST_NSLOTS; j++) |
2277 | fprintf(fp, " slot %1u", (unsigned)j); |
2278 | putc('\n', fp); |
2279 | fputs("========", fp); |
2280 | for (j = 0; j < HIST_NSLOTS; j++) |
2281 | fputs(" =======", fp); |
2282 | putc('\n', fp); |
2283 | for (i = 0; i < JSOP_LIMIT; i++) { |
2284 | for (j = 0; j < HIST_NSLOTS; j++) { |
2285 | if (slot_ops[i][j] != 0) { |
2286 | /* Reuse j in the next loop, since we break after. */ |
2287 | fprintf(fp, "%-8.8s", js_CodeName[i]); |
2288 | for (j = 0; j < HIST_NSLOTS; j++) |
2289 | fprintf(fp, " %7lu", (unsigned long)slot_ops[i][j]); |
2290 | putc('\n', fp); |
2291 | break; |
2292 | } |
2293 | } |
2294 | } |
2295 | fclose(fp); |
2296 | } |
2297 | |
2298 | #endif /* JS_OPSMETER */ |
2299 | |
2300 | #endif /* !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ */ |
2301 | |
2302 | #ifndef jsinvoke_cpp___ |
2303 | |
2304 | #define PUSH(v) (*regs.sp++ = (v)) |
2305 | #define PUSH_OPND(v) PUSH(v) |
2306 | #define STORE_OPND(n,v) (regs.sp[n] = (v)) |
2307 | #define POP() (*--regs.sp) |
2308 | #define POP_OPND() POP() |
2309 | #define FETCH_OPND(n) (regs.sp[n]) |
2310 | |
2311 | /* |
2312 | * Push the jsdouble d using sp from the lexical environment. Try to convert d |
2313 | * to a jsint that fits in a jsval, otherwise GC-alloc space for it and push a |
2314 | * reference. |
2315 | */ |
2316 | #define STORE_NUMBER(cx, n, d) \ |
2317 | JS_BEGIN_MACRO \ |
2318 | jsint i_; \ |
2319 | \ |
2320 | if (JSDOUBLE_IS_INT(d, i_) && INT_FITS_IN_JSVAL(i_)) \ |
2321 | regs.sp[n] = INT_TO_JSVAL(i_); \ |
2322 | else if (!js_NewDoubleInRootedValue(cx, d, ®s.sp[n])) \ |
2323 | goto error; \ |
2324 | JS_END_MACRO |
2325 | |
2326 | #define STORE_INT(cx, n, i) \ |
2327 | JS_BEGIN_MACRO \ |
2328 | if (INT_FITS_IN_JSVAL(i)) \ |
2329 | regs.sp[n] = INT_TO_JSVAL(i); \ |
2330 | else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (i), ®s.sp[n])) \ |
2331 | goto error; \ |
2332 | JS_END_MACRO |
2333 | |
2334 | #define STORE_UINT(cx, n, u) \ |
2335 | JS_BEGIN_MACRO \ |
2336 | if ((u) <= JSVAL_INT_MAX) \ |
2337 | regs.sp[n] = INT_TO_JSVAL(u); \ |
2338 | else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (u), ®s.sp[n])) \ |
2339 | goto error; \ |
2340 | JS_END_MACRO |
2341 | |
2342 | #define FETCH_NUMBER(cx, n, d) \ |
2343 | JS_BEGIN_MACRO \ |
2344 | jsval v_; \ |
2345 | \ |
2346 | v_ = FETCH_OPND(n); \ |
2347 | VALUE_TO_NUMBER(cx, n, v_, d); \ |
2348 | JS_END_MACRO |
2349 | |
2350 | #define FETCH_INT(cx, n, i) \ |
2351 | JS_BEGIN_MACRO \ |
2352 | jsval v_; \ |
2353 | \ |
2354 | v_= FETCH_OPND(n); \ |
2355 | if (JSVAL_IS_INT(v_)) { \ |
2356 | i = JSVAL_TO_INT(v_); \ |
2357 | } else { \ |
2358 | i = js_ValueToECMAInt32(cx, ®s.sp[n]); \ |
2359 | if (JSVAL_IS_NULL(regs.sp[n])) \ |
2360 | goto error; \ |
2361 | } \ |
2362 | JS_END_MACRO |
2363 | |
2364 | #define FETCH_UINT(cx, n, ui) \ |
2365 | JS_BEGIN_MACRO \ |
2366 | jsval v_; \ |
2367 | \ |
2368 | v_= FETCH_OPND(n); \ |
2369 | if (JSVAL_IS_INT(v_)) { \ |
2370 | ui = (uint32) JSVAL_TO_INT(v_); \ |
2371 | } else { \ |
2372 | ui = js_ValueToECMAUint32(cx, ®s.sp[n]); \ |
2373 | if (JSVAL_IS_NULL(regs.sp[n])) \ |
2374 | goto error; \ |
2375 | } \ |
2376 | JS_END_MACRO |
2377 | |
2378 | /* |
2379 | * Optimized conversion macros that test for the desired type in v before |
2380 | * homing sp and calling a conversion function. |
2381 | */ |
2382 | #define VALUE_TO_NUMBER(cx, n, v, d) \ |
2383 | JS_BEGIN_MACRO \ |
2384 | JS_ASSERT(v == regs.sp[n]); \ |
2385 | if (JSVAL_IS_INT(v)) { \ |
2386 | d = (jsdouble)JSVAL_TO_INT(v); \ |
2387 | } else if (JSVAL_IS_DOUBLE(v)) { \ |
2388 | d = *JSVAL_TO_DOUBLE(v); \ |
2389 | } else { \ |
2390 | d = js_ValueToNumber(cx, ®s.sp[n]); \ |
2391 | if (JSVAL_IS_NULL(regs.sp[n])) \ |
2392 | goto error; \ |
2393 | JS_ASSERT(JSVAL_IS_NUMBER(regs.sp[n]) || \ |
2394 | regs.sp[n] == JSVAL_TRUE); \ |
2395 | } \ |
2396 | JS_END_MACRO |
2397 | |
2398 | #define POP_BOOLEAN(cx, v, b) \ |
2399 | JS_BEGIN_MACRO \ |
2400 | v = FETCH_OPND(-1); \ |
2401 | if (v == JSVAL_NULL) { \ |
2402 | b = JS_FALSE; \ |
2403 | } else if (JSVAL_IS_BOOLEAN(v)) { \ |
2404 | b = JSVAL_TO_BOOLEAN(v); \ |
2405 | } else { \ |
2406 | b = js_ValueToBoolean(v); \ |
2407 | } \ |
2408 | regs.sp--; \ |
2409 | JS_END_MACRO |
2410 | |
2411 | #define VALUE_TO_OBJECT(cx, n, v, obj) \ |
2412 | JS_BEGIN_MACRO \ |
2413 | if (!JSVAL_IS_PRIMITIVE(v)) { \ |
2414 | obj = JSVAL_TO_OBJECT(v); \ |
2415 | } else { \ |
2416 | obj = js_ValueToNonNullObject(cx, v); \ |
2417 | if (!obj) \ |
2418 | goto error; \ |
2419 | STORE_OPND(n, OBJECT_TO_JSVAL(obj)); \ |
2420 | } \ |
2421 | JS_END_MACRO |
2422 | |
2423 | #define FETCH_OBJECT(cx, n, v, obj) \ |
2424 | JS_BEGIN_MACRO \ |
2425 | v = FETCH_OPND(n); \ |
2426 | VALUE_TO_OBJECT(cx, n, v, obj); \ |
2427 | JS_END_MACRO |
2428 | |
2429 | #define DEFAULT_VALUE(cx, n, hint, v) \ |
2430 | JS_BEGIN_MACRO \ |
2431 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(v)); \ |
2432 | JS_ASSERT(v == regs.sp[n]); \ |
2433 | if (!OBJ_DEFAULT_VALUE(cx, JSVAL_TO_OBJECT(v), hint, ®s.sp[n])) \ |
2434 | goto error; \ |
2435 | v = regs.sp[n]; \ |
2436 | JS_END_MACRO |
2437 | |
2438 | /* |
2439 | * Quickly test if v is an int from the [-2**29, 2**29) range, that is, when |
2440 | * the lowest bit of v is 1 and the bits 30 and 31 are both either 0 or 1. For |
2441 | * such v we can do increment or decrement via adding or subtracting two |
2442 | * without checking that the result overflows JSVAL_INT_MIN or JSVAL_INT_MAX. |
2443 | */ |
2444 | #define CAN_DO_FAST_INC_DEC(v) (((((v) << 1) ^ v) & 0x80000001) == 1) |
2445 | |
2446 | JS_STATIC_ASSERT(JSVAL_INT == 1); |
2447 | JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL(JSVAL_INT_MIN))); |
2448 | JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL(JSVAL_INT_MAX))); |
2449 | |
2450 | /* |
2451 | * Conditional assert to detect failure to clear a pending exception that is |
2452 | * suppressed (or unintentional suppression of a wanted exception). |
2453 | */ |
2454 | #if defined DEBUG_brendan || defined DEBUG_mrbkap || defined DEBUG_shaver |
2455 | # define DEBUG_NOT_THROWING 1 |
2456 | #endif |
2457 | |
2458 | #ifdef DEBUG_NOT_THROWING |
2459 | # define ASSERT_NOT_THROWING(cx) JS_ASSERT(!(cx)->throwing) |
2460 | #else |
2461 | # define ASSERT_NOT_THROWING(cx) /* nothing */ |
2462 | #endif |
2463 | |
2464 | /* |
2465 | * Define JS_OPMETER to instrument bytecode succession, generating a .dot file |
2466 | * on shutdown that shows the graph of significant predecessor/successor pairs |
2467 | * executed, where the edge labels give the succession counts. The .dot file |
2468 | * is named by the JS_OPMETER_FILE envariable, and defaults to /tmp/ops.dot. |
2469 | * |
2470 | * Bonus feature: JS_OPMETER also enables counters for stack-addressing ops |
2471 | * such as JSOP_GETLOCAL, JSOP_INCARG, via METER_SLOT_OP. The resulting counts |
2472 | * are written to JS_OPMETER_HIST, defaulting to /tmp/ops.hist. |
2473 | */ |
2474 | #ifndef JS_OPMETER |
2475 | # define METER_OP_INIT(op) /* nothing */ |
2476 | # define METER_OP_PAIR(op1,op2) /* nothing */ |
2477 | # define METER_SLOT_OP(op,slot) /* nothing */ |
2478 | #else |
2479 | |
2480 | /* |
2481 | * The second dimension is hardcoded at 256 because we know that many bits fit |
2482 | * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address |
2483 | * any particular row. |
2484 | */ |
2485 | # define METER_OP_INIT(op) ((op) = JSOP_STOP) |
2486 | # define METER_OP_PAIR(op1,op2) (js_MeterOpcodePair(op1, op2)) |
2487 | # define METER_SLOT_OP(op,slot) (js_MeterSlotOpcode(op, slot)) |
2488 | |
2489 | #endif |
2490 | |
2491 | #define MAX_INLINE_CALL_COUNT 3000 |
2492 | |
2493 | /* |
2494 | * Threaded interpretation via computed goto appears to be well-supported by |
2495 | * GCC 3 and higher. IBM's C compiler when run with the right options (e.g., |
2496 | * -qlanglvl=extended) also supports threading. Ditto the SunPro C compiler. |
2497 | * Currently it's broken for JS_VERSION < 160, though this isn't worth fixing. |
2498 | * Add your compiler support macros here. |
2499 | */ |
2500 | #ifndef JS_THREADED_INTERP |
2501 | # if JS_VERSION >= 160 && ( \ |
2502 | __GNUC__ >= 3 || \ |
2503 | (__IBMC__ >= 700 && defined __IBM_COMPUTED_GOTO) || \ |
2504 | __SUNPRO_C >= 0x570) |
2505 | # define JS_THREADED_INTERP 1 |
2506 | # else |
2507 | # define JS_THREADED_INTERP 0 |
2508 | # endif |
2509 | #endif |
2510 | |
2511 | /* |
2512 | * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on |
2513 | * single-thread DEBUG js shell testing to verify property cache hits. |
2514 | */ |
2515 | #if defined DEBUG && !defined JS_THREADSAFE |
2516 | |
2517 | # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \ |
2518 | JS_BEGIN_MACRO \ |
2519 | if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \ |
2520 | entry)) { \ |
2521 | goto error; \ |
2522 | } \ |
2523 | JS_END_MACRO |
2524 | |
2525 | static bool |
2526 | AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs, |
2527 | ptrdiff_t pcoff, JSObject *start, JSObject *found, |
2528 | JSPropCacheEntry *entry) |
2529 | { |
2530 | uint32 sample = cx->runtime->gcNumber; |
2531 | |
2532 | JSAtom *atom; |
2533 | if (pcoff >= 0) |
2534 | GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom); |
2535 | else |
2536 | atom = cx->runtime->atomState.lengthAtom; |
2537 | |
2538 | JSObject *obj, *pobj; |
2539 | JSProperty *prop; |
2540 | bool ok; |
2541 | |
2542 | if (JOF_OPMODE(*regs.pc) == JOF_NAME) { |
2543 | ok = js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &pobj, &prop); |
2544 | } else { |
2545 | obj = start; |
2546 | ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop); |
2547 | } |
2548 | if (!ok) |
2549 | return false; |
2550 | if (!prop) |
2551 | return true; |
2552 | if (cx->runtime->gcNumber != sample || |
2553 | PCVCAP_SHAPE(entry->vcap) != OBJ_SHAPE(pobj)) { |
2554 | OBJ_DROP_PROPERTY(cx, pobj, prop); |
2555 | return true; |
2556 | } |
2557 | JS_ASSERT(prop); |
2558 | JS_ASSERT(pobj == found); |
2559 | |
2560 | JSScopeProperty *sprop = (JSScopeProperty *) prop; |
2561 | if (PCVAL_IS_SLOT(entry->vword)) { |
2562 | JS_ASSERT(PCVAL_TO_SLOT(entry->vword) == sprop->slot); |
2563 | } else if (PCVAL_IS_SPROP(entry->vword)) { |
2564 | JS_ASSERT(PCVAL_TO_SPROP(entry->vword) == sprop); |
2565 | } else { |
2566 | jsval v; |
2567 | JS_ASSERT(PCVAL_IS_OBJECT(entry->vword)); |
2568 | JS_ASSERT(entry->vword != PCVAL_NULL); |
2569 | JS_ASSERT(SCOPE_IS_BRANDED(OBJ_SCOPE(pobj))); |
2570 | JS_ASSERT(SPROP_HAS_STUB_GETTER(sprop)); |
2571 | JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(pobj))); |
2572 | v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot); |
2573 | JS_ASSERT(VALUE_IS_FUNCTION(cx, v)); |
2574 | JS_ASSERT(PCVAL_TO_OBJECT(entry->vword) == JSVAL_TO_OBJECT(v)); |
2575 | } |
2576 | |
2577 | OBJ_DROP_PROPERTY(cx, pobj, prop); |
2578 | return true; |
2579 | } |
2580 | |
2581 | #else |
2582 | # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0) |
2583 | #endif |
2584 | |
2585 | /* |
2586 | * Ensure that the intrepreter switch can close call-bytecode cases in the |
2587 | * same way as non-call bytecodes. |
2588 | */ |
2589 | JS_STATIC_ASSERT(JSOP_NAME_LENGTH == JSOP_CALLNAME_LENGTH); |
2590 | JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH); |
2591 | JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH); |
2592 | JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH); |
2593 | JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH); |
2594 | JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH); |
2595 | JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH); |
2596 | JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH); |
2597 | JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH); |
2598 | |
2599 | /* |
2600 | * Same for debuggable flat closures defined at top level in another function |
2601 | * or program fragment. |
2602 | */ |
2603 | JS_STATIC_ASSERT(JSOP_DEFFUN_FC_LENGTH == JSOP_DEFFUN_DBGFC_LENGTH); |
2604 | |
2605 | /* |
2606 | * Same for JSOP_SETNAME and JSOP_SETPROP, which differ only slightly but |
2607 | * remain distinct for the decompiler. |
2608 | */ |
2609 | JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH); |
2610 | |
2611 | /* See TRY_BRANCH_AFTER_COND. */ |
2612 | JS_STATIC_ASSERT(JSOP_IFNE_LENGTH == JSOP_IFEQ_LENGTH); |
2613 | JS_STATIC_ASSERT(JSOP_IFNE == JSOP_IFEQ + 1); |
2614 | |
2615 | /* For the fastest case inder JSOP_INCNAME, etc. */ |
2616 | JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_DECNAME_LENGTH); |
2617 | JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEINC_LENGTH); |
2618 | JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEDEC_LENGTH); |
2619 | |
2620 | #ifdef JS_TRACER |
2621 | # define ABORT_RECORDING(cx, reason) \ |
2622 | JS_BEGIN_MACRO \ |
2623 | if (TRACE_RECORDER(cx)) \ |
2624 | js_AbortRecording(cx, reason); \ |
2625 | JS_END_MACRO |
2626 | #else |
2627 | # define ABORT_RECORDING(cx, reason) ((void) 0) |
2628 | #endif |
2629 | |
2630 | JS_REQUIRES_STACK JSBool |
2631 | js_Interpret(JSContext *cx) |
2632 | { |
2633 | JSRuntime *rt; |
2634 | JSStackFrame *fp; |
2635 | JSScript *script; |
2636 | uintN inlineCallCount; |
2637 | JSAtom **atoms; |
2638 | JSVersion currentVersion, originalVersion; |
2639 | JSFrameRegs regs; |
2640 | JSObject *obj, *obj2, *parent; |
2641 | JSBool ok, cond; |
2642 | jsint len; |
2643 | jsbytecode *endpc, *pc2; |
2644 | JSOp op, op2; |
2645 | jsatomid index; |
2646 | JSAtom *atom; |
2647 | uintN argc, attrs, flags; |
2648 | uint32 slot; |
2649 | jsval *vp, lval, rval, ltmp, rtmp; |
2650 | jsid id; |
2651 | JSProperty *prop; |
2652 | JSScopeProperty *sprop; |
2653 | JSString *str, *str2; |
2654 | jsint i, j; |
2655 | jsdouble d, d2; |
2656 | JSClass *clasp; |
2657 | JSFunction *fun; |
2658 | JSType type; |
2659 | jsint low, high, off, npairs; |
2660 | JSBool match; |
2661 | #if JS_HAS_GETTER_SETTER |
2662 | JSPropertyOp getter, setter; |
2663 | #endif |
2664 | JSAutoResolveFlags rf(cx, JSRESOLVE_INFER); |
2665 | |
2666 | #ifdef __GNUC__ |
2667 | # define JS_EXTENSION __extension__ |
2668 | # define JS_EXTENSION_(s) __extension__ ({ s; }) |
2669 | #else |
2670 | # define JS_EXTENSION |
2671 | # define JS_EXTENSION_(s) s |
2672 | #endif |
2673 | |
2674 | # ifdef DEBUG |
2675 | /* |
2676 | * We call this macro from BEGIN_CASE in threaded interpreters, |
2677 | * and before entering the switch in non-threaded interpreters. |
2678 | * However, reaching such points doesn't mean we've actually |
2679 | * fetched an OP from the instruction stream: some opcodes use |
2680 | * 'op=x; DO_OP()' to let another opcode's implementation finish |
2681 | * their work, and many opcodes share entry points with a run of |
2682 | * consecutive BEGIN_CASEs. |
2683 | * |
2684 | * Take care to trace OP only when it is the opcode fetched from |
2685 | * the instruction stream, so the trace matches what one would |
2686 | * expect from looking at the code. (We do omit POPs after SETs; |
2687 | * unfortunate, but not worth fixing.) |
2688 | */ |
2689 | # define TRACE_OPCODE(OP) JS_BEGIN_MACRO \ |
2690 | if (JS_UNLIKELY(cx->tracefp != NULL) && \ |
2691 | (OP) == *regs.pc) \ |
2692 | js_TraceOpcode(cx); \ |
2693 | JS_END_MACRO |
2694 | # else |
2695 | # define TRACE_OPCODE(OP) ((void) 0) |
2696 | # endif |
2697 | |
2698 | #if JS_THREADED_INTERP |
2699 | static void *const normalJumpTable[] = { |
2700 | # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \ |
2701 | JS_EXTENSION &&L_##op, |
2702 | # include "jsopcode.tbl" |
2703 | # undef OPDEF |
2704 | }; |
2705 | |
2706 | static void *const interruptJumpTable[] = { |
2707 | # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \ |
2708 | JS_EXTENSION &&interrupt, |
2709 | # include "jsopcode.tbl" |
2710 | # undef OPDEF |
2711 | }; |
2712 | |
2713 | register void * const *jumpTable = normalJumpTable; |
2714 | |
2715 | METER_OP_INIT(op); /* to nullify first METER_OP_PAIR */ |
2716 | |
2717 | # define ENABLE_INTERRUPTS() ((void) (jumpTable = interruptJumpTable)) |
2718 | |
2719 | # ifdef JS_TRACER |
2720 | # define CHECK_RECORDER() \ |
2721 | JS_ASSERT_IF(TRACE_RECORDER(cx), jumpTable == interruptJumpTable) |
2722 | # else |
2723 | # define CHECK_RECORDER() ((void)0) |
2724 | # endif |
2725 | |
2726 | # define DO_OP() JS_BEGIN_MACRO \ |
2727 | CHECK_RECORDER(); \ |
2728 | JS_EXTENSION_(goto *jumpTable[op]); \ |
2729 | JS_END_MACRO |
2730 | # define DO_NEXT_OP(n) JS_BEGIN_MACRO \ |
2731 | METER_OP_PAIR(op, regs.pc[n]); \ |
2732 | op = (JSOp) *(regs.pc += (n)); \ |
2733 | DO_OP(); \ |
2734 | JS_END_MACRO |
2735 | |
2736 | # define BEGIN_CASE(OP) L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER(); |
2737 | # define END_CASE(OP) DO_NEXT_OP(OP##_LENGTH); |
2738 | # define END_VARLEN_CASE DO_NEXT_OP(len); |
2739 | # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) \ |
2740 | JS_ASSERT(js_CodeSpec[OP].length == 1); \ |
2741 | op = (JSOp) *++regs.pc; \ |
2742 | DO_OP(); |
2743 | |
2744 | # define END_EMPTY_CASES |
2745 | |
2746 | #else /* !JS_THREADED_INTERP */ |
2747 | |
2748 | register intN switchMask = 0; |
2749 | intN switchOp; |
2750 | |
2751 | # define ENABLE_INTERRUPTS() ((void) (switchMask = -1)) |
2752 | |
2753 | # ifdef JS_TRACER |
2754 | # define CHECK_RECORDER() \ |
2755 | JS_ASSERT_IF(TRACE_RECORDER(cx), switchMask == -1) |
2756 | # else |
2757 | # define CHECK_RECORDER() ((void)0) |
2758 | # endif |
2759 | |
2760 | # define DO_OP() goto do_op |
2761 | # define DO_NEXT_OP(n) JS_BEGIN_MACRO \ |
2762 | JS_ASSERT((n) == len); \ |
2763 | goto advance_pc; \ |
2764 | JS_END_MACRO |
2765 | |
2766 | # define BEGIN_CASE(OP) case OP: CHECK_RECORDER(); |
2767 | # define END_CASE(OP) END_CASE_LEN(OP##_LENGTH) |
2768 | # define END_CASE_LEN(n) END_CASE_LENX(n) |
2769 | # define END_CASE_LENX(n) END_CASE_LEN##n |
2770 | |
2771 | /* |
2772 | * To share the code for all len == 1 cases we use the specialized label with |
2773 | * code that falls through to advance_pc: . |
2774 | */ |
2775 | # define END_CASE_LEN1 goto advance_pc_by_one; |
2776 | # define END_CASE_LEN2 len = 2; goto advance_pc; |
2777 | # define END_CASE_LEN3 len = 3; goto advance_pc; |
2778 | # define END_CASE_LEN4 len = 4; goto advance_pc; |
2779 | # define END_CASE_LEN5 len = 5; goto advance_pc; |
2780 | # define END_VARLEN_CASE goto advance_pc; |
2781 | # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) |
2782 | # define END_EMPTY_CASES goto advance_pc_by_one; |
2783 | |
2784 | #endif /* !JS_THREADED_INTERP */ |
2785 | |
2786 | #ifdef JS_TRACER |
2787 | /* We had better not be entering the interpreter from JIT-compiled code. */ |
2788 | TraceRecorder *tr = TRACE_RECORDER(cx); |
2789 | SET_TRACE_RECORDER(cx, NULL); |
2790 | |
2791 | /* If a recorder is pending and we try to re-enter the interpreter, flag |
2792 | the recorder to be destroyed when we return. */ |
2793 | if (tr) { |
2794 | if (tr->wasDeepAborted()) |
2795 | tr->removeFragmentoReferences(); |
2796 | else |
2797 | tr->pushAbortStack(); |
2798 | } |
2799 | #endif |
2800 | |
2801 | /* Check for too deep of a native thread stack. */ |
2802 | JS_CHECK_RECURSION(cx, return JS_FALSE); |
2803 | |
2804 | rt = cx->runtime; |
2805 | |
2806 | /* Set registerized frame pointer and derived script pointer. */ |
2807 | fp = cx->fp; |
2808 | script = fp->script; |
2809 | JS_ASSERT(script->length != 0); |
2810 | |
2811 | /* Count of JS function calls that nest in this C js_Interpret frame. */ |
2812 | inlineCallCount = 0; |
2813 | |
2814 | /* |
2815 | * Initialize the index segment register used by LOAD_ATOM and |
2816 | * GET_FULL_INDEX macros below. As a register we use a pointer based on |
2817 | * the atom map to turn frequently executed LOAD_ATOM into simple array |
2818 | * access. For less frequent object and regexp loads we have to recover |
2819 | * the segment from atoms pointer first. |
2820 | */ |
2821 | atoms = script->atomMap.vector; |
2822 | |
2823 | #define LOAD_ATOM(PCOFF) \ |
2824 | JS_BEGIN_MACRO \ |
2825 | JS_ASSERT(fp->imacpc \ |
2826 | ? atoms == COMMON_ATOMS_START(&rt->atomState) && \ |
2827 | GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \ |
2828 | : (size_t)(atoms - script->atomMap.vector) < \ |
2829 | (size_t)(script->atomMap.length - \ |
2830 | GET_INDEX(regs.pc + PCOFF))); \ |
2831 | atom = atoms[GET_INDEX(regs.pc + PCOFF)]; \ |
2832 | JS_END_MACRO |
2833 | |
2834 | #define GET_FULL_INDEX(PCOFF) \ |
2835 | (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF)) |
2836 | |
2837 | #define LOAD_OBJECT(PCOFF) \ |
2838 | JS_GET_SCRIPT_OBJECT(script, GET_FULL_INDEX(PCOFF), obj) |
2839 | |
2840 | #define LOAD_FUNCTION(PCOFF) \ |
2841 | JS_GET_SCRIPT_FUNCTION(script, GET_FULL_INDEX(PCOFF), fun) |
2842 | |
2843 | #ifdef JS_TRACER |
2844 | |
2845 | #define MONITOR_BRANCH() \ |
2846 | JS_BEGIN_MACRO \ |
2847 | if (TRACING_ENABLED(cx)) { \ |
2848 | if (js_MonitorLoopEdge(cx, inlineCallCount)) { \ |
2849 | JS_ASSERT(TRACE_RECORDER(cx)); \ |
2850 | ENABLE_INTERRUPTS(); \ |
2851 | } \ |
2852 | fp = cx->fp; \ |
2853 | script = fp->script; \ |
2854 | atoms = FrameAtomBase(cx, fp); \ |
2855 | currentVersion = (JSVersion) script->version; \ |
2856 | JS_ASSERT(fp->regs == ®s); \ |
2857 | if (cx->throwing) \ |
2858 | goto error; \ |
2859 | } \ |
2860 | JS_END_MACRO |
2861 | |
2862 | #else /* !JS_TRACER */ |
2863 | |
2864 | #define MONITOR_BRANCH() ((void) 0) |
2865 | |
2866 | #endif /* !JS_TRACER */ |
2867 | |
2868 | /* |
2869 | * Prepare to call a user-supplied branch handler, and abort the script |
2870 | * if it returns false. |
2871 | */ |
2872 | #define CHECK_BRANCH() \ |
2873 | JS_BEGIN_MACRO \ |
2874 | if (!JS_CHECK_OPERATION_LIMIT(cx)) \ |
2875 | goto error; \ |
2876 | JS_END_MACRO |
2877 | |
2878 | #ifndef TRACE_RECORDER |
2879 | #define TRACE_RECORDER(cx) (false) |
2880 | #endif |
2881 | |
2882 | #define BRANCH(n) \ |
2883 | JS_BEGIN_MACRO \ |
2884 | regs.pc += (n); \ |
2885 | op = (JSOp) *regs.pc; \ |
2886 | if ((n) <= 0) { \ |
2887 | CHECK_BRANCH(); \ |
2888 | if (op == JSOP_NOP) { \ |
2889 | if (TRACE_RECORDER(cx)) { \ |
2890 | MONITOR_BRANCH(); \ |
2891 | op = (JSOp) *regs.pc; \ |
2892 | } else { \ |
2893 | op = (JSOp) *++regs.pc; \ |
2894 | } \ |
2895 | } else if (op == JSOP_LOOP) { \ |
2896 | MONITOR_BRANCH(); \ |
2897 | op = (JSOp) *regs.pc; \ |
2898 | } \ |
2899 | } \ |
2900 | DO_OP(); \ |
2901 | JS_END_MACRO |
2902 | |
2903 | MUST_FLOW_THROUGH("exit"); |
2904 | ++cx->interpLevel; |
2905 | |
2906 | /* |
2907 | * Optimized Get and SetVersion for proper script language versioning. |
2908 | * |
2909 | * If any native method or JSClass/JSObjectOps hook calls js_SetVersion |
2910 | * and changes cx->version, the effect will "stick" and we will stop |
2911 | * maintaining currentVersion. This is relied upon by testsuites, for |
2912 | * the most part -- web browsers select version before compiling and not |
2913 | * at run-time. |
2914 | */ |
2915 | currentVersion = (JSVersion) script->version; |
2916 | originalVersion = (JSVersion) cx->version; |
2917 | if (currentVersion != originalVersion) |
2918 | js_SetVersion(cx, currentVersion); |
2919 | |
2920 | /* Update the static-link display. */ |
2921 | if (script->staticLevel < JS_DISPLAY_SIZE) { |
2922 | JSStackFrame **disp = &cx->display[script->staticLevel]; |
2923 | fp->displaySave = *disp; |
2924 | *disp = fp; |
2925 | } |
2926 | |
2927 | # define CHECK_INTERRUPT_HANDLER() \ |
2928 | JS_BEGIN_MACRO \ |
2929 | if (cx->debugHooks->interruptHandler) \ |
2930 | ENABLE_INTERRUPTS(); \ |
2931 | JS_END_MACRO |
2932 | |
2933 | /* |
2934 | * Load the debugger's interrupt hook here and after calling out to native |
2935 | * functions (but not to getters, setters, or other native hooks), so we do |
2936 | * not have to reload it each time through the interpreter loop -- we hope |
2937 | * the compiler can keep it in a register when it is non-null. |
2938 | */ |
2939 | CHECK_INTERRUPT_HANDLER(); |
2940 | |
2941 | #if !JS_HAS_GENERATORS |
2942 | JS_ASSERT(!fp->regs); |
2943 | #else |
2944 | /* Initialize the pc and sp registers unless we're resuming a generator. */ |
2945 | if (JS_LIKELY(!fp->regs)) { |
2946 | #endif |
2947 | ASSERT_NOT_THROWING(cx); |
2948 | regs.pc = script->code; |
2949 | regs.sp = StackBase(fp); |
2950 | fp->regs = ®s; |
2951 | #if JS_HAS_GENERATORS |
2952 | } else { |
2953 | JSGenerator *gen; |
2954 | |
2955 | JS_ASSERT(fp->flags & JSFRAME_GENERATOR); |
2956 | gen = FRAME_TO_GENERATOR(fp); |
2957 | JS_ASSERT(fp->regs == &gen->savedRegs); |
2958 | regs = gen->savedRegs; |
2959 | fp->regs = ®s; |
2960 | JS_ASSERT((size_t) (regs.pc - script->code) <= script->length); |
2961 | JS_ASSERT((size_t) (regs.sp - StackBase(fp)) <= StackDepth(script)); |
2962 | |
2963 | /* |
2964 | * To support generator_throw and to catch ignored exceptions, |
2965 | * fail if cx->throwing is set. |
2966 | */ |
2967 | if (cx->throwing) { |
2968 | #ifdef DEBUG_NOT_THROWING |
2969 | if (cx->exception != JSVAL_ARETURN) { |
2970 | printf("JS INTERPRETER CALLED WITH PENDING EXCEPTION %lx\n", |
2971 | (unsigned long) cx->exception); |
2972 | } |
2973 | #endif |
2974 | goto error; |
2975 | } |
2976 | } |
2977 | #endif /* JS_HAS_GENERATORS */ |
2978 | |
2979 | /* |
2980 | * It is important that "op" be initialized before calling DO_OP because |
2981 | * it is possible for "op" to be specially assigned during the normal |
2982 | * processing of an opcode while looping. We rely on DO_NEXT_OP to manage |
2983 | * "op" correctly in all other cases. |
2984 | */ |
2985 | len = 0; |
2986 | DO_NEXT_OP(len); |
2987 | |
2988 | #if JS_THREADED_INTERP |
2989 | /* |
2990 | * This is a loop, but it does not look like a loop. The loop-closing |
2991 | * jump is distributed throughout goto *jumpTable[op] inside of DO_OP. |
2992 | * When interrupts are enabled, jumpTable is set to interruptJumpTable |
2993 | * where all jumps point to the interrupt label. The latter, after |
2994 | * calling the interrupt handler, dispatches through normalJumpTable to |
2995 | * continue the normal bytecode processing. |
2996 | */ |
2997 | interrupt: |
2998 | #else /* !JS_THREADED_INTERP */ |
2999 | for (;;) { |
3000 | advance_pc_by_one: |
3001 | JS_ASSERT(js_CodeSpec[op].length == 1); |
3002 | len = 1; |
3003 | advance_pc: |
3004 | regs.pc += len; |
3005 | op = (JSOp) *regs.pc; |
3006 | |
3007 | do_op: |
3008 | CHECK_RECORDER(); |
3009 | TRACE_OPCODE(op); |
3010 | switchOp = intN(op) | switchMask; |
3011 | do_switch: |
3012 | switch (switchOp) { |
3013 | case -1: |
3014 | JS_ASSERT(switchMask == -1); |
3015 | #endif /* !JS_THREADED_INTERP */ |
3016 | { |
3017 | bool moreInterrupts = false; |
3018 | JSTrapHandler handler = cx->debugHooks->interruptHandler; |
3019 | if (handler) { |
3020 | #ifdef JS_TRACER |
3021 | if (TRACE_RECORDER(cx)) |
3022 | js_AbortRecording(cx, "interrupt handler"); |
3023 | #endif |
3024 | switch (handler(cx, script, regs.pc, &rval, |
3025 | cx->debugHooks->interruptHandlerData)) { |
3026 | case JSTRAP_ERROR: |
3027 | goto error; |
3028 | case JSTRAP_CONTINUE: |
3029 | break; |
3030 | case JSTRAP_RETURN: |
3031 | fp->rval = rval; |
3032 | ok = JS_TRUE; |
3033 | goto forced_return; |
3034 | case JSTRAP_THROW: |
3035 | cx->throwing = JS_TRUE; |
3036 | cx->exception = rval; |
3037 | goto error; |
3038 | default:; |
3039 | } |
3040 | moreInterrupts = true; |
3041 | } |
3042 | |
3043 | #ifdef JS_TRACER |
3044 | TraceRecorder* tr = TRACE_RECORDER(cx); |
3045 | if (tr) { |
3046 | JSRecordingStatus status = TraceRecorder::monitorRecording(cx, tr, op); |
3047 | switch (status) { |
3048 | case JSRS_CONTINUE: |
3049 | moreInterrupts = true; |
3050 | break; |
3051 | case JSRS_IMACRO: |
3052 | atoms = COMMON_ATOMS_START(&rt->atomState); |
3053 | op = JSOp(*regs.pc); |
3054 | DO_OP(); /* keep interrupting for op. */ |
3055 | break; |
3056 | case JSRS_ERROR: |
3057 | // The code at 'error:' aborts the recording. |
3058 | goto error; |
3059 | case JSRS_STOP: |
3060 | break; |
3061 | default: |
3062 | JS_NOT_REACHED("Bad recording status"); |
3063 | } |
3064 | } |
3065 | #endif /* !JS_TRACER */ |
3066 | |
3067 | #if JS_THREADED_INTERP |
3068 | jumpTable = moreInterrupts ? interruptJumpTable : normalJumpTable; |
3069 | JS_EXTENSION_(goto *normalJumpTable[op]); |
3070 | #else |
3071 | switchMask = moreInterrupts ? -1 : 0; |
3072 | switchOp = intN(op); |
3073 | goto do_switch; |
3074 | #endif |
3075 | } |
3076 | |
3077 | /* No-ops for ease of decompilation. */ |
3078 | ADD_EMPTY_CASE(JSOP_NOP) |
3079 | ADD_EMPTY_CASE(JSOP_CONDSWITCH) |
3080 | ADD_EMPTY_CASE(JSOP_TRY) |
3081 | #if JS_HAS_XML_SUPPORT |
3082 | ADD_EMPTY_CASE(JSOP_STARTXML) |
3083 | ADD_EMPTY_CASE(JSOP_STARTXMLEXPR) |
3084 | #endif |
3085 | END_EMPTY_CASES |
3086 | |
3087 | /* ADD_EMPTY_CASE is not used here as JSOP_LINENO_LENGTH == 3. */ |
3088 | BEGIN_CASE(JSOP_LINENO) |
3089 | END_CASE(JSOP_LINENO) |
3090 | |
3091 | BEGIN_CASE(JSOP_PUSH) |
3092 | PUSH_OPND(JSVAL_VOID); |
3093 | END_CASE(JSOP_PUSH) |
3094 | |
3095 | BEGIN_CASE(JSOP_POP) |
3096 | regs.sp--; |
3097 | END_CASE(JSOP_POP) |
3098 | |
3099 | BEGIN_CASE(JSOP_POPN) |
3100 | regs.sp -= GET_UINT16(regs.pc); |
3101 | #ifdef DEBUG |
3102 | JS_ASSERT(StackBase(fp) <= regs.sp); |
3103 | obj = fp->blockChain; |
3104 | JS_ASSERT_IF(obj, |
3105 | OBJ_BLOCK_DEPTH(cx, obj) + OBJ_BLOCK_COUNT(cx, obj) |
3106 | <= (size_t) (regs.sp - StackBase(fp))); |
3107 | for (obj = fp->scopeChain; obj; obj = OBJ_GET_PARENT(cx, obj)) { |
3108 | clasp = OBJ_GET_CLASS(cx, obj); |
3109 | if (clasp != &js_BlockClass && clasp != &js_WithClass) |
3110 | continue; |
3111 | if (OBJ_GET_PRIVATE(cx, obj) != fp) |
3112 | break; |
3113 | JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj) |
3114 | + ((clasp == &js_BlockClass) |
3115 | ? OBJ_BLOCK_COUNT(cx, obj) |
3116 | : 1) |
3117 | <= regs.sp); |
3118 | } |
3119 | #endif |
3120 | END_CASE(JSOP_POPN) |
3121 | |
3122 | BEGIN_CASE(JSOP_SETRVAL) |
3123 | BEGIN_CASE(JSOP_POPV) |
3124 | ASSERT_NOT_THROWING(cx); |
3125 | fp->rval = POP_OPND(); |
3126 | END_CASE(JSOP_POPV) |
3127 | |
3128 | BEGIN_CASE(JSOP_ENTERWITH) |
3129 | if (!js_EnterWith(cx, -1)) |
3130 | goto error; |
3131 | |
3132 | /* |
3133 | * We must ensure that different "with" blocks have different |
3134 | * stack depth associated with them. This allows the try handler |
3135 | * search to properly recover the scope chain. Thus we must keep |
3136 | * the stack at least at the current level. |
3137 | * |
3138 | * We set sp[-1] to the current "with" object to help asserting |
3139 | * the enter/leave balance in [leavewith]. |
3140 | */ |
3141 | regs.sp[-1] = OBJECT_TO_JSVAL(fp->scopeChain); |
3142 | END_CASE(JSOP_ENTERWITH) |
3143 | |
3144 | BEGIN_CASE(JSOP_LEAVEWITH) |
3145 | JS_ASSERT(regs.sp[-1] == OBJECT_TO_JSVAL(fp->scopeChain)); |
3146 | regs.sp--; |
3147 | js_LeaveWith(cx); |
3148 | END_CASE(JSOP_LEAVEWITH) |
3149 | |
3150 | BEGIN_CASE(JSOP_RETURN) |
3151 | fp->rval = POP_OPND(); |
3152 | /* FALL THROUGH */ |
3153 | |
3154 | BEGIN_CASE(JSOP_RETRVAL) /* fp->rval already set */ |
3155 | BEGIN_CASE(JSOP_STOP) |
3156 | /* |
3157 | * When the inlined frame exits with an exception or an error, ok |
3158 | * will be false after the inline_return label. |
3159 | */ |
3160 | ASSERT_NOT_THROWING(cx); |
3161 | CHECK_BRANCH(); |
3162 | |
3163 | if (fp->imacpc) { |
3164 | /* |
3165 | * If we are at the end of an imacro, return to its caller in |
3166 | * the current frame. |
3167 | */ |
3168 | JS_ASSERT(op == JSOP_STOP); |
3169 | |
3170 | end_imacro: |
3171 | JS_ASSERT((uintN)(regs.sp - fp->slots) <= script->nslots); |
3172 | regs.pc = fp->imacpc + js_CodeSpec[*fp->imacpc].length; |
3173 | fp->imacpc = NULL; |
3174 | atoms = script->atomMap.vector; |
3175 | op = JSOp(*regs.pc); |
3176 | DO_OP(); |
3177 | } |
3178 | |
3179 | JS_ASSERT(regs.sp == StackBase(fp)); |
3180 | if ((fp->flags & JSFRAME_CONSTRUCTING) && |
3181 | JSVAL_IS_PRIMITIVE(fp->rval)) { |
3182 | if (!fp->fun) { |
3183 | JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, |
3184 | JSMSG_BAD_NEW_RESULT, |
3185 | js_ValueToPrintableString(cx, rval)); |
3186 | goto error; |
3187 | } |
3188 | fp->rval = OBJECT_TO_JSVAL(fp->thisp); |
3189 | } |
3190 | ok = JS_TRUE; |
3191 | if (inlineCallCount) |
3192 | inline_return: |
3193 | { |
3194 | JSInlineFrame *ifp = (JSInlineFrame *) fp; |
3195 | void *hookData = ifp->hookData; |
3196 | |
3197 | JS_ASSERT(!fp->blockChain); |
3198 | JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0)); |
3199 | |
3200 | if (script->staticLevel < JS_DISPLAY_SIZE) |
3201 | cx->display[script->staticLevel] = fp->displaySave; |
3202 | |
3203 | if (hookData) { |
3204 | JSInterpreterHook hook; |
3205 | JSBool status; |
3206 | |
3207 | hook = cx->debugHooks->callHook; |
3208 | if (hook) { |
3209 | /* |
3210 | * Do not pass &ok directly as exposing the address |
3211 | * inhibits optimizations and uninitialised warnings. |
3212 | */ |
3213 | status = ok; |
3214 | hook(cx, fp, JS_FALSE, &status, hookData); |
3215 | ok = status; |
3216 | CHECK_INTERRUPT_HANDLER(); |
3217 | } |
3218 | } |
3219 | |
3220 | /* |
3221 | * If fp has a call object, sync values and clear the back- |
3222 | * pointer. This can happen for a lightweight function if it |
3223 | * calls eval unexpectedly (in a way that is hidden from the |
3224 | * compiler). See bug 325540. |
3225 | */ |
3226 | if (fp->callobj) |
3227 | ok &= js_PutCallObject(cx, fp); |
3228 | |
3229 | if (fp->argsobj) |
3230 | ok &= js_PutArgsObject(cx, fp); |
3231 | |
3232 | #ifdef INCLUDE_MOZILLA_DTRACE |
3233 | /* DTrace function return, inlines */ |
3234 | if (JAVASCRIPT_FUNCTION_RVAL_ENABLED()) |
3235 | jsdtrace_function_rval(cx, fp, fp->fun); |
3236 | if (JAVASCRIPT_FUNCTION_RETURN_ENABLED()) |
3237 | jsdtrace_function_return(cx, fp, fp->fun); |
3238 | #endif |
3239 | |
3240 | /* Restore context version only if callee hasn't set version. */ |
3241 | if (JS_LIKELY(cx->version == currentVersion)) { |
3242 | currentVersion = ifp->callerVersion; |
3243 | if (currentVersion != cx->version) |
3244 | js_SetVersion(cx, currentVersion); |
3245 | } |
3246 | |
3247 | /* |
3248 | * If inline-constructing, replace primitive rval with the new |
3249 | * object passed in via |this|, and instrument this constructor |
3250 | * invocation |
3251 | */ |
3252 | if (fp->flags & JSFRAME_CONSTRUCTING) { |
3253 | if (JSVAL_IS_PRIMITIVE(fp->rval)) |
3254 | fp->rval = OBJECT_TO_JSVAL(fp->thisp); |
3255 | JS_RUNTIME_METER(cx->runtime, constructs); |
3256 | } |
3257 | |
3258 | /* Restore caller's registers. */ |
3259 | regs = ifp->callerRegs; |
3260 | |
3261 | /* Store the return value in the caller's operand frame. */ |
3262 | regs.sp -= 1 + (size_t) ifp->frame.argc; |
3263 | regs.sp[-1] = fp->rval; |
3264 | |
3265 | /* Restore cx->fp and release the inline frame's space. */ |
3266 | cx->fp = fp = fp->down; |
3267 | JS_ASSERT(fp->regs == &ifp->callerRegs); |
3268 | fp->regs = ®s; |
3269 | JS_ARENA_RELEASE(&cx->stackPool, ifp->mark); |
3270 | |
3271 | /* Restore the calling script's interpreter registers. */ |
3272 | script = fp->script; |
3273 | atoms = FrameAtomBase(cx, fp); |
3274 | |
3275 | /* Resume execution in the calling frame. */ |
3276 | inlineCallCount--; |
3277 | if (JS_LIKELY(ok)) { |
3278 | TRACE_0(LeaveFrame); |
3279 | JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, script, regs.pc)].length |
3280 | == JSOP_CALL_LENGTH); |
3281 | len = JSOP_CALL_LENGTH; |
3282 | DO_NEXT_OP(len); |
3283 | } |
3284 | goto error; |
3285 | } |
3286 | goto exit; |
3287 | |
3288 | BEGIN_CASE(JSOP_DEFAULT) |
3289 | (void) POP(); |
3290 | /* FALL THROUGH */ |
3291 | BEGIN_CASE(JSOP_GOTO) |
3292 | len = GET_JUMP_OFFSET(regs.pc); |
3293 | BRANCH(len); |
3294 | END_CASE(JSOP_GOTO) |
3295 | |
3296 | BEGIN_CASE(JSOP_IFEQ) |
3297 | POP_BOOLEAN(cx, rval, cond); |
3298 | if (cond == JS_FALSE) { |
3299 | len = GET_JUMP_OFFSET(regs.pc); |
3300 | BRANCH(len); |
3301 | } |
3302 | END_CASE(JSOP_IFEQ) |
3303 | |
3304 | BEGIN_CASE(JSOP_IFNE) |
3305 | POP_BOOLEAN(cx, rval, cond); |
3306 | if (cond != JS_FALSE) { |
3307 | len = GET_JUMP_OFFSET(regs.pc); |
3308 | BRANCH(len); |
3309 | } |
3310 | END_CASE(JSOP_IFNE) |
3311 | |
3312 | BEGIN_CASE(JSOP_OR) |
3313 | POP_BOOLEAN(cx, rval, cond); |
3314 | if (cond == JS_TRUE) { |
3315 | len = GET_JUMP_OFFSET(regs.pc); |
3316 | PUSH_OPND(rval); |
3317 | DO_NEXT_OP(len); |
3318 | } |
3319 | END_CASE(JSOP_OR) |
3320 | |
3321 | BEGIN_CASE(JSOP_AND) |
3322 | POP_BOOLEAN(cx, rval, cond); |
3323 | if (cond == JS_FALSE) { |
3324 | len = GET_JUMP_OFFSET(regs.pc); |
3325 | PUSH_OPND(rval); |
3326 | DO_NEXT_OP(len); |
3327 | } |
3328 | END_CASE(JSOP_AND) |
3329 | |
3330 | BEGIN_CASE(JSOP_DEFAULTX) |
3331 | (void) POP(); |
3332 | /* FALL THROUGH */ |
3333 | BEGIN_CASE(JSOP_GOTOX) |
3334 | len = GET_JUMPX_OFFSET(regs.pc); |
3335 | BRANCH(len); |
3336 | END_CASE(JSOP_GOTOX); |
3337 | |
3338 | BEGIN_CASE(JSOP_IFEQX) |
3339 | POP_BOOLEAN(cx, rval, cond); |
3340 | if (cond == JS_FALSE) { |
3341 | len = GET_JUMPX_OFFSET(regs.pc); |
3342 | BRANCH(len); |
3343 | } |
3344 | END_CASE(JSOP_IFEQX) |
3345 | |
3346 | BEGIN_CASE(JSOP_IFNEX) |
3347 | POP_BOOLEAN(cx, rval, cond); |
3348 | if (cond != JS_FALSE) { |
3349 | len = GET_JUMPX_OFFSET(regs.pc); |
3350 | BRANCH(len); |
3351 | } |
3352 | END_CASE(JSOP_IFNEX) |
3353 | |
3354 | BEGIN_CASE(JSOP_ORX) |
3355 | POP_BOOLEAN(cx, rval, cond); |
3356 | if (cond == JS_TRUE) { |
3357 | len = GET_JUMPX_OFFSET(regs.pc); |
3358 | PUSH_OPND(rval); |
3359 | DO_NEXT_OP(len); |
3360 | } |
3361 | END_CASE(JSOP_ORX) |
3362 | |
3363 | BEGIN_CASE(JSOP_ANDX) |
3364 | POP_BOOLEAN(cx, rval, cond); |
3365 | if (cond == JS_FALSE) { |
3366 | len = GET_JUMPX_OFFSET(regs.pc); |
3367 | PUSH_OPND(rval); |
3368 | DO_NEXT_OP(len); |
3369 | } |
3370 | END_CASE(JSOP_ANDX) |
3371 | |
3372 | /* |
3373 | * If the index value at sp[n] is not an int that fits in a jsval, it could |
3374 | * be an object (an XML QName, AttributeName, or AnyName), but only if we are |
3375 | * compiling with JS_HAS_XML_SUPPORT. Otherwise convert the index value to a |
3376 | * string atom id. |
3377 | */ |
3378 | #define FETCH_ELEMENT_ID(obj, n, id) \ |
3379 | JS_BEGIN_MACRO \ |
3380 | jsval idval_ = FETCH_OPND(n); \ |
3381 | if (JSVAL_IS_INT(idval_)) { \ |
3382 | id = INT_JSVAL_TO_JSID(idval_); \ |
3383 | } else { \ |
3384 | if (!js_InternNonIntElementId(cx, obj, idval_, &id)) \ |
3385 | goto error; \ |
3386 | regs.sp[n] = ID_TO_VALUE(id); \ |
3387 | } \ |
3388 | JS_END_MACRO |
3389 | |
3390 | #define TRY_BRANCH_AFTER_COND(cond,spdec) \ |
3391 | JS_BEGIN_MACRO \ |
3392 | uintN diff_; \ |
3393 | JS_ASSERT(js_CodeSpec[op].length == 1); \ |
3394 | diff_ = (uintN) regs.pc[1] - (uintN) JSOP_IFEQ; \ |
3395 | if (diff_ <= 1) { \ |
3396 | regs.sp -= spdec; \ |
3397 | if (cond == (diff_ != 0)) { \ |
3398 | ++regs.pc; \ |
3399 | len = GET_JUMP_OFFSET(regs.pc); \ |
3400 | BRANCH(len); \ |
3401 | } \ |
3402 | len = 1 + JSOP_IFEQ_LENGTH; \ |
3403 | DO_NEXT_OP(len); \ |
3404 | } \ |
3405 | JS_END_MACRO |
3406 | |
3407 | BEGIN_CASE(JSOP_IN) |
3408 | rval = FETCH_OPND(-1); |
3409 | if (JSVAL_IS_PRIMITIVE(rval)) { |
3410 | js_ReportValueError(cx, JSMSG_IN_NOT_OBJECT, -1, rval, NULL); |
3411 | goto error; |
3412 | } |
3413 | obj = JSVAL_TO_OBJECT(rval); |
3414 | FETCH_ELEMENT_ID(obj, -2, id); |
3415 | if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop)) |
3416 | goto error; |
3417 | cond = prop != NULL; |
3418 | if (prop) |
3419 | OBJ_DROP_PROPERTY(cx, obj2, prop); |
3420 | TRY_BRANCH_AFTER_COND(cond, 2); |
3421 | regs.sp--; |
3422 | STORE_OPND(-1, BOOLEAN_TO_JSVAL(cond)); |
3423 | END_CASE(JSOP_IN) |
3424 | |
3425 | BEGIN_CASE(JSOP_ITER) |
3426 | JS_ASSERT(regs.sp > StackBase(fp)); |
3427 | flags = regs.pc[1]; |
3428 | if (!js_ValueToIterator(cx, flags, ®s.sp[-1])) |
3429 | goto error; |
3430 | CHECK_INTERRUPT_HANDLER(); |
3431 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(regs.sp[-1])); |
3432 | PUSH(JSVAL_VOID); |
3433 | END_CASE(JSOP_ITER) |
3434 | |
3435 | BEGIN_CASE(JSOP_NEXTITER) |
3436 | JS_ASSERT(regs.sp - 2 >= StackBase(fp)); |
3437 | JS_ASSERT(!JSVAL_IS_PRIMITIVE(regs.sp[-2])); |
3438 | if (!js_CallIteratorNext(cx, JSVAL_TO_OBJECT(regs.sp[-2]), ®s.sp[-1])) |
3439 | goto error; |
3440 | CHECK_INTERRUPT_HANDLER(); |
3441 | rval = BOOLEAN_TO_JSVAL(regs.sp[-1] != JSVAL_HOLE); |
3442 | PUSH(rval); |
3443 | END_CASE(JSOP_NEXTITER) |
3444 | |
3445 | BEGIN_CASE(JSOP_ENDITER) |
3446 | /* |
3447 | * Decrease the stack pointer even when !ok -- see comments in the |
3448 | * exception capturing code for details. |
3449 | */ |
3450 | JS_ASSERT(regs.sp - 2 >= StackBase(fp)); |
3451 | ok = js_CloseIterator(cx, regs.sp[-2]); |