/[jscoverage]/trunk/js/jsinterp.cpp
ViewVC logotype

Contents of /trunk/js/jsinterp.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 507 - (show annotations)
Sun Jan 10 07:23:34 2010 UTC (9 years, 11 months ago) by siliconforks
File size: 114288 byte(s)
Update SpiderMonkey from Firefox 3.6rc1.

1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=79:
3 *
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 *
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
11 *
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
16 *
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
19 *
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
24 *
25 * Contributor(s):
26 *
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
38 *
39 * ***** END LICENSE BLOCK ***** */
40
41 /*
42 * JavaScript bytecode interpreter.
43 */
44 #include <stdio.h>
45 #include <string.h>
46 #include <math.h>
47 #include "jstypes.h"
48 #include "jsstdint.h"
49 #include "jsarena.h" /* Added by JSIFY */
50 #include "jsutil.h" /* Added by JSIFY */
51 #include "jsprf.h"
52 #include "jsapi.h"
53 #include "jsarray.h"
54 #include "jsatom.h"
55 #include "jsbool.h"
56 #include "jscntxt.h"
57 #include "jsdate.h"
58 #include "jsversion.h"
59 #include "jsdbgapi.h"
60 #include "jsfun.h"
61 #include "jsgc.h"
62 #include "jsinterp.h"
63 #include "jsiter.h"
64 #include "jslock.h"
65 #include "jsnum.h"
66 #include "jsobj.h"
67 #include "jsopcode.h"
68 #include "jsscan.h"
69 #include "jsscope.h"
70 #include "jsscript.h"
71 #include "jsstr.h"
72 #include "jsstaticcheck.h"
73 #include "jstracer.h"
74 #include "jslibmath.h"
75 #include "jsvector.h"
76 #include "jsstrinlines.h"
77
78 #ifdef INCLUDE_MOZILLA_DTRACE
79 #include "jsdtracef.h"
80 #endif
81
82 #if JS_HAS_XML_SUPPORT
83 #include "jsxml.h"
84 #endif
85
86 #include "jsatominlines.h"
87 #include "jsscriptinlines.h"
88
89 #include "jsautooplen.h"
90
91 /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
92 #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
93
94 JS_REQUIRES_STACK JSPropCacheEntry *
95 js_FillPropertyCache(JSContext *cx, JSObject *obj,
96 uintN scopeIndex, uintN protoIndex, JSObject *pobj,
97 JSScopeProperty *sprop, JSBool adding)
98 {
99 JSPropertyCache *cache;
100 jsbytecode *pc;
101 JSScope *scope;
102 jsuword kshape, vshape, khash;
103 JSOp op;
104 const JSCodeSpec *cs;
105 jsuword vword;
106 ptrdiff_t pcoff;
107 JSAtom *atom;
108 JSPropCacheEntry *entry;
109
110 JS_ASSERT(!cx->runtime->gcRunning);
111 cache = &JS_PROPERTY_CACHE(cx);
112
113 /* FIXME bug 489098: consider enabling the property cache for eval. */
114 if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) {
115 PCMETER(cache->disfills++);
116 return JS_NO_PROP_CACHE_FILL;
117 }
118
119 /*
120 * Check for fill from js_SetPropertyHelper where the setter removed sprop
121 * from pobj's scope (via unwatch or delete, e.g.).
122 */
123 scope = OBJ_SCOPE(pobj);
124 if (!scope->has(sprop)) {
125 PCMETER(cache->oddfills++);
126 return JS_NO_PROP_CACHE_FILL;
127 }
128
129 /*
130 * Check for overdeep scope and prototype chain. Because resolve, getter,
131 * and setter hooks can change the prototype chain using JS_SetPrototype
132 * after js_LookupPropertyWithFlags has returned the nominal protoIndex,
133 * we have to validate protoIndex if it is non-zero. If it is zero, then
134 * we know thanks to the scope->has test above, combined with the fact that
135 * obj == pobj, that protoIndex is invariant.
136 *
137 * The scopeIndex can't be wrong. We require JS_SetParent calls to happen
138 * before any running script might consult a parent-linked scope chain. If
139 * this requirement is not satisfied, the fill in progress will never hit,
140 * but vcap vs. scope shape tests ensure nothing malfunctions.
141 */
142 JS_ASSERT_IF(scopeIndex == 0 && protoIndex == 0, obj == pobj);
143
144 if (protoIndex != 0) {
145 JSObject *tmp = obj;
146
147 for (uintN i = 0; i != scopeIndex; i++)
148 tmp = OBJ_GET_PARENT(cx, tmp);
149 JS_ASSERT(tmp != pobj);
150
151 protoIndex = 1;
152 for (;;) {
153 tmp = OBJ_GET_PROTO(cx, tmp);
154
155 /*
156 * We cannot cache properties coming from native objects behind
157 * non-native ones on the prototype chain. The non-natives can
158 * mutate in arbitrary way without changing any shapes.
159 */
160 if (!tmp || !OBJ_IS_NATIVE(tmp)) {
161 PCMETER(cache->noprotos++);
162 return JS_NO_PROP_CACHE_FILL;
163 }
164 if (tmp == pobj)
165 break;
166 ++protoIndex;
167 }
168 }
169
170 if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) {
171 PCMETER(cache->longchains++);
172 return JS_NO_PROP_CACHE_FILL;
173 }
174
175 /*
176 * Optimize the cached vword based on our parameters and the current pc's
177 * opcode format flags.
178 */
179 pc = cx->fp->regs->pc;
180 op = js_GetOpcode(cx, cx->fp->script, pc);
181 cs = &js_CodeSpec[op];
182 kshape = 0;
183
184 do {
185 /*
186 * Check for a prototype "plain old method" callee computation. What
187 * is a plain old method? It's a function-valued property with stub
188 * getter, so get of a function is idempotent.
189 */
190 if ((cs->format & JOF_CALLOP) &&
191 SPROP_HAS_STUB_GETTER(sprop) &&
192 SPROP_HAS_VALID_SLOT(sprop, scope)) {
193 jsval v;
194
195 v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
196 if (VALUE_IS_FUNCTION(cx, v)) {
197 /*
198 * Great, we have a function-valued prototype property where
199 * the getter is JS_PropertyStub. The type id in pobj's scope
200 * does not evolve with changes to property values, however.
201 *
202 * So here, on first cache fill for this method, we brand the
203 * scope with a new shape and set the SCOPE_BRANDED flag. Once
204 * this scope flag is set, any write to a function-valued plain
205 * old property in pobj will result in shape being regenerated.
206 */
207 if (!scope->branded()) {
208 PCMETER(cache->brandfills++);
209 #ifdef DEBUG_notme
210 fprintf(stderr,
211 "branding %p (%s) for funobj %p (%s), shape %lu\n",
212 pobj, pobj->getClass()->name,
213 JSVAL_TO_OBJECT(v),
214 JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v))),
215 OBJ_SHAPE(obj));
216 #endif
217 scope->brandingShapeChange(cx, sprop->slot, v);
218 if (js_IsPropertyCacheDisabled(cx)) /* check for rt->shapeGen overflow */
219 return JS_NO_PROP_CACHE_FILL;
220 scope->setBranded();
221 }
222 vword = JSVAL_OBJECT_TO_PCVAL(v);
223 break;
224 }
225 }
226
227 /* If getting a value via a stub getter, we can cache the slot. */
228 if (!(cs->format & (JOF_SET | JOF_INCDEC | JOF_FOR)) &&
229 SPROP_HAS_STUB_GETTER(sprop) &&
230 SPROP_HAS_VALID_SLOT(sprop, scope)) {
231 /* Great, let's cache sprop's slot and use it on cache hit. */
232 vword = SLOT_TO_PCVAL(sprop->slot);
233 } else {
234 /* Best we can do is to cache sprop (still a nice speedup). */
235 vword = SPROP_TO_PCVAL(sprop);
236 if (adding &&
237 sprop == scope->lastProp &&
238 scope->shape == sprop->shape) {
239 /*
240 * Our caller added a new property. We also know that a setter
241 * that js_NativeSet could have run has not mutated the scope
242 * so the added property is still the last one added and the
243 * scope is not branded.
244 *
245 * We want to cache under scope's shape before the property
246 * addition to bias for the case when the mutator opcode
247 * always adds the same property. It allows to optimize
248 * periodic execution of object initializers or explicit
249 * initialization sequences like
250 *
251 * obj = {}; obj.x = 1; obj.y = 2;
252 *
253 * We assume that on average the win from this optimization is
254 * bigger that the cost of an extra mismatch per loop due to
255 * the bias for the following case:
256 *
257 * obj = {}; ... for (...) { ... obj.x = ... }
258 *
259 * On the first iteration JSOP_SETPROP fills the cache with
260 * the shape of newly created object, not the shape after
261 * obj.x is assigned. That mismatches obj's shape on the
262 * second iteration. Note that on third and the following
263 * iterations the cache will be hit since the shape no longer
264 * mutates.
265 */
266 JS_ASSERT(scope->owned());
267 if (sprop->parent) {
268 kshape = sprop->parent->shape;
269 } else {
270 /*
271 * If obj had its own empty scope before, with a unique
272 * shape, that is lost. Here we only attempt to find a
273 * matching empty scope. In unusual cases involving
274 * __proto__ assignment we may not find one.
275 */
276 JSObject *proto = STOBJ_GET_PROTO(obj);
277 if (!proto || !OBJ_IS_NATIVE(proto))
278 return JS_NO_PROP_CACHE_FILL;
279 JSScope *protoscope = OBJ_SCOPE(proto);
280 if (!protoscope->emptyScope ||
281 protoscope->emptyScope->clasp != obj->getClass()) {
282 return JS_NO_PROP_CACHE_FILL;
283 }
284 kshape = protoscope->emptyScope->shape;
285 }
286
287 /*
288 * When adding we predict no prototype object will later gain a
289 * readonly property or setter.
290 */
291 vshape = cx->runtime->protoHazardShape;
292 }
293 }
294 } while (0);
295
296 if (kshape == 0) {
297 kshape = OBJ_SHAPE(obj);
298 vshape = scope->shape;
299 }
300
301 khash = PROPERTY_CACHE_HASH_PC(pc, kshape);
302 if (obj == pobj) {
303 JS_ASSERT(scopeIndex == 0 && protoIndex == 0);
304 } else {
305 if (op == JSOP_LENGTH) {
306 atom = cx->runtime->atomState.lengthAtom;
307 } else {
308 pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
309 GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom);
310 }
311
312 #ifdef DEBUG
313 if (scopeIndex == 0) {
314 JS_ASSERT(protoIndex != 0);
315 JS_ASSERT((protoIndex == 1) == (OBJ_GET_PROTO(cx, obj) == pobj));
316 }
317 #endif
318
319 if (scopeIndex != 0 || protoIndex != 1) {
320 khash = PROPERTY_CACHE_HASH_ATOM(atom, obj);
321 PCMETER(if (PCVCAP_TAG(cache->table[khash].vcap) <= 1)
322 cache->pcrecycles++);
323 pc = (jsbytecode *) atom;
324 kshape = (jsuword) obj;
325
326 /*
327 * Make sure that a later shadowing assignment will enter
328 * PurgeProtoChain and invalidate this entry, bug 479198.
329 *
330 * This is thread-safe even though obj is not locked. Only the
331 * DELEGATE bit of obj->classword can change at runtime, given that
332 * obj is native; and the bit is only set, never cleared. And on
333 * platforms where another CPU can fail to see this write, it's OK
334 * because the property cache and JIT cache are thread-local.
335 */
336 obj->setDelegate();
337 }
338 }
339
340 entry = &cache->table[khash];
341 PCMETER(PCVAL_IS_NULL(entry->vword) || cache->recycles++);
342 entry->kpc = pc;
343 entry->kshape = kshape;
344 entry->vcap = PCVCAP_MAKE(vshape, scopeIndex, protoIndex);
345 entry->vword = vword;
346
347 cache->empty = JS_FALSE;
348 PCMETER(cache->fills++);
349
350 /*
351 * The modfills counter is not exact. It increases if a getter or setter
352 * recurse into the interpreter.
353 */
354 PCMETER(entry == cache->pctestentry || cache->modfills++);
355 PCMETER(cache->pctestentry = NULL);
356 return entry;
357 }
358
359 JS_REQUIRES_STACK JSAtom *
360 js_FullTestPropertyCache(JSContext *cx, jsbytecode *pc,
361 JSObject **objp, JSObject **pobjp,
362 JSPropCacheEntry **entryp)
363 {
364 JSOp op;
365 const JSCodeSpec *cs;
366 ptrdiff_t pcoff;
367 JSAtom *atom;
368 JSObject *obj, *pobj, *tmp;
369 JSPropCacheEntry *entry;
370 uint32 vcap;
371
372 JS_ASSERT(uintN((cx->fp->imacpc ? cx->fp->imacpc : pc) - cx->fp->script->code)
373 < cx->fp->script->length);
374
375 op = js_GetOpcode(cx, cx->fp->script, pc);
376 cs = &js_CodeSpec[op];
377 if (op == JSOP_LENGTH) {
378 atom = cx->runtime->atomState.lengthAtom;
379 } else {
380 pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
381 GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom);
382 }
383
384 obj = *objp;
385 JS_ASSERT(OBJ_IS_NATIVE(obj));
386 entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_ATOM(atom, obj)];
387 *entryp = entry;
388 vcap = entry->vcap;
389
390 if (entry->kpc != (jsbytecode *) atom) {
391 PCMETER(JS_PROPERTY_CACHE(cx).idmisses++);
392
393 #ifdef DEBUG_notme
394 entry = &JS_PROPERTY_CACHE(cx).table[PROPERTY_CACHE_HASH_PC(pc, OBJ_SHAPE(obj))];
395 fprintf(stderr,
396 "id miss for %s from %s:%u"
397 " (pc %u, kpc %u, kshape %u, shape %u)\n",
398 js_AtomToPrintableString(cx, atom),
399 cx->fp->script->filename,
400 js_PCToLineNumber(cx, cx->fp->script, pc),
401 pc - cx->fp->script->code,
402 entry->kpc - cx->fp->script->code,
403 entry->kshape,
404 OBJ_SHAPE(obj));
405 js_Disassemble1(cx, cx->fp->script, pc,
406 pc - cx->fp->script->code,
407 JS_FALSE, stderr);
408 #endif
409
410 return atom;
411 }
412
413 if (entry->kshape != (jsuword) obj) {
414 PCMETER(JS_PROPERTY_CACHE(cx).komisses++);
415 return atom;
416 }
417
418 pobj = obj;
419
420 if (JOF_MODE(cs->format) == JOF_NAME) {
421 while (vcap & (PCVCAP_SCOPEMASK << PCVCAP_PROTOBITS)) {
422 tmp = OBJ_GET_PARENT(cx, pobj);
423 if (!tmp || !OBJ_IS_NATIVE(tmp))
424 break;
425 pobj = tmp;
426 vcap -= PCVCAP_PROTOSIZE;
427 }
428
429 *objp = pobj;
430 }
431
432 while (vcap & PCVCAP_PROTOMASK) {
433 tmp = OBJ_GET_PROTO(cx, pobj);
434 if (!tmp || !OBJ_IS_NATIVE(tmp))
435 break;
436 pobj = tmp;
437 --vcap;
438 }
439
440 if (JS_LOCK_OBJ_IF_SHAPE(cx, pobj, PCVCAP_SHAPE(vcap))) {
441 #ifdef DEBUG
442 jsid id = ATOM_TO_JSID(atom);
443
444 id = js_CheckForStringIndex(id);
445 JS_ASSERT(OBJ_SCOPE(pobj)->lookup(id));
446 JS_ASSERT_IF(OBJ_SCOPE(pobj)->object, OBJ_SCOPE(pobj)->object == pobj);
447 #endif
448 *pobjp = pobj;
449 return NULL;
450 }
451
452 PCMETER(JS_PROPERTY_CACHE(cx).vcmisses++);
453 return atom;
454 }
455
456 #ifdef DEBUG
457 #define ASSERT_CACHE_IS_EMPTY(cache) \
458 JS_BEGIN_MACRO \
459 JSPropertyCache *cache_ = (cache); \
460 uintN i_; \
461 JS_ASSERT(cache_->empty); \
462 for (i_ = 0; i_ < PROPERTY_CACHE_SIZE; i_++) { \
463 JS_ASSERT(!cache_->table[i_].kpc); \
464 JS_ASSERT(!cache_->table[i_].kshape); \
465 JS_ASSERT(!cache_->table[i_].vcap); \
466 JS_ASSERT(!cache_->table[i_].vword); \
467 } \
468 JS_END_MACRO
469 #else
470 #define ASSERT_CACHE_IS_EMPTY(cache) ((void)0)
471 #endif
472
473 JS_STATIC_ASSERT(PCVAL_NULL == 0);
474
475 void
476 js_PurgePropertyCache(JSContext *cx, JSPropertyCache *cache)
477 {
478 if (cache->empty) {
479 ASSERT_CACHE_IS_EMPTY(cache);
480 return;
481 }
482
483 memset(cache->table, 0, sizeof cache->table);
484 cache->empty = JS_TRUE;
485
486 #ifdef JS_PROPERTY_CACHE_METERING
487 { static FILE *fp;
488 if (!fp)
489 fp = fopen("/tmp/propcache.stats", "w");
490 if (fp) {
491 fputs("Property cache stats for ", fp);
492 #ifdef JS_THREADSAFE
493 fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id);
494 #endif
495 fprintf(fp, "GC %u\n", cx->runtime->gcNumber);
496
497 # define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)cache->mem)
498 P(fills);
499 P(nofills);
500 P(rofills);
501 P(disfills);
502 P(oddfills);
503 P(modfills);
504 P(brandfills);
505 P(noprotos);
506 P(longchains);
507 P(recycles);
508 P(pcrecycles);
509 P(tests);
510 P(pchits);
511 P(protopchits);
512 P(initests);
513 P(inipchits);
514 P(inipcmisses);
515 P(settests);
516 P(addpchits);
517 P(setpchits);
518 P(setpcmisses);
519 P(slotchanges);
520 P(setmisses);
521 P(idmisses);
522 P(komisses);
523 P(vcmisses);
524 P(misses);
525 P(flushes);
526 P(pcpurges);
527 # undef P
528
529 fprintf(fp, "hit rates: pc %g%% (proto %g%%), set %g%%, ini %g%%, full %g%%\n",
530 (100. * cache->pchits) / cache->tests,
531 (100. * cache->protopchits) / cache->tests,
532 (100. * (cache->addpchits + cache->setpchits))
533 / cache->settests,
534 (100. * cache->inipchits) / cache->initests,
535 (100. * (cache->tests - cache->misses)) / cache->tests);
536 fflush(fp);
537 }
538 }
539 #endif
540
541 PCMETER(cache->flushes++);
542 }
543
544 void
545 js_PurgePropertyCacheForScript(JSContext *cx, JSScript *script)
546 {
547 JSPropertyCache *cache;
548 JSPropCacheEntry *entry;
549
550 cache = &JS_PROPERTY_CACHE(cx);
551 for (entry = cache->table; entry < cache->table + PROPERTY_CACHE_SIZE;
552 entry++) {
553 if (JS_UPTRDIFF(entry->kpc, script->code) < script->length) {
554 entry->kpc = NULL;
555 entry->kshape = 0;
556 #ifdef DEBUG
557 entry->vcap = entry->vword = 0;
558 #endif
559 }
560 }
561 }
562
563 /*
564 * Check if the current arena has enough space to fit nslots after sp and, if
565 * so, reserve the necessary space.
566 */
567 static JS_REQUIRES_STACK JSBool
568 AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots)
569 {
570 uintN surplus;
571 jsval *sp2;
572
573 JS_ASSERT((jsval *) cx->stackPool.current->base <= sp);
574 JS_ASSERT(sp <= (jsval *) cx->stackPool.current->avail);
575 surplus = (jsval *) cx->stackPool.current->avail - sp;
576 if (nslots <= surplus)
577 return JS_TRUE;
578
579 /*
580 * No room before current->avail, check if the arena has enough space to
581 * fit the missing slots before the limit.
582 */
583 if (nslots > (size_t) ((jsval *) cx->stackPool.current->limit - sp))
584 return JS_FALSE;
585
586 JS_ARENA_ALLOCATE_CAST(sp2, jsval *, &cx->stackPool,
587 (nslots - surplus) * sizeof(jsval));
588 JS_ASSERT(sp2 == sp + surplus);
589 return JS_TRUE;
590 }
591
592 JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval *
593 js_AllocRawStack(JSContext *cx, uintN nslots, void **markp)
594 {
595 jsval *sp;
596
597 JS_ASSERT(nslots != 0);
598 JS_ASSERT_NOT_ON_TRACE(cx);
599
600 if (!cx->stackPool.first.next) {
601 int64 *timestamp;
602
603 JS_ARENA_ALLOCATE_CAST(timestamp, int64 *,
604 &cx->stackPool, sizeof *timestamp);
605 if (!timestamp) {
606 js_ReportOutOfScriptQuota(cx);
607 return NULL;
608 }
609 *timestamp = JS_Now();
610 }
611
612 if (markp)
613 *markp = JS_ARENA_MARK(&cx->stackPool);
614 JS_ARENA_ALLOCATE_CAST(sp, jsval *, &cx->stackPool, nslots * sizeof(jsval));
615 if (!sp)
616 js_ReportOutOfScriptQuota(cx);
617 return sp;
618 }
619
620 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
621 js_FreeRawStack(JSContext *cx, void *mark)
622 {
623 JS_ARENA_RELEASE(&cx->stackPool, mark);
624 }
625
626 JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
627 js_AllocStack(JSContext *cx, uintN nslots, void **markp)
628 {
629 jsval *sp;
630 JSArena *a;
631 JSStackHeader *sh;
632
633 /* Callers don't check for zero nslots: we do to avoid empty segments. */
634 if (nslots == 0) {
635 *markp = NULL;
636 return (jsval *) JS_ARENA_MARK(&cx->stackPool);
637 }
638
639 /* Allocate 2 extra slots for the stack segment header we'll likely need. */
640 sp = js_AllocRawStack(cx, 2 + nslots, markp);
641 if (!sp)
642 return NULL;
643
644 /* Try to avoid another header if we can piggyback on the last segment. */
645 a = cx->stackPool.current;
646 sh = cx->stackHeaders;
647 if (sh && JS_STACK_SEGMENT(sh) + sh->nslots == sp) {
648 /* Extend the last stack segment, give back the 2 header slots. */
649 sh->nslots += nslots;
650 a->avail -= 2 * sizeof(jsval);
651 } else {
652 /*
653 * Need a new stack segment, so allocate and push a stack segment
654 * header from the 2 extra slots.
655 */
656 sh = (JSStackHeader *)sp;
657 sh->nslots = nslots;
658 sh->down = cx->stackHeaders;
659 cx->stackHeaders = sh;
660 sp += 2;
661 }
662
663 /*
664 * Store JSVAL_NULL using memset, to let compilers optimize as they see
665 * fit, in case a caller allocates and pushes GC-things one by one, which
666 * could nest a last-ditch GC that will scan this segment.
667 */
668 memset(sp, 0, nslots * sizeof(jsval));
669 return sp;
670 }
671
672 JS_REQUIRES_STACK JS_FRIEND_API(void)
673 js_FreeStack(JSContext *cx, void *mark)
674 {
675 JSStackHeader *sh;
676 jsuword slotdiff;
677
678 /* Check for zero nslots allocation special case. */
679 if (!mark)
680 return;
681
682 /* We can assert because js_FreeStack always balances js_AllocStack. */
683 sh = cx->stackHeaders;
684 JS_ASSERT(sh);
685
686 /* If mark is in the current segment, reduce sh->nslots, else pop sh. */
687 slotdiff = JS_UPTRDIFF(mark, JS_STACK_SEGMENT(sh)) / sizeof(jsval);
688 if (slotdiff < (jsuword)sh->nslots)
689 sh->nslots = slotdiff;
690 else
691 cx->stackHeaders = sh->down;
692
693 /* Release the stackPool space allocated since mark was set. */
694 JS_ARENA_RELEASE(&cx->stackPool, mark);
695 }
696
697 JSObject *
698 js_GetScopeChain(JSContext *cx, JSStackFrame *fp)
699 {
700 JSObject *sharedBlock = fp->blockChain;
701
702 if (!sharedBlock) {
703 /*
704 * Don't force a call object for a lightweight function call, but do
705 * insist that there is a call object for a heavyweight function call.
706 */
707 JS_ASSERT(!fp->fun ||
708 !(fp->fun->flags & JSFUN_HEAVYWEIGHT) ||
709 fp->callobj);
710 JS_ASSERT(fp->scopeChain);
711 return fp->scopeChain;
712 }
713
714 /* We don't handle cloning blocks on trace. */
715 js_LeaveTrace(cx);
716
717 /*
718 * We have one or more lexical scopes to reflect into fp->scopeChain, so
719 * make sure there's a call object at the current head of the scope chain,
720 * if this frame is a call frame.
721 *
722 * Also, identify the innermost compiler-allocated block we needn't clone.
723 */
724 JSObject *limitBlock, *limitClone;
725 if (fp->fun && !fp->callobj) {
726 JS_ASSERT(OBJ_GET_CLASS(cx, fp->scopeChain) != &js_BlockClass ||
727 fp->scopeChain->getPrivate() != fp);
728 if (!js_GetCallObject(cx, fp))
729 return NULL;
730
731 /* We know we must clone everything on blockChain. */
732 limitBlock = limitClone = NULL;
733 } else {
734 /*
735 * scopeChain includes all blocks whose static scope we're within that
736 * have already been cloned. Find the innermost such block. Its
737 * prototype should appear on blockChain; we'll clone blockChain up
738 * to, but not including, that prototype.
739 */
740 limitClone = fp->scopeChain;
741 while (OBJ_GET_CLASS(cx, limitClone) == &js_WithClass)
742 limitClone = OBJ_GET_PARENT(cx, limitClone);
743 JS_ASSERT(limitClone);
744
745 /*
746 * It may seem like we don't know enough about limitClone to be able
747 * to just grab its prototype as we do here, but it's actually okay.
748 *
749 * If limitClone is a block object belonging to this frame, then its
750 * prototype is the innermost entry in blockChain that we have already
751 * cloned, and is thus the place to stop when we clone below.
752 *
753 * Otherwise, there are no blocks for this frame on scopeChain, and we
754 * need to clone the whole blockChain. In this case, limitBlock can
755 * point to any object known not to be on blockChain, since we simply
756 * loop until we hit limitBlock or NULL. If limitClone is a block, it
757 * isn't a block from this function, since blocks can't be nested
758 * within themselves on scopeChain (recursion is dynamic nesting, not
759 * static nesting). If limitClone isn't a block, its prototype won't
760 * be a block either. So we can just grab limitClone's prototype here
761 * regardless of its type or which frame it belongs to.
762 */
763 limitBlock = OBJ_GET_PROTO(cx, limitClone);
764
765 /* If the innermost block has already been cloned, we are done. */
766 if (limitBlock == sharedBlock)
767 return fp->scopeChain;
768 }
769
770 /*
771 * Special-case cloning the innermost block; this doesn't have enough in
772 * common with subsequent steps to include in the loop.
773 *
774 * js_CloneBlockObject leaves the clone's parent slot uninitialized. We
775 * populate it below.
776 */
777 JSObject *innermostNewChild = js_CloneBlockObject(cx, sharedBlock, fp);
778 if (!innermostNewChild)
779 return NULL;
780 JSAutoTempValueRooter tvr(cx, innermostNewChild);
781
782 /*
783 * Clone our way towards outer scopes until we reach the innermost
784 * enclosing function, or the innermost block we've already cloned.
785 */
786 JSObject *newChild = innermostNewChild;
787 for (;;) {
788 JS_ASSERT(OBJ_GET_PROTO(cx, newChild) == sharedBlock);
789 sharedBlock = OBJ_GET_PARENT(cx, sharedBlock);
790
791 /* Sometimes limitBlock will be NULL, so check that first. */
792 if (sharedBlock == limitBlock || !sharedBlock)
793 break;
794
795 /* As in the call above, we don't know the real parent yet. */
796 JSObject *clone
797 = js_CloneBlockObject(cx, sharedBlock, fp);
798 if (!clone)
799 return NULL;
800
801 /*
802 * Avoid OBJ_SET_PARENT overhead as newChild cannot escape to
803 * other threads.
804 */
805 STOBJ_SET_PARENT(newChild, clone);
806 newChild = clone;
807 }
808 STOBJ_SET_PARENT(newChild, fp->scopeChain);
809
810
811 /*
812 * If we found a limit block belonging to this frame, then we should have
813 * found it in blockChain.
814 */
815 JS_ASSERT_IF(limitBlock &&
816 OBJ_GET_CLASS(cx, limitBlock) == &js_BlockClass &&
817 limitClone->getPrivate() == fp,
818 sharedBlock);
819
820 /* Place our newly cloned blocks at the head of the scope chain. */
821 fp->scopeChain = innermostNewChild;
822 return fp->scopeChain;
823 }
824
825 JSBool
826 js_GetPrimitiveThis(JSContext *cx, jsval *vp, JSClass *clasp, jsval *thisvp)
827 {
828 jsval v;
829 JSObject *obj;
830
831 v = vp[1];
832 if (JSVAL_IS_OBJECT(v)) {
833 obj = JS_THIS_OBJECT(cx, vp);
834 if (!JS_InstanceOf(cx, obj, clasp, vp + 2))
835 return JS_FALSE;
836 v = obj->fslots[JSSLOT_PRIMITIVE_THIS];
837 }
838 *thisvp = v;
839 return JS_TRUE;
840 }
841
842 /* Some objects (e.g., With) delegate 'this' to another object. */
843 static inline JSObject *
844 CallThisObjectHook(JSContext *cx, JSObject *obj, jsval *argv)
845 {
846 JSObject *thisp = obj->thisObject(cx);
847 if (!thisp)
848 return NULL;
849 argv[-1] = OBJECT_TO_JSVAL(thisp);
850 return thisp;
851 }
852
853 /*
854 * ECMA requires "the global object", but in embeddings such as the browser,
855 * which have multiple top-level objects (windows, frames, etc. in the DOM),
856 * we prefer fun's parent. An example that causes this code to run:
857 *
858 * // in window w1
859 * function f() { return this }
860 * function g() { return f }
861 *
862 * // in window w2
863 * var h = w1.g()
864 * alert(h() == w1)
865 *
866 * The alert should display "true".
867 */
868 JS_STATIC_INTERPRET JSObject *
869 js_ComputeGlobalThis(JSContext *cx, JSBool lazy, jsval *argv)
870 {
871 JSObject *thisp;
872
873 if (JSVAL_IS_PRIMITIVE(argv[-2]) ||
874 !OBJ_GET_PARENT(cx, JSVAL_TO_OBJECT(argv[-2]))) {
875 thisp = cx->globalObject;
876 } else {
877 JSStackFrame *fp;
878 jsid id;
879 jsval v;
880 uintN attrs;
881 JSBool ok;
882 JSObject *parent;
883
884 /*
885 * Walk up the parent chain, first checking that the running script
886 * has access to the callee's parent object. Note that if lazy, the
887 * running script whose principals we want to check is the script
888 * associated with fp->down, not with fp.
889 *
890 * FIXME: 417851 -- this access check should not be required, as it
891 * imposes a performance penalty on all js_ComputeGlobalThis calls,
892 * and it represents a maintenance hazard.
893 */
894 fp = js_GetTopStackFrame(cx); /* quell GCC overwarning */
895 if (lazy) {
896 JS_ASSERT(fp->argv == argv);
897 fp->dormantNext = cx->dormantFrameChain;
898 cx->dormantFrameChain = fp;
899 cx->fp = fp->down;
900 fp->down = NULL;
901 }
902 thisp = JSVAL_TO_OBJECT(argv[-2]);
903 id = ATOM_TO_JSID(cx->runtime->atomState.parentAtom);
904
905 ok = thisp->checkAccess(cx, id, JSACC_PARENT, &v, &attrs);
906 if (lazy) {
907 cx->dormantFrameChain = fp->dormantNext;
908 fp->dormantNext = NULL;
909 fp->down = cx->fp;
910 cx->fp = fp;
911 }
912 if (!ok)
913 return NULL;
914
915 thisp = JSVAL_IS_VOID(v)
916 ? OBJ_GET_PARENT(cx, thisp)
917 : JSVAL_TO_OBJECT(v);
918 while ((parent = OBJ_GET_PARENT(cx, thisp)) != NULL)
919 thisp = parent;
920 }
921
922 return CallThisObjectHook(cx, thisp, argv);
923 }
924
925 static JSObject *
926 ComputeThis(JSContext *cx, JSBool lazy, jsval *argv)
927 {
928 JSObject *thisp;
929
930 JS_ASSERT(!JSVAL_IS_NULL(argv[-1]));
931 if (!JSVAL_IS_OBJECT(argv[-1])) {
932 if (!js_PrimitiveToObject(cx, &argv[-1]))
933 return NULL;
934 thisp = JSVAL_TO_OBJECT(argv[-1]);
935 return thisp;
936 }
937
938 thisp = JSVAL_TO_OBJECT(argv[-1]);
939 if (OBJ_GET_CLASS(cx, thisp) == &js_CallClass || OBJ_GET_CLASS(cx, thisp) == &js_BlockClass)
940 return js_ComputeGlobalThis(cx, lazy, argv);
941
942 return CallThisObjectHook(cx, thisp, argv);
943 }
944
945 JSObject *
946 js_ComputeThis(JSContext *cx, JSBool lazy, jsval *argv)
947 {
948 JS_ASSERT(argv[-1] != JSVAL_HOLE); // check for SynthesizeFrame poisoning
949 if (JSVAL_IS_NULL(argv[-1]))
950 return js_ComputeGlobalThis(cx, lazy, argv);
951 return ComputeThis(cx, lazy, argv);
952 }
953
954 #if JS_HAS_NO_SUCH_METHOD
955
956 const uint32 JSSLOT_FOUND_FUNCTION = JSSLOT_PRIVATE;
957 const uint32 JSSLOT_SAVED_ID = JSSLOT_PRIVATE + 1;
958
959 JSClass js_NoSuchMethodClass = {
960 "NoSuchMethod",
961 JSCLASS_HAS_RESERVED_SLOTS(2) | JSCLASS_IS_ANONYMOUS,
962 JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_PropertyStub,
963 JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
964 NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
965 };
966
967 /*
968 * When JSOP_CALLPROP or JSOP_CALLELEM does not find the method property of
969 * the base object, we search for the __noSuchMethod__ method in the base.
970 * If it exists, we store the method and the property's id into an object of
971 * NoSuchMethod class and store this object into the callee's stack slot.
972 * Later, js_Invoke will recognise such an object and transfer control to
973 * NoSuchMethod that invokes the method like:
974 *
975 * this.__noSuchMethod__(id, args)
976 *
977 * where id is the name of the method that this invocation attempted to
978 * call by name, and args is an Array containing this invocation's actual
979 * parameters.
980 */
981 JS_STATIC_INTERPRET JSBool
982 js_OnUnknownMethod(JSContext *cx, jsval *vp)
983 {
984 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
985
986 JSObject *obj = JSVAL_TO_OBJECT(vp[1]);
987 jsid id = ATOM_TO_JSID(cx->runtime->atomState.noSuchMethodAtom);
988 JSAutoTempValueRooter tvr(cx, JSVAL_NULL);
989 if (!js_GetMethod(cx, obj, id, false, tvr.addr()))
990 return false;
991 if (JSVAL_IS_PRIMITIVE(tvr.value())) {
992 vp[0] = tvr.value();
993 } else {
994 #if JS_HAS_XML_SUPPORT
995 /* Extract the function name from function::name qname. */
996 if (!JSVAL_IS_PRIMITIVE(vp[0])) {
997 obj = JSVAL_TO_OBJECT(vp[0]);
998 if (!js_IsFunctionQName(cx, obj, &id))
999 return false;
1000 if (id != 0)
1001 vp[0] = ID_TO_VALUE(id);
1002 }
1003 #endif
1004 obj = js_NewObjectWithGivenProto(cx, &js_NoSuchMethodClass,
1005 NULL, NULL);
1006 if (!obj)
1007 return false;
1008 obj->fslots[JSSLOT_FOUND_FUNCTION] = tvr.value();
1009 obj->fslots[JSSLOT_SAVED_ID] = vp[0];
1010 vp[0] = OBJECT_TO_JSVAL(obj);
1011 }
1012 return true;
1013 }
1014
1015 static JS_REQUIRES_STACK JSBool
1016 NoSuchMethod(JSContext *cx, uintN argc, jsval *vp, uint32 flags)
1017 {
1018 jsval *invokevp;
1019 void *mark;
1020 JSBool ok;
1021 JSObject *obj, *argsobj;
1022
1023 invokevp = js_AllocStack(cx, 2 + 2, &mark);
1024 if (!invokevp)
1025 return JS_FALSE;
1026
1027 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[0]));
1028 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
1029 obj = JSVAL_TO_OBJECT(vp[0]);
1030 JS_ASSERT(STOBJ_GET_CLASS(obj) == &js_NoSuchMethodClass);
1031
1032 invokevp[0] = obj->fslots[JSSLOT_FOUND_FUNCTION];
1033 invokevp[1] = vp[1];
1034 invokevp[2] = obj->fslots[JSSLOT_SAVED_ID];
1035 argsobj = js_NewArrayObject(cx, argc, vp + 2);
1036 if (!argsobj) {
1037 ok = JS_FALSE;
1038 } else {
1039 invokevp[3] = OBJECT_TO_JSVAL(argsobj);
1040 ok = (flags & JSINVOKE_CONSTRUCT)
1041 ? js_InvokeConstructor(cx, 2, JS_TRUE, invokevp)
1042 : js_Invoke(cx, 2, invokevp, flags);
1043 vp[0] = invokevp[0];
1044 }
1045 js_FreeStack(cx, mark);
1046 return ok;
1047 }
1048
1049 #endif /* JS_HAS_NO_SUCH_METHOD */
1050
1051 /*
1052 * We check if the function accepts a primitive value as |this|. For that we
1053 * use a table that maps value's tag into the corresponding function flag.
1054 */
1055 JS_STATIC_ASSERT(JSVAL_INT == 1);
1056 JS_STATIC_ASSERT(JSVAL_DOUBLE == 2);
1057 JS_STATIC_ASSERT(JSVAL_STRING == 4);
1058 JS_STATIC_ASSERT(JSVAL_SPECIAL == 6);
1059
1060 const uint16 js_PrimitiveTestFlags[] = {
1061 JSFUN_THISP_NUMBER, /* INT */
1062 JSFUN_THISP_NUMBER, /* DOUBLE */
1063 JSFUN_THISP_NUMBER, /* INT */
1064 JSFUN_THISP_STRING, /* STRING */
1065 JSFUN_THISP_NUMBER, /* INT */
1066 JSFUN_THISP_BOOLEAN, /* BOOLEAN */
1067 JSFUN_THISP_NUMBER /* INT */
1068 };
1069
1070 /*
1071 * Find a function reference and its 'this' object implicit first parameter
1072 * under argc arguments on cx's stack, and call the function. Push missing
1073 * required arguments, allocate declared local variables, and pop everything
1074 * when done. Then push the return value.
1075 */
1076 JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
1077 js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags)
1078 {
1079 void *mark;
1080 JSStackFrame frame;
1081 jsval *sp, *argv, *newvp;
1082 jsval v;
1083 JSObject *funobj, *parent;
1084 JSBool ok;
1085 JSClass *clasp;
1086 const JSObjectOps *ops;
1087 JSNative native;
1088 JSFunction *fun;
1089 JSScript *script;
1090 uintN nslots, i;
1091 uint32 rootedArgsFlag;
1092 JSInterpreterHook hook;
1093 void *hookData;
1094
1095 JS_ASSERT(argc <= JS_ARGS_LENGTH_MAX);
1096
1097 /* [vp .. vp + 2 + argc) must belong to the last JS stack arena. */
1098 JS_ASSERT((jsval *) cx->stackPool.current->base <= vp);
1099 JS_ASSERT(vp + 2 + argc <= (jsval *) cx->stackPool.current->avail);
1100
1101 /* Mark the top of stack and load frequently-used registers. */
1102 mark = JS_ARENA_MARK(&cx->stackPool);
1103 MUST_FLOW_THROUGH("out2");
1104 v = *vp;
1105
1106 if (JSVAL_IS_PRIMITIVE(v))
1107 goto bad;
1108
1109 funobj = JSVAL_TO_OBJECT(v);
1110 parent = OBJ_GET_PARENT(cx, funobj);
1111 clasp = OBJ_GET_CLASS(cx, funobj);
1112 if (clasp != &js_FunctionClass) {
1113 #if JS_HAS_NO_SUCH_METHOD
1114 if (clasp == &js_NoSuchMethodClass) {
1115 ok = NoSuchMethod(cx, argc, vp, flags);
1116 goto out2;
1117 }
1118 #endif
1119
1120 /* Function is inlined, all other classes use object ops. */
1121 ops = funobj->map->ops;
1122
1123 /*
1124 * XXX this makes no sense -- why convert to function if clasp->call?
1125 * XXX better to call that hook without converting
1126 *
1127 * FIXME bug 408416: try converting to function, for API compatibility
1128 * if there is a call op defined.
1129 */
1130 if ((ops == &js_ObjectOps) ? clasp->call : ops->call) {
1131 ok = clasp->convert(cx, funobj, JSTYPE_FUNCTION, &v);
1132 if (!ok)
1133 goto out2;
1134
1135 if (VALUE_IS_FUNCTION(cx, v)) {
1136 /* Make vp refer to funobj to keep it available as argv[-2]. */
1137 *vp = v;
1138 funobj = JSVAL_TO_OBJECT(v);
1139 parent = OBJ_GET_PARENT(cx, funobj);
1140 goto have_fun;
1141 }
1142 }
1143 fun = NULL;
1144 script = NULL;
1145 nslots = 0;
1146
1147 /* Try a call or construct native object op. */
1148 if (flags & JSINVOKE_CONSTRUCT) {
1149 if (!JSVAL_IS_OBJECT(vp[1])) {
1150 ok = js_PrimitiveToObject(cx, &vp[1]);
1151 if (!ok)
1152 goto out2;
1153 }
1154 native = ops->construct;
1155 } else {
1156 native = ops->call;
1157 }
1158 if (!native)
1159 goto bad;
1160 } else {
1161 have_fun:
1162 /* Get private data and set derived locals from it. */
1163 fun = GET_FUNCTION_PRIVATE(cx, funobj);
1164 nslots = FUN_MINARGS(fun);
1165 nslots = (nslots > argc) ? nslots - argc : 0;
1166 if (FUN_INTERPRETED(fun)) {
1167 native = NULL;
1168 script = fun->u.i.script;
1169 JS_ASSERT(script);
1170 } else {
1171 native = fun->u.n.native;
1172 script = NULL;
1173 nslots += fun->u.n.extra;
1174 }
1175
1176 if (JSFUN_BOUND_METHOD_TEST(fun->flags)) {
1177 /* Handle bound method special case. */
1178 vp[1] = OBJECT_TO_JSVAL(parent);
1179 } else if (!JSVAL_IS_OBJECT(vp[1])) {
1180 JS_ASSERT(!(flags & JSINVOKE_CONSTRUCT));
1181 if (PRIMITIVE_THIS_TEST(fun, vp[1]))
1182 goto start_call;
1183 }
1184 }
1185
1186 if (flags & JSINVOKE_CONSTRUCT) {
1187 JS_ASSERT(!JSVAL_IS_PRIMITIVE(vp[1]));
1188 } else {
1189 /*
1190 * We must call js_ComputeThis in case we are not called from the
1191 * interpreter, where a prior bytecode has computed an appropriate
1192 * |this| already.
1193 *
1194 * But we need to compute |this| eagerly only for so-called "slow"
1195 * (i.e., not fast) native functions. Fast natives must use either
1196 * JS_THIS or JS_THIS_OBJECT, and scripted functions will go through
1197 * the appropriate this-computing bytecode, e.g., JSOP_THIS.
1198 */
1199 if (native && (!fun || !(fun->flags & JSFUN_FAST_NATIVE))) {
1200 if (!js_ComputeThis(cx, JS_FALSE, vp + 2)) {
1201 ok = JS_FALSE;
1202 goto out2;
1203 }
1204 flags |= JSFRAME_COMPUTED_THIS;
1205 }
1206 }
1207
1208 start_call:
1209 if (native && fun && (fun->flags & JSFUN_FAST_NATIVE)) {
1210 #ifdef DEBUG_NOT_THROWING
1211 JSBool alreadyThrowing = cx->throwing;
1212 #endif
1213 JS_ASSERT(nslots == 0);
1214 #if JS_HAS_LVALUE_RETURN
1215 /* Set by JS_SetCallReturnValue2, used to return reference types. */
1216 cx->rval2set = JS_FALSE;
1217 #endif
1218 ok = ((JSFastNative) native)(cx, argc, vp);
1219 JS_RUNTIME_METER(cx->runtime, nativeCalls);
1220 #ifdef DEBUG_NOT_THROWING
1221 if (ok && !alreadyThrowing)
1222 ASSERT_NOT_THROWING(cx);
1223 #endif
1224 goto out2;
1225 }
1226
1227 argv = vp + 2;
1228 sp = argv + argc;
1229
1230 rootedArgsFlag = JSFRAME_ROOTED_ARGV;
1231 if (nslots != 0) {
1232 /*
1233 * The extra slots required by the function continue with argument
1234 * slots. Thus, when the last stack pool arena does not have room to
1235 * fit nslots right after sp and AllocateAfterSP fails, we have to copy
1236 * [vp..vp+2+argc) slots and clear rootedArgsFlag to root the copy.
1237 */
1238 if (!AllocateAfterSP(cx, sp, nslots)) {
1239 rootedArgsFlag = 0;
1240 newvp = js_AllocRawStack(cx, 2 + argc + nslots, NULL);
1241 if (!newvp) {
1242 ok = JS_FALSE;
1243 goto out2;
1244 }
1245 memcpy(newvp, vp, (2 + argc) * sizeof(jsval));
1246 argv = newvp + 2;
1247 sp = argv + argc;
1248 }
1249
1250 /* Push void to initialize missing args. */
1251 i = nslots;
1252 do {
1253 *sp++ = JSVAL_VOID;
1254 } while (--i != 0);
1255 }
1256
1257 /* Allocate space for local variables and stack of interpreted function. */
1258 if (script && script->nslots != 0) {
1259 if (!AllocateAfterSP(cx, sp, script->nslots)) {
1260 /* NB: Discontinuity between argv and slots, stack slots. */
1261 sp = js_AllocRawStack(cx, script->nslots, NULL);
1262 if (!sp) {
1263 ok = JS_FALSE;
1264 goto out2;
1265 }
1266 }
1267
1268 /* Push void to initialize local variables. */
1269 for (jsval *end = sp + fun->u.i.nvars; sp != end; ++sp)
1270 *sp = JSVAL_VOID;
1271 }
1272
1273 /*
1274 * Initialize the frame.
1275 *
1276 * To set thisp we use an explicit cast and not JSVAL_TO_OBJECT, as vp[1]
1277 * can be a primitive value here for those native functions specified with
1278 * JSFUN_THISP_(NUMBER|STRING|BOOLEAN) flags.
1279 */
1280 frame.thisp = (JSObject *)vp[1];
1281 frame.varobj = NULL;
1282 frame.callobj = NULL;
1283 frame.argsobj = NULL;
1284 frame.script = script;
1285 frame.fun = fun;
1286 frame.argc = argc;
1287 frame.argv = argv;
1288
1289 /* Default return value for a constructor is the new object. */
1290 frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID;
1291 frame.down = cx->fp;
1292 frame.annotation = NULL;
1293 frame.scopeChain = NULL; /* set below for real, after cx->fp is set */
1294 frame.blockChain = NULL;
1295 frame.regs = NULL;
1296 frame.imacpc = NULL;
1297 frame.slots = NULL;
1298 frame.sharpDepth = 0;
1299 frame.sharpArray = NULL;
1300 frame.flags = flags | rootedArgsFlag;
1301 frame.dormantNext = NULL;
1302 frame.displaySave = NULL;
1303
1304 MUST_FLOW_THROUGH("out");
1305 cx->fp = &frame;
1306
1307 /* Init these now in case we goto out before first hook call. */
1308 hook = cx->debugHooks->callHook;
1309 hookData = NULL;
1310
1311 if (native) {
1312 /* If native, use caller varobj and scopeChain for eval. */
1313 JS_ASSERT(!frame.varobj);
1314 JS_ASSERT(!frame.scopeChain);
1315 if (frame.down) {
1316 frame.varobj = frame.down->varobj;
1317 frame.scopeChain = frame.down->scopeChain;
1318 }
1319
1320 /* But ensure that we have a scope chain. */
1321 if (!frame.scopeChain)
1322 frame.scopeChain = parent;
1323 } else {
1324 /* Use parent scope so js_GetCallObject can find the right "Call". */
1325 frame.scopeChain = parent;
1326 if (JSFUN_HEAVYWEIGHT_TEST(fun->flags)) {
1327 /* Scope with a call object parented by the callee's parent. */
1328 if (!js_GetCallObject(cx, &frame)) {
1329 ok = JS_FALSE;
1330 goto out;
1331 }
1332 }
1333 frame.slots = sp - fun->u.i.nvars;
1334 }
1335
1336 /* Call the hook if present after we fully initialized the frame. */
1337 if (hook)
1338 hookData = hook(cx, &frame, JS_TRUE, 0, cx->debugHooks->callHookData);
1339
1340 #ifdef INCLUDE_MOZILLA_DTRACE
1341 /* DTrace function entry, non-inlines */
1342 if (JAVASCRIPT_FUNCTION_ENTRY_ENABLED())
1343 jsdtrace_function_entry(cx, &frame, fun);
1344 if (JAVASCRIPT_FUNCTION_INFO_ENABLED())
1345 jsdtrace_function_info(cx, &frame, frame.down, fun);
1346 if (JAVASCRIPT_FUNCTION_ARGS_ENABLED())
1347 jsdtrace_function_args(cx, &frame, fun, frame.argc, frame.argv);
1348 #endif
1349
1350 /* Call the function, either a native method or an interpreted script. */
1351 if (native) {
1352 #ifdef DEBUG_NOT_THROWING
1353 JSBool alreadyThrowing = cx->throwing;
1354 #endif
1355
1356 #if JS_HAS_LVALUE_RETURN
1357 /* Set by JS_SetCallReturnValue2, used to return reference types. */
1358 cx->rval2set = JS_FALSE;
1359 #endif
1360 ok = native(cx, frame.thisp, argc, frame.argv, &frame.rval);
1361 JS_RUNTIME_METER(cx->runtime, nativeCalls);
1362 #ifdef DEBUG_NOT_THROWING
1363 if (ok && !alreadyThrowing)
1364 ASSERT_NOT_THROWING(cx);
1365 #endif
1366 } else {
1367 JS_ASSERT(script);
1368 ok = js_Interpret(cx);
1369 }
1370
1371 #ifdef INCLUDE_MOZILLA_DTRACE
1372 /* DTrace function return, non-inlines */
1373 if (JAVASCRIPT_FUNCTION_RVAL_ENABLED())
1374 jsdtrace_function_rval(cx, &frame, fun, &frame.rval);
1375 if (JAVASCRIPT_FUNCTION_RETURN_ENABLED())
1376 jsdtrace_function_return(cx, &frame, fun);
1377 #endif
1378
1379 out:
1380 if (hookData) {
1381 hook = cx->debugHooks->callHook;
1382 if (hook)
1383 hook(cx, &frame, JS_FALSE, &ok, hookData);
1384 }
1385
1386 frame.putActivationObjects(cx);
1387
1388 *vp = frame.rval;
1389
1390 /* Restore cx->fp now that we're done releasing frame objects. */
1391 cx->fp = frame.down;
1392
1393 out2:
1394 /* Pop everything we may have allocated off the stack. */
1395 JS_ARENA_RELEASE(&cx->stackPool, mark);
1396 if (!ok)
1397 *vp = JSVAL_NULL;
1398 return ok;
1399
1400 bad:
1401 js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS);
1402 ok = JS_FALSE;
1403 goto out2;
1404 }
1405
1406 JSBool
1407 js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags,
1408 uintN argc, jsval *argv, jsval *rval)
1409 {
1410 jsval *invokevp;
1411 void *mark;
1412 JSBool ok;
1413
1414 js_LeaveTrace(cx);
1415 invokevp = js_AllocStack(cx, 2 + argc, &mark);
1416 if (!invokevp)
1417 return JS_FALSE;
1418
1419 invokevp[0] = fval;
1420 invokevp[1] = OBJECT_TO_JSVAL(obj);
1421 memcpy(invokevp + 2, argv, argc * sizeof *argv);
1422
1423 ok = js_Invoke(cx, argc, invokevp, flags);
1424 if (ok) {
1425 /*
1426 * Store *rval in the a scoped local root if a scope is open, else in
1427 * the lastInternalResult pigeon-hole GC root, solely so users of
1428 * js_InternalInvoke and its direct and indirect (js_ValueToString for
1429 * example) callers do not need to manage roots for local, temporary
1430 * references to such results.
1431 */
1432 *rval = *invokevp;
1433 if (JSVAL_IS_GCTHING(*rval) && *rval != JSVAL_NULL) {
1434 if (cx->localRootStack) {
1435 if (js_PushLocalRoot(cx, cx->localRootStack, *rval) < 0)
1436 ok = JS_FALSE;
1437 } else {
1438 cx->weakRoots.lastInternalResult = *rval;
1439 }
1440 }
1441 }
1442
1443 js_FreeStack(cx, mark);
1444 return ok;
1445 }
1446
1447 JSBool
1448 js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval,
1449 JSAccessMode mode, uintN argc, jsval *argv, jsval *rval)
1450 {
1451 JSSecurityCallbacks *callbacks;
1452
1453 js_LeaveTrace(cx);
1454
1455 /*
1456 * js_InternalInvoke could result in another try to get or set the same id
1457 * again, see bug 355497.
1458 */
1459 JS_CHECK_RECURSION(cx, return JS_FALSE);
1460
1461 /*
1462 * Check general (not object-ops/class-specific) access from the running
1463 * script to obj.id only if id has a scripted getter or setter that we're
1464 * about to invoke. If we don't check this case, nothing else will -- no
1465 * other native code has the chance to check.
1466 *
1467 * Contrast this non-native (scripted) case with native getter and setter
1468 * accesses, where the native itself must do an access check, if security
1469 * policies requires it. We make a checkAccess or checkObjectAccess call
1470 * back to the embedding program only in those cases where we're not going
1471 * to call an embedding-defined native function, getter, setter, or class
1472 * hook anyway. Where we do call such a native, there's no need for the
1473 * engine to impose a separate access check callback on all embeddings --
1474 * many embeddings have no security policy at all.
1475 */
1476 JS_ASSERT(mode == JSACC_READ || mode == JSACC_WRITE);
1477 callbacks = JS_GetSecurityCallbacks(cx);
1478 if (callbacks &&
1479 callbacks->checkObjectAccess &&
1480 VALUE_IS_FUNCTION(cx, fval) &&
1481 FUN_INTERPRETED(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval))) &&
1482 !callbacks->checkObjectAccess(cx, obj, ID_TO_VALUE(id), mode, &fval)) {
1483 return JS_FALSE;
1484 }
1485
1486 return js_InternalCall(cx, obj, fval, argc, argv, rval);
1487 }
1488
1489 JSBool
1490 js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
1491 JSStackFrame *down, uintN flags, jsval *result)
1492 {
1493 JSInterpreterHook hook;
1494 void *hookData, *mark;
1495 JSStackFrame *oldfp, frame;
1496 JSObject *obj, *tmp;
1497 JSBool ok;
1498
1499 js_LeaveTrace(cx);
1500
1501 #ifdef INCLUDE_MOZILLA_DTRACE
1502 if (JAVASCRIPT_EXECUTE_START_ENABLED())
1503 jsdtrace_execute_start(script);
1504 #endif
1505
1506 hook = cx->debugHooks->executeHook;
1507 hookData = mark = NULL;
1508 oldfp = js_GetTopStackFrame(cx);
1509 frame.script = script;
1510 if (down) {
1511 /* Propagate arg state for eval and the debugger API. */
1512 frame.callobj = down->callobj;
1513 frame.argsobj = down->argsobj;
1514 frame.varobj = down->varobj;
1515 frame.fun = down->fun;
1516 frame.thisp = down->thisp;
1517 if (down->flags & JSFRAME_COMPUTED_THIS)
1518 flags |= JSFRAME_COMPUTED_THIS;
1519 frame.argc = down->argc;
1520 frame.argv = down->argv;
1521 frame.annotation = down->annotation;
1522 frame.sharpArray = down->sharpArray;
1523 JS_ASSERT(script->nfixed == 0);
1524 } else {
1525 frame.callobj = NULL;
1526 frame.argsobj = NULL;
1527 obj = chain;
1528 if (cx->options & JSOPTION_VAROBJFIX) {
1529 while ((tmp = OBJ_GET_PARENT(cx, obj)) != NULL)
1530 obj = tmp;
1531 }
1532 frame.varobj = obj;
1533 frame.fun = NULL;
1534 frame.thisp = chain;
1535 frame.argc = 0;
1536 frame.argv = NULL;
1537 frame.annotation = NULL;
1538 frame.sharpArray = NULL;
1539 }
1540
1541 frame.imacpc = NULL;
1542 if (script->nslots != 0) {
1543 frame.slots = js_AllocRawStack(cx, script->nslots, &mark);
1544 if (!frame.slots) {
1545 ok = JS_FALSE;
1546 goto out;
1547 }
1548 memset(frame.slots, 0, script->nfixed * sizeof(jsval));
1549 } else {
1550 frame.slots = NULL;
1551 }
1552
1553 frame.rval = JSVAL_VOID;
1554 frame.down = down;
1555 frame.scopeChain = chain;
1556 frame.regs = NULL;
1557 frame.sharpDepth = 0;
1558 frame.flags = flags;
1559 frame.dormantNext = NULL;
1560 frame.blockChain = NULL;
1561
1562 /*
1563 * Here we wrap the call to js_Interpret with code to (conditionally)
1564 * save and restore the old stack frame chain into a chain of 'dormant'
1565 * frame chains. Since we are replacing cx->fp, we were running into
1566 * the problem that if GC was called under this frame, some of the GC
1567 * things associated with the old frame chain (available here only in
1568 * the C variable 'oldfp') were not rooted and were being collected.
1569 *
1570 * So, now we preserve the links to these 'dormant' frame chains in cx
1571 * before calling js_Interpret and cleanup afterwards. The GC walks
1572 * these dormant chains and marks objects in the same way that it marks
1573 * objects in the primary cx->fp chain.
1574 */
1575 if (oldfp && oldfp != down) {
1576 JS_ASSERT(!oldfp->dormantNext);
1577 oldfp->dormantNext = cx->dormantFrameChain;
1578 cx->dormantFrameChain = oldfp;
1579 }
1580
1581 cx->fp = &frame;
1582 if (!down) {
1583 OBJ_TO_INNER_OBJECT(cx, chain);
1584 if (!chain)
1585 return JS_FALSE;
1586 frame.scopeChain = chain;
1587
1588 frame.thisp = frame.thisp->thisObject(cx);
1589 if (!frame.thisp) {
1590 ok = JS_FALSE;
1591 goto out2;
1592 }
1593 frame.flags |= JSFRAME_COMPUTED_THIS;
1594 }
1595
1596 if (hook) {
1597 hookData = hook(cx, &frame, JS_TRUE, 0,
1598 cx->debugHooks->executeHookData);
1599 }
1600
1601 ok = js_Interpret(cx);
1602 if (result)
1603 *result = frame.rval;
1604
1605 if (hookData) {
1606 hook = cx->debugHooks->executeHook;
1607 if (hook)
1608 hook(cx, &frame, JS_FALSE, &ok, hookData);
1609 }
1610
1611 out2:
1612 if (mark)
1613 js_FreeRawStack(cx, mark);
1614 cx->fp = oldfp;
1615
1616 if (oldfp && oldfp != down) {
1617 JS_ASSERT(cx->dormantFrameChain == oldfp);
1618 cx->dormantFrameChain = oldfp->dormantNext;
1619 oldfp->dormantNext = NULL;
1620 }
1621
1622 out:
1623 #ifdef INCLUDE_MOZILLA_DTRACE
1624 if (JAVASCRIPT_EXECUTE_DONE_ENABLED())
1625 jsdtrace_execute_done(script);
1626 #endif
1627 return ok;
1628 }
1629
1630 JSBool
1631 js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
1632 JSObject **objp, JSProperty **propp)
1633 {
1634 JSObject *obj2;
1635 JSProperty *prop;
1636 uintN oldAttrs, report;
1637 bool isFunction;
1638 jsval value;
1639 const char *type, *name;
1640
1641 /*
1642 * Both objp and propp must be either null or given. When given, *propp
1643 * must be null. This way we avoid an extra "if (propp) *propp = NULL" for
1644 * the common case of a non-existing property.
1645 */
1646 JS_ASSERT(!objp == !propp);
1647 JS_ASSERT_IF(propp, !*propp);
1648
1649 /* The JSPROP_INITIALIZER case below may generate a warning. Since we must
1650 * drop the property before reporting it, we insists on !propp to avoid
1651 * looking up the property again after the reporting is done.
1652 */
1653 JS_ASSERT_IF(attrs & JSPROP_INITIALIZER, attrs == JSPROP_INITIALIZER);
1654 JS_ASSERT_IF(attrs == JSPROP_INITIALIZER, !propp);
1655
1656 if (!obj->lookupProperty(cx, id, &obj2, &prop))
1657 return JS_FALSE;
1658 if (!prop)
1659 return JS_TRUE;
1660
1661 /* Use prop as a speedup hint to obj->getAttributes. */
1662 if (!obj2->getAttributes(cx, id, prop, &oldAttrs)) {
1663 obj2->dropProperty(cx, prop);
1664 return JS_FALSE;
1665 }
1666
1667 /*
1668 * If our caller doesn't want prop, drop it (we don't need it any longer).
1669 */
1670 if (!propp) {
1671 obj2->dropProperty(cx, prop);
1672 prop = NULL;
1673 } else {
1674 *objp = obj2;
1675 *propp = prop;
1676 }
1677
1678 if (attrs == JSPROP_INITIALIZER) {
1679 /* Allow the new object to override properties. */
1680 if (obj2 != obj)
1681 return JS_TRUE;
1682
1683 /* The property must be dropped already. */
1684 JS_ASSERT(!prop);
1685 report = JSREPORT_WARNING | JSREPORT_STRICT;
1686
1687 #ifdef __GNUC__
1688 isFunction = false; /* suppress bogus gcc warnings */
1689 #endif
1690 } else {
1691 /* We allow redeclaring some non-readonly properties. */
1692 if (((oldAttrs | attrs) & JSPROP_READONLY) == 0) {
1693 /* Allow redeclaration of variables and functions. */
1694 if (!(attrs & (JSPROP_GETTER | JSPROP_SETTER)))
1695 return JS_TRUE;
1696
1697 /*
1698 * Allow adding a getter only if a property already has a setter
1699 * but no getter and similarly for adding a setter. That is, we
1700 * allow only the following transitions:
1701 *
1702 * no-property --> getter --> getter + setter
1703 * no-property --> setter --> getter + setter
1704 */
1705 if ((~(oldAttrs ^ attrs) & (JSPROP_GETTER | JSPROP_SETTER)) == 0)
1706 return JS_TRUE;
1707
1708 /*
1709 * Allow redeclaration of an impermanent property (in which case
1710 * anyone could delete it and redefine it, willy-nilly).
1711 */
1712 if (!(oldAttrs & JSPROP_PERMANENT))
1713 return JS_TRUE;
1714 }
1715 if (prop)
1716 obj2->dropProperty(cx, prop);
1717
1718 report = JSREPORT_ERROR;
1719 isFunction = (oldAttrs & (JSPROP_GETTER | JSPROP_SETTER)) != 0;
1720 if (!isFunction) {
1721 if (!obj->getProperty(cx, id, &value))
1722 return JS_FALSE;
1723 isFunction = VALUE_IS_FUNCTION(cx, value);
1724 }
1725 }
1726
1727 type = (attrs == JSPROP_INITIALIZER)
1728 ? "property"
1729 : (oldAttrs & attrs & JSPROP_GETTER)
1730 ? js_getter_str
1731 : (oldAttrs & attrs & JSPROP_SETTER)
1732 ? js_setter_str
1733 : (oldAttrs & JSPROP_READONLY)
1734 ? js_const_str
1735 : isFunction
1736 ? js_function_str
1737 : js_var_str;
1738 name = js_ValueToPrintableString(cx, ID_TO_VALUE(id));
1739 if (!name)
1740 return JS_FALSE;
1741 return JS_ReportErrorFlagsAndNumber(cx, report,
1742 js_GetErrorMessage, NULL,
1743 JSMSG_REDECLARED_VAR,
1744 type, name);
1745 }
1746
1747 JSBool
1748 js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval)
1749 {
1750 jsval ltag = JSVAL_TAG(lval), rtag = JSVAL_TAG(rval);
1751 jsdouble ld, rd;
1752
1753 if (ltag == rtag) {
1754 if (ltag == JSVAL_STRING) {
1755 JSString *lstr = JSVAL_TO_STRING(lval),
1756 *rstr = JSVAL_TO_STRING(rval);
1757 return js_EqualStrings(lstr, rstr);
1758 }
1759 if (ltag == JSVAL_DOUBLE) {
1760 ld = *JSVAL_TO_DOUBLE(lval);
1761 rd = *JSVAL_TO_DOUBLE(rval);
1762 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1763 }
1764 if (ltag == JSVAL_OBJECT &&
1765 lval != rval &&
1766 !JSVAL_IS_NULL(lval) &&
1767 !JSVAL_IS_NULL(rval)) {
1768 JSObject *lobj, *robj;
1769
1770 lobj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(lval));
1771 robj = js_GetWrappedObject(cx, JSVAL_TO_OBJECT(rval));
1772 lval = OBJECT_TO_JSVAL(lobj);
1773 rval = OBJECT_TO_JSVAL(robj);
1774 }
1775 return lval == rval;
1776 }
1777 if (ltag == JSVAL_DOUBLE && JSVAL_IS_INT(rval)) {
1778 ld = *JSVAL_TO_DOUBLE(lval);
1779 rd = JSVAL_TO_INT(rval);
1780 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1781 }
1782 if (JSVAL_IS_INT(lval) && rtag == JSVAL_DOUBLE) {
1783 ld = JSVAL_TO_INT(lval);
1784 rd = *JSVAL_TO_DOUBLE(rval);
1785 return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
1786 }
1787 return lval == rval;
1788 }
1789
1790 static inline bool
1791 IsNegativeZero(jsval v)
1792 {
1793 return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v));
1794 }
1795
1796 static inline bool
1797 IsNaN(jsval v)
1798 {
1799 return JSVAL_IS_DOUBLE(v) && JSDOUBLE_IS_NaN(*JSVAL_TO_DOUBLE(v));
1800 }
1801
1802 JSBool
1803 js_SameValue(jsval v1, jsval v2, JSContext *cx)
1804 {
1805 if (IsNegativeZero(v1))
1806 return IsNegativeZero(v2);
1807 if (IsNegativeZero(v2))
1808 return JS_FALSE;
1809 if (IsNaN(v1) && IsNaN(v2))
1810 return JS_TRUE;
1811 return js_StrictlyEqual(cx, v1, v2);
1812 }
1813
1814 JS_REQUIRES_STACK JSBool
1815 js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp)
1816 {
1817 JSFunction *fun, *fun2;
1818 JSObject *obj, *obj2, *proto, *parent;
1819 jsval lval, rval;
1820 JSClass *clasp;
1821
1822 fun = NULL;
1823 obj2 = NULL;
1824 lval = *vp;
1825 if (!JSVAL_IS_OBJECT(lval) ||
1826 (obj2 = JSVAL_TO_OBJECT(lval)) == NULL ||
1827 /* XXX clean up to avoid special cases above ObjectOps layer */
1828 OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass ||
1829 !obj2->map->ops->construct)
1830 {
1831 fun = js_ValueToFunction(cx, vp, JSV2F_CONSTRUCT);
1832 if (!fun)
1833 return JS_FALSE;
1834 }
1835
1836 clasp = &js_ObjectClass;
1837 if (!obj2) {
1838 proto = parent = NULL;
1839 fun = NULL;
1840 } else {
1841 /*
1842 * Get the constructor prototype object for this function.
1843 * Use the nominal 'this' parameter slot, vp[1], as a local
1844 * root to protect this prototype, in case it has no other
1845 * strong refs.
1846 */
1847 if (!obj2->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom),
1848 &vp[1])) {
1849 return JS_FALSE;
1850 }
1851 rval = vp[1];
1852 proto = JSVAL_IS_OBJECT(rval) ? JSVAL_TO_OBJECT(rval) : NULL;
1853 parent = OBJ_GET_PARENT(cx, obj2);
1854
1855 if (OBJ_GET_CLASS(cx, obj2) == &js_FunctionClass) {
1856 fun2 = GET_FUNCTION_PRIVATE(cx, obj2);
1857 if (!FUN_INTERPRETED(fun2) && fun2->u.n.clasp)
1858 clasp = fun2->u.n.clasp;
1859 }
1860 }
1861 obj = js_NewObject(cx, clasp, proto, parent);
1862 if (!obj)
1863 return JS_FALSE;
1864
1865 /* Now we have an object with a constructor method; call it. */
1866 vp[1] = OBJECT_TO_JSVAL(obj);
1867 if (!js_Invoke(cx, argc, vp, JSINVOKE_CONSTRUCT))
1868 return JS_FALSE;
1869
1870 /* Check the return value and if it's primitive, force it to be obj. */
1871 rval = *vp;
1872 if (clampReturn && JSVAL_IS_PRIMITIVE(rval)) {
1873 if (!fun) {
1874 /* native [[Construct]] returning primitive is error */
1875 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1876 JSMSG_BAD_NEW_RESULT,
1877 js_ValueToPrintableString(cx, rval));
1878 return JS_FALSE;
1879 }
1880 *vp = OBJECT_TO_JSVAL(obj);
1881 }
1882
1883 JS_RUNTIME_METER(cx->runtime, constructs);
1884 return JS_TRUE;
1885 }
1886
1887 JSBool
1888 js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp)
1889 {
1890 JS_ASSERT(!JSVAL_IS_INT(idval));
1891
1892 #if JS_HAS_XML_SUPPORT
1893 if (!JSVAL_IS_PRIMITIVE(idval)) {
1894 if (OBJECT_IS_XML(cx, obj)) {
1895 *idp = OBJECT_JSVAL_TO_JSID(idval);
1896 return JS_TRUE;
1897 }
1898 if (!js_IsFunctionQName(cx, JSVAL_TO_OBJECT(idval), idp))
1899 return JS_FALSE;
1900 if (*idp != 0)
1901 return JS_TRUE;
1902 }
1903 #endif
1904
1905 return js_ValueToStringId(cx, idval, idp);
1906 }
1907
1908 /*
1909 * Enter the new with scope using an object at sp[-1] and associate the depth
1910 * of the with block with sp + stackIndex.
1911 */
1912 JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool
1913 js_EnterWith(JSContext *cx, jsint stackIndex)
1914 {
1915 JSStackFrame *fp;
1916 jsval *sp;
1917 JSObject *obj, *parent, *withobj;
1918
1919 fp = cx->fp;
1920 sp = fp->regs->sp;
1921 JS_ASSERT(stackIndex < 0);
1922 JS_ASSERT(StackBase(fp) <= sp + stackIndex);
1923
1924 if (!JSVAL_IS_PRIMITIVE(sp[-1])) {
1925 obj = JSVAL_TO_OBJECT(sp[-1]);
1926 } else {
1927 obj = js_ValueToNonNullObject(cx, sp[-1]);
1928 if (!obj)
1929 return JS_FALSE;
1930 sp[-1] = OBJECT_TO_JSVAL(obj);
1931 }
1932
1933 parent = js_GetScopeChain(cx, fp);
1934 if (!parent)
1935 return JS_FALSE;
1936
1937 OBJ_TO_INNER_OBJECT(cx, obj);
1938 if (!obj)
1939 return JS_FALSE;
1940
1941 withobj = js_NewWithObject(cx, obj, parent,
1942 sp + stackIndex - StackBase(fp));
1943 if (!withobj)
1944 return JS_FALSE;
1945
1946 fp->scopeChain = withobj;
1947 return JS_TRUE;
1948 }
1949
1950 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
1951 js_LeaveWith(JSContext *cx)
1952 {
1953 JSObject *withobj;
1954
1955 withobj = cx->fp->scopeChain;
1956 JS_ASSERT(OBJ_GET_CLASS(cx, withobj) == &js_WithClass);
1957 JS_ASSERT(withobj->getPrivate() == cx->fp);
1958 JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0);
1959 cx->fp->scopeChain = OBJ_GET_PARENT(cx, withobj);
1960 withobj->setPrivate(NULL);
1961 }
1962
1963 JS_REQUIRES_STACK JSClass *
1964 js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth)
1965 {
1966 JSClass *clasp;
1967
1968 clasp = OBJ_GET_CLASS(cx, obj);
1969 if ((clasp == &js_WithClass || clasp == &js_BlockClass) &&
1970 obj->getPrivate() == cx->fp &&
1971 OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) {
1972 return clasp;
1973 }
1974 return NULL;
1975 }
1976
1977 /*
1978 * Unwind block and scope chains to match the given depth. The function sets
1979 * fp->sp on return to stackDepth.
1980 */
1981 JS_REQUIRES_STACK JSBool
1982 js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth,
1983 JSBool normalUnwind)
1984 {
1985 JSObject *obj;
1986 JSClass *clasp;
1987
1988 JS_ASSERT(stackDepth >= 0);
1989 JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp);
1990
1991 for (obj = fp->blockChain; obj; obj = OBJ_GET_PARENT(cx, obj)) {
1992 JS_ASSERT(OBJ_GET_CLASS(cx, obj) == &js_BlockClass);
1993 if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth)
1994 break;
1995 }
1996 fp->blockChain = obj;
1997
1998 for (;;) {
1999 obj = fp->scopeChain;
2000 clasp = js_IsActiveWithOrBlock(cx, obj, stackDepth);
2001 if (!clasp)
2002 break;
2003 if (clasp == &js_BlockClass) {
2004 /* Don't fail until after we've updated all stacks. */
2005 normalUnwind &= js_PutBlockObject(cx, normalUnwind);
2006 } else {
2007 js_LeaveWith(cx);
2008 }
2009 }
2010
2011 fp->regs->sp = StackBase(fp) + stackDepth;
2012 return normalUnwind;
2013 }
2014
2015 JS_STATIC_INTERPRET JSBool
2016 js_DoIncDec(JSContext *cx, const JSCodeSpec *cs, jsval *vp, jsval *vp2)
2017 {
2018 jsval v;
2019 jsdouble d;
2020
2021 v = *vp;
2022 if (JSVAL_IS_DOUBLE(v)) {
2023 d = *JSVAL_TO_DOUBLE(v);
2024 } else if (JSVAL_IS_INT(v)) {
2025 d = JSVAL_TO_INT(v);
2026 } else {
2027 d = js_ValueToNumber(cx, vp);
2028 if (JSVAL_IS_NULL(*vp))
2029 return JS_FALSE;
2030 JS_ASSERT(JSVAL_IS_NUMBER(*vp) || *vp == JSVAL_TRUE);
2031
2032 /* Store the result of v conversion back in vp for post increments. */
2033 if ((cs->format & JOF_POST) &&
2034 *vp == JSVAL_TRUE
2035 && !js_NewNumberInRootedValue(cx, d, vp)) {
2036 return JS_FALSE;
2037 }
2038 }
2039
2040 (cs->format & JOF_INC) ? d++ : d--;
2041 if (!js_NewNumberInRootedValue(cx, d, vp2))
2042 return JS_FALSE;
2043
2044 if (!(cs->format & JOF_POST))
2045 *vp = *vp2;
2046 return JS_TRUE;
2047 }
2048
2049 jsval&
2050 js_GetUpvar(JSContext *cx, uintN level, uintN cookie)
2051 {
2052 level -= UPVAR_FRAME_SKIP(cookie);
2053 JS_ASSERT(level < JS_DISPLAY_SIZE);
2054
2055 JSStackFrame *fp = cx->display[level];
2056 JS_ASSERT(fp->script);
2057
2058 uintN slot = UPVAR_FRAME_SLOT(cookie);
2059 jsval *vp;
2060
2061 if (!fp->fun) {
2062 vp = fp->slots + fp->script->nfixed;
2063 } else if (slot < fp->fun->nargs) {
2064 vp = fp->argv;
2065 } else if (slot == CALLEE_UPVAR_SLOT) {
2066 vp = &fp->argv[-2];
2067 slot = 0;
2068 } else {
2069 slot -= fp->fun->nargs;
2070 JS_ASSERT(slot < fp->script->nslots);
2071 vp = fp->slots;
2072 }
2073
2074 return vp[slot];
2075 }
2076
2077 #ifdef DEBUG
2078
2079 JS_STATIC_INTERPRET JS_REQUIRES_STACK void
2080 js_TraceOpcode(JSContext *cx)
2081 {
2082 FILE *tracefp;
2083 JSStackFrame *fp;
2084 JSFrameRegs *regs;
2085 intN ndefs, n, nuses;
2086 jsval *siter;
2087 JSString *str;
2088 JSOp op;
2089
2090 tracefp = (FILE *) cx->tracefp;
2091 JS_ASSERT(tracefp);
2092 fp = cx->fp;
2093 regs = fp->regs;
2094
2095 /*
2096 * Operations in prologues don't produce interesting values, and
2097 * js_DecompileValueGenerator isn't set up to handle them anyway.
2098 */
2099 if (cx->tracePrevPc && regs->pc >= fp->script->main) {
2100 JSOp tracePrevOp = JSOp(*cx->tracePrevPc);
2101 ndefs = js_GetStackDefs(cx, &js_CodeSpec[tracePrevOp], tracePrevOp,
2102 fp->script, cx->tracePrevPc);
2103
2104 /*
2105 * If there aren't that many elements on the stack, then
2106 * we have probably entered a new frame, and printing output
2107 * would just be misleading.
2108 */
2109 if (ndefs != 0 &&
2110 ndefs < regs->sp - fp->slots) {
2111 for (n = -ndefs; n < 0; n++) {
2112 char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n],
2113 NULL);
2114 if (bytes) {
2115 fprintf(tracefp, "%s %s",
2116 (n == -ndefs) ? " output:" : ",",
2117 bytes);
2118 cx->free(bytes);
2119 }
2120 }
2121 fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp)));
2122 }
2123 fprintf(tracefp, " stack: ");
2124 for (siter = StackBase(fp); siter < regs->sp; siter++) {
2125 str = js_ValueToString(cx, *siter);
2126 if (!str)
2127 fputs("<null>", tracefp);
2128 else
2129 js_FileEscapedString(tracefp, str, 0);
2130 fputc(' ', tracefp);
2131 }
2132 fputc('\n', tracefp);
2133 }
2134
2135 fprintf(tracefp, "%4u: ",
2136 js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : regs->pc));
2137 js_Disassemble1(cx, fp->script, regs->pc,
2138 regs->pc - fp->script->code,
2139 JS_FALSE, tracefp);
2140 op = (JSOp) *regs->pc;
2141 nuses = js_GetStackUses(&js_CodeSpec[op], op, regs->pc);
2142 if (nuses != 0) {
2143 for (n = -nuses; n < 0; n++) {
2144 char *bytes = js_DecompileValueGenerator(cx, n, regs->sp[n],
2145 NULL);
2146 if (bytes) {
2147 fprintf(tracefp, "%s %s",
2148 (n == -nuses) ? " inputs:" : ",",
2149 bytes);
2150 cx->free(bytes);
2151 }
2152 }
2153 fprintf(tracefp, " @ %u\n", (uintN) (regs->sp - StackBase(fp)));
2154 }
2155 cx->tracePrevPc = regs->pc;
2156
2157 /* It's nice to have complete traces when debugging a crash. */
2158 fflush(tracefp);
2159 }
2160
2161 #endif /* DEBUG */
2162
2163 #ifdef JS_OPMETER
2164
2165 # include <stdlib.h>
2166
2167 # define HIST_NSLOTS 8
2168
2169 /*
2170 * The second dimension is hardcoded at 256 because we know that many bits fit
2171 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
2172 * any particular row.
2173 */
2174 static uint32 succeeds[JSOP_LIMIT][256];
2175 static uint32 slot_ops[JSOP_LIMIT][HIST_NSLOTS];
2176
2177 JS_STATIC_INTERPRET void
2178 js_MeterOpcodePair(JSOp op1, JSOp op2)
2179 {
2180 if (op1 != JSOP_STOP)
2181 ++succeeds[op1][op2];
2182 }
2183
2184 JS_STATIC_INTERPRET void
2185 js_MeterSlotOpcode(JSOp op, uint32 slot)
2186 {
2187 if (slot < HIST_NSLOTS)
2188 ++slot_ops[op][slot];
2189 }
2190
2191 typedef struct Edge {
2192 const char *from;
2193 const char *to;
2194 uint32 count;
2195 } Edge;
2196
2197 static int
2198 compare_edges(const void *a, const void *b)
2199 {
2200 const Edge *ea = (const Edge *) a;
2201 const Edge *eb = (const Edge *) b;
2202
2203 return (int32)eb->count - (int32)ea->count;
2204 }
2205
2206 void
2207 js_DumpOpMeters()
2208 {
2209 const char *name, *from, *style;
2210 FILE *fp;
2211 uint32 total, count;
2212 uint32 i, j, nedges;
2213 Edge *graph;
2214
2215 name = getenv("JS_OPMETER_FILE");
2216 if (!name)
2217 name = "/tmp/ops.dot";
2218 fp = fopen(name, "w");
2219 if (!fp) {
2220 perror(name);
2221 return;
2222 }
2223
2224 total = nedges = 0;
2225 for (i = 0; i < JSOP_LIMIT; i++) {
2226 for (j = 0; j < JSOP_LIMIT; j++) {
2227 count = succeeds[i][j];
2228 if (count != 0) {
2229 total += count;
2230 ++nedges;
2231 }
2232 }
2233 }
2234
2235 # define SIGNIFICANT(count,total) (200. * (count) >= (total))
2236
2237 graph = (Edge *) js_calloc(nedges * sizeof graph[0]);
2238 for (i = nedges = 0; i < JSOP_LIMIT; i++) {
2239 from = js_CodeName[i];
2240 for (j = 0; j < JSOP_LIMIT; j++) {
2241 count = succeeds[i][j];
2242 if (count != 0 && SIGNIFICANT(count, total)) {
2243 graph[nedges].from = from;
2244 graph[nedges].to = js_CodeName[j];
2245 graph[nedges].count = count;
2246 ++nedges;
2247 }
2248 }
2249 }
2250 qsort(graph, nedges, sizeof(Edge), compare_edges);
2251
2252 # undef SIGNIFICANT
2253
2254 fputs("digraph {\n", fp);
2255 for (i = 0, style = NULL; i < nedges; i++) {
2256 JS_ASSERT(i == 0 || graph[i-1].count >= graph[i].count);
2257 if (!style || graph[i-1].count != graph[i].count) {
2258 style = (i > nedges * .75) ? "dotted" :
2259 (i > nedges * .50) ? "dashed" :
2260 (i > nedges * .25) ? "solid" : "bold";
2261 }
2262 fprintf(fp, " %s -> %s [label=\"%lu\" style=%s]\n",
2263 graph[i].from, graph[i].to,
2264 (unsigned long)graph[i].count, style);
2265 }
2266 js_free(graph);
2267 fputs("}\n", fp);
2268 fclose(fp);
2269
2270 name = getenv("JS_OPMETER_HIST");
2271 if (!name)
2272 name = "/tmp/ops.hist";
2273 fp = fopen(name, "w");
2274 if (!fp) {
2275 perror(name);
2276 return;
2277 }
2278 fputs("bytecode", fp);
2279 for (j = 0; j < HIST_NSLOTS; j++)
2280 fprintf(fp, " slot %1u", (unsigned)j);
2281 putc('\n', fp);
2282 fputs("========", fp);
2283 for (j = 0; j < HIST_NSLOTS; j++)
2284 fputs(" =======", fp);
2285 putc('\n', fp);
2286 for (i = 0; i < JSOP_LIMIT; i++) {
2287 for (j = 0; j < HIST_NSLOTS; j++) {
2288 if (slot_ops[i][j] != 0) {
2289 /* Reuse j in the next loop, since we break after. */
2290 fprintf(fp, "%-8.8s", js_CodeName[i]);
2291 for (j = 0; j < HIST_NSLOTS; j++)
2292 fprintf(fp, " %7lu", (unsigned long)slot_ops[i][j]);
2293 putc('\n', fp);
2294 break;
2295 }
2296 }
2297 }
2298 fclose(fp);
2299 }
2300
2301 #endif /* JS_OPSMETER */
2302
2303 #endif /* !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ */
2304
2305 #ifndef jsinvoke_cpp___
2306
2307 #define PUSH(v) (*regs.sp++ = (v))
2308 #define PUSH_OPND(v) PUSH(v)
2309 #define STORE_OPND(n,v) (regs.sp[n] = (v))
2310 #define POP() (*--regs.sp)
2311 #define POP_OPND() POP()
2312 #define FETCH_OPND(n) (regs.sp[n])
2313
2314 /*
2315 * Push the jsdouble d using sp from the lexical environment. Try to convert d
2316 * to a jsint that fits in a jsval, otherwise GC-alloc space for it and push a
2317 * reference.
2318 */
2319 #define STORE_NUMBER(cx, n, d) \
2320 JS_BEGIN_MACRO \
2321 jsint i_; \
2322 \
2323 if (JSDOUBLE_IS_INT(d, i_) && INT_FITS_IN_JSVAL(i_)) \
2324 regs.sp[n] = INT_TO_JSVAL(i_); \
2325 else if (!js_NewDoubleInRootedValue(cx, d, &regs.sp[n])) \
2326 goto error; \
2327 JS_END_MACRO
2328
2329 #define STORE_INT(cx, n, i) \
2330 JS_BEGIN_MACRO \
2331 if (INT_FITS_IN_JSVAL(i)) \
2332 regs.sp[n] = INT_TO_JSVAL(i); \
2333 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (i), &regs.sp[n])) \
2334 goto error; \
2335 JS_END_MACRO
2336
2337 #define STORE_UINT(cx, n, u) \
2338 JS_BEGIN_MACRO \
2339 if ((u) <= JSVAL_INT_MAX) \
2340 regs.sp[n] = INT_TO_JSVAL(u); \
2341 else if (!js_NewDoubleInRootedValue(cx, (jsdouble) (u), &regs.sp[n])) \
2342 goto error; \
2343 JS_END_MACRO
2344
2345 #define FETCH_NUMBER(cx, n, d) \
2346 JS_BEGIN_MACRO \
2347 jsval v_; \
2348 \
2349 v_ = FETCH_OPND(n); \
2350 VALUE_TO_NUMBER(cx, n, v_, d); \
2351 JS_END_MACRO
2352
2353 #define FETCH_INT(cx, n, i) \
2354 JS_BEGIN_MACRO \
2355 jsval v_; \
2356 \
2357 v_= FETCH_OPND(n); \
2358 if (JSVAL_IS_INT(v_)) { \
2359 i = JSVAL_TO_INT(v_); \
2360 } else { \
2361 i = js_ValueToECMAInt32(cx, &regs.sp[n]); \
2362 if (JSVAL_IS_NULL(regs.sp[n])) \
2363 goto error; \
2364 } \
2365 JS_END_MACRO
2366
2367 #define FETCH_UINT(cx, n, ui) \
2368 JS_BEGIN_MACRO \
2369 jsval v_; \
2370 \
2371 v_= FETCH_OPND(n); \
2372 if (JSVAL_IS_INT(v_)) { \
2373 ui = (uint32) JSVAL_TO_INT(v_); \
2374 } else { \
2375 ui = js_ValueToECMAUint32(cx, &regs.sp[n]); \
2376 if (JSVAL_IS_NULL(regs.sp[n])) \
2377 goto error; \
2378 } \
2379 JS_END_MACRO
2380
2381 /*
2382 * Optimized conversion macros that test for the desired type in v before
2383 * homing sp and calling a conversion function.
2384 */
2385 #define VALUE_TO_NUMBER(cx, n, v, d) \
2386 JS_BEGIN_MACRO \
2387 JS_ASSERT(v == regs.sp[n]); \
2388 if (JSVAL_IS_INT(v)) { \
2389 d = (jsdouble)JSVAL_TO_INT(v); \
2390 } else if (JSVAL_IS_DOUBLE(v)) { \
2391 d = *JSVAL_TO_DOUBLE(v); \
2392 } else { \
2393 d = js_ValueToNumber(cx, &regs.sp[n]); \
2394 if (JSVAL_IS_NULL(regs.sp[n])) \
2395 goto error; \
2396 JS_ASSERT(JSVAL_IS_NUMBER(regs.sp[n]) || \
2397 regs.sp[n] == JSVAL_TRUE); \
2398 } \
2399 JS_END_MACRO
2400
2401 #define POP_BOOLEAN(cx, v, b) \
2402 JS_BEGIN_MACRO \
2403 v = FETCH_OPND(-1); \
2404 if (v == JSVAL_NULL) { \
2405 b = JS_FALSE; \
2406 } else if (JSVAL_IS_BOOLEAN(v)) { \
2407 b = JSVAL_TO_BOOLEAN(v); \
2408 } else { \
2409 b = js_ValueToBoolean(v); \
2410 } \
2411 regs.sp--; \
2412 JS_END_MACRO
2413
2414 #define VALUE_TO_OBJECT(cx, n, v, obj) \
2415 JS_BEGIN_MACRO \
2416 if (!JSVAL_IS_PRIMITIVE(v)) { \
2417 obj = JSVAL_TO_OBJECT(v); \
2418 } else { \
2419 obj = js_ValueToNonNullObject(cx, v); \
2420 if (!obj) \
2421 goto error; \
2422 STORE_OPND(n, OBJECT_TO_JSVAL(obj)); \
2423 } \
2424 JS_END_MACRO
2425
2426 #define FETCH_OBJECT(cx, n, v, obj) \
2427 JS_BEGIN_MACRO \
2428 v = FETCH_OPND(n); \
2429 VALUE_TO_OBJECT(cx, n, v, obj); \
2430 JS_END_MACRO
2431
2432 #define DEFAULT_VALUE(cx, n, hint, v) \
2433 JS_BEGIN_MACRO \
2434 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v)); \
2435 JS_ASSERT(v == regs.sp[n]); \
2436 if (!JSVAL_TO_OBJECT(v)->defaultValue(cx, hint, &regs.sp[n])) \
2437 goto error; \
2438 v = regs.sp[n]; \
2439 JS_END_MACRO
2440
2441 /*
2442 * Quickly test if v is an int from the [-2**29, 2**29) range, that is, when
2443 * the lowest bit of v is 1 and the bits 30 and 31 are both either 0 or 1. For
2444 * such v we can do increment or decrement via adding or subtracting two
2445 * without checking that the result overflows JSVAL_INT_MIN or JSVAL_INT_MAX.
2446 */
2447 #define CAN_DO_FAST_INC_DEC(v) (((((v) << 1) ^ v) & 0x80000001) == 1)
2448
2449 JS_STATIC_ASSERT(JSVAL_INT == 1);
2450 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MIN)));
2451 JS_STATIC_ASSERT(!CAN_DO_FAST_INC_DEC(INT_TO_JSVAL_CONSTEXPR(JSVAL_INT_MAX)));
2452
2453 /*
2454 * Conditional assert to detect failure to clear a pending exception that is
2455 * suppressed (or unintentional suppression of a wanted exception).
2456 */
2457 #if defined DEBUG_brendan || defined DEBUG_mrbkap || defined DEBUG_shaver
2458 # define DEBUG_NOT_THROWING 1
2459 #endif
2460
2461 #ifdef DEBUG_NOT_THROWING
2462 # define ASSERT_NOT_THROWING(cx) JS_ASSERT(!(cx)->throwing)
2463 #else
2464 # define ASSERT_NOT_THROWING(cx) /* nothing */
2465 #endif
2466
2467 /*
2468 * Define JS_OPMETER to instrument bytecode succession, generating a .dot file
2469 * on shutdown that shows the graph of significant predecessor/successor pairs
2470 * executed, where the edge labels give the succession counts. The .dot file
2471 * is named by the JS_OPMETER_FILE envariable, and defaults to /tmp/ops.dot.
2472 *
2473 * Bonus feature: JS_OPMETER also enables counters for stack-addressing ops
2474 * such as JSOP_GETLOCAL, JSOP_INCARG, via METER_SLOT_OP. The resulting counts
2475 * are written to JS_OPMETER_HIST, defaulting to /tmp/ops.hist.
2476 */
2477 #ifndef JS_OPMETER
2478 # define METER_OP_INIT(op) /* nothing */
2479 # define METER_OP_PAIR(op1,op2) /* nothing */
2480 # define METER_SLOT_OP(op,slot) /* nothing */
2481 #else
2482
2483 /*
2484 * The second dimension is hardcoded at 256 because we know that many bits fit
2485 * in a byte, and mainly to optimize away multiplying by JSOP_LIMIT to address
2486 * any particular row.
2487 */
2488 # define METER_OP_INIT(op) ((op) = JSOP_STOP)
2489 # define METER_OP_PAIR(op1,op2) (js_MeterOpcodePair(op1, op2))
2490 # define METER_SLOT_OP(op,slot) (js_MeterSlotOpcode(op, slot))
2491
2492 #endif
2493
2494 #define MAX_INLINE_CALL_COUNT 3000
2495
2496 /*
2497 * Threaded interpretation via computed goto appears to be well-supported by
2498 * GCC 3 and higher. IBM's C compiler when run with the right options (e.g.,
2499 * -qlanglvl=extended) also supports threading. Ditto the SunPro C compiler.
2500 * Currently it's broken for JS_VERSION < 160, though this isn't worth fixing.
2501 * Add your compiler support macros here.
2502 */
2503 #ifndef JS_THREADED_INTERP
2504 # if JS_VERSION >= 160 && ( \
2505 __GNUC__ >= 3 || \
2506 (__IBMC__ >= 700 && defined __IBM_COMPUTED_GOTO) || \
2507 __SUNPRO_C >= 0x570)
2508 # define JS_THREADED_INTERP 1
2509 # else
2510 # define JS_THREADED_INTERP 0
2511 # endif
2512 #endif
2513
2514 /*
2515 * Deadlocks or else bad races are likely if JS_THREADSAFE, so we must rely on
2516 * single-thread DEBUG js shell testing to verify property cache hits.
2517 */
2518 #if defined DEBUG && !defined JS_THREADSAFE
2519
2520 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) \
2521 JS_BEGIN_MACRO \
2522 if (!AssertValidPropertyCacheHit(cx, script, regs, pcoff, obj, pobj, \
2523 entry)) { \
2524 goto error; \
2525 } \
2526 JS_END_MACRO
2527
2528 static bool
2529 AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs,
2530 ptrdiff_t pcoff, JSObject *start, JSObject *found,
2531 JSPropCacheEntry *entry)
2532 {
2533 uint32 sample = cx->runtime->gcNumber;
2534
2535 JSAtom *atom;
2536 if (pcoff >= 0)
2537 GET_ATOM_FROM_BYTECODE(script, regs.pc, pcoff, atom);
2538 else
2539 atom = cx->runtime->atomState.lengthAtom;
2540
2541 JSObject *obj, *pobj;
2542 JSProperty *prop;
2543 bool ok;
2544
2545 if (JOF_OPMODE(*regs.pc) == JOF_NAME) {
2546 ok = js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &pobj, &prop);
2547 } else {
2548 obj = start;
2549 ok = js_LookupProperty(cx, obj, ATOM_TO_JSID(atom), &pobj, &prop);
2550 }
2551 if (!ok)
2552 return false;
2553 if (!prop)
2554 return true;
2555 if (cx->runtime->gcNumber != sample ||
2556 PCVCAP_SHAPE(entry->vcap) != OBJ_SHAPE(pobj)) {
2557 pobj->dropProperty(cx, prop);
2558 return true;
2559 }
2560 JS_ASSERT(prop);
2561 JS_ASSERT(pobj == found);
2562
2563 JSScopeProperty *sprop = (JSScopeProperty *) prop;
2564 if (PCVAL_IS_SLOT(entry->vword)) {
2565 JS_ASSERT(PCVAL_TO_SLOT(entry->vword) == sprop->slot);
2566 } else if (PCVAL_IS_SPROP(entry->vword)) {
2567 JS_ASSERT(PCVAL_TO_SPROP(entry->vword) == sprop);
2568 } else {
2569 jsval v;
2570 JS_ASSERT(PCVAL_IS_OBJECT(entry->vword));
2571 JS_ASSERT(entry->vword != PCVAL_NULL);
2572 JS_ASSERT(OBJ_SCOPE(pobj)->branded());
2573 JS_ASSERT(SPROP_HAS_STUB_GETTER(sprop));
2574 JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(pobj)));
2575 v = LOCKED_OBJ_GET_SLOT(pobj, sprop->slot);
2576 JS_ASSERT(VALUE_IS_FUNCTION(cx, v));
2577 JS_ASSERT(PCVAL_TO_OBJECT(entry->vword) == JSVAL_TO_OBJECT(v));
2578 }
2579
2580 pobj->dropProperty(cx, prop);
2581 return true;
2582 }
2583
2584 #else
2585 # define ASSERT_VALID_PROPERTY_CACHE_HIT(pcoff,obj,pobj,entry) ((void) 0)
2586 #endif
2587
2588 /*
2589 * Ensure that the intrepreter switch can close call-bytecode cases in the
2590 * same way as non-call bytecodes.
2591 */
2592 JS_STATIC_ASSERT(JSOP_NAME_LENGTH == JSOP_CALLNAME_LENGTH);
2593 JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH);
2594 JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH);
2595 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH);
2596 JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH);
2597 JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH);
2598 JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH);
2599 JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH);
2600 JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH);
2601
2602 /*
2603 * Same for debuggable flat closures defined at top level in another function
2604 * or program fragment.
2605 */
2606 JS_STATIC_ASSERT(JSOP_DEFFUN_FC_LENGTH == JSOP_DEFFUN_DBGFC_LENGTH);
2607
2608 /*
2609 * Same for JSOP_SETNAME and JSOP_SETPROP, which differ only slightly but
2610 * remain distinct for the decompiler.
2611 */
2612 JS_STATIC_ASSERT(JSOP_SETNAME_LENGTH == JSOP_SETPROP_LENGTH);
2613
2614 /* See TRY_BRANCH_AFTER_COND. */
2615 JS_STATIC_ASSERT(JSOP_IFNE_LENGTH == JSOP_IFEQ_LENGTH);
2616 JS_STATIC_ASSERT(JSOP_IFNE == JSOP_IFEQ + 1);
2617
2618 /* For the fastest case inder JSOP_INCNAME, etc. */
2619 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_DECNAME_LENGTH);
2620 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEINC_LENGTH);
2621 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEDEC_LENGTH);
2622
2623 #ifdef JS_TRACER
2624 # define ABORT_RECORDING(cx, reason) \
2625 JS_BEGIN_MACRO \
2626 if (TRACE_RECORDER(cx)) \
2627 js_AbortRecording(cx, reason); \
2628 JS_END_MACRO
2629 #else
2630 # define ABORT_RECORDING(cx, reason) ((void) 0)
2631 #endif
2632
2633 JS_REQUIRES_STACK JSBool
2634 js_Interpret(JSContext *cx)
2635 {
2636 #ifdef MOZ_TRACEVIS
2637 TraceVisStateObj tvso(cx, S_INTERP);
2638 #endif
2639
2640 JSRuntime *rt;
2641 JSStackFrame *fp;
2642 JSScript *script;
2643 uintN inlineCallCount;
2644 JSAtom **atoms;
2645 JSVersion currentVersion, originalVersion;
2646 JSFrameRegs regs;
2647 JSObject *obj, *obj2, *parent;
2648 JSBool ok, cond;
2649 jsint len;
2650 jsbytecode *endpc, *pc2;
2651 JSOp op, op2;
2652 jsatomid index;
2653 JSAtom *atom;
2654 uintN argc, attrs, flags;
2655 uint32 slot;
2656 jsval *vp, lval, rval, ltmp, rtmp;
2657 jsid id;
2658 JSProperty *prop;
2659 JSScopeProperty *sprop;
2660 JSString *str, *str2;
2661 jsint i, j;
2662 jsdouble d, d2;
2663 JSClass *clasp;
2664 JSFunction *fun;
2665 JSType type;
2666 jsint low, high, off, npairs;
2667 JSBool match;
2668 #if JS_HAS_GETTER_SETTER
2669 JSPropertyOp getter, setter;
2670 #endif
2671 JSAutoResolveFlags rf(cx, JSRESOLVE_INFER);
2672
2673 #ifdef __GNUC__
2674 # define JS_EXTENSION __extension__
2675 # define JS_EXTENSION_(s) __extension__ ({ s; })
2676 #else
2677 # define JS_EXTENSION
2678 # define JS_EXTENSION_(s) s
2679 #endif
2680
2681 # ifdef DEBUG
2682 /*
2683 * We call this macro from BEGIN_CASE in threaded interpreters,
2684 * and before entering the switch in non-threaded interpreters.
2685 * However, reaching such points doesn't mean we've actually
2686 * fetched an OP from the instruction stream: some opcodes use
2687 * 'op=x; DO_OP()' to let another opcode's implementation finish
2688 * their work, and many opcodes share entry points with a run of
2689 * consecutive BEGIN_CASEs.
2690 *
2691 * Take care to trace OP only when it is the opcode fetched from
2692 * the instruction stream, so the trace matches what one would
2693 * expect from looking at the code. (We do omit POPs after SETs;
2694 * unfortunate, but not worth fixing.)
2695 */
2696 # define TRACE_OPCODE(OP) JS_BEGIN_MACRO \
2697 if (JS_UNLIKELY(cx->tracefp != NULL) && \
2698 (OP) == *regs.pc) \
2699 js_TraceOpcode(cx); \
2700 JS_END_MACRO
2701 # else
2702 # define TRACE_OPCODE(OP) ((void) 0)
2703 # endif
2704
2705 #if JS_THREADED_INTERP
2706 static void *const normalJumpTable[] = {
2707 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2708 JS_EXTENSION &&L_##op,
2709 # include "jsopcode.tbl"
2710 # undef OPDEF
2711 };
2712
2713 static void *const interruptJumpTable[] = {
2714 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
2715 JS_EXTENSION &&interrupt,
2716 # include "jsopcode.tbl"
2717 # undef OPDEF
2718 };
2719
2720 register void * const *jumpTable = normalJumpTable;
2721
2722 METER_OP_INIT(op); /* to nullify first METER_OP_PAIR */
2723
2724 # define ENABLE_INTERRUPTS() ((void) (jumpTable = interruptJumpTable))
2725
2726 # ifdef JS_TRACER
2727 # define CHECK_RECORDER() \
2728 JS_ASSERT_IF(TRACE_RECORDER(cx), jumpTable == interruptJumpTable)
2729 # else
2730 # define CHECK_RECORDER() ((void)0)
2731 # endif
2732
2733 # define DO_OP() JS_BEGIN_MACRO \
2734 CHECK_RECORDER(); \
2735 JS_EXTENSION_(goto *jumpTable[op]); \
2736 JS_END_MACRO
2737 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2738 METER_OP_PAIR(op, regs.pc[n]); \
2739 op = (JSOp) *(regs.pc += (n)); \
2740 DO_OP(); \
2741 JS_END_MACRO
2742
2743 # define BEGIN_CASE(OP) L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER();
2744 # define END_CASE(OP) DO_NEXT_OP(OP##_LENGTH);
2745 # define END_VARLEN_CASE DO_NEXT_OP(len);
2746 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP) \
2747 JS_ASSERT(js_CodeSpec[OP].length == 1); \
2748 op = (JSOp) *++regs.pc; \
2749 DO_OP();
2750
2751 # define END_EMPTY_CASES
2752
2753 #else /* !JS_THREADED_INTERP */
2754
2755 register intN switchMask = 0;
2756 intN switchOp;
2757
2758 # define ENABLE_INTERRUPTS() ((void) (switchMask = -1))
2759
2760 # ifdef JS_TRACER
2761 # define CHECK_RECORDER() \
2762 JS_ASSERT_IF(TRACE_RECORDER(cx), switchMask == -1)
2763 # else
2764 # define CHECK_RECORDER() ((void)0)
2765 # endif
2766
2767 # define DO_OP() goto do_op
2768 # define DO_NEXT_OP(n) JS_BEGIN_MACRO \
2769 JS_ASSERT((n) == len); \
2770 goto advance_pc; \
2771 JS_END_MACRO
2772
2773 # define BEGIN_CASE(OP) case OP: CHECK_RECORDER();
2774 # define END_CASE(OP) END_CASE_LEN(OP##_LENGTH)
2775 # define END_CASE_LEN(n) END_CASE_LENX(n)
2776 # define END_CASE_LENX(n) END_CASE_LEN##n
2777
2778 /*
2779 * To share the code for all len == 1 cases we use the specialized label with
2780 * code that falls through to advance_pc: .
2781 */
2782 # define END_CASE_LEN1 goto advance_pc_by_one;
2783 # define END_CASE_LEN2 len = 2; goto advance_pc;
2784 # define END_CASE_LEN3 len = 3; goto advance_pc;
2785 # define END_CASE_LEN4 len = 4; goto advance_pc;
2786 # define END_CASE_LEN5 len = 5; goto advance_pc;
2787 # define END_VARLEN_CASE goto advance_pc;
2788 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)
2789 # define END_EMPTY_CASES goto advance_pc_by_one;
2790
2791 #endif /* !JS_THREADED_INTERP */
2792
2793 #ifdef JS_TRACER
2794 /* We cannot reenter the interpreter while recording. */
2795 if (TRACE_RECORDER(cx))
2796 js_AbortRecording(cx, "attempt to reenter interpreter while recording");
2797 #endif
2798
2799 /* Check for too deep of a native thread stack. */
2800 JS_CHECK_RECURSION(cx, return JS_FALSE);
2801
2802 rt = cx->runtime;
2803
2804 /* Set registerized frame pointer and derived script pointer. */
2805 fp = cx->fp;
2806 script = fp->script;
2807 JS_ASSERT(script->length != 0);
2808
2809 /* Count of JS function calls that nest in this C js_Interpret frame. */
2810 inlineCallCount = 0;
2811
2812 /*
2813 * Initialize the index segment register used by LOAD_ATOM and
2814 * GET_FULL_INDEX macros below. As a register we use a pointer based on
2815 * the atom map to turn frequently executed LOAD_ATOM into simple array
2816 * access. For less frequent object and regexp loads we have to recover
2817 * the segment from atoms pointer first.
2818 */
2819 atoms = script->atomMap.vector;
2820
2821 #define LOAD_ATOM(PCOFF) \
2822 JS_BEGIN_MACRO \
2823 JS_ASSERT(fp->imacpc \
2824 ? atoms == COMMON_ATOMS_START(&rt->atomState) && \
2825 GET_INDEX(regs.pc + PCOFF) < js_common_atom_count \
2826 : (size_t)(atoms - script->atomMap.vector) < \
2827 (size_t)(script->atomMap.length - \
2828 GET_INDEX(regs.pc + PCOFF))); \
2829 atom = atoms[GET_INDEX(regs.pc + PCOFF)]; \
2830 JS_END_MACRO
2831
2832 #define GET_FULL_INDEX(PCOFF) \
2833 (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
2834
2835 #define LOAD_OBJECT(PCOFF) \
2836 (obj = script->getObject(GET_FULL_INDEX(PCOFF)))
2837
2838 #define LOAD_FUNCTION(PCOFF) \
2839 (fun = script->getFunction(GET_FULL_INDEX(PCOFF)))
2840
2841 #ifdef JS_TRACER
2842
2843 #ifdef MOZ_TRACEVIS
2844 #if JS_THREADED_INTERP
2845 #define MONITOR_BRANCH_TRACEVIS \
2846 JS_BEGIN_MACRO \
2847 if (jumpTable != interruptJumpTable) \
2848 js_EnterTraceVisState(cx, S_RECORD, R_NONE); \
2849 JS_END_MACRO
2850 #else /* !JS_THREADED_INTERP */
2851 #define MONITOR_BRANCH_TRACEVIS \
2852 JS_BEGIN_MACRO \
2853 js_EnterTraceVisState(cx, S_RECORD, R_NONE); \
2854 JS_END_MACRO
2855 #endif
2856 #else
2857 #define MONITOR_BRANCH_TRACEVIS
2858 #endif
2859
2860 #define MONITOR_BRANCH() \
2861 JS_BEGIN_MACRO \
2862 if (TRACING_ENABLED(cx)) { \
2863 if (js_MonitorLoopEdge(cx, inlineCallCount)) { \
2864 JS_ASSERT(TRACE_RECORDER(cx)); \
2865 MONITOR_BRANCH_TRACEVIS; \
2866 ENABLE_INTERRUPTS(); \
2867 } \
2868 fp = cx->fp; \
2869 script = fp->script; \
2870 atoms = FrameAtomBase(cx, fp); \
2871 currentVersion = (JSVersion) script->version; \
2872 JS_ASSERT(fp->regs == &regs); \
2873 if (cx->throwing) \
2874 goto error; \
2875 } \
2876 JS_END_MACRO
2877
2878 #else /* !JS_TRACER */
2879
2880 #define MONITOR_BRANCH() ((void) 0)
2881
2882 #endif /* !JS_TRACER */
2883
2884 /*
2885 * Prepare to call a user-supplied branch handler, and abort the script
2886 * if it returns false.
2887 */
2888 #define CHECK_BRANCH() \
2889 JS_BEGIN_MACRO \
2890 if (!JS_CHECK_OPERATION_LIMIT(cx)) \
2891 goto error; \
2892 JS_END_MACRO
2893
2894 #ifndef TRACE_RECORDER
2895 #define TRACE_RECORDER(cx) (false)
2896 #endif
2897
2898 #define BRANCH(n) \
2899 JS_BEGIN_MACRO \
2900 regs.pc += (n); \
2901 op = (JSOp) *regs.pc; \
2902 if ((n) <= 0) { \
2903 CHECK_BRANCH(); \
2904 if (op == JSOP_NOP) { \
2905 if (TRACE_RECORDER(cx)) { \
2906 MONITOR_BRANCH(); \
2907 op = (JSOp) *regs.pc; \
2908 } else { \
2909 op = (JSOp) *++regs.pc; \
2910 } \
2911 } else if (op == JSOP_TRACE) { \
2912 MONITOR_BRANCH(); \
2913 op = (JSOp) *regs.pc; \
2914 } \
2915 } \
2916 DO_OP(); \
2917 JS_END_MACRO
2918
2919 MUST_FLOW_THROUGH("exit");
2920 ++cx->interpLevel;
2921
2922 /*
2923 * Optimized Get and SetVersion for proper script language versioning.
2924 *
2925 * If any native method or JSClass/JSObjectOps hook calls js_SetVersion
2926 * and changes cx->version, the effect will "stick" and we will stop
2927 * maintaining currentVersion. This is relied upon by testsuites, for
2928 * the most part -- web browsers select version before compiling and not
2929 * at run-time.
2930 */
2931 currentVersion = (JSVersion) script->version;
2932 originalVersion = (JSVersion) cx->version;
2933 if (currentVersion != originalVersion)
2934 js_SetVersion(cx, currentVersion);
2935
2936 /* Update the static-link display. */
2937 if (script->staticLevel < JS_DISPLAY_SIZE) {
2938 JSStackFrame **disp = &cx->display[script->staticLevel];
2939 fp->displaySave = *disp;
2940 *disp = fp;
2941 }
2942
2943 # define CHECK_INTERRUPT_HANDLER() \
2944 JS_BEGIN_MACRO \
2945 if (cx->debugHooks->interruptHandler) \
2946 ENABLE_INTERRUPTS(); \
2947 JS_END_MACRO
2948
2949 /*
2950 * Load the debugger's interrupt hook here and after calling out to native
2951 * functions (but not to getters, setters, or other native hooks), so we do
2952 * not have to reload it each time through the interpreter loop -- we hope
2953 * the compiler can keep it in a register when it is non-null.
2954 */
2955 CHECK_INTERRUPT_HANDLER();
2956
2957 #if !JS_HAS_GENERATORS
2958 JS_ASSERT(!fp->regs);
2959 #else
2960 /* Initialize the pc and sp registers unless we're resuming a generator. */
2961 if (JS_LIKELY(!fp->regs)) {
2962 #endif
2963 ASSERT_NOT_THROWING(cx);
2964 regs.pc = script->code;
2965 regs.sp = StackBase(fp);
2966 fp->regs = &regs;
2967 #if JS_HAS_GENERATORS
2968 } else {
2969 JSGenerator *gen;
2970
2971 JS_ASSERT(fp->flags & JSFRAME_GENERATOR);
2972 gen = FRAME_TO_GENERATOR(fp);
2973 JS_ASSERT(fp->regs == &gen->savedRegs);
2974 regs = gen->savedRegs;
2975 fp->regs = &regs;
2976 JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
2977 JS_ASSERT((size_t) (regs.sp - StackBase(fp)) <= StackDepth(script));
2978
2979 /*
2980 * To support generator_throw and to catch ignored exceptions,
2981 * fail if cx->throwing is set.
2982 */
2983 if (cx->throwing) {
2984 #ifdef DEBUG_NOT_THROWING
2985 if (cx->exception != JSVAL_ARETURN) {
2986 printf("JS INTERPRETER CALLED WITH PENDING EXCEPTION %lx\n",
2987 (unsigned long) cx->exception);
2988 }
2989 #endif
2990 goto error;
2991 }
2992 }
2993 #endif /* JS_HAS_GENERATORS */
2994
2995 /*
2996 * It is important that "op" be initialized before calling DO_OP because
2997 * it is possible for "op" to be specially assigned during the normal
2998 * processing of an opcode while looping. We rely on DO_NEXT_OP to manage
2999 * "op" correctly in all other cases.
3000 */
3001 len = 0;
3002 DO_NEXT_OP(len);
3003
3004 #if JS_THREADED_INTERP
3005 /*
3006 * This is a loop, but it does not look like a loop. The loop-closing
3007 * jump is distributed throughout goto *jumpTable[op] inside of DO_OP.
3008 * When interrupts are enabled, jumpTable is set to interruptJumpTable
3009 * where all jumps point to the interrupt label. The latter, after
3010 * calling the interrupt handler, dispatches through normalJumpTable to
3011 * continue the normal bytecode processing.
3012 */
3013
3014 #else /* !JS_THREADED_INTERP */
3015 for (;;) {
3016 advance_pc_by_one:
3017 JS_ASSERT(js_CodeSpec[op].length == 1);
3018 len = 1;
3019 advance_pc:
3020 regs.pc += len;
3021 op = (JSOp) *regs.pc;
3022
3023 do_op:
3024 CHECK_RECORDER();
3025 TRACE_OPCODE(op);
3026 switchOp = intN(op) | switchMask;
3027 do_switch:
3028 switch (switchOp) {
3029 #endif
3030
3031 /********************** Here we include the operations ***********************/
3032 #include "jsops.cpp"
3033 /*****************************************************************************/
3034
3035 #if !JS_THREADED_INTERP
3036 default:
3037 #endif
3038 #ifndef JS_TRACER
3039 bad_opcode:
3040 #endif
3041 {
3042 char numBuf[12];
3043 JS_snprintf(numBuf, sizeof numBuf, "%d", op);
3044 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
3045 JSMSG_BAD_BYTECODE, numBuf);
3046 goto error;
3047 }
3048
3049 #if !JS_THREADED_INTERP
3050 } /* switch (op) */
3051 } /* for (;;) */
3052 #endif /* !JS_THREADED_INTERP */
3053
3054 error:
3055 if (fp->imacpc && cx->throwing) {
3056 // To keep things simple, we hard-code imacro exception handlers here.
3057 if (*fp->imacpc == JSOP_NEXTITER && js_ValueIsStopIteration(cx->exception)) {
3058 // pc may point to JSOP_DUP here due to bug 474854.
3059 JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP || *regs.pc == JSOP_TRUE);
3060 cx->throwing = JS_FALSE;
3061 cx->exception = JSVAL_VOID;
3062 regs.sp[-1] = JSVAL_HOLE;
3063 PUSH(JSVAL_FALSE);
3064 goto end_imacro;
3065 }
3066
3067 // Handle other exceptions as if they came from the imacro-calling pc.
3068 regs.pc = fp->imacpc;
3069 fp->imacpc = NULL;
3070 atoms = script->atomMap.vector;
3071 }
3072
3073 JS_ASSERT((size_t)((fp->imacpc ? fp->imacpc : regs.pc) - script->code) < script->length);
3074
3075 #ifdef JS_TRACER
3076 /*
3077 * This abort could be weakened to permit tracing through exceptions that
3078 * are thrown and caught within a loop, with the co-operation of the tracer.
3079 * For now just bail on any sign of trouble.
3080 */
3081 if (TRACE_RECORDER(cx))
3082 js_AbortRecording(cx, "error or exception while recording");
3083 #endif
3084
3085 if (!cx->throwing) {
3086 /* This is an error, not a catchable exception, quit the frame ASAP. */
3087 ok = JS_FALSE;
3088 } else {
3089 JSTrapHandler handler;
3090 JSTryNote *tn, *tnlimit;
3091 uint32 offset;
3092
3093 /* Call debugger throw hook if set. */
3094 handler = cx->debugHooks->throwHook;
3095 if (handler) {
3096 switch (handler(cx, script, regs.pc, &rval,
3097 cx->debugHooks->throwHookData)) {
3098 case JSTRAP_ERROR:
3099 cx->throwing = JS_FALSE;
3100 goto error;
3101 case JSTRAP_RETURN:
3102 cx->throwing = JS_FALSE;
3103 fp->rval = rval;
3104 ok = JS_TRUE;
3105 goto forced_return;
3106 case JSTRAP_THROW:
3107 cx->exception = rval;
3108 case JSTRAP_CONTINUE:
3109 default:;
3110 }
3111 CHECK_INTERRUPT_HANDLER();
3112 }
3113
3114 /*
3115 * Look for a try block in script that can catch this exception.
3116 */
3117 if (script->trynotesOffset == 0)
3118 goto no_catch;
3119
3120 offset = (uint32)(regs.pc - script->main);
3121 tn = script->trynotes()->vector;
3122 tnlimit = tn + script->trynotes()->length;
3123 do {
3124 if (offset - tn->start >= tn->length)
3125 continue;
3126
3127 /*
3128 * We have a note that covers the exception pc but we must check
3129 * whether the interpreter has already executed the corresponding
3130 * handler. This is possible when the executed bytecode
3131 * implements break or return from inside a for-in loop.
3132 *
3133 * In this case the emitter generates additional [enditer] and
3134 * [gosub] opcodes to close all outstanding iterators and execute
3135 * the finally blocks. If such an [enditer] throws an exception,
3136 * its pc can still be inside several nested for-in loops and
3137 * try-finally statements even if we have already closed the
3138 * corresponding iterators and invoked the finally blocks.
3139 *
3140 * To address this, we make [enditer] always decrease the stack
3141 * even when its implementation throws an exception. Thus already
3142 * executed [enditer] and [gosub] opcodes will have try notes
3143 * with the stack depth exceeding the current one and this
3144 * condition is what we use to filter them out.
3145 */
3146 if (tn->stackDepth > regs.sp - StackBase(fp))
3147 continue;
3148
3149 /*
3150 * Set pc to the first bytecode after the the try note to point
3151 * to the beginning of catch or finally or to [enditer] closing
3152 * the for-in loop.
3153 */
3154 regs.pc = (script)->main + tn->start + tn->length;
3155
3156 ok = js_UnwindScope(cx, fp, tn->stackDepth, JS_TRUE);
3157 JS_ASSERT(fp->regs->sp == StackBase(fp) + tn->stackDepth);
3158 if (!ok) {
3159 /*
3160 * Restart the handler search with updated pc and stack depth
3161 * to properly notify the debugger.
3162 */
3163 goto error;
3164 }
3165
3166 switch (tn->kind) {
3167 case JSTRY_CATCH:
3168 JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENTERBLOCK);
3169
3170 #if JS_HAS_GENERATORS
3171 /* Catch cannot intercept the closing of a generator. */
3172 if (JS_UNLIKELY(cx->exception == JSVAL_ARETURN))
3173 break;
3174 #endif
3175
3176 /*
3177 * Don't clear cx->throwing to save cx->exception from GC
3178 * until it is pushed to the stack via [exception] in the
3179 * catch block.
3180 */
3181 len = 0;
3182 DO_NEXT_OP(len);
3183
3184 case JSTRY_FINALLY:
3185 /*
3186 * Push (true, exception) pair for finally to indicate that
3187 * [retsub] should rethrow the exception.
3188 */
3189 PUSH(JSVAL_TRUE);
3190 PUSH(cx->exception);
3191 cx->throwing = JS_FALSE;
3192 len = 0;
3193 DO_NEXT_OP(len);
3194
3195 case JSTRY_ITER:
3196 /*
3197 * This is similar to JSOP_ENDITER in the interpreter loop,
3198 * except the code now uses the stack slot normally used by
3199 * JSOP_NEXTITER, namely regs.sp[-1] before the regs.sp -= 2
3200 * adjustment and regs.sp[1] after, to save and restore the
3201 * pending exception.
3202 */
3203 JS_ASSERT(js_GetOpcode(cx, fp->script, regs.pc) == JSOP_ENDITER);
3204 regs.sp[-1] = cx->exception;
3205 cx->throwing = JS_FALSE;
3206 ok = js_CloseIterator(cx, regs.sp[-2]);
3207 regs.sp -= 2;
3208 if (!ok)
3209 goto error;
3210 cx->throwing = JS_TRUE;
3211 cx->exception = regs.sp[1];
3212 }
3213 } while (++tn != tnlimit);
3214
3215 no_catch:
3216 /*
3217 * Propagate the exception or error to the caller unless the exception
3218 * is an asynchronous return from a generator.
3219 */
3220 ok = JS_FALSE;
3221 #if JS_HAS_GENERATORS
3222 if (JS_UNLIKELY(cx->throwing && cx->exception == JSVAL_ARETURN)) {
3223 cx->throwing = JS_FALSE;
3224 ok = JS_TRUE;
3225 fp->rval = JSVAL_VOID;
3226 }
3227 #endif
3228 }
3229
3230 forced_return:
3231 /*
3232 * Unwind the scope making sure that ok stays false even when UnwindScope
3233 * returns true.
3234 *
3235 * When a trap handler returns JSTRAP_RETURN, we jump here with ok set to
3236 * true bypassing any finally blocks.
3237 */
3238 ok &= js_UnwindScope(cx, fp, 0, ok || cx->throwing);
3239 JS_ASSERT(regs.sp == StackBase(fp));
3240
3241 #ifdef DEBUG
3242 cx->tracePrevPc = NULL;
3243 #endif
3244
3245 if (inlineCallCount)
3246 goto inline_return;
3247
3248 exit:
3249 /*
3250 * At this point we are inevitably leaving an interpreted function or a
3251 * top-level script, and returning to one of:
3252 * (a) an "out of line" call made through js_Invoke;
3253 * (b) a js_Execute activation;
3254 * (c) a generator (SendToGenerator, jsiter.c).
3255 *
3256 * We must not be in an inline frame. The check above ensures that for the
3257 * error case and for a normal return, the code jumps directly to parent's
3258 * frame pc.
3259 */
3260 JS_ASSERT(inlineCallCount == 0);
3261 JS_ASSERT(fp->regs == &regs);
3262 #ifdef JS_TRACER
3263 if (TRACE_RECORDER(cx))
3264 js_AbortRecording(cx, "recording out of js_Interpret");
3265 #endif
3266 #if JS_HAS_GENERATORS
3267 if (JS_UNLIKELY(fp->flags & JSFRAME_YIELDING)) {
3268 JSGenerator *gen;
3269
3270 gen = FRAME_TO_GENERATOR(fp);
3271 gen->savedRegs = regs;
3272 gen->frame.regs = &gen->savedRegs;
3273 } else
3274 #endif /* JS_HAS_GENERATORS */
3275 {
3276 JS_ASSERT(!fp->blockChain);
3277 JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0));
3278 fp->regs = NULL;
3279 }
3280
3281 /* Undo the remaining effects committed on entry to js_Interpret. */
3282 if (script->staticLevel < JS_DISPLAY_SIZE)
3283 cx->display[script->staticLevel] = fp->displaySave;
3284 if (cx->version == currentVersion && currentVersion != originalVersion)
3285 js_SetVersion(cx, originalVersion);
3286 --cx->interpLevel;
3287
3288 return ok;
3289
3290 atom_not_defined:
3291 {
3292 const char *printable;
3293
3294 printable = js_AtomToPrintableString(cx, atom);
3295 if (printable)
3296 js_ReportIsNotDefined(cx, printable);
3297 goto error;
3298 }
3299 }
3300
3301 #endif /* !defined jsinvoke_cpp___ */

  ViewVC Help
Powered by ViewVC 1.1.24