/[jscoverage]/trunk/js/jsemit.cpp
ViewVC logotype

Contents of /trunk/js/jsemit.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 507 - (show annotations)
Sun Jan 10 07:23:34 2010 UTC (9 years, 10 months ago) by siliconforks
File size: 256029 byte(s)
Update SpiderMonkey from Firefox 3.6rc1.

1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
3 *
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 *
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
11 *
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
16 *
17 * The Original Code is Mozilla Communicator client code, released
18 * March 31, 1998.
19 *
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
24 *
25 * Contributor(s):
26 *
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
38 *
39 * ***** END LICENSE BLOCK ***** */
40
41 /*
42 * JS bytecode generation.
43 */
44 #ifdef HAVE_MEMORY_H
45 #include <memory.h>
46 #endif
47 #include <new>
48 #include <string.h>
49 #include "jstypes.h"
50 #include "jsstdint.h"
51 #include "jsarena.h" /* Added by JSIFY */
52 #include "jsutil.h" /* Added by JSIFY */
53 #include "jsbit.h"
54 #include "jsprf.h"
55 #include "jsapi.h"
56 #include "jsatom.h"
57 #include "jsbool.h"
58 #include "jscntxt.h"
59 #include "jsversion.h"
60 #include "jsemit.h"
61 #include "jsfun.h"
62 #include "jsnum.h"
63 #include "jsopcode.h"
64 #include "jsparse.h"
65 #include "jsregexp.h"
66 #include "jsscan.h"
67 #include "jsscope.h"
68 #include "jsscript.h"
69 #include "jsautooplen.h"
70 #include "jsstaticcheck.h"
71
72 /* Allocation chunk counts, must be powers of two in general. */
73 #define BYTECODE_CHUNK 256 /* code allocation increment */
74 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
75 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
76
77 /* Macros to compute byte sizes from typed element counts. */
78 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
79 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
80 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
81
82 static JSBool
83 NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
84 uintN stackDepth, size_t start, size_t end);
85
86 JSCodeGenerator::JSCodeGenerator(JSCompiler *jsc,
87 JSArenaPool *cpool, JSArenaPool *npool,
88 uintN lineno)
89 : JSTreeContext(jsc),
90 codePool(cpool), notePool(npool),
91 codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
92 stackDepth(0), maxStackDepth(0),
93 ntrynotes(0), lastTryNode(NULL),
94 spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
95 numSpanDeps(0), numJumpTargets(0), spanDepTodo(0),
96 arrayCompDepth(0),
97 emitLevel(0)
98 {
99 flags = TCF_COMPILING;
100 memset(&prolog, 0, sizeof prolog);
101 memset(&main, 0, sizeof main);
102 current = &main;
103 firstLine = prolog.currentLine = main.currentLine = lineno;
104 prolog.noteMask = main.noteMask = SRCNOTE_CHUNK - 1;
105 memset(&upvarMap, 0, sizeof upvarMap);
106 }
107
108 JSCodeGenerator::~JSCodeGenerator()
109 {
110 JS_ARENA_RELEASE(codePool, codeMark);
111 JS_ARENA_RELEASE(notePool, noteMark);
112
113 /* NB: non-null only after OOM. */
114 if (spanDeps)
115 compiler->context->free(spanDeps);
116
117 if (upvarMap.vector)
118 compiler->context->free(upvarMap.vector);
119 }
120
121 static ptrdiff_t
122 EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta)
123 {
124 jsbytecode *base, *limit, *next;
125 ptrdiff_t offset, length;
126 size_t incr, size;
127
128 base = CG_BASE(cg);
129 next = CG_NEXT(cg);
130 limit = CG_LIMIT(cg);
131 offset = next - base;
132 if (next + delta > limit) {
133 length = offset + delta;
134 length = (length <= BYTECODE_CHUNK)
135 ? BYTECODE_CHUNK
136 : JS_BIT(JS_CeilingLog2(length));
137 incr = BYTECODE_SIZE(length);
138 if (!base) {
139 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr);
140 } else {
141 size = BYTECODE_SIZE(limit - base);
142 incr -= size;
143 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
144 }
145 if (!base) {
146 js_ReportOutOfScriptQuota(cx);
147 return -1;
148 }
149 CG_BASE(cg) = base;
150 CG_LIMIT(cg) = base + length;
151 CG_NEXT(cg) = base + offset;
152 }
153 return offset;
154 }
155
156 static void
157 UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
158 {
159 jsbytecode *pc;
160 JSOp op;
161 const JSCodeSpec *cs;
162 uintN extra, depth, nuses;
163 intN ndefs;
164
165 pc = CG_CODE(cg, target);
166 op = (JSOp) *pc;
167 cs = &js_CodeSpec[op];
168 #ifdef JS_TRACER
169 extern uint8 js_opcode2extra[];
170 extra = js_opcode2extra[op];
171 #else
172 extra = 0;
173 #endif
174 if ((cs->format & JOF_TMPSLOT_MASK) || extra) {
175 depth = (uintN) cg->stackDepth +
176 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
177 extra;
178 if (depth > cg->maxStackDepth)
179 cg->maxStackDepth = depth;
180 }
181
182 nuses = js_GetStackUses(cs, op, pc);
183 cg->stackDepth -= nuses;
184 JS_ASSERT(cg->stackDepth >= 0);
185 if (cg->stackDepth < 0) {
186 char numBuf[12];
187 JSTokenStream *ts;
188
189 JS_snprintf(numBuf, sizeof numBuf, "%d", target);
190 ts = &cg->compiler->tokenStream;
191 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING,
192 js_GetErrorMessage, NULL,
193 JSMSG_STACK_UNDERFLOW,
194 ts->filename ? ts->filename : "stdin",
195 numBuf);
196 }
197 ndefs = cs->ndefs;
198 if (ndefs < 0) {
199 JSObject *blockObj;
200
201 /* We just executed IndexParsedObject */
202 JS_ASSERT(op == JSOP_ENTERBLOCK);
203 JS_ASSERT(nuses == 0);
204 blockObj = cg->objectList.lastbox->object;
205 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass);
206 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH]));
207
208 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
209 ndefs = OBJ_BLOCK_COUNT(cx, blockObj);
210 }
211 cg->stackDepth += ndefs;
212 if ((uintN)cg->stackDepth > cg->maxStackDepth)
213 cg->maxStackDepth = cg->stackDepth;
214 }
215
216 ptrdiff_t
217 js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op)
218 {
219 ptrdiff_t offset = EmitCheck(cx, cg, op, 1);
220
221 if (offset >= 0) {
222 *CG_NEXT(cg)++ = (jsbytecode)op;
223 UpdateDepth(cx, cg, offset);
224 }
225 return offset;
226 }
227
228 ptrdiff_t
229 js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1)
230 {
231 ptrdiff_t offset = EmitCheck(cx, cg, op, 2);
232
233 if (offset >= 0) {
234 jsbytecode *next = CG_NEXT(cg);
235 next[0] = (jsbytecode)op;
236 next[1] = op1;
237 CG_NEXT(cg) = next + 2;
238 UpdateDepth(cx, cg, offset);
239 }
240 return offset;
241 }
242
243 ptrdiff_t
244 js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1,
245 jsbytecode op2)
246 {
247 ptrdiff_t offset = EmitCheck(cx, cg, op, 3);
248
249 if (offset >= 0) {
250 jsbytecode *next = CG_NEXT(cg);
251 next[0] = (jsbytecode)op;
252 next[1] = op1;
253 next[2] = op2;
254 CG_NEXT(cg) = next + 3;
255 UpdateDepth(cx, cg, offset);
256 }
257 return offset;
258 }
259
260 ptrdiff_t
261 js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra)
262 {
263 ptrdiff_t length = 1 + (ptrdiff_t)extra;
264 ptrdiff_t offset = EmitCheck(cx, cg, op, length);
265
266 if (offset >= 0) {
267 jsbytecode *next = CG_NEXT(cg);
268 *next = (jsbytecode)op;
269 memset(next + 1, 0, BYTECODE_SIZE(extra));
270 CG_NEXT(cg) = next + length;
271
272 /*
273 * Don't UpdateDepth if op's use-count comes from the immediate
274 * operand yet to be stored in the extra bytes after op.
275 */
276 if (js_CodeSpec[op].nuses >= 0)
277 UpdateDepth(cx, cg, offset);
278 }
279 return offset;
280 }
281
282 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
283 const char js_with_statement_str[] = "with statement";
284 const char js_finally_block_str[] = "finally block";
285 const char js_script_str[] = "script";
286
287 static const char *statementName[] = {
288 "label statement", /* LABEL */
289 "if statement", /* IF */
290 "else statement", /* ELSE */
291 "destructuring body", /* BODY */
292 "switch statement", /* SWITCH */
293 "block", /* BLOCK */
294 js_with_statement_str, /* WITH */
295 "catch block", /* CATCH */
296 "try block", /* TRY */
297 js_finally_block_str, /* FINALLY */
298 js_finally_block_str, /* SUBROUTINE */
299 "do loop", /* DO_LOOP */
300 "for loop", /* FOR_LOOP */
301 "for/in loop", /* FOR_IN_LOOP */
302 "while loop", /* WHILE_LOOP */
303 };
304
305 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT);
306
307 static const char *
308 StatementName(JSCodeGenerator *cg)
309 {
310 if (!cg->topStmt)
311 return js_script_str;
312 return statementName[cg->topStmt->type];
313 }
314
315 static void
316 ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg)
317 {
318 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET,
319 StatementName(cg));
320 }
321
322 /**
323 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
324 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
325 into unconditional (gotos and gosubs), and conditional jumps or branches
326 (which pop a value, test it, and jump depending on its value). Most jumps
327 have just one immediate operand, a signed offset from the jump opcode's pc
328 to the target bytecode. The lookup and table switch opcodes may contain
329 many jump offsets.
330
331 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
332 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
333 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
334 the extended form of the JSOP_OR branch opcode). The unextended or short
335 formats have 16-bit signed immediate offset operands, the extended or long
336 formats have 32-bit signed immediates. The span-dependency problem consists
337 of selecting as few long instructions as possible, or about as few -- since
338 jumps can span other jumps, extending one jump may cause another to need to
339 be extended.
340
341 Most JS scripts are short, so need no extended jumps. We optimize for this
342 case by generating short jumps until we know a long jump is needed. After
343 that point, we keep generating short jumps, but each jump's 16-bit immediate
344 offset operand is actually an unsigned index into cg->spanDeps, an array of
345 JSSpanDep structs. Each struct tells the top offset in the script of the
346 opcode, the "before" offset of the jump (which will be the same as top for
347 simplex jumps, but which will index further into the bytecode array for a
348 non-initial jump offset in a lookup or table switch), the after "offset"
349 adjusted during span-dependent instruction selection (initially the same
350 value as the "before" offset), and the jump target (more below).
351
352 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
353 ensure that all bytecode generated so far can be inspected to discover where
354 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
355 that we generate span-dependency records sorted by their offsets, so we can
356 binary-search when trying to find a JSSpanDep for a given bytecode offset,
357 or the nearest JSSpanDep at or above a given pc.
358
359 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
360 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
361 tells us that we need to binary-search for the cg->spanDeps entry by the
362 jump opcode's bytecode offset (sd->before).
363
364 Jump targets need to be maintained in a data structure that lets us look
365 up an already-known target by its address (jumps may have a common target),
366 and that also lets us update the addresses (script-relative, a.k.a. absolute
367 offsets) of targets that come after a jump target (for when a jump below
368 that target needs to be extended). We use an AVL tree, implemented using
369 recursion, but with some tricky optimizations to its height-balancing code
370 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
371
372 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
373 positive sign, even though they link "backward" (i.e., toward lower bytecode
374 address). We don't want to waste space and search time in the AVL tree for
375 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
376 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
377 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
378 target, or is still awaiting backpatching.
379
380 Note that backpatch chains would present a problem for BuildSpanDepTable,
381 which inspects bytecode to build cg->spanDeps on demand, when the first
382 short jump offset overflows. To solve this temporary problem, we emit a
383 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
384 nuses/ndefs counts help keep the stack balanced, but whose opcode format
385 distinguishes its backpatch delta immediate operand from a normal jump
386 offset.
387 */
388 static int
389 BalanceJumpTargets(JSJumpTarget **jtp)
390 {
391 JSJumpTarget *jt, *jt2, *root;
392 int dir, otherDir, heightChanged;
393 JSBool doubleRotate;
394
395 jt = *jtp;
396 JS_ASSERT(jt->balance != 0);
397
398 if (jt->balance < -1) {
399 dir = JT_RIGHT;
400 doubleRotate = (jt->kids[JT_LEFT]->balance > 0);
401 } else if (jt->balance > 1) {
402 dir = JT_LEFT;
403 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0);
404 } else {
405 return 0;
406 }
407
408 otherDir = JT_OTHER_DIR(dir);
409 if (doubleRotate) {
410 jt2 = jt->kids[otherDir];
411 *jtp = root = jt2->kids[dir];
412
413 jt->kids[otherDir] = root->kids[dir];
414 root->kids[dir] = jt;
415
416 jt2->kids[dir] = root->kids[otherDir];
417 root->kids[otherDir] = jt2;
418
419 heightChanged = 1;
420 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0);
421 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0);
422 root->balance = 0;
423 } else {
424 *jtp = root = jt->kids[otherDir];
425 jt->kids[otherDir] = root->kids[dir];
426 root->kids[dir] = jt;
427
428 heightChanged = (root->balance != 0);
429 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance);
430 }
431
432 return heightChanged;
433 }
434
435 typedef struct AddJumpTargetArgs {
436 JSContext *cx;
437 JSCodeGenerator *cg;
438 ptrdiff_t offset;
439 JSJumpTarget *node;
440 } AddJumpTargetArgs;
441
442 static int
443 AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp)
444 {
445 JSJumpTarget *jt;
446 int balanceDelta;
447
448 jt = *jtp;
449 if (!jt) {
450 JSCodeGenerator *cg = args->cg;
451
452 jt = cg->jtFreeList;
453 if (jt) {
454 cg->jtFreeList = jt->kids[JT_LEFT];
455 } else {
456 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool,
457 sizeof *jt);
458 if (!jt) {
459 js_ReportOutOfScriptQuota(args->cx);
460 return 0;
461 }
462 }
463 jt->offset = args->offset;
464 jt->balance = 0;
465 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL;
466 cg->numJumpTargets++;
467 args->node = jt;
468 *jtp = jt;
469 return 1;
470 }
471
472 if (jt->offset == args->offset) {
473 args->node = jt;
474 return 0;
475 }
476
477 if (args->offset < jt->offset)
478 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]);
479 else
480 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]);
481 if (!args->node)
482 return 0;
483
484 jt->balance += balanceDelta;
485 return (balanceDelta && jt->balance)
486 ? 1 - BalanceJumpTargets(jtp)
487 : 0;
488 }
489
490 #ifdef DEBUG_brendan
491 static int AVLCheck(JSJumpTarget *jt)
492 {
493 int lh, rh;
494
495 if (!jt) return 0;
496 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1);
497 lh = AVLCheck(jt->kids[JT_LEFT]);
498 rh = AVLCheck(jt->kids[JT_RIGHT]);
499 JS_ASSERT(jt->balance == rh - lh);
500 return 1 + JS_MAX(lh, rh);
501 }
502 #endif
503
504 static JSBool
505 SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd,
506 ptrdiff_t off)
507 {
508 AddJumpTargetArgs args;
509
510 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) {
511 ReportStatementTooLarge(cx, cg);
512 return JS_FALSE;
513 }
514
515 args.cx = cx;
516 args.cg = cg;
517 args.offset = sd->top + off;
518 args.node = NULL;
519 AddJumpTarget(&args, &cg->jumpTargets);
520 if (!args.node)
521 return JS_FALSE;
522
523 #ifdef DEBUG_brendan
524 AVLCheck(cg->jumpTargets);
525 #endif
526
527 SD_SET_TARGET(sd, args.node);
528 return JS_TRUE;
529 }
530
531 #define SPANDEPS_MIN 256
532 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
533 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
534
535 static JSBool
536 AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2,
537 ptrdiff_t off)
538 {
539 uintN index;
540 JSSpanDep *sdbase, *sd;
541 size_t size;
542
543 index = cg->numSpanDeps;
544 if (index + 1 == 0) {
545 ReportStatementTooLarge(cx, cg);
546 return JS_FALSE;
547 }
548
549 if ((index & (index - 1)) == 0 &&
550 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) {
551 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2;
552 sdbase = (JSSpanDep *) cx->realloc(sdbase, size + size);
553 if (!sdbase)
554 return JS_FALSE;
555 cg->spanDeps = sdbase;
556 }
557
558 cg->numSpanDeps = index + 1;
559 sd = cg->spanDeps + index;
560 sd->top = pc - CG_BASE(cg);
561 sd->offset = sd->before = pc2 - CG_BASE(cg);
562
563 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) {
564 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
565 if (off != 0) {
566 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN);
567 if (off > BPDELTA_MAX) {
568 ReportStatementTooLarge(cx, cg);
569 return JS_FALSE;
570 }
571 }
572 SD_SET_BPDELTA(sd, off);
573 } else if (off == 0) {
574 /* Jump offset will be patched directly, without backpatch chaining. */
575 SD_SET_TARGET(sd, 0);
576 } else {
577 /* The jump offset in off is non-zero, therefore it's already known. */
578 if (!SetSpanDepTarget(cx, cg, sd, off))
579 return JS_FALSE;
580 }
581
582 if (index > SPANDEP_INDEX_MAX)
583 index = SPANDEP_INDEX_HUGE;
584 SET_SPANDEP_INDEX(pc2, index);
585 return JS_TRUE;
586 }
587
588 static jsbytecode *
589 AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc)
590 {
591 JSOp op;
592 jsbytecode *pc2;
593 ptrdiff_t off;
594 jsint low, high;
595 uintN njumps, indexlen;
596
597 op = (JSOp) *pc;
598 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH);
599 pc2 = pc;
600 off = GET_JUMP_OFFSET(pc2);
601 if (!AddSpanDep(cx, cg, pc, pc2, off))
602 return NULL;
603 pc2 += JUMP_OFFSET_LEN;
604 if (op == JSOP_TABLESWITCH) {
605 low = GET_JUMP_OFFSET(pc2);
606 pc2 += JUMP_OFFSET_LEN;
607 high = GET_JUMP_OFFSET(pc2);
608 pc2 += JUMP_OFFSET_LEN;
609 njumps = (uintN) (high - low + 1);
610 indexlen = 0;
611 } else {
612 njumps = GET_UINT16(pc2);
613 pc2 += UINT16_LEN;
614 indexlen = INDEX_LEN;
615 }
616 while (njumps) {
617 --njumps;
618 pc2 += indexlen;
619 off = GET_JUMP_OFFSET(pc2);
620 if (!AddSpanDep(cx, cg, pc, pc2, off))
621 return NULL;
622 pc2 += JUMP_OFFSET_LEN;
623 }
624 return 1 + pc2;
625 }
626
627 static JSBool
628 BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg)
629 {
630 jsbytecode *pc, *end;
631 JSOp op;
632 const JSCodeSpec *cs;
633 ptrdiff_t off;
634
635 pc = CG_BASE(cg) + cg->spanDepTodo;
636 end = CG_NEXT(cg);
637 while (pc != end) {
638 JS_ASSERT(pc < end);
639 op = (JSOp)*pc;
640 cs = &js_CodeSpec[op];
641
642 switch (JOF_TYPE(cs->format)) {
643 case JOF_TABLESWITCH:
644 case JOF_LOOKUPSWITCH:
645 pc = AddSwitchSpanDeps(cx, cg, pc);
646 if (!pc)
647 return JS_FALSE;
648 break;
649
650 case JOF_JUMP:
651 off = GET_JUMP_OFFSET(pc);
652 if (!AddSpanDep(cx, cg, pc, pc, off))
653 return JS_FALSE;
654 /* FALL THROUGH */
655 default:
656 pc += cs->length;
657 break;
658 }
659 }
660
661 return JS_TRUE;
662 }
663
664 static JSSpanDep *
665 GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc)
666 {
667 uintN index;
668 ptrdiff_t offset;
669 int lo, hi, mid;
670 JSSpanDep *sd;
671
672 index = GET_SPANDEP_INDEX(pc);
673 if (index != SPANDEP_INDEX_HUGE)
674 return cg->spanDeps + index;
675
676 offset = pc - CG_BASE(cg);
677 lo = 0;
678 hi = cg->numSpanDeps - 1;
679 while (lo <= hi) {
680 mid = (lo + hi) / 2;
681 sd = cg->spanDeps + mid;
682 if (sd->before == offset)
683 return sd;
684 if (sd->before < offset)
685 lo = mid + 1;
686 else
687 hi = mid - 1;
688 }
689
690 JS_ASSERT(0);
691 return NULL;
692 }
693
694 static JSBool
695 SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
696 ptrdiff_t delta)
697 {
698 JSSpanDep *sd;
699
700 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
701 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) {
702 SET_JUMP_OFFSET(pc, delta);
703 return JS_TRUE;
704 }
705
706 if (delta > BPDELTA_MAX) {
707 ReportStatementTooLarge(cx, cg);
708 return JS_FALSE;
709 }
710
711 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg))
712 return JS_FALSE;
713
714 sd = GetSpanDep(cg, pc);
715 JS_ASSERT(SD_GET_BPDELTA(sd) == 0);
716 SD_SET_BPDELTA(sd, delta);
717 return JS_TRUE;
718 }
719
720 static void
721 UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta)
722 {
723 if (jt->offset > pivot) {
724 jt->offset += delta;
725 if (jt->kids[JT_LEFT])
726 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta);
727 }
728 if (jt->kids[JT_RIGHT])
729 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta);
730 }
731
732 static JSSpanDep *
733 FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo,
734 JSSpanDep *guard)
735 {
736 int num, hi, mid;
737 JSSpanDep *sdbase, *sd;
738
739 num = cg->numSpanDeps;
740 JS_ASSERT(num > 0);
741 hi = num - 1;
742 sdbase = cg->spanDeps;
743 while (lo <= hi) {
744 mid = (lo + hi) / 2;
745 sd = sdbase + mid;
746 if (sd->before == offset)
747 return sd;
748 if (sd->before < offset)
749 lo = mid + 1;
750 else
751 hi = mid - 1;
752 }
753 if (lo == num)
754 return guard;
755 sd = sdbase + lo;
756 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset));
757 return sd;
758 }
759
760 static void
761 FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt)
762 {
763 if (jt->kids[JT_LEFT])
764 FreeJumpTargets(cg, jt->kids[JT_LEFT]);
765 if (jt->kids[JT_RIGHT])
766 FreeJumpTargets(cg, jt->kids[JT_RIGHT]);
767 jt->kids[JT_LEFT] = cg->jtFreeList;
768 cg->jtFreeList = jt;
769 }
770
771 static JSBool
772 OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg)
773 {
774 jsbytecode *pc, *oldpc, *base, *limit, *next;
775 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard;
776 ptrdiff_t offset, growth, delta, top, pivot, span, length, target;
777 JSBool done;
778 JSOp op;
779 uint32 type;
780 size_t size, incr;
781 jssrcnote *sn, *snlimit;
782 JSSrcNoteSpec *spec;
783 uintN i, n, noteIndex;
784 JSTryNode *tryNode;
785 #ifdef DEBUG_brendan
786 int passes = 0;
787 #endif
788
789 base = CG_BASE(cg);
790 sdbase = cg->spanDeps;
791 sdlimit = sdbase + cg->numSpanDeps;
792 offset = CG_OFFSET(cg);
793 growth = 0;
794
795 do {
796 done = JS_TRUE;
797 delta = 0;
798 top = pivot = -1;
799 sdtop = NULL;
800 pc = NULL;
801 op = JSOP_NOP;
802 type = 0;
803 #ifdef DEBUG_brendan
804 passes++;
805 #endif
806
807 for (sd = sdbase; sd < sdlimit; sd++) {
808 JS_ASSERT(JT_HAS_TAG(sd->target));
809 sd->offset += delta;
810
811 if (sd->top != top) {
812 sdtop = sd;
813 top = sd->top;
814 JS_ASSERT(top == sd->before);
815 pivot = sd->offset;
816 pc = base + top;
817 op = (JSOp) *pc;
818 type = JOF_OPTYPE(op);
819 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
820 /*
821 * We already extended all the jump offset operands for
822 * the opcode at sd->top. Jumps and branches have only
823 * one jump offset operand, but switches have many, all
824 * of which are adjacent in cg->spanDeps.
825 */
826 continue;
827 }
828
829 JS_ASSERT(type == JOF_JUMP ||
830 type == JOF_TABLESWITCH ||
831 type == JOF_LOOKUPSWITCH);
832 }
833
834 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
835 span = SD_SPAN(sd, pivot);
836 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
837 ptrdiff_t deltaFromTop = 0;
838
839 done = JS_FALSE;
840
841 switch (op) {
842 case JSOP_GOTO: op = JSOP_GOTOX; break;
843 case JSOP_IFEQ: op = JSOP_IFEQX; break;
844 case JSOP_IFNE: op = JSOP_IFNEX; break;
845 case JSOP_OR: op = JSOP_ORX; break;
846 case JSOP_AND: op = JSOP_ANDX; break;
847 case JSOP_GOSUB: op = JSOP_GOSUBX; break;
848 case JSOP_CASE: op = JSOP_CASEX; break;
849 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break;
850 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break;
851 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break;
852 default:
853 ReportStatementTooLarge(cx, cg);
854 return JS_FALSE;
855 }
856 *pc = (jsbytecode) op;
857
858 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) {
859 if (sd2 <= sd) {
860 /*
861 * sd2->offset already includes delta as it stood
862 * before we entered this loop, but it must also
863 * include the delta relative to top due to all the
864 * extended jump offset immediates for the opcode
865 * starting at top, which we extend in this loop.
866 *
867 * If there is only one extended jump offset, then
868 * sd2->offset won't change and this for loop will
869 * iterate once only.
870 */
871 sd2->offset += deltaFromTop;
872 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
873 } else {
874 /*
875 * sd2 comes after sd, and won't be revisited by
876 * the outer for loop, so we have to increase its
877 * offset by delta, not merely by deltaFromTop.
878 */
879 sd2->offset += delta;
880 }
881
882 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN;
883 UpdateJumpTargets(cg->jumpTargets, sd2->offset,
884 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
885 }
886 sd = sd2 - 1;
887 }
888 }
889 }
890
891 growth += delta;
892 } while (!done);
893
894 if (growth) {
895 #ifdef DEBUG_brendan
896 JSTokenStream *ts = &cg->compiler->tokenStream;
897
898 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
899 ts->filename ? ts->filename : "stdin", cg->firstLine,
900 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps,
901 passes, offset + growth, offset, growth);
902 #endif
903
904 /*
905 * Ensure that we have room for the extended jumps, but don't round up
906 * to a power of two -- we're done generating code, so we cut to fit.
907 */
908 limit = CG_LIMIT(cg);
909 length = offset + growth;
910 next = base + length;
911 if (next > limit) {
912 JS_ASSERT(length > BYTECODE_CHUNK);
913 size = BYTECODE_SIZE(limit - base);
914 incr = BYTECODE_SIZE(length) - size;
915 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr);
916 if (!base) {
917 js_ReportOutOfScriptQuota(cx);
918 return JS_FALSE;
919 }
920 CG_BASE(cg) = base;
921 CG_LIMIT(cg) = next = base + length;
922 }
923 CG_NEXT(cg) = next;
924
925 /*
926 * Set up a fake span dependency record to guard the end of the code
927 * being generated. This guard record is returned as a fencepost by
928 * FindNearestSpanDep if there is no real spandep at or above a given
929 * unextended code offset.
930 */
931 guard.top = -1;
932 guard.offset = offset + growth;
933 guard.before = offset;
934 guard.target = NULL;
935 }
936
937 /*
938 * Now work backwards through the span dependencies, copying chunks of
939 * bytecode between each extended jump toward the end of the grown code
940 * space, and restoring immediate offset operands for all jump bytecodes.
941 * The first chunk of bytecodes, starting at base and ending at the first
942 * extended jump offset (NB: this chunk includes the operation bytecode
943 * just before that immediate jump offset), doesn't need to be copied.
944 */
945 JS_ASSERT(sd == sdlimit);
946 top = -1;
947 while (--sd >= sdbase) {
948 if (sd->top != top) {
949 top = sd->top;
950 op = (JSOp) base[top];
951 type = JOF_OPTYPE(op);
952
953 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--)
954 continue;
955 sd2++;
956 pivot = sd2->offset;
957 JS_ASSERT(top == sd2->before);
958 }
959
960 oldpc = base + sd->before;
961 span = SD_SPAN(sd, pivot);
962
963 /*
964 * If this jump didn't need to be extended, restore its span immediate
965 * offset operand now, overwriting the index of sd within cg->spanDeps
966 * that was stored temporarily after *pc when BuildSpanDepTable ran.
967 *
968 * Note that span might fit in 16 bits even for an extended jump op,
969 * if the op has multiple span operands, not all of which overflowed
970 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
971 * range for a short jump, but others are not).
972 */
973 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) {
974 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX);
975 SET_JUMP_OFFSET(oldpc, span);
976 continue;
977 }
978
979 /*
980 * Set up parameters needed to copy the next run of bytecode starting
981 * at offset (which is a cursor into the unextended, original bytecode
982 * vector), down to sd->before (a cursor of the same scale as offset,
983 * it's the index of the original jump pc). Reuse delta to count the
984 * nominal number of bytes to copy.
985 */
986 pc = base + sd->offset;
987 delta = offset - sd->before;
988 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN);
989
990 /*
991 * Don't bother copying the jump offset we're about to reset, but do
992 * copy the bytecode at oldpc (which comes just before its immediate
993 * jump offset operand), on the next iteration through the loop, by
994 * including it in offset's new value.
995 */
996 offset = sd->before + 1;
997 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN));
998 if (size) {
999 memmove(pc + 1 + JUMPX_OFFSET_LEN,
1000 oldpc + 1 + JUMP_OFFSET_LEN,
1001 size);
1002 }
1003
1004 SET_JUMPX_OFFSET(pc, span);
1005 }
1006
1007 if (growth) {
1008 /*
1009 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1010 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1011 * at each sd that moved. The future may bring different offset sizes
1012 * for span-dependent instruction operands. However, we fix only main
1013 * notes here, not prolog notes -- we know that prolog opcodes are not
1014 * span-dependent, and aren't likely ever to be.
1015 */
1016 offset = growth = 0;
1017 sd = sdbase;
1018 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount;
1019 sn < snlimit;
1020 sn = SN_NEXT(sn)) {
1021 /*
1022 * Recall that the offset of a given note includes its delta, and
1023 * tells the offset of the annotated bytecode from the main entry
1024 * point of the script.
1025 */
1026 offset += SN_DELTA(sn);
1027 while (sd < sdlimit && sd->before < offset) {
1028 /*
1029 * To compute the delta to add to sn, we need to look at the
1030 * spandep after sd, whose offset - (before + growth) tells by
1031 * how many bytes sd's instruction grew.
1032 */
1033 sd2 = sd + 1;
1034 if (sd2 == sdlimit)
1035 sd2 = &guard;
1036 delta = sd2->offset - (sd2->before + growth);
1037 if (delta > 0) {
1038 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN);
1039 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta);
1040 if (!sn)
1041 return JS_FALSE;
1042 snlimit = cg->main.notes + cg->main.noteCount;
1043 growth += delta;
1044 }
1045 sd++;
1046 }
1047
1048 /*
1049 * If sn has span-dependent offset operands, check whether each
1050 * covers further span-dependencies, and increase those operands
1051 * accordingly. Some source notes measure offset not from the
1052 * annotated pc, but from that pc plus some small bias. NB: we
1053 * assume that spec->offsetBias can't itself span span-dependent
1054 * instructions!
1055 */
1056 spec = &js_SrcNoteSpec[SN_TYPE(sn)];
1057 if (spec->isSpanDep) {
1058 pivot = offset + spec->offsetBias;
1059 n = spec->arity;
1060 for (i = 0; i < n; i++) {
1061 span = js_GetSrcNoteOffset(sn, i);
1062 if (span == 0)
1063 continue;
1064 target = pivot + span * spec->isSpanDep;
1065 sd2 = FindNearestSpanDep(cg, target,
1066 (target >= pivot)
1067 ? sd - sdbase
1068 : 0,
1069 &guard);
1070
1071 /*
1072 * Increase target by sd2's before-vs-after offset delta,
1073 * which is absolute (i.e., relative to start of script,
1074 * as is target). Recompute the span by subtracting its
1075 * adjusted pivot from target.
1076 */
1077 target += sd2->offset - sd2->before;
1078 span = target - (pivot + growth);
1079 span *= spec->isSpanDep;
1080 noteIndex = sn - cg->main.notes;
1081 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span))
1082 return JS_FALSE;
1083 sn = cg->main.notes + noteIndex;
1084 snlimit = cg->main.notes + cg->main.noteCount;
1085 }
1086 }
1087 }
1088 cg->main.lastNoteOffset += growth;
1089
1090 /*
1091 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1092 * not clear how we can beat that).
1093 */
1094 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) {
1095 /*
1096 * First, look for the nearest span dependency at/above tn->start.
1097 * There may not be any such spandep, in which case the guard will
1098 * be returned.
1099 */
1100 offset = tryNode->note.start;
1101 sd = FindNearestSpanDep(cg, offset, 0, &guard);
1102 delta = sd->offset - sd->before;
1103 tryNode->note.start = offset + delta;
1104
1105 /*
1106 * Next, find the nearest spandep at/above tn->start + tn->length.
1107 * Use its delta minus tn->start's delta to increase tn->length.
1108 */
1109 length = tryNode->note.length;
1110 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard);
1111 if (sd2 != sd) {
1112 tryNode->note.length =
1113 length + sd2->offset - sd2->before - delta;
1114 }
1115 }
1116 }
1117
1118 #ifdef DEBUG_brendan
1119 {
1120 uintN bigspans = 0;
1121 top = -1;
1122 for (sd = sdbase; sd < sdlimit; sd++) {
1123 offset = sd->offset;
1124
1125 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1126 if (sd->top != top) {
1127 JS_ASSERT(top == -1 ||
1128 !JOF_TYPE_IS_EXTENDED_JUMP(type) ||
1129 bigspans != 0);
1130 bigspans = 0;
1131 top = sd->top;
1132 JS_ASSERT(top == sd->before);
1133 op = (JSOp) base[offset];
1134 type = JOF_OPTYPE(op);
1135 JS_ASSERT(type == JOF_JUMP ||
1136 type == JOF_JUMPX ||
1137 type == JOF_TABLESWITCH ||
1138 type == JOF_TABLESWITCHX ||
1139 type == JOF_LOOKUPSWITCH ||
1140 type == JOF_LOOKUPSWITCHX);
1141 pivot = offset;
1142 }
1143
1144 pc = base + offset;
1145 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) {
1146 span = GET_JUMPX_OFFSET(pc);
1147 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) {
1148 bigspans++;
1149 } else {
1150 JS_ASSERT(type == JOF_TABLESWITCHX ||
1151 type == JOF_LOOKUPSWITCHX);
1152 }
1153 } else {
1154 span = GET_JUMP_OFFSET(pc);
1155 }
1156 JS_ASSERT(SD_SPAN(sd, pivot) == span);
1157 }
1158 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0);
1159 }
1160 #endif
1161
1162 /*
1163 * Reset so we optimize at most once -- cg may be used for further code
1164 * generation of successive, independent, top-level statements. No jump
1165 * can span top-level statements, because JS lacks goto.
1166 */
1167 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps)));
1168 cx->free(cg->spanDeps);
1169 cg->spanDeps = NULL;
1170 FreeJumpTargets(cg, cg->jumpTargets);
1171 cg->jumpTargets = NULL;
1172 cg->numSpanDeps = cg->numJumpTargets = 0;
1173 cg->spanDepTodo = CG_OFFSET(cg);
1174 return JS_TRUE;
1175 }
1176
1177 static ptrdiff_t
1178 EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off)
1179 {
1180 JSBool extend;
1181 ptrdiff_t jmp;
1182 jsbytecode *pc;
1183
1184 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off;
1185 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg))
1186 return -1;
1187
1188 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off));
1189 if (jmp >= 0 && (extend || cg->spanDeps)) {
1190 pc = CG_CODE(cg, jmp);
1191 if (!AddSpanDep(cx, cg, pc, pc, off))
1192 return -1;
1193 }
1194 return jmp;
1195 }
1196
1197 static ptrdiff_t
1198 GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc)
1199 {
1200 JSSpanDep *sd;
1201 JSJumpTarget *jt;
1202 ptrdiff_t top;
1203
1204 if (!cg->spanDeps)
1205 return GET_JUMP_OFFSET(pc);
1206
1207 sd = GetSpanDep(cg, pc);
1208 jt = sd->target;
1209 if (!JT_HAS_TAG(jt))
1210 return JT_TO_BPDELTA(jt);
1211
1212 top = sd->top;
1213 while (--sd >= cg->spanDeps && sd->top == top)
1214 continue;
1215 sd++;
1216 return JT_CLR_TAG(jt)->offset - sd->offset;
1217 }
1218
1219 JSBool
1220 js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc,
1221 ptrdiff_t off)
1222 {
1223 if (!cg->spanDeps) {
1224 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) {
1225 SET_JUMP_OFFSET(pc, off);
1226 return JS_TRUE;
1227 }
1228
1229 if (!BuildSpanDepTable(cx, cg))
1230 return JS_FALSE;
1231 }
1232
1233 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off);
1234 }
1235
1236 bool
1237 JSTreeContext::inStatement(JSStmtType type)
1238 {
1239 for (JSStmtInfo *stmt = topStmt; stmt; stmt = stmt->down) {
1240 if (stmt->type == type)
1241 return true;
1242 }
1243 return false;
1244 }
1245
1246 void
1247 js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type,
1248 ptrdiff_t top)
1249 {
1250 stmt->type = type;
1251 stmt->flags = 0;
1252 stmt->blockid = tc->blockid();
1253 SET_STATEMENT_TOP(stmt, top);
1254 stmt->label = NULL;
1255 JS_ASSERT(!stmt->blockObj);
1256 stmt->down = tc->topStmt;
1257 tc->topStmt = stmt;
1258 if (STMT_LINKS_SCOPE(stmt)) {
1259 stmt->downScope = tc->topScopeStmt;
1260 tc->topScopeStmt = stmt;
1261 } else {
1262 stmt->downScope = NULL;
1263 }
1264 }
1265
1266 void
1267 js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj,
1268 ptrdiff_t top)
1269 {
1270 js_PushStatement(tc, stmt, STMT_BLOCK, top);
1271 stmt->flags |= SIF_SCOPE;
1272 STOBJ_SET_PARENT(blockObj, tc->blockChain);
1273 stmt->downScope = tc->topScopeStmt;
1274 tc->topScopeStmt = stmt;
1275 tc->blockChain = blockObj;
1276 stmt->blockObj = blockObj;
1277 }
1278
1279 /*
1280 * Emit a backpatch op with offset pointing to the previous jump of this type,
1281 * so that we can walk back up the chain fixing up the op and jump offset.
1282 */
1283 static ptrdiff_t
1284 EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp)
1285 {
1286 ptrdiff_t offset, delta;
1287
1288 offset = CG_OFFSET(cg);
1289 delta = offset - *lastp;
1290 *lastp = offset;
1291 JS_ASSERT(delta > 0);
1292 return EmitJump(cx, cg, op, delta);
1293 }
1294
1295 /*
1296 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1297 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1298 * NB: We use cx and cg from our caller's lexical environment, and return
1299 * false on error.
1300 */
1301 #define EMIT_UINT16_IMM_OP(op, i) \
1302 JS_BEGIN_MACRO \
1303 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1304 return JS_FALSE; \
1305 JS_END_MACRO
1306
1307 static JSBool
1308 FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops)
1309 {
1310 JS_ASSERT(*npops != 0);
1311 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1312 return JS_FALSE;
1313 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops);
1314 *npops = 0;
1315 return JS_TRUE;
1316 }
1317
1318 /*
1319 * Emit additional bytecode(s) for non-local jumps.
1320 */
1321 static JSBool
1322 EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt)
1323 {
1324 intN depth, npops;
1325 JSStmtInfo *stmt;
1326
1327 /*
1328 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1329 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1330 * end of a with statement, so we save cg->stackDepth here and restore it
1331 * just before a successful return.
1332 */
1333 depth = cg->stackDepth;
1334 npops = 0;
1335
1336 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1337
1338 for (stmt = cg->topStmt; stmt != toStmt; stmt = stmt->down) {
1339 switch (stmt->type) {
1340 case STMT_FINALLY:
1341 FLUSH_POPS();
1342 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1343 return JS_FALSE;
1344 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0)
1345 return JS_FALSE;
1346 break;
1347
1348 case STMT_WITH:
1349 /* There's a With object on the stack that we need to pop. */
1350 FLUSH_POPS();
1351 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1352 return JS_FALSE;
1353 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0)
1354 return JS_FALSE;
1355 break;
1356
1357 case STMT_FOR_IN_LOOP:
1358 /*
1359 * The iterator and the object being iterated need to be popped.
1360 */
1361 FLUSH_POPS();
1362 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1363 return JS_FALSE;
1364 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0)
1365 return JS_FALSE;
1366 break;
1367
1368 case STMT_SUBROUTINE:
1369 /*
1370 * There's a [exception or hole, retsub pc-index] pair on the
1371 * stack that we need to pop.
1372 */
1373 npops += 2;
1374 break;
1375
1376 default:;
1377 }
1378
1379 if (stmt->flags & SIF_SCOPE) {
1380 uintN i;
1381
1382 /* There is a Block object with locals on the stack to pop. */
1383 FLUSH_POPS();
1384 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0)
1385 return JS_FALSE;
1386 i = OBJ_BLOCK_COUNT(cx, stmt->blockObj);
1387 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i);
1388 }
1389 }
1390
1391 FLUSH_POPS();
1392 cg->stackDepth = depth;
1393 return JS_TRUE;
1394
1395 #undef FLUSH_POPS
1396 }
1397
1398 static ptrdiff_t
1399 EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
1400 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
1401 {
1402 intN index;
1403
1404 if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
1405 return -1;
1406
1407 if (label)
1408 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
1409 else if (noteType != SRC_NULL)
1410 index = js_NewSrcNote(cx, cg, noteType);
1411 else
1412 index = 0;
1413 if (index < 0)
1414 return -1;
1415
1416 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp);
1417 }
1418
1419 static JSBool
1420 BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last,
1421 jsbytecode *target, jsbytecode op)
1422 {
1423 jsbytecode *pc, *stop;
1424 ptrdiff_t delta, span;
1425
1426 pc = CG_CODE(cg, last);
1427 stop = CG_CODE(cg, -1);
1428 while (pc != stop) {
1429 delta = GetJumpOffset(cg, pc);
1430 span = target - pc;
1431 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span);
1432
1433 /*
1434 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1435 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1436 * and need to see the JSOP_BACKPATCH* op at *pc).
1437 */
1438 *pc = op;
1439 pc -= delta;
1440 }
1441 return JS_TRUE;
1442 }
1443
1444 void
1445 js_PopStatement(JSTreeContext *tc)
1446 {
1447 JSStmtInfo *stmt;
1448
1449 stmt = tc->topStmt;
1450 tc->topStmt = stmt->down;
1451 if (STMT_LINKS_SCOPE(stmt)) {
1452 tc->topScopeStmt = stmt->downScope;
1453 if (stmt->flags & SIF_SCOPE) {
1454 tc->blockChain = STOBJ_GET_PARENT(stmt->blockObj);
1455 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth);
1456 }
1457 }
1458 }
1459
1460 JSBool
1461 js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg)
1462 {
1463 JSStmtInfo *stmt;
1464
1465 stmt = cg->topStmt;
1466 if (!STMT_IS_TRYING(stmt) &&
1467 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) ||
1468 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update),
1469 JSOP_GOTO))) {
1470 return JS_FALSE;
1471 }
1472 js_PopStatement(cg);
1473 return JS_TRUE;
1474 }
1475
1476 JSBool
1477 js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1478 JSParseNode *pn)
1479 {
1480 jsdouble dval;
1481 jsint ival;
1482 JSAtom *valueAtom;
1483 jsval v;
1484 JSAtomListElement *ale;
1485
1486 /* XXX just do numbers for now */
1487 if (pn->pn_type == TOK_NUMBER) {
1488 dval = pn->pn_dval;
1489 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
1490 v = INT_TO_JSVAL(ival);
1491 } else {
1492 /*
1493 * We atomize double to root a jsdouble instance that we wrap as
1494 * jsval and store in cg->constList. This works because atoms are
1495 * protected from GC during compilation.
1496 */
1497 valueAtom = js_AtomizeDouble(cx, dval);
1498 if (!valueAtom)
1499 return JS_FALSE;
1500 v = ATOM_KEY(valueAtom);
1501 }
1502 ale = cg->constList.add(cg->compiler, atom);
1503 if (!ale)
1504 return JS_FALSE;
1505 ALE_SET_VALUE(ale, v);
1506 }
1507 return JS_TRUE;
1508 }
1509
1510 JSStmtInfo *
1511 js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
1512 {
1513 JSObject *obj;
1514 JSScope *scope;
1515 JSScopeProperty *sprop;
1516
1517 if (!stmt)
1518 stmt = tc->topScopeStmt;
1519 for (; stmt; stmt = stmt->downScope) {
1520 if (stmt->type == STMT_WITH)
1521 break;
1522
1523 /* Skip "maybe scope" statements that don't contain let bindings. */
1524 if (!(stmt->flags & SIF_SCOPE))
1525 continue;
1526
1527 obj = stmt->blockObj;
1528 JS_ASSERT(obj->getClass() == &js_BlockClass);
1529 scope = OBJ_SCOPE(obj);
1530 sprop = scope->lookup(ATOM_TO_JSID(atom));
1531 if (sprop) {
1532 JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID);
1533
1534 if (slotp) {
1535 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]));
1536 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) +
1537 sprop->shortid;
1538 }
1539 return stmt;
1540 }
1541 }
1542
1543 if (slotp)
1544 *slotp = -1;
1545 return stmt;
1546 }
1547
1548 /*
1549 * Check if the attributes describe a property holding a compile-time constant
1550 * or a permanent, read-only property without a getter.
1551 */
1552 #define IS_CONSTANT_PROPERTY(attrs) \
1553 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1554 (JSPROP_READONLY | JSPROP_PERMANENT))
1555
1556 /*
1557 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1558 * name defining a constant.
1559 */
1560 static JSBool
1561 LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
1562 jsval *vp)
1563 {
1564 JSBool ok;
1565 JSStmtInfo *stmt;
1566 JSAtomListElement *ale;
1567 JSObject *obj, *objbox;
1568 JSProperty *prop;
1569 uintN attrs;
1570
1571 /*
1572 * Chase down the cg stack, but only until we reach the outermost cg.
1573 * This enables propagating consts from top-level into switch cases in a
1574 * function compiled along with the top-level script.
1575 */
1576 *vp = JSVAL_HOLE;
1577 do {
1578 if (cg->flags & (TCF_IN_FUNCTION | TCF_COMPILE_N_GO)) {
1579 /* XXX this will need revising if 'const' becomes block-scoped. */
1580 stmt = js_LexicalLookup(cg, atom, NULL);
1581 if (stmt)
1582 return JS_TRUE;
1583
1584 ale = cg->constList.lookup(atom);
1585 if (ale) {
1586 JS_ASSERT(ALE_VALUE(ale) != JSVAL_HOLE);
1587 *vp = ALE_VALUE(ale);
1588 return JS_TRUE;
1589 }
1590
1591 /*
1592 * Try looking in the variable object for a direct property that
1593 * is readonly and permanent. We know such a property can't be
1594 * shadowed by another property on obj's prototype chain, or a
1595 * with object or catch variable; nor can prop's value be changed,
1596 * nor can prop be deleted.
1597 */
1598 if (cg->flags & TCF_IN_FUNCTION) {
1599 if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
1600 break;
1601 } else {
1602 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
1603 obj = cg->scopeChain;
1604 ok = obj->lookupProperty(cx, ATOM_TO_JSID(atom), &objbox, &prop);
1605 if (!ok)
1606 return JS_FALSE;
1607 if (objbox == obj) {
1608 /*
1609 * We're compiling code that will be executed immediately,
1610 * not re-executed against a different scope chain and/or
1611 * variable object. Therefore we can get constant values
1612 * from our variable object here.
1613 */
1614 ok = obj->getAttributes(cx, ATOM_TO_JSID(atom), prop, &attrs);
1615 if (ok && IS_CONSTANT_PROPERTY(attrs)) {
1616 ok = obj->getProperty(cx, ATOM_TO_JSID(atom), vp);
1617 JS_ASSERT_IF(ok, *vp != JSVAL_HOLE);
1618 }
1619 }
1620 if (prop)
1621 objbox->dropProperty(cx, prop);
1622 if (!ok)
1623 return JS_FALSE;
1624 if (prop)
1625 break;
1626 }
1627 }
1628 } while ((cg = (JSCodeGenerator *) cg->parent) != NULL);
1629 return JS_TRUE;
1630 }
1631
1632 /*
1633 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1634 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1635 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1636 * after the main bytecode sequence.
1637 */
1638 static JSOp
1639 EmitBigIndexPrefix(JSContext *cx, JSCodeGenerator *cg, uintN index)
1640 {
1641 uintN indexBase;
1642
1643 /*
1644 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1645 * for big indexes.
1646 */
1647 JS_STATIC_ASSERT(INDEX_LIMIT <= JS_BIT(24));
1648 JS_STATIC_ASSERT(INDEX_LIMIT >=
1649 (JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 2) << 16);
1650
1651 if (index < JS_BIT(16))
1652 return JSOP_NOP;
1653 indexBase = index >> 16;
1654 if (indexBase <= JSOP_INDEXBASE3 - JSOP_INDEXBASE1 + 1) {
1655 if (js_Emit1(cx, cg, (JSOp)(JSOP_INDEXBASE1 + indexBase - 1)) < 0)
1656 return JSOP_FALSE;
1657 return JSOP_RESETBASE0;
1658 }
1659
1660 if (index >= INDEX_LIMIT) {
1661 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
1662 JSMSG_TOO_MANY_LITERALS);
1663 return JSOP_FALSE;
1664 }
1665
1666 if (js_Emit2(cx, cg, JSOP_INDEXBASE, (JSOp)indexBase) < 0)
1667 return JSOP_FALSE;
1668 return JSOP_RESETBASE;
1669 }
1670
1671 /*
1672 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1673 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1674 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1675 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1676 * with single-byte JSOP_INDEXBASE[123] codes.
1677 *
1678 * Such prefixing currently requires a suffix to restore the "zero segment"
1679 * register setting, but this could be optimized further.
1680 */
1681 static JSBool
1682 EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
1683 {
1684 JSOp bigSuffix;
1685
1686 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1687 if (bigSuffix == JSOP_FALSE)
1688 return JS_FALSE;
1689 EMIT_UINT16_IMM_OP(op, index);
1690 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1691 }
1692
1693 /*
1694 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1695 * caller's lexical environment, and embedding a false return on error.
1696 */
1697 #define EMIT_INDEX_OP(op, index) \
1698 JS_BEGIN_MACRO \
1699 if (!EmitIndexOp(cx, op, index, cg)) \
1700 return JS_FALSE; \
1701 JS_END_MACRO
1702
1703 static JSBool
1704 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
1705 {
1706 JSAtomListElement *ale;
1707
1708 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1709 if (op == JSOP_GETPROP &&
1710 pn->pn_atom == cx->runtime->atomState.lengthAtom) {
1711 return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
1712 }
1713 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
1714 if (!ale)
1715 return JS_FALSE;
1716 return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
1717 }
1718
1719 static JSBool
1720 EmitObjectOp(JSContext *cx, JSObjectBox *objbox, JSOp op,
1721 JSCodeGenerator *cg)
1722 {
1723 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
1724 return EmitIndexOp(cx, op, cg->objectList.index(objbox), cg);
1725 }
1726
1727 /*
1728 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1729 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1730 * other parts of the code there's no necessary relationship between the two.
1731 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1732 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1733 */
1734 JS_STATIC_ASSERT(ARGNO_LEN == 2);
1735 JS_STATIC_ASSERT(SLOTNO_LEN == 2);
1736
1737 static JSBool
1738 EmitSlotIndexOp(JSContext *cx, JSOp op, uintN slot, uintN index,
1739 JSCodeGenerator *cg)
1740 {
1741 JSOp bigSuffix;
1742 ptrdiff_t off;
1743 jsbytecode *pc;
1744
1745 JS_ASSERT(JOF_OPTYPE(op) == JOF_SLOTATOM ||
1746 JOF_OPTYPE(op) == JOF_SLOTOBJECT);
1747 bigSuffix = EmitBigIndexPrefix(cx, cg, index);
1748 if (bigSuffix == JSOP_FALSE)
1749 return JS_FALSE;
1750
1751 /* Emit [op, slot, index]. */
1752 off = js_EmitN(cx, cg, op, 2 + INDEX_LEN);
1753 if (off < 0)
1754 return JS_FALSE;
1755 pc = CG_CODE(cg, off);
1756 SET_UINT16(pc, slot);
1757 pc += 2;
1758 SET_INDEX(pc, index);
1759 return bigSuffix == JSOP_NOP || js_Emit1(cx, cg, bigSuffix) >= 0;
1760 }
1761
1762 /*
1763 * Adjust the slot for a block local to account for the number of variables
1764 * that share the same index space with locals. Due to the incremental code
1765 * generation for top-level script, we do the adjustment via code patching in
1766 * JSCompiler::compileScript; see comments there.
1767 *
1768 * The function returns -1 on failures.
1769 */
1770 static jsint
1771 AdjustBlockSlot(JSContext *cx, JSCodeGenerator *cg, jsint slot)
1772 {
1773 JS_ASSERT((jsuint) slot < cg->maxStackDepth);
1774 if (cg->flags & TCF_IN_FUNCTION) {
1775 slot += cg->fun->u.i.nvars;
1776 if ((uintN) slot >= SLOTNO_LIMIT) {
1777 js_ReportCompileErrorNumber(cx, CG_TS(cg), NULL,
1778 JSREPORT_ERROR,
1779 JSMSG_TOO_MANY_LOCALS);
1780 slot = -1;
1781 }
1782 }
1783 return slot;
1784 }
1785
1786 static bool
1787 EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
1788 {
1789 JS_ASSERT(PN_TYPE(pn) == TOK_LEXICALSCOPE);
1790 if (!EmitObjectOp(cx, pn->pn_objbox, JSOP_ENTERBLOCK, cg))
1791 return false;
1792
1793 JSObject *blockObj = pn->pn_objbox->object;
1794 jsint depth = AdjustBlockSlot(cx, cg, OBJ_BLOCK_DEPTH(cx, blockObj));
1795 if (depth < 0)
1796 return false;
1797
1798 for (uintN slot = JSSLOT_FREE(&js_BlockClass),
1799 limit = slot + OBJ_BLOCK_COUNT(cx, blockObj);
1800 slot < limit; slot++) {
1801 jsval v = STOBJ_GET_SLOT(blockObj, slot);
1802
1803 /* Beware the empty destructuring dummy. */
1804 if (JSVAL_IS_VOID(v)) {
1805 JS_ASSERT(slot + 1 <= limit);
1806 continue;
1807 }
1808
1809 JSDefinition *dn = (JSDefinition *) JSVAL_TO_PRIVATE(v);
1810 JS_ASSERT(dn->pn_defn);
1811 JS_ASSERT(uintN(dn->frameSlot() + depth) < JS_BIT(16));
1812 dn->pn_cookie += depth;
1813 #ifdef DEBUG
1814 for (JSParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) {
1815 JS_ASSERT(pnu->pn_lexdef == dn);
1816 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND));
1817 JS_ASSERT(pnu->pn_cookie == FREE_UPVAR_COOKIE);
1818 }
1819 #endif
1820 }
1821
1822 OBJ_SCOPE(blockObj)->freeslot = JSSLOT_FREE(&js_BlockClass);
1823 return js_GrowSlots(cx, blockObj, JSSLOT_FREE(&js_BlockClass));
1824 }
1825
1826 /*
1827 * When eval is called from a function, the eval code or function code it
1828 * compiles may reference upvars that live in the eval-calling function. The
1829 * eval-invoked compiler does not have explicit definitions for these upvars
1830 * and we do not attempt to create them a-priori (by inspecting the function's
1831 * args and vars) -- we could, but we'd take an avoidable penalty for each
1832 * function local not referenced by any upvar. Instead, we map such upvars
1833 * lazily, growing upvarMap.vector by powers of two.
1834 *
1835 * This function knows that it is called with pn pointing to a PN_NAME-arity
1836 * node, and cg->compiler->callerFrame having a non-null fun member, and the
1837 * static level of cg at least one greater than the eval-calling function's
1838 * static level.
1839 */
1840 static bool
1841 MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
1842 {
1843 JSContext *cx = cg->compiler->context;
1844 JSFunction *fun = cg->compiler->callerFrame->fun;
1845 uintN upvarLevel = fun->u.i.script->staticLevel;
1846
1847 JSFunctionBox *funbox = cg->funbox;
1848 if (funbox) {
1849 /*
1850 * Treat top-level function definitions as escaping (i.e., as funargs),
1851 * required since we compile each such top level function or statement
1852 * and throw away the AST, so we can't yet see all funarg uses of this
1853 * function being compiled (cg->funbox->object). See bug 493177.
1854 */
1855 if (funbox->level == fun->u.i.script->staticLevel + 1U &&
1856 !(((JSFunction *) funbox->object)->flags & JSFUN_LAMBDA)) {
1857 JS_ASSERT_IF(cx->options & JSOPTION_ANONFUNFIX,
1858 ((JSFunction *) funbox->object)->atom);
1859 return true;
1860 }
1861
1862 while (funbox->level >= upvarLevel) {
1863 if (funbox->node->pn_dflags & PND_FUNARG)
1864 return true;
1865 funbox = funbox->parent;
1866 if (!funbox)
1867 break;
1868 }
1869 }
1870
1871 JSAtom *atom = pn->pn_atom;
1872
1873 uintN index;
1874 JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
1875 if (localKind == JSLOCAL_NONE)
1876 return true;
1877
1878 JS_ASSERT(cg->staticLevel > upvarLevel);
1879 if (cg->staticLevel >= JS_DISPLAY_SIZE || upvarLevel >= JS_DISPLAY_SIZE)
1880 return true;
1881
1882 JSAtomListElement *ale = cg->upvarList.lookup(atom);
1883 if (!ale) {
1884 if ((cg->flags & TCF_IN_FUNCTION) &&
1885 !js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
1886 return false;
1887 }
1888
1889 ale = cg->upvarList.add(cg->compiler, atom);
1890 if (!ale)
1891 return false;
1892 JS_ASSERT(ALE_INDEX(ale) == cg->upvarList.count - 1);
1893
1894 uint32 *vector = cg->upvarMap.vector;
1895 uint32 length = cg->upvarMap.length;
1896
1897 JS_ASSERT(ALE_INDEX(ale) <= length);
1898 if (ALE_INDEX(ale) == length) {
1899 length = 2 * JS_MAX(2, length);
1900 vector = (uint32 *) cx->realloc(vector, length * sizeof *vector);
1901 if (!vector)
1902 return false;
1903 cg->upvarMap.vector = vector;
1904 cg->upvarMap.length = length;
1905 }
1906
1907 if (localKind != JSLOCAL_ARG)
1908 index += fun->nargs;
1909 JS_ASSERT(index < JS_BIT(16));
1910
1911 uintN skip = cg->staticLevel - upvarLevel;
1912 vector[ALE_INDEX(ale)] = MAKE_UPVAR_COOKIE(skip, index);
1913 }
1914
1915 pn->pn_op = JSOP_GETUPVAR;
1916 pn->pn_cookie = MAKE_UPVAR_COOKIE(cg->staticLevel, ALE_INDEX(ale));
1917 pn->pn_dflags |= PND_BOUND;
1918 return true;
1919 }
1920
1921 /*
1922 * BindNameToSlot attempts to optimize name gets and sets to stack slot loads
1923 * and stores, given the compile-time information in cg and a TOK_NAME node pn.
1924 * It returns false on error, true on success.
1925 *
1926 * The caller can inspect pn->pn_cookie for FREE_UPVAR_COOKIE to tell whether
1927 * optimization occurred, in which case BindNameToSlot also updated pn->pn_op.
1928 * If pn->pn_cookie is still FREE_UPVAR_COOKIE on return, pn->pn_op still may
1929 * have been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not
1930 * pn->pn_op was modified, if this function finds an argument or local variable
1931 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1932 * successful return.
1933 *
1934 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1935 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1936 * in js_EmitTree.
1937 */
1938 static JSBool
1939 BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
1940 {
1941 JSDefinition *dn;
1942 JSOp op;
1943 JSAtom *atom;
1944 uint32 cookie;
1945 JSDefinition::Kind dn_kind;
1946 JSAtomListElement *ale;
1947 uintN index;
1948
1949 JS_ASSERT(pn->pn_type == TOK_NAME);
1950
1951 /* Idempotency tests come first, since we may be called more than once. */
1952 if (pn->pn_dflags & PND_BOUND)
1953 return JS_TRUE;
1954
1955 /* No cookie initialized for these two, they're pre-bound by definition. */
1956 JS_ASSERT(pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE);
1957
1958 /*
1959 * The parser linked all uses (including forward references) to their
1960 * definitions, unless a with statement or direct eval intervened.
1961 */
1962 if (pn->pn_used) {
1963 JS_ASSERT(pn->pn_cookie == FREE_UPVAR_COOKIE);
1964 dn = pn->pn_lexdef;
1965 JS_ASSERT(dn->pn_defn);
1966 pn->pn_dflags |= (dn->pn_dflags & PND_CONST);
1967 } else {
1968 if (!pn->pn_defn)
1969 return JS_TRUE;
1970 dn = (JSDefinition *) pn;
1971 }
1972
1973 op = PN_OP(pn);
1974 if (op == JSOP_NOP)
1975 return JS_TRUE;
1976
1977 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
1978 atom = pn->pn_atom;
1979 cookie = dn->pn_cookie;
1980 dn_kind = dn->kind();
1981
1982 /*
1983 * Turn attempts to mutate const-declared bindings into get ops (for
1984 * pre-increment and pre-decrement ops, our caller will have to emit
1985 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well).
1986 *
1987 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared
1988 * bindings visible to the compiler are permanent in JS unless the
1989 * declaration originates in eval code. We detect eval code by testing
1990 * cg->compiler->callerFrame, which is set only by eval or a debugger
1991 * equivalent.
1992 *
1993 * Note that this callerFrame non-null test must be qualified by testing
1994 * !cg->funbox to exclude function code nested in eval code, which is not
1995 * subject to the deletable binding exception.
1996 */
1997 switch (op) {
1998 case JSOP_NAME:
1999 case JSOP_SETCONST:
2000 break;
2001 case JSOP_DELNAME:
2002 if (dn_kind != JSDefinition::UNKNOWN) {
2003 if (cg->compiler->callerFrame && !cg->funbox)
2004 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2005 else
2006 pn->pn_op = JSOP_FALSE;
2007 pn->pn_dflags |= PND_BOUND;
2008 return JS_TRUE;
2009 }
2010 break;
2011 default:
2012 if (pn->isConst())
2013 pn->pn_op = op = JSOP_NAME;
2014 }
2015
2016 if (cookie == FREE_UPVAR_COOKIE) {
2017 JSStackFrame *caller = cg->compiler->callerFrame;
2018 if (caller) {
2019 JS_ASSERT(cg->flags & TCF_COMPILE_N_GO);
2020
2021 /*
2022 * Don't generate upvars on the left side of a for loop. See
2023 * bug 470758.
2024 */
2025 if (cg->flags & TCF_IN_FOR_INIT)
2026 return JS_TRUE;
2027
2028 JS_ASSERT(caller->script);
2029 if (!caller->fun)
2030 return JS_TRUE;
2031
2032 /*
2033 * Make sure the variable object used by the compiler to initialize
2034 * parent links matches the caller's varobj. Compile-n-go compiler-
2035 * created function objects have the top-level cg's scopeChain set
2036 * as their parent by JSCompiler::newFunction.
2037 */
2038 JSObject *scopeobj = (cg->flags & TCF_IN_FUNCTION)
2039 ? STOBJ_GET_PARENT(FUN_OBJECT(cg->fun))
2040 : cg->scopeChain;
2041 if (scopeobj != caller->varobj)
2042 return JS_TRUE;
2043
2044 /*
2045 * We are compiling eval or debug script inside a function frame
2046 * and the scope chain matches the function's variable object.
2047 * Optimize access to function's arguments and variable and the
2048 * arguments object.
2049 */
2050 if (op != JSOP_NAME)
2051 return JS_TRUE;
2052
2053 return MakeUpvarForEval(pn, cg);
2054 }
2055 return JS_TRUE;
2056 }
2057
2058 if (dn->pn_dflags & PND_GVAR) {
2059 /*
2060 * If this is a global reference from within a function, leave pn_op as
2061 * JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
2062 * only we could depend on the global frame's slots being valid for all
2063 * calls to the function.
2064 */
2065 if (cg->flags & TCF_IN_FUNCTION)
2066 return JS_TRUE;
2067
2068 /*
2069 * We are optimizing global variables and there may be no pre-existing
2070 * global property named atom when this global script runs. If atom was
2071 * declared via const or var, optimize pn to access fp->vars using the
2072 * appropriate JSOP_*GVAR op.
2073 *
2074 * FIXME: should be able to optimize global function access too.
2075 */
2076 JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
2077
2078 switch (op) {
2079 case JSOP_NAME: op = JSOP_GETGVAR; break;
2080 case JSOP_SETNAME: op = JSOP_SETGVAR; break;
2081 case JSOP_SETCONST: /* NB: no change */ break;
2082 case JSOP_INCNAME: op = JSOP_INCGVAR; break;
2083 case JSOP_NAMEINC: op = JSOP_GVARINC; break;
2084 case JSOP_DECNAME: op = JSOP_DECGVAR; break;
2085 case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
2086 case JSOP_FORNAME: /* NB: no change */ break;
2087 case JSOP_DELNAME: /* NB: no change */ break;
2088 default: JS_NOT_REACHED("gvar");
2089 }
2090 pn->pn_op = op;
2091 pn->pn_cookie = cookie;
2092 pn->pn_dflags |= PND_BOUND;
2093 return JS_TRUE;
2094 }
2095
2096 uintN level = UPVAR_FRAME_SKIP(cookie);
2097 JS_ASSERT(cg->staticLevel >= level);
2098
2099 /*
2100 * A JSDefinition witnessed as a declaration by the parser cannot be an
2101 * upvar, unless it is the degenerate kind of upvar selected above (in the
2102 * code before the PND_GVAR test) for the special case of compile-and-go
2103 * code generated from eval called from a function, where the eval code
2104 * uses local vars defined in the function. We detect this upvar-for-eval
2105 * case by checking dn's op.
2106 */
2107 if (PN_OP(dn) == JSOP_GETUPVAR) {
2108 JS_ASSERT(cg->staticLevel >= level);
2109 if (op != JSOP_NAME)
2110 return JS_TRUE;
2111
2112 JSStackFrame *caller = cg->compiler->callerFrame;
2113 JS_ASSERT(caller);
2114 JS_ASSERT(caller->script);
2115
2116 JSTreeContext *tc = cg;
2117 while (tc->staticLevel != level)
2118 tc = tc->parent;
2119 JS_ASSERT(tc->flags & TCF_COMPILING);
2120
2121 JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
2122 JS_ASSERT(evalcg->flags & TCF_COMPILE_N_GO);
2123 JS_ASSERT(caller->fun && caller->varobj == evalcg->scopeChain);
2124
2125 /*
2126 * Don't generate upvars on the left side of a for loop. See
2127 * bug 470758 and bug 520513.
2128 */
2129 if (evalcg->flags & TCF_IN_FOR_INIT)
2130 return JS_TRUE;
2131
2132 if (cg->staticLevel == level) {
2133 pn->pn_op = JSOP_GETUPVAR;
2134 pn->pn_cookie = cookie;
2135 pn->pn_dflags |= PND_BOUND;
2136 return JS_TRUE;
2137 }
2138
2139 return MakeUpvarForEval(pn, cg);
2140 }
2141
2142 uintN skip = cg->staticLevel - level;
2143 if (skip != 0) {
2144 JS_ASSERT(cg->flags & TCF_IN_FUNCTION);
2145 JS_ASSERT_IF(UPVAR_FRAME_SLOT(cookie) != CALLEE_UPVAR_SLOT,
2146 cg->lexdeps.lookup(atom));
2147 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
2148 JS_ASSERT(cg->fun->u.i.skipmin <= skip);
2149
2150 /*
2151 * If op is a mutating opcode, this upvar's static level is too big to
2152 * index into the display, or the function is heavyweight, we fall back
2153 * on JSOP_*NAME*.
2154 */
2155 if (op != JSOP_NAME)
2156 return JS_TRUE;
2157 if (level >= JS_DISPLAY_SIZE)
2158 return JS_TRUE;
2159 if (cg->flags & TCF_FUN_HEAVYWEIGHT)
2160 return JS_TRUE;
2161
2162 if (FUN_FLAT_CLOSURE(cg->fun)) {
2163 op = JSOP_GETDSLOT;
2164 } else {
2165 /*
2166 * The function we're compiling may not be heavyweight, but if it
2167 * escapes as a funarg, we can't use JSOP_GETUPVAR/JSOP_CALLUPVAR.
2168 * JSCompiler::analyzeFunctions has arranged for this function's
2169 * enclosing functions to be heavyweight, so we can safely stick
2170 * with JSOP_NAME/JSOP_CALLNAME.
2171 */
2172 if (cg->funbox->node->pn_dflags & PND_FUNARG)
2173 return JS_TRUE;
2174
2175 /*
2176 * Generator functions may be resumed from any call stack, which
2177 * defeats the display optimization to static link searching used
2178 * by JSOP_{GET,CALL}UPVAR.
2179 */
2180 if (cg->flags & TCF_FUN_IS_GENERATOR)
2181 return JS_TRUE;
2182
2183 op = JSOP_GETUPVAR;
2184 }
2185
2186 ale = cg->upvarList.lookup(atom);
2187 if (ale) {
2188 index = ALE_INDEX(ale);
2189 } else {
2190 if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
2191 return JS_FALSE;
2192
2193 ale = cg->upvarList.add(cg->compiler, atom);
2194 if (!ale)
2195 return JS_FALSE;
2196 index = ALE_INDEX(ale);
2197 JS_ASSERT(index == cg->upvarList.count - 1);
2198
2199 uint32 *vector = cg->upvarMap.vector;
2200 if (!vector) {
2201 uint32 length = cg->lexdeps.count;
2202
2203 vector = (uint32 *) js_calloc(length * sizeof *vector);
2204 if (!vector) {
2205 JS_ReportOutOfMemory(cx);
2206 return JS_FALSE;
2207 }
2208 cg->upvarMap.vector = vector;
2209 cg->upvarMap.length = length;
2210 }
2211
2212 uintN slot = UPVAR_FRAME_SLOT(cookie);
2213 if (slot != CALLEE_UPVAR_SLOT && dn_kind != JSDefinition::ARG) {
2214 JSTreeContext *tc = cg;
2215 do {
2216 tc = tc->parent;
2217 } while (tc->staticLevel != level);
2218 if (tc->flags & TCF_IN_FUNCTION)
2219 slot += tc->fun->nargs;
2220 }
2221
2222 vector[index] = MAKE_UPVAR_COOKIE(skip, slot);
2223 }
2224
2225 pn->pn_op = op;
2226 pn->pn_cookie = index;
2227 pn->pn_dflags |= PND_BOUND;
2228 return JS_TRUE;
2229 }
2230
2231 /*
2232 * We are compiling a function body and may be able to optimize name
2233 * to stack slot. Look for an argument or variable in the function and
2234 * rewrite pn_op and update pn accordingly.
2235 */
2236 switch (dn_kind) {
2237 case JSDefinition::UNKNOWN:
2238 return JS_TRUE;
2239
2240 case JSDefinition::LET:
2241 switch (op) {
2242 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2243 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2244 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2245 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2246 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2247 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2248 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2249 default: JS_NOT_REACHED("let");
2250 }
2251 break;
2252
2253 case JSDefinition::ARG:
2254 switch (op) {
2255 case JSOP_NAME: op = JSOP_GETARG; break;
2256 case JSOP_SETNAME: op = JSOP_SETARG; break;
2257 case JSOP_INCNAME: op = JSOP_INCARG; break;
2258 case JSOP_NAMEINC: op = JSOP_ARGINC; break;
2259 case JSOP_DECNAME: op = JSOP_DECARG; break;
2260 case JSOP_NAMEDEC: op = JSOP_ARGDEC; break;
2261 case JSOP_FORNAME: op = JSOP_FORARG; break;
2262 default: JS_NOT_REACHED("arg");
2263 }
2264 JS_ASSERT(!pn->isConst());
2265 break;
2266
2267 case JSDefinition::VAR:
2268 if (PN_OP(dn) == JSOP_CALLEE) {
2269 JS_ASSERT(op != JSOP_CALLEE);
2270 JS_ASSERT((cg->fun->flags & JSFUN_LAMBDA) && atom == cg->fun->atom);
2271
2272 switch (op) {
2273 default:
2274 /*
2275 * Leave pn->pn_op == JSOP_NAME if cg->fun is heavyweight, as
2276 * we cannot be sure cg->fun is not something of the form:
2277 *
2278 * var ff = (function f(s) { eval(s); return f; });
2279 *
2280 * where a caller invokes ff("var f = 42"). The result returned
2281 * for such an invocation must be 42, since the callee name is
2282 * lexically bound in an outer declarative environment from the
2283 * function's activation. See jsfun.cpp:call_resolve.
2284 */
2285 JS_ASSERT(op != JSOP_DELNAME);
2286 if (!(cg->flags & TCF_FUN_HEAVYWEIGHT)) {
2287 op = JSOP_CALLEE;
2288 pn->pn_dflags |= PND_CONST;
2289 }
2290 break;
2291 }
2292 pn->pn_op = op;
2293 pn->pn_dflags |= PND_BOUND;
2294 return JS_TRUE;
2295 }
2296 /* FALL THROUGH */
2297
2298 default:
2299 JS_ASSERT_IF(dn_kind != JSDefinition::FUNCTION,
2300 dn_kind == JSDefinition::VAR ||
2301 dn_kind == JSDefinition::CONST);
2302 switch (op) {
2303 case JSOP_NAME: op = JSOP_GETLOCAL; break;
2304 case JSOP_SETNAME: op = JSOP_SETLOCAL; break;
2305 case JSOP_SETCONST: op = JSOP_SETLOCAL; break;
2306 case JSOP_INCNAME: op = JSOP_INCLOCAL; break;
2307 case JSOP_NAMEINC: op = JSOP_LOCALINC; break;
2308 case JSOP_DECNAME: op = JSOP_DECLOCAL; break;
2309 case JSOP_NAMEDEC: op = JSOP_LOCALDEC; break;
2310 case JSOP_FORNAME: op = JSOP_FORLOCAL; break;
2311 default: JS_NOT_REACHED("local");
2312 }
2313 JS_ASSERT_IF(dn_kind == JSDefinition::CONST, pn->pn_dflags & PND_CONST);
2314 break;
2315 }
2316
2317 JS_ASSERT(op != PN_OP(pn));
2318 pn->pn_op = op;
2319 pn->pn_cookie = UPVAR_FRAME_SLOT(cookie);
2320 pn->pn_dflags |= PND_BOUND;
2321 return JS_TRUE;
2322 }
2323
2324 /*
2325 * If pn contains a useful expression, return true with *answer set to true.
2326 * If pn contains a useless expression, return true with *answer set to false.
2327 * Return false on error.
2328 *
2329 * The caller should initialize *answer to false and invoke this function on
2330 * an expression statement or similar subtree to decide whether the tree could
2331 * produce code that has any side effects. For an expression statement, we
2332 * define useless code as code with no side effects, because the main effect,
2333 * the value left on the stack after the code executes, will be discarded by a
2334 * pop bytecode.
2335 */
2336 static JSBool
2337 CheckSideEffects(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2338 JSBool *answer)
2339 {
2340 JSBool ok;
2341 JSParseNode *pn2;
2342
2343 ok = JS_TRUE;
2344 if (!pn || *answer)
2345 return ok;
2346
2347 switch (pn->pn_arity) {
2348 case PN_FUNC:
2349 /*
2350 * A named function, contrary to ES3, is no longer useful, because we
2351 * bind its name lexically (using JSOP_CALLEE) instead of creating an
2352 * Object instance and binding a readonly, permanent property in it
2353 * (the object and binding can be detected and hijacked or captured).
2354 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
2355 */
2356 *answer = JS_FALSE;
2357 break;
2358
2359 case PN_LIST:
2360 if (pn->pn_op == JSOP_NOP ||
2361 pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2362 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2363 /*
2364 * Non-operators along with ||, &&, ===, and !== never invoke
2365 * toString or valueOf.
2366 */
2367 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next)
2368 ok &= CheckSideEffects(cx, cg, pn2, answer);
2369 } else {
2370 /*
2371 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2372 * are presumed to be useful, because they may have side effects
2373 * even if their main effect (their return value) is discarded.
2374 *
2375 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2376 * to avoid too much recursion. All such lists must be presumed
2377 * to be useful because each index operation could invoke a getter
2378 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2379 * does not apply here: arguments[i][j] might invoke a getter).
2380 *
2381 * Likewise, array and object initialisers may call prototype
2382 * setters (the __defineSetter__ built-in, and writable __proto__
2383 * on Array.prototype create this hazard). Initialiser list nodes
2384 * have JSOP_NEWINIT in their pn_op.
2385 */
2386 *answer = JS_TRUE;
2387 }
2388 break;
2389
2390 case PN_TERNARY:
2391 ok = CheckSideEffects(cx, cg, pn->pn_kid1, answer) &&
2392 CheckSideEffects(cx, cg, pn->pn_kid2, answer) &&
2393 CheckSideEffects(cx, cg, pn->pn_kid3, answer);
2394 break;
2395
2396 case PN_BINARY:
2397 if (pn->pn_type == TOK_ASSIGN) {
2398 /*
2399 * Assignment is presumed to be useful, even if the next operation
2400 * is another assignment overwriting this one's ostensible effect,
2401 * because the left operand may be a property with a setter that
2402 * has side effects.
2403 *
2404 * The only exception is assignment of a useless value to a const
2405 * declared in the function currently being compiled.
2406 */
2407 pn2 = pn->pn_left;
2408 if (pn2->pn_type != TOK_NAME) {
2409 *answer = JS_TRUE;
2410 } else {
2411 if (!BindNameToSlot(cx, cg, pn2))
2412 return JS_FALSE;
2413 if (!CheckSideEffects(cx, cg, pn->pn_right, answer))
2414 return JS_FALSE;
2415 if (!*answer && (pn->pn_op != JSOP_NOP || !pn2->isConst()))
2416 *answer = JS_TRUE;
2417 }
2418 } else {
2419 if (pn->pn_op == JSOP_OR || pn->pn_op == JSOP_AND ||
2420 pn->pn_op == JSOP_STRICTEQ || pn->pn_op == JSOP_STRICTNE) {
2421 /*
2422 * ||, &&, ===, and !== do not convert their operands via
2423 * toString or valueOf method calls.
2424 */
2425 ok = CheckSideEffects(cx, cg, pn->pn_left, answer) &&
2426 CheckSideEffects(cx, cg, pn->pn_right, answer);
2427 } else {
2428 /*
2429 * We can't easily prove that neither operand ever denotes an
2430 * object with a toString or valueOf method.
2431 */
2432 *answer = JS_TRUE;
2433 }
2434 }
2435 break;
2436
2437 case PN_UNARY:
2438 switch (pn->pn_type) {
2439 case TOK_DELETE:
2440 pn2 = pn->pn_kid;
2441 switch (pn2->pn_type) {
2442 case TOK_NAME:
2443 if (!BindNameToSlot(cx, cg, pn2))
2444 return JS_FALSE;
2445 if (pn2->isConst()) {
2446 *answer = JS_FALSE;
2447 break;
2448 }
2449 /* FALL THROUGH */
2450 case TOK_DOT:
2451 #if JS_HAS_XML_SUPPORT
2452 case TOK_DBLDOT:
2453 #endif
2454 #if JS_HAS_LVALUE_RETURN
2455 case TOK_LP:
2456 #endif
2457 case TOK_LB:
2458 /* All these delete addressing modes have effects too. */
2459 *answer = JS_TRUE;
2460 break;
2461 default:
2462 ok = CheckSideEffects(cx, cg, pn2, answer);
2463 break;
2464 }
2465 break;
2466
2467 case TOK_UNARYOP:
2468 if (pn->pn_op == JSOP_NOT) {
2469 /* ! does not convert its operand via toString or valueOf. */
2470 ok = CheckSideEffects(cx, cg, pn->pn_kid, answer);
2471 break;
2472 }
2473 /* FALL THROUGH */
2474
2475 default:
2476 /*
2477 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2478 * have direct effects. Of the remaining unary-arity node types,
2479 * we can't easily prove that the operand never denotes an object
2480 * with a toString or valueOf method.
2481 */
2482 *answer = JS_TRUE;
2483 break;
2484 }
2485 break;
2486
2487 case PN_NAME:
2488 /*
2489 * Take care to avoid trying to bind a label name (labels, both for
2490 * statements and property values in object initialisers, have pn_op
2491 * defaulted to JSOP_NOP).
2492 */
2493 if (pn->pn_type == TOK_NAME && pn->pn_op != JSOP_NOP) {
2494 if (!BindNameToSlot(cx, cg, pn))
2495 return JS_FALSE;
2496 if (pn->pn_op != JSOP_ARGUMENTS && pn->pn_op != JSOP_CALLEE &&
2497 pn->pn_cookie == FREE_UPVAR_COOKIE) {
2498 /*
2499 * Not an argument or local variable use, and not a use of a
2500 * unshadowed named function expression's given name, so this
2501 * expression could invoke a getter that has side effects.
2502 */
2503 *answer = JS_TRUE;
2504 }
2505 }
2506 pn2 = pn->maybeExpr();
2507 if (pn->pn_type == TOK_DOT) {
2508 if (pn2->pn_type == TOK_NAME && !BindNameToSlot(cx, cg, pn2))
2509 return JS_FALSE;
2510 if (!(pn2->pn_op == JSOP_ARGUMENTS &&
2511 pn->pn_atom == cx->runtime->atomState.lengthAtom)) {
2512 /*
2513 * Any dotted property reference could call a getter, except
2514 * for arguments.length where arguments is unambiguous.
2515 */
2516 *answer = JS_TRUE;
2517 }
2518 }
2519 ok = CheckSideEffects(cx, cg, pn2, answer);
2520 break;
2521
2522 case PN_NAMESET:
2523 ok = CheckSideEffects(cx, cg, pn->pn_tree, answer);
2524 break;
2525
2526 case PN_NULLARY:
2527 if (pn->pn_type == TOK_DEBUGGER)
2528 *answer = JS_TRUE;
2529 break;
2530 }
2531 return ok;
2532 }
2533
2534 static JSBool
2535 EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2536 JSBool callContext)
2537 {
2538 JSOp op;
2539
2540 if (!BindNameToSlot(cx, cg, pn))
2541 return JS_FALSE;
2542 op = PN_OP(pn);
2543
2544 if (callContext) {
2545 switch (op) {
2546 case JSOP_NAME:
2547 op = JSOP_CALLNAME;
2548 break;
2549 case JSOP_GETGVAR:
2550 op = JSOP_CALLGVAR;
2551 break;
2552 case JSOP_GETARG:
2553 op = JSOP_CALLARG;
2554 break;
2555 case JSOP_GETLOCAL:
2556 op = JSOP_CALLLOCAL;
2557 break;
2558 case JSOP_GETUPVAR:
2559 op = JSOP_CALLUPVAR;
2560 break;
2561 case JSOP_GETDSLOT:
2562 op = JSOP_CALLDSLOT;
2563 break;
2564 default:
2565 JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
2566 break;
2567 }
2568 }
2569
2570 if (op == JSOP_ARGUMENTS || op == JSOP_CALLEE) {
2571 if (js_Emit1(cx, cg, op) < 0)
2572 return JS_FALSE;
2573 if (callContext && js_Emit1(cx, cg, JSOP_NULL) < 0)
2574 return JS_FALSE;
2575 } else {
2576 if (pn->pn_cookie != FREE_UPVAR_COOKIE) {
2577 EMIT_UINT16_IMM_OP(op, pn->pn_cookie);
2578 } else {
2579 if (!EmitAtomOp(cx, pn, op, cg))
2580 return JS_FALSE;
2581 }
2582 }
2583
2584 return JS_TRUE;
2585 }
2586
2587 #if JS_HAS_XML_SUPPORT
2588 static JSBool
2589 EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2590 {
2591 JSParseNode *pn2;
2592 uintN oldflags;
2593
2594 JS_ASSERT(pn->pn_type == TOK_UNARYOP);
2595 JS_ASSERT(pn->pn_op == JSOP_XMLNAME);
2596 JS_ASSERT(op == JSOP_XMLNAME || op == JSOP_CALLXMLNAME);
2597
2598 pn2 = pn->pn_kid;
2599 oldflags = cg->flags;
2600 cg->flags &= ~TCF_IN_FOR_INIT;
2601 if (!js_EmitTree(cx, cg, pn2))
2602 return JS_FALSE;
2603 cg->flags |= oldflags & TCF_IN_FOR_INIT;
2604 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2605 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2606 return JS_FALSE;
2607 }
2608
2609 return js_Emit1(cx, cg, op) >= 0;
2610 }
2611 #endif
2612
2613 static JSBool
2614 EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2615 {
2616 /*
2617 * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
2618 * deoptimize away from fast paths in the interpreter and trace recorder,
2619 * which skip dense array instances by going up to Array.prototype before
2620 * looking up the property name.
2621 */
2622 JSAtomListElement *ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2623 if (!ale)
2624 return JS_FALSE;
2625 if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
2626 return JS_FALSE;
2627 if (js_Emit1(cx, cg, op) < 0)
2628 return JS_FALSE;
2629 return JS_TRUE;
2630 }
2631
2632 static JSBool
2633 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
2634 JSBool callContext)
2635 {
2636 JSParseNode *pn2, *pndot, *pnup, *pndown;
2637 ptrdiff_t top;
2638
2639 JS_ASSERT(pn->pn_arity == PN_NAME);
2640 pn2 = pn->maybeExpr();
2641
2642 /* Special case deoptimization on __proto__, __count__ and __parent__. */
2643 if ((op == JSOP_GETPROP || op == JSOP_CALLPROP) &&
2644 (pn->pn_atom == cx->runtime->atomState.protoAtom ||
2645 pn->pn_atom == cx->runtime->atomState.parentAtom ||
2646 pn->pn_atom == cx->runtime->atomState.countAtom)) {
2647 if (pn2 && !js_EmitTree(cx, cg, pn2))
2648 return JS_FALSE;
2649 return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
2650 }
2651
2652 if (callContext) {
2653 JS_ASSERT(pn->pn_type == TOK_DOT);
2654 JS_ASSERT(op == JSOP_GETPROP);
2655 op = JSOP_CALLPROP;
2656 } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
2657 if (pn2->pn_op == JSOP_THIS) {
2658 if (pn->pn_atom != cx->runtime->atomState.lengthAtom) {
2659 /* Fast path for gets of |this.foo|. */
2660 return EmitAtomOp(cx, pn, JSOP_GETTHISPROP, cg);
2661 }
2662 } else if (pn2->pn_type == TOK_NAME) {
2663 /*
2664 * Try to optimize:
2665 * - arguments.length into JSOP_ARGCNT
2666 * - argname.prop into JSOP_GETARGPROP
2667 * - localname.prop into JSOP_GETLOCALPROP
2668 * but don't do this if the property is 'length' -- prefer to emit
2669 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2670 */
2671 if (!BindNameToSlot(cx, cg, pn2))
2672 return JS_FALSE;
2673 if (pn->pn_atom == cx->runtime->atomState.lengthAtom) {
2674 if (pn2->pn_op == JSOP_ARGUMENTS)
2675 return js_Emit1(cx, cg, JSOP_ARGCNT) >= 0;
2676 } else {
2677 switch (pn2->pn_op) {
2678 case JSOP_GETARG:
2679 op = JSOP_GETARGPROP;
2680 goto do_indexconst;
2681 case JSOP_GETLOCAL:
2682 op = JSOP_GETLOCALPROP;
2683 do_indexconst: {
2684 JSAtomListElement *ale;
2685 jsatomid atomIndex;
2686
2687 ale = cg->atomList.add(cg->compiler, pn->pn_atom);
2688 if (!ale)
2689 return JS_FALSE;
2690 atomIndex = ALE_INDEX(ale);
2691 return EmitSlotIndexOp(cx, op, pn2->pn_cookie, atomIndex, cg);
2692 }
2693
2694 default:;
2695 }
2696 }
2697 }
2698 }
2699
2700 /*
2701 * If the object operand is also a dotted property reference, reverse the
2702 * list linked via pn_expr temporarily so we can iterate over it from the
2703 * bottom up (reversing again as we go), to avoid excessive recursion.
2704 */
2705 if (pn2->pn_type == TOK_DOT) {
2706 pndot = pn2;
2707 pnup = NULL;
2708 top = CG_OFFSET(cg);
2709 for (;;) {
2710 /* Reverse pndot->pn_expr to point up, not down. */
2711 pndot->pn_offset = top;
2712 JS_ASSERT(!pndot->pn_used);
2713 pndown = pndot->pn_expr;
2714 pndot->pn_expr = pnup;
2715 if (pndown->pn_type != TOK_DOT)
2716 break;
2717 pnup = pndot;
2718 pndot = pndown;
2719 }
2720
2721 /* pndown is a primary expression, not a dotted property reference. */
2722 if (!js_EmitTree(cx, cg, pndown))
2723 return JS_FALSE;
2724
2725 do {
2726 /* Walk back up the list, emitting annotated name ops. */
2727 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2728 CG_OFFSET(cg) - pndown->pn_offset) < 0) {
2729 return JS_FALSE;
2730 }
2731
2732 /*
2733 * Special case deoptimization on __proto__, __count__ and
2734 * __parent__, as above.
2735 */
2736 if (pndot->pn_arity == PN_NAME &&
2737 (pndot->pn_atom == cx->runtime->atomState.protoAtom ||
2738 pndot->pn_atom == cx->runtime->atomState.parentAtom ||
2739 pndot->pn_atom == cx->runtime->atomState.countAtom)) {
2740 if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
2741 return JS_FALSE;
2742 } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
2743 return JS_FALSE;
2744 }
2745
2746 /* Reverse the pn_expr link again. */
2747 pnup = pndot->pn_expr;
2748 pndot->pn_expr = pndown;
2749 pndown = pndot;
2750 } while ((pndot = pnup) != NULL);
2751 } else {
2752 if (!js_EmitTree(cx, cg, pn2))
2753 return JS_FALSE;
2754 }
2755
2756 if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
2757 CG_OFFSET(cg) - pn2->pn_offset) < 0) {
2758 return JS_FALSE;
2759 }
2760
2761 return EmitAtomOp(cx, pn, op, cg);
2762 }
2763
2764 static JSBool
2765 EmitElemOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
2766 {
2767 ptrdiff_t top;
2768 JSParseNode *left, *right, *next, ltmp, rtmp;
2769 jsint slot;
2770
2771 top = CG_OFFSET(cg);
2772 if (pn->pn_arity == PN_LIST) {
2773 /* Left-associative operator chain to avoid too much recursion. */
2774 JS_ASSERT(pn->pn_op == JSOP_GETELEM);
2775 JS_ASSERT(pn->pn_count >= 3);
2776 left = pn->pn_head;
2777 right = pn->last();
2778 next = left->pn_next;
2779 JS_ASSERT(next != right);
2780
2781 /*
2782 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2783 * one or more index expression and JSOP_GETELEM op pairs.
2784 */
2785 if (left->pn_type == TOK_NAME && next->pn_type == TOK_NUMBER) {
2786 if (!BindNameToSlot(cx, cg, left))
2787 return JS_FALSE;
2788 if (left->pn_op == JSOP_ARGUMENTS &&
2789 JSDOUBLE_IS_INT(next->pn_dval, slot) &&
2790 (jsuint)slot < JS_BIT(16)) {
2791 /*
2792 * arguments[i]() requires arguments object as "this".
2793 * Check that we never generates list for that usage.
2794 */
2795 JS_ASSERT(op != JSOP_CALLELEM || next->pn_next);
2796 left->pn_offset = next->pn_offset = top;
2797 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2798 left = next;
2799 next = left->pn_next;
2800 }
2801 }
2802
2803 /*
2804 * Check whether we generated JSOP_ARGSUB, just above, and have only
2805 * one more index expression to emit. Given arguments[0][j], we must
2806 * skip the while loop altogether, falling through to emit code for j
2807 * (in the subtree referenced by right), followed by the annotated op,
2808 * at the bottom of this function.
2809 */
2810 JS_ASSERT(next != right || pn->pn_count == 3);
2811 if (left == pn->pn_head) {
2812 if (!js_EmitTree(cx, cg, left))
2813 return JS_FALSE;
2814 }
2815 while (next != right) {
2816 if (!js_EmitTree(cx, cg, next))
2817 return JS_FALSE;
2818 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2819 return JS_FALSE;
2820 if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
2821 return JS_FALSE;
2822 next = next->pn_next;
2823 }
2824 } else {
2825 if (pn->pn_arity == PN_NAME) {
2826 /*
2827 * Set left and right so pn appears to be a TOK_LB node, instead
2828 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2829 * EmitDestructuringOps nearer below. In the destructuring case,
2830 * the base expression (pn_expr) of the name may be null, which
2831 * means we have to emit a JSOP_BINDNAME.
2832 */
2833 left = pn->maybeExpr();
2834 if (!left) {
2835 left = &ltmp;
2836 left->pn_type = TOK_STRING;
2837 left->pn_op = JSOP_BINDNAME;
2838 left->pn_arity = PN_NULLARY;
2839 left->pn_pos = pn->pn_pos;
2840 left->pn_atom = pn->pn_atom;
2841 }
2842 right = &rtmp;
2843 right->pn_type = TOK_STRING;
2844 JS_ASSERT(ATOM_IS_STRING(pn->pn_atom));
2845 right->pn_op = js_IsIdentifier(ATOM_TO_STRING(pn->pn_atom))
2846 ? JSOP_QNAMEPART
2847 : JSOP_STRING;
2848 right->pn_arity = PN_NULLARY;
2849 right->pn_pos = pn->pn_pos;
2850 right->pn_atom = pn->pn_atom;
2851 } else {
2852 JS_ASSERT(pn->pn_arity == PN_BINARY);
2853 left = pn->pn_left;
2854 right = pn->pn_right;
2855 }
2856
2857 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2858 if (op == JSOP_GETELEM &&
2859 left->pn_type == TOK_NAME &&
2860 right->pn_type == TOK_NUMBER) {
2861 if (!BindNameToSlot(cx, cg, left))
2862 return JS_FALSE;
2863 if (left->pn_op == JSOP_ARGUMENTS &&
2864 JSDOUBLE_IS_INT(right->pn_dval, slot) &&
2865 (jsuint)slot < JS_BIT(16)) {
2866 left->pn_offset = right->pn_offset = top;
2867 EMIT_UINT16_IMM_OP(JSOP_ARGSUB, (jsatomid)slot);
2868 return JS_TRUE;
2869 }
2870 }
2871
2872 if (!js_EmitTree(cx, cg, left))
2873 return JS_FALSE;
2874 }
2875
2876 /* The right side of the descendant operator is implicitly quoted. */
2877 JS_ASSERT(op != JSOP_DESCENDANTS || right->pn_type != TOK_STRING ||
2878 right->pn_op == JSOP_QNAMEPART);
2879 if (!js_EmitTree(cx, cg, right))
2880 return JS_FALSE;
2881 if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - top) < 0)
2882 return JS_FALSE;
2883 return js_Emit1(cx, cg, op) >= 0;
2884 }
2885
2886 static JSBool
2887 EmitNumberOp(JSContext *cx, jsdouble dval, JSCodeGenerator *cg)
2888 {
2889 jsint ival;
2890 uint32 u;
2891 ptrdiff_t off;
2892 jsbytecode *pc;
2893 JSAtom *atom;
2894 JSAtomListElement *ale;
2895
2896 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) {
2897 if (ival == 0)
2898 return js_Emit1(cx, cg, JSOP_ZERO) >= 0;
2899 if (ival == 1)
2900 return js_Emit1(cx, cg, JSOP_ONE) >= 0;
2901 if ((jsint)(int8)ival == ival)
2902 return js_Emit2(cx, cg, JSOP_INT8, (jsbytecode)(int8)ival) >= 0;
2903
2904 u = (uint32)ival;
2905 if (u < JS_BIT(16)) {
2906 EMIT_UINT16_IMM_OP(JSOP_UINT16, u);
2907 } else if (u < JS_BIT(24)) {
2908 off = js_EmitN(cx, cg, JSOP_UINT24, 3);
2909 if (off < 0)
2910 return JS_FALSE;
2911 pc = CG_CODE(cg, off);
2912 SET_UINT24(pc, u);
2913 } else {
2914 off = js_EmitN(cx, cg, JSOP_INT32, 4);
2915 if (off < 0)
2916 return JS_FALSE;
2917 pc = CG_CODE(cg, off);
2918 SET_INT32(pc, ival);
2919 }
2920 return JS_TRUE;
2921 }
2922
2923 atom = js_AtomizeDouble(cx, dval);
2924 if (!atom)
2925 return JS_FALSE;
2926
2927 ale = cg->atomList.add(cg->compiler, atom);
2928 if (!ale)
2929 return JS_FALSE;
2930 return EmitIndexOp(cx, JSOP_DOUBLE, ALE_INDEX(ale), cg);
2931 }
2932
2933 static JSBool
2934 EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
2935 JSStmtInfo *stmtInfo)
2936 {
2937 JSOp switchOp;
2938 JSBool ok, hasDefault, constPropagated;
2939 ptrdiff_t top, off, defaultOffset;
2940 JSParseNode *pn2, *pn3, *pn4;
2941 uint32 caseCount, tableLength;
2942 JSParseNode **table;
2943 jsdouble d;
2944 jsint i, low, high;
2945 jsval v;
2946 JSAtom *atom;
2947 JSAtomListElement *ale;
2948 intN noteIndex;
2949 size_t switchSize, tableSize;
2950 jsbytecode *pc, *savepc;
2951 #if JS_HAS_BLOCK_SCOPE
2952 jsint count;
2953 #endif
2954
2955 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2956 switchOp = JSOP_TABLESWITCH;
2957 ok = JS_TRUE;
2958 hasDefault = constPropagated = JS_FALSE;
2959 defaultOffset = -1;
2960
2961 /*
2962 * If the switch contains let variables scoped by its body, model the
2963 * resulting block on the stack first, before emitting the discriminant's
2964 * bytecode (in case the discriminant contains a stack-model dependency
2965 * such as a let expression).
2966 */
2967 pn2 = pn->pn_right;
2968 #if JS_HAS_BLOCK_SCOPE
2969 if (pn2->pn_type == TOK_LEXICALSCOPE) {
2970 /*
2971 * Push the body's block scope before discriminant code-gen for proper
2972 * static block scope linkage in case the discriminant contains a let
2973 * expression. The block's locals must lie under the discriminant on
2974 * the stack so that case-dispatch bytecodes can find the discriminant
2975 * on top of stack.
2976 */
2977 count = OBJ_BLOCK_COUNT(cx, pn2->pn_objbox->object);
2978 js_PushBlockScope(cg, stmtInfo, pn2->pn_objbox->object, -1);
2979 stmtInfo->type = STMT_SWITCH;
2980
2981 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2982 if (!EmitEnterBlock(cx, pn2, cg))
2983 return JS_FALSE;
2984
2985 /*
2986 * Pop the switch's statement info around discriminant code-gen. Note
2987 * how this leaves cg->blockChain referencing the switch's
2988 * block scope object, which is necessary for correct block parenting
2989 * in the case where the discriminant contains a let expression.
2990 */
2991 cg->topStmt = stmtInfo->down;
2992 cg->topScopeStmt = stmtInfo->downScope;
2993 }
2994 #ifdef __GNUC__
2995 else {
2996 count = 0;
2997 }
2998 #endif
2999 #endif
3000
3001 /*
3002 * Emit code for the discriminant first (or nearly first, in the case of a
3003 * switch whose body is a block scope).
3004 */
3005 if (!js_EmitTree(cx, cg, pn->pn_left))
3006 return JS_FALSE;
3007
3008 /* Switch bytecodes run from here till end of final case. */
3009 top = CG_OFFSET(cg);
3010 #if !JS_HAS_BLOCK_SCOPE
3011 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3012 #else
3013 if (pn2->pn_type == TOK_LC) {
3014 js_PushStatement(cg, stmtInfo, STMT_SWITCH, top);
3015 } else {
3016 /* Re-push the switch's statement info record. */
3017 cg->topStmt = cg->topScopeStmt = stmtInfo;
3018
3019 /* Set the statement info record's idea of top. */
3020 stmtInfo->update = top;
3021
3022 /* Advance pn2 to refer to the switch case list. */
3023 pn2 = pn2->expr();
3024 }
3025 #endif
3026
3027 caseCount = pn2->pn_count;
3028 tableLength = 0;
3029 table = NULL;
3030
3031 if (caseCount == 0 ||
3032 (caseCount == 1 &&
3033 (hasDefault = (pn2->pn_head->pn_type == TOK_DEFAULT)))) {
3034 caseCount = 0;
3035 low = 0;
3036 high = -1;
3037 } else {
3038 #define INTMAP_LENGTH 256
3039 jsbitmap intmap_space[INTMAP_LENGTH];
3040 jsbitmap *intmap = NULL;
3041 int32 intmap_bitlen = 0;
3042
3043 low = JSVAL_INT_MAX;
3044 high = JSVAL_INT_MIN;
3045
3046 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3047 if (pn3->pn_type == TOK_DEFAULT) {
3048 hasDefault = JS_TRUE;
3049 caseCount--; /* one of the "cases" was the default */
3050 continue;
3051 }
3052
3053 JS_ASSERT(pn3->pn_type == TOK_CASE);
3054 if (switchOp == JSOP_CONDSWITCH)
3055 continue;
3056
3057 pn4 = pn3->pn_left;
3058 while (pn4->pn_type == TOK_RP)
3059 pn4 = pn4->pn_kid;
3060 switch (pn4->pn_type) {
3061 case TOK_NUMBER:
3062 d = pn4->pn_dval;
3063 if (JSDOUBLE_IS_INT(d, i) && INT_FITS_IN_JSVAL(i)) {
3064 pn3->pn_val = INT_TO_JSVAL(i);
3065 } else {
3066 atom = js_AtomizeDouble(cx, d);
3067 if (!atom) {
3068 ok = JS_FALSE;
3069 goto release;
3070 }
3071 pn3->pn_val = ATOM_KEY(atom);
3072 }
3073 break;
3074 case TOK_STRING:
3075 pn3->pn_val = ATOM_KEY(pn4->pn_atom);
3076 break;
3077 case TOK_NAME:
3078 if (!pn4->maybeExpr()) {
3079 ok = LookupCompileTimeConstant(cx, cg, pn4->pn_atom, &v);
3080 if (!ok)
3081 goto release;
3082 if (v != JSVAL_HOLE) {
3083 if (!JSVAL_IS_PRIMITIVE(v)) {
3084 /*
3085 * XXX JSOP_LOOKUPSWITCH does not support const-
3086 * propagated object values, see bug 407186.
3087 */
3088 switchOp = JSOP_CONDSWITCH;
3089 continue;
3090 }
3091 pn3->pn_val = v;
3092 constPropagated = JS_TRUE;
3093 break;
3094 }
3095 }
3096 /* FALL THROUGH */
3097 case TOK_PRIMARY:
3098 if (pn4->pn_op == JSOP_TRUE) {
3099 pn3->pn_val = JSVAL_TRUE;
3100 break;
3101 }
3102 if (pn4->pn_op == JSOP_FALSE) {
3103 pn3->pn_val = JSVAL_FALSE;
3104 break;
3105 }
3106 /* FALL THROUGH */
3107 default:
3108 switchOp = JSOP_CONDSWITCH;
3109 continue;
3110 }
3111
3112 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3->pn_val));
3113
3114 if (switchOp != JSOP_TABLESWITCH)
3115 continue;
3116 if (!JSVAL_IS_INT(pn3->pn_val)) {
3117 switchOp = JSOP_LOOKUPSWITCH;
3118 continue;
3119 }
3120 i = JSVAL_TO_INT(pn3->pn_val);
3121 if ((jsuint)(i + (jsint)JS_BIT(15)) >= (jsuint)JS_BIT(16)) {
3122 switchOp = JSOP_LOOKUPSWITCH;
3123 continue;
3124 }
3125 if (i < low)
3126 low = i;
3127 if (high < i)
3128 high = i;
3129
3130 /*
3131 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
3132 * We bias i by 65536 if it's negative, and hope that's a rare
3133 * case (because it requires a malloc'd bitmap).
3134 */
3135 if (i < 0)
3136 i += JS_BIT(16);
3137 if (i >= intmap_bitlen) {
3138 if (!intmap &&
3139 i < (INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2)) {
3140 intmap = intmap_space;
3141 intmap_bitlen = INTMAP_LENGTH << JS_BITS_PER_WORD_LOG2;
3142 } else {
3143 /* Just grab 8K for the worst-case bitmap. */
3144 intmap_bitlen = JS_BIT(16);
3145 intmap = (jsbitmap *)
3146 cx->malloc((JS_BIT(16) >> JS_BITS_PER_WORD_LOG2)
3147 * sizeof(jsbitmap));
3148 if (!intmap) {
3149 JS_ReportOutOfMemory(cx);
3150 return JS_FALSE;
3151 }
3152 }
3153 memset(intmap, 0, intmap_bitlen >> JS_BITS_PER_BYTE_LOG2);
3154 }
3155 if (JS_TEST_BIT(intmap, i)) {
3156 switchOp = JSOP_LOOKUPSWITCH;
3157 continue;
3158 }
3159 JS_SET_BIT(intmap, i);
3160 }
3161
3162 release:
3163 if (intmap && intmap != intmap_space)
3164 cx->free(intmap);
3165 if (!ok)
3166 return JS_FALSE;
3167
3168 /*
3169 * Compute table length and select lookup instead if overlarge or
3170 * more than half-sparse.
3171 */
3172 if (switchOp == JSOP_TABLESWITCH) {
3173 tableLength = (uint32)(high - low + 1);
3174 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
3175 switchOp = JSOP_LOOKUPSWITCH;
3176 } else if (switchOp == JSOP_LOOKUPSWITCH) {
3177 /*
3178 * Lookup switch supports only atom indexes below 64K limit.
3179 * Conservatively estimate the maximum possible index during
3180 * switch generation and use conditional switch if it exceeds
3181 * the limit.
3182 */
3183 if (caseCount + cg->atomList.count > JS_BIT(16))
3184 switchOp = JSOP_CONDSWITCH;
3185 }
3186 }
3187
3188 /*
3189 * Emit a note with two offsets: first tells total switch code length,
3190 * second tells offset to first JSOP_CASE if condswitch.
3191 */
3192 noteIndex = js_NewSrcNote3(cx, cg, SRC_SWITCH, 0, 0);
3193 if (noteIndex < 0)
3194 return JS_FALSE;
3195
3196 if (switchOp == JSOP_CONDSWITCH) {
3197 /*
3198 * 0 bytes of immediate for unoptimized ECMAv2 switch.
3199 */
3200 switchSize = 0;
3201 } else if (switchOp == JSOP_TABLESWITCH) {
3202 /*
3203 * 3 offsets (len, low, high) before the table, 1 per entry.
3204 */
3205 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength));
3206 } else {
3207 /*
3208 * JSOP_LOOKUPSWITCH:
3209 * 1 offset (len) and 1 atom index (npairs) before the table,
3210 * 1 atom index and 1 jump offset per entry.
3211 */
3212 switchSize = (size_t)(JUMP_OFFSET_LEN + INDEX_LEN +
3213 (INDEX_LEN + JUMP_OFFSET_LEN) * caseCount);
3214 }
3215
3216 /*
3217 * Emit switchOp followed by switchSize bytes of jump or lookup table.
3218 *
3219 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
3220 * to emit the immediate operand(s) by which bytecode readers such as
3221 * BuildSpanDepTable discover the length of the switch opcode *before*
3222 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
3223 * also important to zero all unknown jump offset immediate operands,
3224 * so they can be converted to span dependencies with null targets to
3225 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
3226 */
3227 if (js_EmitN(cx, cg, switchOp, switchSize) < 0)
3228 return JS_FALSE;
3229
3230 off = -1;
3231 if (switchOp == JSOP_CONDSWITCH) {
3232 intN caseNoteIndex = -1;
3233 JSBool beforeCases = JS_TRUE;
3234
3235 /* Emit code for evaluating cases and jumping to case statements. */
3236 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) {
3237 pn4 = pn3->pn_left;
3238 if (pn4 && !js_EmitTree(cx, cg, pn4))
3239 return JS_FALSE;
3240 if (caseNoteIndex >= 0) {
3241 /* off is the previous JSOP_CASE's bytecode offset. */
3242 if (!js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3243 CG_OFFSET(cg) - off)) {
3244 return JS_FALSE;
3245 }
3246 }
3247 if (!pn4) {
3248 JS_ASSERT(pn3->pn_type == TOK_DEFAULT);
3249 continue;
3250 }
3251 caseNoteIndex = js_NewSrcNote2(cx, cg, SRC_PCDELTA, 0);
3252 if (caseNoteIndex < 0)
3253 return JS_FALSE;
3254 off = EmitJump(cx, cg, JSOP_CASE, 0);
3255 if (off < 0)
3256 return JS_FALSE;
3257 pn3->pn_offset = off;
3258 if (beforeCases) {
3259 uintN noteCount, noteCountDelta;
3260
3261 /* Switch note's second offset is to first JSOP_CASE. */
3262 noteCount = CG_NOTE_COUNT(cg);
3263 if (!js_SetSrcNoteOffset(cx, cg, (uintN)noteIndex, 1,
3264 off - top)) {
3265 return JS_FALSE;
3266 }
3267 noteCountDelta = CG_NOTE_COUNT(cg) - noteCount;
3268 if (noteCountDelta != 0)
3269 caseNoteIndex += noteCountDelta;
3270 beforeCases = JS_FALSE;
3271 }
3272 }
3273
3274 /*
3275 * If we didn't have an explicit default (which could fall in between
3276 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
3277 * call in the loop above), link the last case to the implicit default
3278 * for the decompiler.
3279 */
3280 if (!hasDefault &&
3281 caseNoteIndex >= 0 &&
3282 !js_SetSrcNoteOffset(cx, cg, (uintN)caseNoteIndex, 0,
3283 CG_OFFSET(cg) - off)) {
3284 return JS_FALSE;
3285 }
3286
3287 /* Emit default even if no explicit default statement. */
3288 defaultOffset = EmitJump(cx, cg, JSOP_DEFAULT, 0);
3289 if (defaultOffset < 0)
3290 return JS_FALSE;
3291 } else {
3292 pc = CG_CODE(cg, top + JUMP_OFFSET_LEN);
3293
3294 if (switchOp == JSOP_TABLESWITCH) {
3295 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
3296 SET_JUMP_OFFSET(pc, low);
3297 pc += JUMP_OFFSET_LEN;
3298 SET_JUMP_OFFSET(pc, high);
3299 pc += JUMP_OFFSET_LEN;
3300
3301 /*
3302 * Use malloc to avoid arena bloat for programs with many switches.
3303 * We free table if non-null at label out, so all control flow must
3304 * exit this function through goto out or goto bad.
3305 */
3306 if (tableLength != 0